diff --git a/go.mod b/go.mod
index 7e6346a1fd..952c894919 100644
--- a/go.mod
+++ b/go.mod
@@ -1,73 +1,73 @@
module github.com/integrations/terraform-provider-github/v6
go 1.21
-
-toolchain go1.22.0
+toolchain go1.24.1
require (
github.com/client9/misspell v0.3.4
- github.com/go-jose/go-jose/v3 v3.0.3
- github.com/golangci/golangci-lint v1.59.1
+ github.com/go-jose/go-jose/v3 v3.0.4
+ github.com/golangci/golangci-lint v1.64.8
github.com/google/go-github/v66 v66.0.1-0.20241027130611-9e5757d5a766
github.com/google/uuid v1.6.0
- github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320
- github.com/hashicorp/terraform-plugin-sdk/v2 v2.34.0
+ github.com/hashicorp/go-cty v1.5.0
+ github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.1
github.com/shurcooL/githubv4 v0.0.0-20221126192849-0b5c4c7994eb
- github.com/stretchr/testify v1.9.0
- golang.org/x/crypto v0.31.0
- golang.org/x/oauth2 v0.22.0
+ github.com/stretchr/testify v1.10.0
+ golang.org/x/crypto v0.36.0
+ golang.org/x/oauth2 v0.26.0
)
require (
- 4d63.com/gocheckcompilerdirectives v1.2.1 // indirect
- 4d63.com/gochecknoglobals v0.2.1 // indirect
- github.com/4meepo/tagalign v1.3.4 // indirect
- github.com/Abirdcfly/dupword v0.0.14 // indirect
- github.com/Antonboom/errname v0.1.13 // indirect
- github.com/Antonboom/nilnil v0.1.9 // indirect
- github.com/Antonboom/testifylint v1.3.1 // indirect
- github.com/BurntSushi/toml v1.4.0 // indirect
- github.com/Crocmagnon/fatcontext v0.2.2 // indirect
+ 4d63.com/gocheckcompilerdirectives v1.3.0 // indirect
+ 4d63.com/gochecknoglobals v0.2.2 // indirect
+ github.com/4meepo/tagalign v1.4.2 // indirect
+ github.com/Abirdcfly/dupword v0.1.3 // indirect
+ github.com/Antonboom/errname v1.0.0 // indirect
+ github.com/Antonboom/nilnil v1.0.1 // indirect
+ github.com/Antonboom/testifylint v1.5.2 // indirect
+ github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect
+ github.com/Crocmagnon/fatcontext v0.7.1 // indirect
github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect
- github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0 // indirect
- github.com/Masterminds/semver/v3 v3.2.1 // indirect
- github.com/OpenPeeDeeP/depguard/v2 v2.2.0 // indirect
- github.com/ProtonMail/go-crypto v1.1.0-alpha.2 // indirect
+ github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.1 // indirect
+ github.com/Masterminds/semver/v3 v3.3.0 // indirect
+ github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
+ github.com/ProtonMail/go-crypto v1.1.3 // indirect
github.com/agext/levenshtein v1.2.2 // indirect
- github.com/alecthomas/go-check-sumtype v0.1.4 // indirect
- github.com/alexkohler/nakedret/v2 v2.0.4 // indirect
+ github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
+ github.com/alexkohler/nakedret/v2 v2.0.5 // indirect
github.com/alexkohler/prealloc v1.0.0 // indirect
github.com/alingse/asasalint v0.0.11 // indirect
+ github.com/alingse/nilnesserr v0.1.2 // indirect
github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect
github.com/ashanbrown/forbidigo v1.6.0 // indirect
- github.com/ashanbrown/makezero v1.1.1 // indirect
+ github.com/ashanbrown/makezero v1.2.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
- github.com/bkielbasa/cyclop v1.2.1 // indirect
+ github.com/bkielbasa/cyclop v1.2.3 // indirect
github.com/blizzy78/varnamelen v0.8.0 // indirect
- github.com/bombsimon/wsl/v4 v4.2.1 // indirect
- github.com/breml/bidichk v0.2.7 // indirect
- github.com/breml/errchkjson v0.3.6 // indirect
- github.com/butuzov/ireturn v0.3.0 // indirect
- github.com/butuzov/mirror v1.2.0 // indirect
- github.com/catenacyber/perfsprint v0.7.1 // indirect
+ github.com/bombsimon/wsl/v4 v4.5.0 // indirect
+ github.com/breml/bidichk v0.3.2 // indirect
+ github.com/breml/errchkjson v0.4.0 // indirect
+ github.com/butuzov/ireturn v0.3.1 // indirect
+ github.com/butuzov/mirror v1.3.0 // indirect
+ github.com/catenacyber/perfsprint v0.8.2 // indirect
github.com/ccojocar/zxcvbn-go v1.0.2 // indirect
- github.com/cespare/xxhash/v2 v2.2.0 // indirect
+ github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/charithe/durationcheck v0.0.10 // indirect
github.com/chavacava/garif v0.1.0 // indirect
- github.com/ckaznocha/intrange v0.1.2 // indirect
+ github.com/ckaznocha/intrange v0.3.0 // indirect
github.com/cloudflare/circl v1.3.7 // indirect
- github.com/curioswitch/go-reassign v0.2.0 // indirect
- github.com/daixiang0/gci v0.13.4 // indirect
+ github.com/curioswitch/go-reassign v0.3.0 // indirect
+ github.com/daixiang0/gci v0.13.5 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/denis-tingaikin/go-header v0.5.0 // indirect
github.com/ettle/strcase v0.2.0 // indirect
- github.com/fatih/color v1.17.0 // indirect
+ github.com/fatih/color v1.18.0 // indirect
github.com/fatih/structtag v1.2.0 // indirect
github.com/firefart/nonamedreturns v1.0.5 // indirect
github.com/fsnotify/fsnotify v1.5.4 // indirect
github.com/fzipp/gocyclo v0.6.0 // indirect
- github.com/ghostiam/protogetter v0.3.6 // indirect
- github.com/go-critic/go-critic v0.11.4 // indirect
+ github.com/ghostiam/protogetter v0.3.9 // indirect
+ github.com/go-critic/go-critic v0.12.0 // indirect
github.com/go-toolsmith/astcast v1.1.0 // indirect
github.com/go-toolsmith/astcopy v1.1.0 // indirect
github.com/go-toolsmith/astequal v1.2.0 // indirect
@@ -75,133 +75,137 @@ require (
github.com/go-toolsmith/astp v1.1.0 // indirect
github.com/go-toolsmith/strparse v1.1.0 // indirect
github.com/go-toolsmith/typep v1.1.0 // indirect
- github.com/go-viper/mapstructure/v2 v2.0.0 // indirect
- github.com/go-xmlfmt/xmlfmt v1.1.2 // indirect
+ github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
+ github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
github.com/gobwas/glob v0.2.3 // indirect
- github.com/gofrs/flock v0.8.1 // indirect
+ github.com/gofrs/flock v0.12.1 // indirect
github.com/golang/protobuf v1.5.4 // indirect
- github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect
- github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e // indirect
+ github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect
+ github.com/golangci/go-printf-func-name v0.1.0 // indirect
+ github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect
github.com/golangci/misspell v0.6.0 // indirect
- github.com/golangci/modinfo v0.3.4 // indirect
github.com/golangci/plugin-module-register v0.1.1 // indirect
- github.com/golangci/revgrep v0.5.3 // indirect
+ github.com/golangci/revgrep v0.8.0 // indirect
github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed // indirect
- github.com/google/go-cmp v0.6.0 // indirect
+ github.com/google/go-cmp v0.7.0 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/gordonklaus/ineffassign v0.1.0 // indirect
github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
- github.com/gostaticanalysis/comment v1.4.2 // indirect
- github.com/gostaticanalysis/forcetypeassert v0.1.0 // indirect
+ github.com/gostaticanalysis/comment v1.5.0 // indirect
+ github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect
github.com/gostaticanalysis/nilerr v0.1.1 // indirect
github.com/hashicorp/errwrap v1.0.0 // indirect
github.com/hashicorp/go-checkpoint v0.5.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
- github.com/hashicorp/go-hclog v1.5.0 // indirect
+ github.com/hashicorp/go-hclog v1.6.3 // indirect
+ github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
- github.com/hashicorp/go-plugin v1.6.0 // indirect
+ github.com/hashicorp/go-plugin v1.6.2 // indirect
+ github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/go-version v1.7.0 // indirect
- github.com/hashicorp/hc-install v0.6.4 // indirect
+ github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
+ github.com/hashicorp/hc-install v0.9.1 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
- github.com/hashicorp/hcl/v2 v2.20.1 // indirect
+ github.com/hashicorp/hcl/v2 v2.23.0 // indirect
github.com/hashicorp/logutils v1.0.0 // indirect
- github.com/hashicorp/terraform-exec v0.21.0 // indirect
- github.com/hashicorp/terraform-json v0.22.1 // indirect
- github.com/hashicorp/terraform-plugin-go v0.23.0 // indirect
+ github.com/hashicorp/terraform-exec v0.22.0 // indirect
+ github.com/hashicorp/terraform-json v0.24.0 // indirect
+ github.com/hashicorp/terraform-plugin-go v0.26.0 // indirect
github.com/hashicorp/terraform-plugin-log v0.9.0 // indirect
- github.com/hashicorp/terraform-registry-address v0.2.3 // indirect
+ github.com/hashicorp/terraform-registry-address v0.2.4 // indirect
github.com/hashicorp/terraform-svchost v0.1.1 // indirect
github.com/hashicorp/yamux v0.1.1 // indirect
github.com/hexops/gotextdiff v1.0.3 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jgautheron/goconst v1.7.1 // indirect
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
- github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af // indirect
- github.com/jjti/go-spancheck v0.6.1 // indirect
- github.com/julz/importas v0.1.0 // indirect
- github.com/karamaru-alpha/copyloopvar v1.1.0 // indirect
- github.com/kisielk/errcheck v1.7.0 // indirect
- github.com/kkHAIKE/contextcheck v1.1.5 // indirect
+ github.com/jjti/go-spancheck v0.6.4 // indirect
+ github.com/julz/importas v0.2.0 // indirect
+ github.com/karamaru-alpha/copyloopvar v1.2.1 // indirect
+ github.com/kisielk/errcheck v1.9.0 // indirect
+ github.com/kkHAIKE/contextcheck v1.1.6 // indirect
github.com/kulti/thelper v0.6.3 // indirect
github.com/kunwardeep/paralleltest v1.0.10 // indirect
- github.com/kyoh86/exportloopref v0.1.11 // indirect
- github.com/lasiar/canonicalheader v1.1.1 // indirect
- github.com/ldez/gomoddirectives v0.2.4 // indirect
- github.com/ldez/tagliatelle v0.5.0 // indirect
+ github.com/lasiar/canonicalheader v1.1.2 // indirect
+ github.com/ldez/exptostd v0.4.2 // indirect
+ github.com/ldez/gomoddirectives v0.6.1 // indirect
+ github.com/ldez/grignotin v0.9.0 // indirect
+ github.com/ldez/tagliatelle v0.7.1 // indirect
+ github.com/ldez/usetesting v0.4.2 // indirect
github.com/leonklingele/grouper v1.1.2 // indirect
- github.com/lufeee/execinquery v1.2.1 // indirect
github.com/macabu/inamedparam v0.1.3 // indirect
github.com/magiconair/properties v1.8.6 // indirect
github.com/maratori/testableexamples v1.0.0 // indirect
github.com/maratori/testpackage v1.1.1 // indirect
- github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26 // indirect
- github.com/mattn/go-colorable v0.1.13 // indirect
+ github.com/matoous/godox v1.1.0 // indirect
+ github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
- github.com/mattn/go-runewidth v0.0.9 // indirect
+ github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
- github.com/mgechev/revive v1.3.7 // indirect
+ github.com/mgechev/revive v1.7.0 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
github.com/mitchellh/go-wordwrap v1.0.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/reflectwalk v1.0.2 // indirect
- github.com/moricho/tparallel v0.3.1 // indirect
+ github.com/moricho/tparallel v0.3.2 // indirect
github.com/nakabonne/nestif v0.3.1 // indirect
- github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
github.com/nishanths/exhaustive v0.12.0 // indirect
github.com/nishanths/predeclared v0.2.2 // indirect
- github.com/nunnatsa/ginkgolinter v0.16.2 // indirect
+ github.com/nunnatsa/ginkgolinter v0.19.1 // indirect
github.com/oklog/run v1.0.0 // indirect
github.com/olekukonko/tablewriter v0.0.5 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
- github.com/pelletier/go-toml/v2 v2.2.2 // indirect
+ github.com/pelletier/go-toml/v2 v2.2.3 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
- github.com/polyfloyd/go-errorlint v1.5.2 // indirect
+ github.com/polyfloyd/go-errorlint v1.7.1 // indirect
github.com/prometheus/client_golang v1.12.1 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.32.1 // indirect
github.com/prometheus/procfs v0.7.3 // indirect
- github.com/quasilyte/go-ruleguard v0.4.2 // indirect
+ github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 // indirect
github.com/quasilyte/go-ruleguard/dsl v0.3.22 // indirect
github.com/quasilyte/gogrep v0.5.0 // indirect
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect
- github.com/ryancurrah/gomodguard v1.3.2 // indirect
+ github.com/raeperd/recvcheck v0.2.0 // indirect
+ github.com/rivo/uniseg v0.4.7 // indirect
+ github.com/rogpeppe/go-internal v1.14.1 // indirect
+ github.com/ryancurrah/gomodguard v1.3.5 // indirect
github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
- github.com/sanposhiho/wastedassign/v2 v2.0.7 // indirect
- github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 // indirect
+ github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
+ github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 // indirect
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
- github.com/sashamelentyev/usestdlibvars v1.26.0 // indirect
- github.com/securego/gosec/v2 v2.20.1-0.20240525090044-5f0084eb01a9 // indirect
- github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect
+ github.com/sashamelentyev/usestdlibvars v1.28.0 // indirect
+ github.com/securego/gosec/v2 v2.22.2 // indirect
github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/sivchari/containedctx v1.0.3 // indirect
- github.com/sivchari/tenv v1.7.1 // indirect
- github.com/sonatard/noctx v0.0.2 // indirect
+ github.com/sivchari/tenv v1.12.1 // indirect
+ github.com/sonatard/noctx v0.1.0 // indirect
github.com/sourcegraph/go-diff v0.7.0 // indirect
- github.com/spf13/afero v1.11.0 // indirect
+ github.com/spf13/afero v1.12.0 // indirect
github.com/spf13/cast v1.5.0 // indirect
- github.com/spf13/cobra v1.7.0 // indirect
+ github.com/spf13/cobra v1.9.1 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
- github.com/spf13/pflag v1.0.5 // indirect
+ github.com/spf13/pflag v1.0.6 // indirect
github.com/spf13/viper v1.12.0 // indirect
github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
- github.com/stbenjam/no-sprintf-host-port v0.1.1 // indirect
+ github.com/stbenjam/no-sprintf-host-port v0.2.0 // indirect
github.com/stretchr/objx v0.5.2 // indirect
github.com/subosito/gotenv v1.4.1 // indirect
- github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c // indirect
- github.com/tdakkota/asciicheck v0.2.0 // indirect
- github.com/tetafro/godot v1.4.16 // indirect
- github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 // indirect
- github.com/timonwong/loggercheck v0.9.4 // indirect
- github.com/tomarrell/wrapcheck/v2 v2.8.3 // indirect
+ github.com/tdakkota/asciicheck v0.4.1 // indirect
+ github.com/tetafro/godot v1.5.0 // indirect
+ github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3 // indirect
+ github.com/timonwong/loggercheck v0.10.1 // indirect
+ github.com/tomarrell/wrapcheck/v2 v2.10.0 // indirect
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
- github.com/ultraware/funlen v0.1.0 // indirect
- github.com/ultraware/whitespace v0.1.1 // indirect
- github.com/uudashr/gocognit v1.1.2 // indirect
+ github.com/ultraware/funlen v0.2.0 // indirect
+ github.com/ultraware/whitespace v0.2.0 // indirect
+ github.com/uudashr/gocognit v1.2.0 // indirect
+ github.com/uudashr/iface v1.3.1 // indirect
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
@@ -209,31 +213,29 @@ require (
github.com/yagipy/maintidx v1.0.0 // indirect
github.com/yeya24/promlinter v0.3.0 // indirect
github.com/ykadowak/zerologlint v0.1.5 // indirect
- github.com/zclconf/go-cty v1.14.4 // indirect
+ github.com/zclconf/go-cty v1.16.2 // indirect
gitlab.com/bosi/decorder v0.4.2 // indirect
- go-simpler.org/musttag v0.12.2 // indirect
- go-simpler.org/sloglint v0.7.1 // indirect
+ go-simpler.org/musttag v0.13.0 // indirect
+ go-simpler.org/sloglint v0.9.0 // indirect
go.uber.org/atomic v1.7.0 // indirect
- go.uber.org/automaxprocs v1.5.3 // indirect
+ go.uber.org/automaxprocs v1.6.0 // indirect
go.uber.org/multierr v1.6.0 // indirect
go.uber.org/zap v1.24.0 // indirect
- golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc // indirect
- golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f // indirect
- golang.org/x/mod v0.18.0 // indirect
- golang.org/x/net v0.26.0 // indirect
- golang.org/x/sync v0.10.0 // indirect
- golang.org/x/sys v0.28.0 // indirect
- golang.org/x/text v0.21.0 // indirect
- golang.org/x/tools v0.22.0 // indirect
+ golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac // indirect
+ golang.org/x/mod v0.24.0 // indirect
+ golang.org/x/net v0.37.0 // indirect
+ golang.org/x/sync v0.12.0 // indirect
+ golang.org/x/sys v0.31.0 // indirect
+ golang.org/x/text v0.23.0 // indirect
+ golang.org/x/tools v0.31.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de // indirect
- google.golang.org/grpc v1.63.2 // indirect
- google.golang.org/protobuf v1.34.0 // indirect
- gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20250219182151-9fdb1cabc7b2 // indirect
+ google.golang.org/grpc v1.70.0 // indirect
+ google.golang.org/protobuf v1.36.5 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
- honnef.co/go/tools v0.4.7 // indirect
- mvdan.cc/gofumpt v0.6.0 // indirect
+ honnef.co/go/tools v0.6.1 // indirect
+ mvdan.cc/gofumpt v0.7.0 // indirect
mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f // indirect
)
diff --git a/go.sum b/go.sum
index 833e973361..1012aab554 100644
--- a/go.sum
+++ b/go.sum
@@ -1,7 +1,7 @@
-4d63.com/gocheckcompilerdirectives v1.2.1 h1:AHcMYuw56NPjq/2y615IGg2kYkBdTvOaojYCBcRE7MA=
-4d63.com/gocheckcompilerdirectives v1.2.1/go.mod h1:yjDJSxmDTtIHHCqX0ufRYZDL6vQtMG7tJdKVeWwsqvs=
-4d63.com/gochecknoglobals v0.2.1 h1:1eiorGsgHOFOuoOiJDy2psSrQbRdIHrlge0IJIkUgDc=
-4d63.com/gochecknoglobals v0.2.1/go.mod h1:KRE8wtJB3CXCsb1xy421JfTHIIbmT3U5ruxw2Qu8fSU=
+4d63.com/gocheckcompilerdirectives v1.3.0 h1:Ew5y5CtcAAQeTVKUVFrE7EwHMrTO6BggtEj8BZSjZ3A=
+4d63.com/gocheckcompilerdirectives v1.3.0/go.mod h1:ofsJ4zx2QAuIP/NO/NAh1ig6R1Fb18/GI7RVMwz7kAY=
+4d63.com/gochecknoglobals v0.2.2 h1:H1vdnwnMaZdQW/N+NrkT1SZMTBmcwHe9Vq8lJcYYTtU=
+4d63.com/gochecknoglobals v0.2.2/go.mod h1:lLxwTQjL5eIesRbvnzIP3jZtG140FnTdz+AlMa+ogt0=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
@@ -37,91 +37,93 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
-github.com/4meepo/tagalign v1.3.4 h1:P51VcvBnf04YkHzjfclN6BbsopfJR5rxs1n+5zHt+w8=
-github.com/4meepo/tagalign v1.3.4/go.mod h1:M+pnkHH2vG8+qhE5bVc/zeP7HS/j910Fwa9TUSyZVI0=
-github.com/Abirdcfly/dupword v0.0.14 h1:3U4ulkc8EUo+CaT105/GJ1BQwtgyj6+VaBVbAX11Ba8=
-github.com/Abirdcfly/dupword v0.0.14/go.mod h1:VKDAbxdY8YbKUByLGg8EETzYSuC4crm9WwI6Y3S0cLI=
-github.com/Antonboom/errname v0.1.13 h1:JHICqsewj/fNckzrfVSe+T33svwQxmjC+1ntDsHOVvM=
-github.com/Antonboom/errname v0.1.13/go.mod h1:uWyefRYRN54lBg6HseYCFhs6Qjcy41Y3Jl/dVhA87Ns=
-github.com/Antonboom/nilnil v0.1.9 h1:eKFMejSxPSA9eLSensFmjW2XTgTwJMjZ8hUHtV4s/SQ=
-github.com/Antonboom/nilnil v0.1.9/go.mod h1:iGe2rYwCq5/Me1khrysB4nwI7swQvjclR8/YRPl5ihQ=
-github.com/Antonboom/testifylint v1.3.1 h1:Uam4q1Q+2b6H7gvk9RQFw6jyVDdpzIirFOOrbs14eG4=
-github.com/Antonboom/testifylint v1.3.1/go.mod h1:NV0hTlteCkViPW9mSR4wEMfwp+Hs1T3dY60bkvSfhpM=
+github.com/4meepo/tagalign v1.4.2 h1:0hcLHPGMjDyM1gHG58cS73aQF8J4TdVR96TZViorO9E=
+github.com/4meepo/tagalign v1.4.2/go.mod h1:+p4aMyFM+ra7nb41CnFG6aSDXqRxU/w1VQqScKqDARI=
+github.com/Abirdcfly/dupword v0.1.3 h1:9Pa1NuAsZvpFPi9Pqkd93I7LIYRURj+A//dFd5tgBeE=
+github.com/Abirdcfly/dupword v0.1.3/go.mod h1:8VbB2t7e10KRNdwTVoxdBaxla6avbhGzb8sCTygUMhw=
+github.com/Antonboom/errname v1.0.0 h1:oJOOWR07vS1kRusl6YRSlat7HFnb3mSfMl6sDMRoTBA=
+github.com/Antonboom/errname v1.0.0/go.mod h1:gMOBFzK/vrTiXN9Oh+HFs+e6Ndl0eTFbtsRTSRdXyGI=
+github.com/Antonboom/nilnil v1.0.1 h1:C3Tkm0KUxgfO4Duk3PM+ztPncTFlOf0b2qadmS0s4xs=
+github.com/Antonboom/nilnil v1.0.1/go.mod h1:CH7pW2JsRNFgEh8B2UaPZTEPhCMuFowP/e8Udp9Nnb0=
+github.com/Antonboom/testifylint v1.5.2 h1:4s3Xhuv5AvdIgbd8wOOEeo0uZG7PbDKQyKY5lGoQazk=
+github.com/Antonboom/testifylint v1.5.2/go.mod h1:vxy8VJ0bc6NavlYqjZfmp6EfqXMtBgQ4+mhCojwC1P8=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0=
-github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
+github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/kWtOwnv/G+AzdKuy2ZrqINhenH4HyNs=
+github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
-github.com/Crocmagnon/fatcontext v0.2.2 h1:OrFlsDdOj9hW/oBEJBNSuH7QWf+E9WPVHw+x52bXVbk=
-github.com/Crocmagnon/fatcontext v0.2.2/go.mod h1:WSn/c/+MMNiD8Pri0ahRj0o9jVpeowzavOQplBJw6u0=
+github.com/Crocmagnon/fatcontext v0.7.1 h1:SC/VIbRRZQeQWj/TcQBS6JmrXcfA+BU4OGSVUt54PjM=
+github.com/Crocmagnon/fatcontext v0.7.1/go.mod h1:1wMvv3NXEBJucFGfwOJBxSVWcoIO6emV215SMkW9MFU=
github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM=
github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
-github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0 h1:sATXp1x6/axKxz2Gjxv8MALP0bXaNRfQinEwyfMcx8c=
-github.com/GaijinEntertainment/go-exhaustruct/v3 v3.2.0/go.mod h1:Nl76DrGNJTA1KJ0LePKBw/vznBX1EHbAZX8mwjR82nI=
-github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
-github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
+github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.1 h1:Sz1JIXEcSfhz7fUi7xHnhpIE0thVASYjvosApmHuD2k=
+github.com/GaijinEntertainment/go-exhaustruct/v3 v3.3.1/go.mod h1:n/LSCXNuIYqVfBlVXyHfMQkZDdp1/mmxfSjADd3z1Zg=
+github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0=
+github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
-github.com/OpenPeeDeeP/depguard/v2 v2.2.0 h1:vDfG60vDtIuf0MEOhmLlLLSzqaRM8EMcgJPdp74zmpA=
-github.com/OpenPeeDeeP/depguard/v2 v2.2.0/go.mod h1:CIzddKRvLBC4Au5aYP/i3nyaWQ+ClszLIuVocRiCYFQ=
-github.com/ProtonMail/go-crypto v1.1.0-alpha.2 h1:bkyFVUP+ROOARdgCiJzNQo2V2kiB97LyUpzH9P6Hrlg=
-github.com/ProtonMail/go-crypto v1.1.0-alpha.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
+github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4=
+github.com/OpenPeeDeeP/depguard/v2 v2.2.1/go.mod h1:q4DKzC4UcVaAvcfd41CZh0PWpGgzrVxUYBlgKNGquUo=
+github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk=
+github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE=
github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
-github.com/alecthomas/assert/v2 v2.2.2 h1:Z/iVC0xZfWTaFNE6bA3z07T86hd45Xe2eLt6WVy2bbk=
-github.com/alecthomas/assert/v2 v2.2.2/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ=
-github.com/alecthomas/go-check-sumtype v0.1.4 h1:WCvlB3l5Vq5dZQTFmodqL2g68uHiSwwlWcT5a2FGK0c=
-github.com/alecthomas/go-check-sumtype v0.1.4/go.mod h1:WyYPfhfkdhyrdaligV6svFopZV8Lqdzn5pyVBaV6jhQ=
-github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk=
-github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
+github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
+github.com/alecthomas/assert/v2 v2.11.0/go.mod h1:Bze95FyfUr7x34QZrjL+XP+0qgp/zg8yS+TtBj1WA3k=
+github.com/alecthomas/go-check-sumtype v0.3.1 h1:u9aUvbGINJxLVXiFvHUlPEaD7VDULsrxJb4Aq31NLkU=
+github.com/alecthomas/go-check-sumtype v0.3.1/go.mod h1:A8TSiN3UPRw3laIgWEUOHHLPa6/r9MtoigdlP5h3K/E=
+github.com/alecthomas/repr v0.4.0 h1:GhI2A8MACjfegCPVq9f1FLvIBS+DrQ2KQBFZP1iFzXc=
+github.com/alecthomas/repr v0.4.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
-github.com/alexkohler/nakedret/v2 v2.0.4 h1:yZuKmjqGi0pSmjGpOC016LtPJysIL0WEUiaXW5SUnNg=
-github.com/alexkohler/nakedret/v2 v2.0.4/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU=
+github.com/alexkohler/nakedret/v2 v2.0.5 h1:fP5qLgtwbx9EJE8dGEERT02YwS8En4r9nnZ71RK+EVU=
+github.com/alexkohler/nakedret/v2 v2.0.5/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU=
github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw=
github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE=
github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw=
github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I=
+github.com/alingse/nilnesserr v0.1.2 h1:Yf8Iwm3z2hUUrP4muWfW83DF4nE3r1xZ26fGWUKCZlo=
+github.com/alingse/nilnesserr v0.1.2/go.mod h1:1xJPrXonEtX7wyTq8Dytns5P2hNzoWymVUIaKm4HNFg=
github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec=
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
github.com/ashanbrown/forbidigo v1.6.0 h1:D3aewfM37Yb3pxHujIPSpTf6oQk9sc9WZi8gerOIVIY=
github.com/ashanbrown/forbidigo v1.6.0/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU=
-github.com/ashanbrown/makezero v1.1.1 h1:iCQ87C0V0vSyO+M9E/FZYbu65auqH0lnsOkf5FcB28s=
-github.com/ashanbrown/makezero v1.1.1/go.mod h1:i1bJLCRSCHOcOa9Y6MyF2FTfMZMFdHvxKHxgO5Z1axI=
+github.com/ashanbrown/makezero v1.2.0 h1:/2Lp1bypdmK9wDIq7uWBlDF1iMUpIIS4A+pF6C9IEUU=
+github.com/ashanbrown/makezero v1.2.0/go.mod h1:dxlPhHbDMC6N6xICzFBSK+4njQDdK8euNO0qjQMtGY4=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
-github.com/bkielbasa/cyclop v1.2.1 h1:AeF71HZDob1P2/pRm1so9cd1alZnrpyc4q2uP2l0gJY=
-github.com/bkielbasa/cyclop v1.2.1/go.mod h1:K/dT/M0FPAiYjBgQGau7tz+3TMh4FWAEqlMhzFWCrgM=
+github.com/bkielbasa/cyclop v1.2.3 h1:faIVMIGDIANuGPWH031CZJTi2ymOQBULs9H21HSMa5w=
+github.com/bkielbasa/cyclop v1.2.3/go.mod h1:kHTwA9Q0uZqOADdupvcFJQtp/ksSnytRMe8ztxG8Fuo=
github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M=
github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k=
-github.com/bombsimon/wsl/v4 v4.2.1 h1:Cxg6u+XDWff75SIFFmNsqnIOgob+Q9hG6y/ioKbRFiM=
-github.com/bombsimon/wsl/v4 v4.2.1/go.mod h1:Xu/kDxGZTofQcDGCtQe9KCzhHphIe0fDuyWTxER9Feo=
-github.com/breml/bidichk v0.2.7 h1:dAkKQPLl/Qrk7hnP6P+E0xOodrq8Us7+U0o4UBOAlQY=
-github.com/breml/bidichk v0.2.7/go.mod h1:YodjipAGI9fGcYM7II6wFvGhdMYsC5pHDlGzqvEW3tQ=
-github.com/breml/errchkjson v0.3.6 h1:VLhVkqSBH96AvXEyclMR37rZslRrY2kcyq+31HCsVrA=
-github.com/breml/errchkjson v0.3.6/go.mod h1:jhSDoFheAF2RSDOlCfhHO9KqhZgAYLyvHe7bRCX8f/U=
+github.com/bombsimon/wsl/v4 v4.5.0 h1:iZRsEvDdyhd2La0FVi5k6tYehpOR/R7qIUjmKk7N74A=
+github.com/bombsimon/wsl/v4 v4.5.0/go.mod h1:NOQ3aLF4nD7N5YPXMruR6ZXDOAqLoM0GEpLwTdvmOSc=
+github.com/breml/bidichk v0.3.2 h1:xV4flJ9V5xWTqxL+/PMFF6dtJPvZLPsyixAoPe8BGJs=
+github.com/breml/bidichk v0.3.2/go.mod h1:VzFLBxuYtT23z5+iVkamXO386OB+/sVwZOpIj6zXGos=
+github.com/breml/errchkjson v0.4.0 h1:gftf6uWZMtIa/Is3XJgibewBm2ksAQSY/kABDNFTAdk=
+github.com/breml/errchkjson v0.4.0/go.mod h1:AuBOSTHyLSaaAFlWsRSuRBIroCh3eh7ZHh5YeelDIk8=
github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA=
github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8=
-github.com/butuzov/ireturn v0.3.0 h1:hTjMqWw3y5JC3kpnC5vXmFJAWI/m31jaCYQqzkS6PL0=
-github.com/butuzov/ireturn v0.3.0/go.mod h1:A09nIiwiqzN/IoVo9ogpa0Hzi9fex1kd9PSD6edP5ZA=
-github.com/butuzov/mirror v1.2.0 h1:9YVK1qIjNspaqWutSv8gsge2e/Xpq1eqEkslEUHy5cs=
-github.com/butuzov/mirror v1.2.0/go.mod h1:DqZZDtzm42wIAIyHXeN8W/qb1EPlb9Qn/if9icBOpdQ=
-github.com/catenacyber/perfsprint v0.7.1 h1:PGW5G/Kxn+YrN04cRAZKC+ZuvlVwolYMrIyyTJ/rMmc=
-github.com/catenacyber/perfsprint v0.7.1/go.mod h1:/wclWYompEyjUD2FuIIDVKNkqz7IgBIWXIH3V0Zol50=
+github.com/butuzov/ireturn v0.3.1 h1:mFgbEI6m+9W8oP/oDdfA34dLisRFCj2G6o/yiI1yZrY=
+github.com/butuzov/ireturn v0.3.1/go.mod h1:ZfRp+E7eJLC0NQmk1Nrm1LOrn/gQlOykv+cVPdiXH5M=
+github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc=
+github.com/butuzov/mirror v1.3.0/go.mod h1:AEij0Z8YMALaq4yQj9CPPVYOyJQyiexpQEQgihajRfI=
+github.com/catenacyber/perfsprint v0.8.2 h1:+o9zVmCSVa7M4MvabsWvESEhpsMkhfE7k0sHNGL95yw=
+github.com/catenacyber/perfsprint v0.8.2/go.mod h1:q//VWC2fWbcdSLEY1R3l8n0zQCDPdE4IjZwyY1HMunM=
github.com/ccojocar/zxcvbn-go v1.0.2 h1:na/czXU8RrhXO4EZme6eQJLR4PzcGsahsBOAwU6I3Vg=
github.com/ccojocar/zxcvbn-go v1.0.2/go.mod h1:g1qkXtUSvHP8lhHp5GrSmTz6uWALGRMQdw6Qnz/hi60=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
-github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
-github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
+github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
+github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/charithe/durationcheck v0.0.10 h1:wgw73BiocdBDQPik+zcEoBG/ob8uyBHf2iyoHGPf5w4=
github.com/charithe/durationcheck v0.0.10/go.mod h1:bCWXb7gYRysD1CU3C+u4ceO49LoGOY1C1L6uouGNreQ=
github.com/chavacava/garif v0.1.0 h1:2JHa3hbYf5D9dsgseMKAmc/MZ109otzgNFk5s87H9Pc=
@@ -129,25 +131,27 @@ github.com/chavacava/garif v0.1.0/go.mod h1:XMyYCkEL58DF0oyW4qDjjnPWONs2HBqYKI+U
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
-github.com/ckaznocha/intrange v0.1.2 h1:3Y4JAxcMntgb/wABQ6e8Q8leMd26JbX2790lIss9MTI=
-github.com/ckaznocha/intrange v0.1.2/go.mod h1:RWffCw/vKBwHeOEwWdCikAtY0q4gGt8VhJZEEA5n+RE=
+github.com/ckaznocha/intrange v0.3.0 h1:VqnxtK32pxgkhJgYQEeOArVidIPg+ahLP7WBOXZd5ZY=
+github.com/ckaznocha/intrange v0.3.0/go.mod h1:+I/o2d2A1FBHgGELbGxzIcyd3/9l9DuwjM8FsbSS3Lo=
github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU=
github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
-github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
-github.com/curioswitch/go-reassign v0.2.0 h1:G9UZyOcpk/d7Gd6mqYgd8XYWFMw/znxwGDUstnC9DIo=
-github.com/curioswitch/go-reassign v0.2.0/go.mod h1:x6OpXuWvgfQaMGks2BZybTngWjT84hqJfKoO8Tt/Roc=
-github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg=
-github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
-github.com/daixiang0/gci v0.13.4 h1:61UGkmpoAcxHM2hhNkZEf5SzwQtWJXTSws7jaPyqwlw=
-github.com/daixiang0/gci v0.13.4/go.mod h1:12etP2OniiIdP4q+kjUGrC/rUagga7ODbqsom5Eo5Yk=
+github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
+github.com/curioswitch/go-reassign v0.3.0 h1:dh3kpQHuADL3cobV/sSGETA8DOv457dwl+fbBAhrQPs=
+github.com/curioswitch/go-reassign v0.3.0/go.mod h1:nApPCCTtqLJN/s8HfItCcKV0jIPwluBOvZP+dsJGA88=
+github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo=
+github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
+github.com/daixiang0/gci v0.13.5 h1:kThgmH1yBmZSBCh1EJVxQ7JsHpm5Oms0AMed/0LaH4c=
+github.com/daixiang0/gci v0.13.5/go.mod h1:12etP2OniiIdP4q+kjUGrC/rUagga7ODbqsom5Eo5Yk=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/denis-tingaikin/go-header v0.5.0 h1:SRdnP5ZKvcO9KKRP1KJrhFR3RrlGuD+42t4429eC9k8=
github.com/denis-tingaikin/go-header v0.5.0/go.mod h1:mMenU5bWrok6Wl2UsZjy+1okegmwQ3UgWl4V1D8gjlY=
+github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
+github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -157,41 +161,45 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q=
github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A=
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
-github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4=
-github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI=
+github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
+github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4=
github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
github.com/firefart/nonamedreturns v1.0.5 h1:tM+Me2ZaXs8tfdDw3X6DOX++wMCOqzYUho6tUTYIdRA=
github.com/firefart/nonamedreturns v1.0.5/go.mod h1:gHJjDqhGM4WyPt639SOZs+G89Ko7QKH5R5BhnO6xJhw=
-github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
-github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
+github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
+github.com/frankban/quicktest v1.14.3/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps=
github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI=
github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU=
github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo=
github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA=
-github.com/ghostiam/protogetter v0.3.6 h1:R7qEWaSgFCsy20yYHNIJsU9ZOb8TziSRRxuAOTVKeOk=
-github.com/ghostiam/protogetter v0.3.6/go.mod h1:7lpeDnEJ1ZjL/YtyoN99ljO4z0pd3H0d18/t2dPBxHw=
-github.com/go-critic/go-critic v0.11.4 h1:O7kGOCx0NDIni4czrkRIXTnit0mkyKOCePh3My6OyEU=
-github.com/go-critic/go-critic v0.11.4/go.mod h1:2QAdo4iuLik5S9YG0rT4wcZ8QxwHYkrr6/2MWAiv/vc=
+github.com/ghostiam/protogetter v0.3.9 h1:j+zlLLWzqLay22Cz/aYwTHKQ88GE2DQ6GkWSYFOI4lQ=
+github.com/ghostiam/protogetter v0.3.9/go.mod h1:WZ0nw9pfzsgxuRsPOFQomgDVSWtDLJRfQJEhsGbmQMA=
+github.com/go-critic/go-critic v0.12.0 h1:iLosHZuye812wnkEz1Xu3aBwn5ocCPfc9yqmFG9pa6w=
+github.com/go-critic/go-critic v0.12.0/go.mod h1:DpE0P6OVc6JzVYzmM5gq5jMU31zLr4am5mB/VfFK64w=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI=
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic=
-github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU=
-github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow=
-github.com/go-git/go-git/v5 v5.12.0 h1:7Md+ndsjrzZxbddRDZjF14qK+NN56sy6wkqaVrjZtys=
-github.com/go-git/go-git/v5 v5.12.0/go.mod h1:FTM9VKtnI2m65hNI/TenDDDnUf2Q9FHnXYjuz9i5OEY=
+github.com/go-git/go-billy/v5 v5.6.0 h1:w2hPNtoehvJIxR00Vb4xX94qHQi/ApZfX+nBE2Cjio8=
+github.com/go-git/go-billy/v5 v5.6.0/go.mod h1:sFDq7xD3fn3E0GOwUSZqHo9lrkmx8xJhA0ZrfvjBRGM=
+github.com/go-git/go-git/v5 v5.13.0 h1:vLn5wlGIh/X78El6r3Jr+30W16Blk0CTcxTYcYPWi5E=
+github.com/go-git/go-git/v5 v5.13.0/go.mod h1:Wjo7/JyVKtQgUNdXYXIepzWfJQkUEIGvkvVkiXRR/zw=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
-github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k=
-github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ=
+github.com/go-jose/go-jose/v3 v3.0.4 h1:Wp5HA7bLQcKnf6YYao/4kpRpVMp/yf6+pJKV8WFSaNY=
+github.com/go-jose/go-jose/v3 v3.0.4/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
-github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
-github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
+github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
+github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
+github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
+github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
+github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI=
+github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
@@ -216,14 +224,14 @@ github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQi
github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ=
github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus=
github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig=
-github.com/go-viper/mapstructure/v2 v2.0.0 h1:dhn8MZ1gZ0mzeodTG3jt5Vj/o87xZKuNAprG2mQfMfc=
-github.com/go-viper/mapstructure/v2 v2.0.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
-github.com/go-xmlfmt/xmlfmt v1.1.2 h1:Nea7b4icn8s57fTx1M5AI4qQT5HEM3rVUO8MuE6g80U=
-github.com/go-xmlfmt/xmlfmt v1.1.2/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
+github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss=
+github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
+github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUWY=
+github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
-github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
-github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
+github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E=
+github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
@@ -257,20 +265,20 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
-github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM=
-github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk=
-github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e h1:ULcKCDV1LOZPFxGZaA6TlQbiM3J2GCPnkx/bGF6sX/g=
-github.com/golangci/gofmt v0.0.0-20231018234816-f50ced29576e/go.mod h1:Pm5KhLPA8gSnQwrQ6ukebRcapGb/BG9iUkdaiCcGHJM=
-github.com/golangci/golangci-lint v1.59.1 h1:CRRLu1JbhK5avLABFJ/OHVSQ0Ie5c4ulsOId1h3TTks=
-github.com/golangci/golangci-lint v1.59.1/go.mod h1:jX5Oif4C7P0j9++YB2MMJmoNrb01NJ8ITqKWNLewThg=
+github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 h1:WUvBfQL6EW/40l6OmeSBYQJNSif4O11+bmWEz+C7FYw=
+github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32/go.mod h1:NUw9Zr2Sy7+HxzdjIULge71wI6yEg1lWQr7Evcu8K0E=
+github.com/golangci/go-printf-func-name v0.1.0 h1:dVokQP+NMTO7jwO4bwsRwLWeudOVUPPyAKJuzv8pEJU=
+github.com/golangci/go-printf-func-name v0.1.0/go.mod h1:wqhWFH5mUdJQhweRnldEywnR5021wTdZSNgwYceV14s=
+github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE=
+github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY=
+github.com/golangci/golangci-lint v1.64.8 h1:y5TdeVidMtBGG32zgSC7ZXTFNHrsJkDnpO4ItB3Am+I=
+github.com/golangci/golangci-lint v1.64.8/go.mod h1:5cEsUQBSr6zi8XI8OjmcY2Xmliqc4iYL7YoPrL+zLJ4=
github.com/golangci/misspell v0.6.0 h1:JCle2HUTNWirNlDIAUO44hUsKhOFqGPoC4LZxlaSXDs=
github.com/golangci/misspell v0.6.0/go.mod h1:keMNyY6R9isGaSAu+4Q8NMBwMPkh15Gtc8UCVoDtAWo=
-github.com/golangci/modinfo v0.3.4 h1:oU5huX3fbxqQXdfspamej74DFX0kyGLkw1ppvXoJ8GA=
-github.com/golangci/modinfo v0.3.4/go.mod h1:wytF1M5xl9u0ij8YSvhkEVPP3M5Mc7XLl1pxH3B2aUM=
github.com/golangci/plugin-module-register v0.1.1 h1:TCmesur25LnyJkpsVrupv1Cdzo+2f7zX0H6Jkw1Ol6c=
github.com/golangci/plugin-module-register v0.1.1/go.mod h1:TTpqoB6KkwOJMV8u7+NyXMrkwwESJLOkfl9TxR1DGFc=
-github.com/golangci/revgrep v0.5.3 h1:3tL7c1XBMtWHHqVpS5ChmiAAoe4PF/d5+ULzV9sLAzs=
-github.com/golangci/revgrep v0.5.3/go.mod h1:U4R/s9dlXZsg8uJmaR1GrloUr14D7qDl8gi2iPXJH8k=
+github.com/golangci/revgrep v0.8.0 h1:EZBctwbVd0aMeRnNUsFogoyayvKHyxlV3CdUA46FX2s=
+github.com/golangci/revgrep v0.8.0/go.mod h1:U4R/s9dlXZsg8uJmaR1GrloUr14D7qDl8gi2iPXJH8k=
github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed h1:IURFTjxeTfNFP0hTEi1YKjB/ub8zkpaOqFFMApi2EAs=
github.com/golangci/unconvert v0.0.0-20240309020433-c5143eacb3ed/go.mod h1:XLXN8bNw4CGRPaqgl3bv/lhz7bsGPh4/xSaMTbo2vkQ=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
@@ -288,8 +296,8 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
-github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
-github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
+github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/go-github/v66 v66.0.1-0.20241027130611-9e5757d5a766 h1:Ne77zoI0NHPnZ/GPRzTVsedUUrkrI2UcnS821dfqp28=
github.com/google/go-github/v66 v66.0.1-0.20241027130611-9e5757d5a766/go.mod h1:+4SO9Zkuyf8ytMj0csN1NR/5OTR+MfqPp8P8dVlcvY4=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
@@ -304,8 +312,8 @@ github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hf
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
-github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6 h1:k7nVchz72niMH6YLQNvHSdIE7iqsQxK1P41mySCvssg=
-github.com/google/pprof v0.0.0-20240424215950-a892ee059fd6/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
+github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad h1:a6HEuzUHeKH6hwfN/ZoQgRgVIWFJljSWa/zetS2WTvg=
+github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad/go.mod h1:vavhavw2zAxS5dIdcRluK6cSGGPlZynqzFM8NdvU144=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -316,15 +324,16 @@ github.com/gordonklaus/ineffassign v0.1.0/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it
github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk=
github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc=
github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado=
-github.com/gostaticanalysis/comment v1.4.2 h1:hlnx5+S2fY9Zo9ePo4AhgYsYHbM2+eAv8m/s1JiCd6Q=
github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM=
-github.com/gostaticanalysis/forcetypeassert v0.1.0 h1:6eUflI3DiGusXGK6X7cCcIgVCpZ2CiZ1Q7jl6ZxNV70=
-github.com/gostaticanalysis/forcetypeassert v0.1.0/go.mod h1:qZEedyP/sY1lTGV1uJ3VhWZ2mqag3IkWsDHVbplHXak=
+github.com/gostaticanalysis/comment v1.5.0 h1:X82FLl+TswsUMpMh17srGRuKaaXprTaytmEpgnKIDu8=
+github.com/gostaticanalysis/comment v1.5.0/go.mod h1:V6eb3gpCv9GNVqb6amXzEUX3jXLVK/AdA+IrAMSqvEc=
+github.com/gostaticanalysis/forcetypeassert v0.2.0 h1:uSnWrrUEYDr86OCxWa4/Tp2jeYDlogZiZHzGkWFefTk=
+github.com/gostaticanalysis/forcetypeassert v0.2.0/go.mod h1:M5iPavzE9pPqWyeiVXSFghQjljW1+l/Uke3PXHS6ILY=
github.com/gostaticanalysis/nilerr v0.1.1 h1:ThE+hJP0fEp4zWLkWHWcRyI2Od0p7DlgYG3Uqrmrcpk=
github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A=
github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M=
-github.com/gostaticanalysis/testutil v0.4.0 h1:nhdCmubdmDF6VEatUNjgUZBJKWRqugoISdUv3PPQgHY=
-github.com/gostaticanalysis/testutil v0.4.0/go.mod h1:bLIoPefWXrRi/ssLFWX1dx7Repi5x3CuviD3dgAZaBU=
+github.com/gostaticanalysis/testutil v0.5.0 h1:Dq4wT1DdTwTGCQQv3rl3IvD5Ld0E6HiY+3Zh0sUGqw8=
+github.com/gostaticanalysis/testutil v0.5.0/go.mod h1:OLQSbuM6zw2EvCcXTz1lVq5unyoNft372msDY0nY5Hs=
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU=
@@ -332,14 +341,18 @@ github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuD
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
-github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 h1:1/D3zfFHttUKaCaGKZ/dR2roBXv0vKbSCnssIldfQdI=
-github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320/go.mod h1:EiZBMaudVLy8fmjf9Npq1dq9RalhveqZG5w/yz3mHWs=
-github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c=
-github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
+github.com/hashicorp/go-cty v1.5.0 h1:EkQ/v+dDNUqnuVpmS5fPqyY71NXVgT5gf32+57xY8g0=
+github.com/hashicorp/go-cty v1.5.0/go.mod h1:lFUCG5kd8exDobgSfyj4ONE/dc822kiYMguVKdHGMLM=
+github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k=
+github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
+github.com/hashicorp/go-immutable-radix/v2 v2.1.0 h1:CUW5RYIcysz+D3B+l1mDeXrQ7fUvGGCwJfdASSzbrfo=
+github.com/hashicorp/go-immutable-radix/v2 v2.1.0/go.mod h1:hgdqLXA4f6NIjRVisM1TJ9aOJVNRqKZj+xDGF6m7PBw=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
-github.com/hashicorp/go-plugin v1.6.0 h1:wgd4KxHJTVGGqWBq4QPB1i5BZNEx9BR8+OFmHDmTk8A=
-github.com/hashicorp/go-plugin v1.6.0/go.mod h1:lBS5MtSSBZk0SHc66KACcjjlU6WzEVP/8pwz68aMkCI=
+github.com/hashicorp/go-plugin v1.6.2 h1:zdGAEd0V1lCaU0u+MxWQhtSDQmahpkwOun8U8EiRVog=
+github.com/hashicorp/go-plugin v1.6.2/go.mod h1:CkgLQ5CZqNmdL9U9JzM532t8ZiYQ35+pj3b1FD37R0Q=
+github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
+github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
@@ -348,26 +361,28 @@ github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKe
github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hashicorp/hc-install v0.6.4 h1:QLqlM56/+SIIGvGcfFiwMY3z5WGXT066suo/v9Km8e0=
-github.com/hashicorp/hc-install v0.6.4/go.mod h1:05LWLy8TD842OtgcfBbOT0WMoInBMUSHjmDx10zuBIA=
+github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
+github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
+github.com/hashicorp/hc-install v0.9.1 h1:gkqTfE3vVbafGQo6VZXcy2v5yoz2bE0+nhZXruCuODQ=
+github.com/hashicorp/hc-install v0.9.1/go.mod h1:pWWvN/IrfeBK4XPeXXYkL6EjMufHkCK5DvwxeLKuBf0=
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
-github.com/hashicorp/hcl/v2 v2.20.1 h1:M6hgdyz7HYt1UN9e61j+qKJBqR3orTWbI1HKBJEdxtc=
-github.com/hashicorp/hcl/v2 v2.20.1/go.mod h1:TZDqQ4kNKCbh1iJp99FdPiUaVDDUPivbqxZulxDYqL4=
+github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3qos=
+github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA=
github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
-github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVWkd/RG0D2XQ=
-github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg=
-github.com/hashicorp/terraform-json v0.22.1 h1:xft84GZR0QzjPVWs4lRUwvTcPnegqlyS7orfb5Ltvec=
-github.com/hashicorp/terraform-json v0.22.1/go.mod h1:JbWSQCLFSXFFhg42T7l9iJwdGXBYV8fmmD6o/ML4p3A=
-github.com/hashicorp/terraform-plugin-go v0.23.0 h1:AALVuU1gD1kPb48aPQUjug9Ir/125t+AAurhqphJ2Co=
-github.com/hashicorp/terraform-plugin-go v0.23.0/go.mod h1:1E3Cr9h2vMlahWMbsSEcNrOCxovCZhOOIXjFHbjc/lQ=
+github.com/hashicorp/terraform-exec v0.22.0 h1:G5+4Sz6jYZfRYUCg6eQgDsqTzkNXV+fP8l+uRmZHj64=
+github.com/hashicorp/terraform-exec v0.22.0/go.mod h1:bjVbsncaeh8jVdhttWYZuBGj21FcYw6Ia/XfHcNO7lQ=
+github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4NbeATsYz8Q=
+github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow=
+github.com/hashicorp/terraform-plugin-go v0.26.0 h1:cuIzCv4qwigug3OS7iKhpGAbZTiypAfFQmw8aE65O2M=
+github.com/hashicorp/terraform-plugin-go v0.26.0/go.mod h1:+CXjuLDiFgqR+GcrM5a2E2Kal5t5q2jb0E3D57tTdNY=
github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0=
github.com/hashicorp/terraform-plugin-log v0.9.0/go.mod h1:rKL8egZQ/eXSyDqzLUuwUYLVdlYeamldAHSxjUFADow=
-github.com/hashicorp/terraform-plugin-sdk/v2 v2.34.0 h1:kJiWGx2kiQVo97Y5IOGR4EMcZ8DtMswHhUuFibsCQQE=
-github.com/hashicorp/terraform-plugin-sdk/v2 v2.34.0/go.mod h1:sl/UoabMc37HA6ICVMmGO+/0wofkVIRxf+BMb/dnoIg=
-github.com/hashicorp/terraform-registry-address v0.2.3 h1:2TAiKJ1A3MAkZlH1YI/aTVcLZRu7JseiXNRHbOAyoTI=
-github.com/hashicorp/terraform-registry-address v0.2.3/go.mod h1:lFHA76T8jfQteVfT7caREqguFrW3c4MFSPhZB7HHgUM=
+github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.1 h1:WNMsTLkZf/3ydlgsuXePa3jvZFwAJhruxTxP/c1Viuw=
+github.com/hashicorp/terraform-plugin-sdk/v2 v2.36.1/go.mod h1:P6o64QS97plG44iFzSM6rAn6VJIC/Sy9a9IkEtl79K4=
+github.com/hashicorp/terraform-registry-address v0.2.4 h1:JXu/zHB2Ymg/TGVCRu10XqNa4Sh2bWcqCNyKWjnCPJA=
+github.com/hashicorp/terraform-registry-address v0.2.4/go.mod h1:tUNYTVyCtU4OIGXXMDp7WNcJ+0W1B4nmstVDgHMjfAU=
github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ=
github.com/hashicorp/terraform-svchost v0.1.1/go.mod h1:mNsjQfZyf/Jhz35v6/0LWcv26+X7JPS+buii2c9/ctc=
github.com/hashicorp/yamux v0.1.1 h1:yrQxtgseBDrq9Y652vSRDvsKCJKOUD+GzTS4Y0Y8pvE=
@@ -385,10 +400,8 @@ github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgf
github.com/jhump/protoreflect v1.15.1/go.mod h1:jD/2GMKKE6OqX8qTjhADU1e6DShO+gavG9e0Q693nKo=
github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs=
github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c=
-github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48=
-github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0=
-github.com/jjti/go-spancheck v0.6.1 h1:ZK/wE5Kyi1VX3PJpUO2oEgeoI4FWOUm7Shb2Gbv5obI=
-github.com/jjti/go-spancheck v0.6.1/go.mod h1:vF1QkOO159prdo6mHRxak2CpzDpHAfKiPUDP/NeRnX8=
+github.com/jjti/go-spancheck v0.6.4 h1:Tl7gQpYf4/TMU7AT84MN83/6PutY21Nb9fuQjFTpRRc=
+github.com/jjti/go-spancheck v0.6.4/go.mod h1:yAEYdKJ2lRkDA8g7X+oKUHXOWVAXSBJRv04OhF+QUjk=
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
@@ -398,17 +411,17 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
-github.com/julz/importas v0.1.0 h1:F78HnrsjY3cR7j0etXy5+TU1Zuy7Xt08X/1aJnH5xXY=
-github.com/julz/importas v0.1.0/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0=
-github.com/karamaru-alpha/copyloopvar v1.1.0 h1:x7gNyKcC2vRBO1H2Mks5u1VxQtYvFiym7fCjIP8RPos=
-github.com/karamaru-alpha/copyloopvar v1.1.0/go.mod h1:u7CIfztblY0jZLOQZgH3oYsJzpC2A7S6u/lfgSXHy0k=
+github.com/julz/importas v0.2.0 h1:y+MJN/UdL63QbFJHws9BVC5RpA2iq0kpjrFajTGivjQ=
+github.com/julz/importas v0.2.0/go.mod h1:pThlt589EnCYtMnmhmRYY/qn9lCf/frPOK+WMx3xiJY=
+github.com/karamaru-alpha/copyloopvar v1.2.1 h1:wmZaZYIjnJ0b5UoKDjUHrikcV0zuPyyxI4SVplLd2CI=
+github.com/karamaru-alpha/copyloopvar v1.2.1/go.mod h1:nFmMlFNlClC2BPvNaHMdkirmTJxVCY0lhxBtlfOypMM=
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
-github.com/kisielk/errcheck v1.7.0 h1:+SbscKmWJ5mOK/bO1zS60F5I9WwZDWOfRsC4RwfwRV0=
-github.com/kisielk/errcheck v1.7.0/go.mod h1:1kLL+jV4e+CFfueBmI1dSK2ADDyQnlrnrY/FqKluHJQ=
+github.com/kisielk/errcheck v1.9.0 h1:9xt1zI9EBfcYBvdU1nVrzMzzUPUtPKs9bVSIM3TAb3M=
+github.com/kisielk/errcheck v1.9.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
-github.com/kkHAIKE/contextcheck v1.1.5 h1:CdnJh63tcDe53vG+RebdpdXJTc9atMgGqdx8LXxiilg=
-github.com/kkHAIKE/contextcheck v1.1.5/go.mod h1:O930cpht4xb1YQpK+1+AgoM3mFsvxr7uyFptcnWTYUA=
+github.com/kkHAIKE/contextcheck v1.1.6 h1:7HIyRcnyzxL9Lz06NGhiKvenXq7Zw6Q0UQu/ttjfJCE=
+github.com/kkHAIKE/contextcheck v1.1.6/go.mod h1:3dDbMRNBFaq8HFXWC1JyvDSPm43CmE6IuHam8Wr0rkg=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
@@ -423,18 +436,20 @@ github.com/kulti/thelper v0.6.3 h1:ElhKf+AlItIu+xGnI990no4cE2+XaSu1ULymV2Yulxs=
github.com/kulti/thelper v0.6.3/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I=
github.com/kunwardeep/paralleltest v1.0.10 h1:wrodoaKYzS2mdNVnc4/w31YaXFtsc21PCTdvWJ/lDDs=
github.com/kunwardeep/paralleltest v1.0.10/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY=
-github.com/kyoh86/exportloopref v0.1.11 h1:1Z0bcmTypkL3Q4k+IDHMWTcnCliEZcaPiIe0/ymEyhQ=
-github.com/kyoh86/exportloopref v0.1.11/go.mod h1:qkV4UF1zGl6EkF1ox8L5t9SwyeBAZ3qLMd6up458uqA=
-github.com/lasiar/canonicalheader v1.1.1 h1:wC+dY9ZfiqiPwAexUApFush/csSPXeIi4QqyxXmng8I=
-github.com/lasiar/canonicalheader v1.1.1/go.mod h1:cXkb3Dlk6XXy+8MVQnF23CYKWlyA7kfQhSw2CcZtZb0=
-github.com/ldez/gomoddirectives v0.2.4 h1:j3YjBIjEBbqZ0NKtBNzr8rtMHTOrLPeiwTkfUJZ3alg=
-github.com/ldez/gomoddirectives v0.2.4/go.mod h1:oWu9i62VcQDYp9EQ0ONTfqLNh+mDLWWDO+SO0qSQw5g=
-github.com/ldez/tagliatelle v0.5.0 h1:epgfuYt9v0CG3fms0pEgIMNPuFf/LpPIfjk4kyqSioo=
-github.com/ldez/tagliatelle v0.5.0/go.mod h1:rj1HmWiL1MiKQuOONhd09iySTEkUuE/8+5jtPYz9xa4=
+github.com/lasiar/canonicalheader v1.1.2 h1:vZ5uqwvDbyJCnMhmFYimgMZnJMjwljN5VGY0VKbMXb4=
+github.com/lasiar/canonicalheader v1.1.2/go.mod h1:qJCeLFS0G/QlLQ506T+Fk/fWMa2VmBUiEI2cuMK4djI=
+github.com/ldez/exptostd v0.4.2 h1:l5pOzHBz8mFOlbcifTxzfyYbgEmoUqjxLFHZkjlbHXs=
+github.com/ldez/exptostd v0.4.2/go.mod h1:iZBRYaUmcW5jwCR3KROEZ1KivQQp6PHXbDPk9hqJKCQ=
+github.com/ldez/gomoddirectives v0.6.1 h1:Z+PxGAY+217f/bSGjNZr/b2KTXcyYLgiWI6geMBN2Qc=
+github.com/ldez/gomoddirectives v0.6.1/go.mod h1:cVBiu3AHR9V31em9u2kwfMKD43ayN5/XDgr+cdaFaKs=
+github.com/ldez/grignotin v0.9.0 h1:MgOEmjZIVNn6p5wPaGp/0OKWyvq42KnzAt/DAb8O4Ow=
+github.com/ldez/grignotin v0.9.0/go.mod h1:uaVTr0SoZ1KBii33c47O1M8Jp3OP3YDwhZCmzT9GHEk=
+github.com/ldez/tagliatelle v0.7.1 h1:bTgKjjc2sQcsgPiT902+aadvMjCeMHrY7ly2XKFORIk=
+github.com/ldez/tagliatelle v0.7.1/go.mod h1:3zjxUpsNB2aEZScWiZTHrAXOl1x25t3cRmzfK1mlo2I=
+github.com/ldez/usetesting v0.4.2 h1:J2WwbrFGk3wx4cZwSMiCQQ00kjGR0+tuuyW0Lqm4lwA=
+github.com/ldez/usetesting v0.4.2/go.mod h1:eEs46T3PpQ+9RgN9VjpY6qWdiw2/QmfiDeWmdZdrjIQ=
github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84YrjT3mIY=
github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA=
-github.com/lufeee/execinquery v1.2.1 h1:hf0Ems4SHcUGBxpGN7Jz78z1ppVkP/837ZlETPCEtOM=
-github.com/lufeee/execinquery v1.2.1/go.mod h1:EC7DrEKView09ocscGHC+apXMIaorh4xqSxS/dy8SbM=
github.com/macabu/inamedparam v0.1.3 h1:2tk/phHkMlEL/1GNe/Yf6kkR/hkcUdAEY3L0hjYV1Mk=
github.com/macabu/inamedparam v0.1.3/go.mod h1:93FLICAIk/quk7eaPPQvbzihUdn/QkGDwIZEoLtpH6I=
github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo=
@@ -443,25 +458,25 @@ github.com/maratori/testableexamples v1.0.0 h1:dU5alXRrD8WKSjOUnmJZuzdxWOEQ57+7s
github.com/maratori/testableexamples v1.0.0/go.mod h1:4rhjL1n20TUTT4vdh3RDqSizKLyXp7K2u6HgraZCGzE=
github.com/maratori/testpackage v1.1.1 h1:S58XVV5AD7HADMmD0fNnziNHqKvSdDuEKdPD1rNTU04=
github.com/maratori/testpackage v1.1.1/go.mod h1:s4gRK/ym6AMrqpOa/kEbQTV4Q4jb7WeLZzVhVVVOQMc=
-github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26 h1:gWg6ZQ4JhDfJPqlo2srm/LN17lpybq15AryXIRcWYLE=
-github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s=
+github.com/matoous/godox v1.1.0 h1:W5mqwbyWrwZv6OQ5Z1a/DHGMOvXYCBP3+Ht7KMoJhq4=
+github.com/matoous/godox v1.1.0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs=
github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE=
github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU=
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
-github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
-github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
+github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
+github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
-github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
-github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
+github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
+github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
-github.com/mgechev/revive v1.3.7 h1:502QY0vQGe9KtYJ9FpxMz9rL+Fc/P13CI5POL4uHCcE=
-github.com/mgechev/revive v1.3.7/go.mod h1:RJ16jUbF0OWC3co/+XTxmFNgEpUPwnnA0BRllX2aDNA=
+github.com/mgechev/revive v1.7.0 h1:JyeQ4yO5K8aZhIKf5rec56u0376h8AlKNQEmjfkjKlY=
+github.com/mgechev/revive v1.7.0/go.mod h1:qZnwcNhoguE58dfi96IJeSTPeZQejNeoMQLUZGi4SW4=
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
@@ -479,28 +494,26 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
-github.com/moricho/tparallel v0.3.1 h1:fQKD4U1wRMAYNngDonW5XupoB/ZGJHdpzrWqgyg9krA=
-github.com/moricho/tparallel v0.3.1/go.mod h1:leENX2cUv7Sv2qDgdi0D0fCftN8fRC67Bcn8pqzeYNI=
+github.com/moricho/tparallel v0.3.2 h1:odr8aZVFA3NZrNybggMkYO3rgPRcqjeQUlBBFVxKHTI=
+github.com/moricho/tparallel v0.3.2/go.mod h1:OQ+K3b4Ln3l2TZveGCywybl68glfLEwFGqvnjok8b+U=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U=
github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4Ngq6aY7OE=
-github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
-github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhKRf3Swg=
github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs=
github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk=
github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c=
-github.com/nunnatsa/ginkgolinter v0.16.2 h1:8iLqHIZvN4fTLDC0Ke9tbSZVcyVHoBs0HIbnVSxfHJk=
-github.com/nunnatsa/ginkgolinter v0.16.2/go.mod h1:4tWRinDN1FeJgU+iJANW/kz7xKN5nYRAOfJDQUS9dOQ=
+github.com/nunnatsa/ginkgolinter v0.19.1 h1:mjwbOlDQxZi9Cal+KfbEJTCz327OLNfwNvoZ70NJ+c4=
+github.com/nunnatsa/ginkgolinter v0.19.1/go.mod h1:jkQ3naZDmxaZMXPWaS9rblH+i+GWXQCaS/JFIWcOH2s=
github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
-github.com/onsi/ginkgo/v2 v2.17.3 h1:oJcvKpIb7/8uLpDDtnQuf18xVnwKp8DTD7DQ6gTd/MU=
-github.com/onsi/ginkgo/v2 v2.17.3/go.mod h1:nP2DPOQoNsQmsVyv5rDA8JkXQoCs6goXIvr/PRJ1eCc=
-github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk=
-github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0=
+github.com/onsi/ginkgo/v2 v2.22.2 h1:/3X8Panh8/WwhU/3Ssa6rCKqPLuAkVY2I0RoyDLySlU=
+github.com/onsi/ginkgo/v2 v2.22.2/go.mod h1:oeMosUL+8LtarXBHu/c0bx2D/K9zyQ6uX3cTyztHwsk=
+github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
+github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw=
github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU=
github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w=
@@ -510,8 +523,8 @@ github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT9
github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc=
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
-github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
-github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
+github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
+github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4=
github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@@ -520,8 +533,8 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/polyfloyd/go-errorlint v1.5.2 h1:SJhVik3Umsjh7mte1vE0fVZ5T1gznasQG3PV7U5xFdA=
-github.com/polyfloyd/go-errorlint v1.5.2/go.mod h1:sH1QC1pxxi0fFecsVIzBmxtrgd9IF/SkJpA6wqyKAJs=
+github.com/polyfloyd/go-errorlint v1.7.1 h1:RyLVXIbosq1gBdk/pChWA8zWYLsq9UEw7a1L5TVMCnA=
+github.com/polyfloyd/go-errorlint v1.7.1/go.mod h1:aXjNb1x2TNhoLsk26iv1yl7a+zTnXPhwEMtEXukiLR8=
github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g=
github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
@@ -546,8 +559,8 @@ github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU=
github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
-github.com/quasilyte/go-ruleguard v0.4.2 h1:htXcXDK6/rO12kiTHKfHuqR4kr3Y4M0J0rOL6CH/BYs=
-github.com/quasilyte/go-ruleguard v0.4.2/go.mod h1:GJLgqsLeo4qgavUoL8JeGFNS7qcisx3awV/w9eWTmNI=
+github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1 h1:+Wl/0aFp0hpuHM3H//KMft64WQ1yX9LdJY64Qm/gFCo=
+github.com/quasilyte/go-ruleguard v0.4.3-0.20240823090925-0fe6f58b47b1/go.mod h1:GJLgqsLeo4qgavUoL8JeGFNS7qcisx3awV/w9eWTmNI=
github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE=
github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU=
github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo=
@@ -556,28 +569,31 @@ github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl
github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0=
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs=
github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ=
+github.com/raeperd/recvcheck v0.2.0 h1:GnU+NsbiCqdC2XX5+vMZzP+jAJC5fht7rcVTAhX74UI=
+github.com/raeperd/recvcheck v0.2.0/go.mod h1:n04eYkwIR0JbgD73wT8wL4JjPC3wm0nFtzBnWNocnYU=
+github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
+github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
+github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
-github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
-github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
+github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
+github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
-github.com/ryancurrah/gomodguard v1.3.2 h1:CuG27ulzEB1Gu5Dk5gP8PFxSOZ3ptSdP5iI/3IXxM18=
-github.com/ryancurrah/gomodguard v1.3.2/go.mod h1:LqdemiFomEjcxOqirbQCb3JFvSxH2JUYMerTFd3sF2o=
+github.com/ryancurrah/gomodguard v1.3.5 h1:cShyguSwUEeC0jS7ylOiG/idnd1TpJ1LfHGpV3oJmPU=
+github.com/ryancurrah/gomodguard v1.3.5/go.mod h1:MXlEPQRxgfPQa62O8wzK3Ozbkv9Rkqr+wKjSxTdsNJE=
github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU=
github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ=
-github.com/sanposhiho/wastedassign/v2 v2.0.7 h1:J+6nrY4VW+gC9xFzUc+XjPD3g3wF3je/NsJFwFK7Uxc=
-github.com/sanposhiho/wastedassign/v2 v2.0.7/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI=
-github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4=
-github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY=
+github.com/sanposhiho/wastedassign/v2 v2.1.0 h1:crurBF7fJKIORrV85u9UUpePDYGWnwvv3+A96WvwXT0=
+github.com/sanposhiho/wastedassign/v2 v2.1.0/go.mod h1:+oSmSC+9bQ+VUAxA66nBb0Z7N8CK7mscKTDYC6aIek4=
+github.com/santhosh-tekuri/jsonschema/v6 v6.0.1 h1:PKK9DyHxif4LZo+uQSgXNqs0jj5+xZwwfKHgph2lxBw=
+github.com/santhosh-tekuri/jsonschema/v6 v6.0.1/go.mod h1:JXeL+ps8p7/KNMjDQk3TCwPpBy0wYklyWTfbkIzdIFU=
github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tMEOsumirXcOJqAw=
github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ=
-github.com/sashamelentyev/usestdlibvars v1.26.0 h1:LONR2hNVKxRmzIrZR0PhSF3mhCAzvnr+DcUiHgREfXE=
-github.com/sashamelentyev/usestdlibvars v1.26.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8=
-github.com/securego/gosec/v2 v2.20.1-0.20240525090044-5f0084eb01a9 h1:rnO6Zp1YMQwv8AyxzuwsVohljJgp4L0ZqiCgtACsPsc=
-github.com/securego/gosec/v2 v2.20.1-0.20240525090044-5f0084eb01a9/go.mod h1:dg7lPlu/xK/Ut9SedURCoZbVCR4yC7fM65DtH9/CDHs=
+github.com/sashamelentyev/usestdlibvars v1.28.0 h1:jZnudE2zKCtYlGzLVreNp5pmCdOxXUzwsMDBkR21cyQ=
+github.com/sashamelentyev/usestdlibvars v1.28.0/go.mod h1:9nl0jgOfHKWNFS43Ojw0i7aRoS4j6EBye3YBhmAIRF8=
+github.com/securego/gosec/v2 v2.22.2 h1:IXbuI7cJninj0nRpZSLCUlotsj8jGusohfONMrHoF6g=
+github.com/securego/gosec/v2 v2.22.2/go.mod h1:UEBGA+dSKb+VqM6TdehR7lnQtIIMorYJ4/9CW1KVQBE=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
-github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU=
-github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs=
github.com/shurcooL/githubv4 v0.0.0-20221126192849-0b5c4c7994eb h1:foJysa74+t41fG7adnt+TkfcNxQUWid8R/HlXe+Mmbw=
github.com/shurcooL/githubv4 v0.0.0-20221126192849-0b5c4c7994eb/go.mod h1:hAF0iLZy4td2EX+/8Tw+4nodhlMrwN3HupfaXj3zkGo=
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
@@ -591,30 +607,31 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE=
github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4=
-github.com/sivchari/tenv v1.7.1 h1:PSpuD4bu6fSmtWMxSGWcvqUUgIn7k3yOJhOIzVWn8Ak=
-github.com/sivchari/tenv v1.7.1/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg=
-github.com/skeema/knownhosts v1.2.2 h1:Iug2P4fLmDw9f41PB6thxUkNUkJzB5i+1/exaj40L3A=
-github.com/skeema/knownhosts v1.2.2/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo=
-github.com/sonatard/noctx v0.0.2 h1:L7Dz4De2zDQhW8S0t+KUjY0MAQJd6SgVwhzNIc4ok00=
-github.com/sonatard/noctx v0.0.2/go.mod h1:kzFz+CzWSjQ2OzIm46uJZoXuBpa2+0y3T36U18dWqIo=
+github.com/sivchari/tenv v1.12.1 h1:+E0QzjktdnExv/wwsnnyk4oqZBUfuh89YMQT1cyuvSY=
+github.com/sivchari/tenv v1.12.1/go.mod h1:1LjSOUCc25snIr5n3DtGGrENhX3LuWefcplwVGC24mw=
+github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY=
+github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M=
+github.com/sonatard/noctx v0.1.0 h1:JjqOc2WN16ISWAjAk8M5ej0RfExEXtkEyExl2hLW+OM=
+github.com/sonatard/noctx v0.1.0/go.mod h1:0RvBxqY8D4j9cTTTWE8ylt2vqj2EPI8fHmrxHdsaZ2c=
github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0=
github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs=
-github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
-github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
+github.com/spf13/afero v1.12.0 h1:UcOPyRBYczmFn6yvphxkn9ZEOY65cpwGKb5mL36mrqs=
+github.com/spf13/afero v1.12.0/go.mod h1:ZTlWwG4/ahT8W7T0WQ5uYmjI9duaLQGy3Q2OAl4sk/4=
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
-github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
-github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
+github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
+github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
-github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
+github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.12.0 h1:CZ7eSOd3kZoaYDLbXnmzgQI5RlciuXBMA+18HwHRfZQ=
github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI=
github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0=
github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I=
-github.com/stbenjam/no-sprintf-host-port v0.1.1 h1:tYugd/yrm1O0dV+ThCbaKZh195Dfm07ysF0U6JQXczc=
-github.com/stbenjam/no-sprintf-host-port v0.1.1/go.mod h1:TLhvtIvONRzdmkFiio4O8LHsN9N74I+PhRquPsxpL0I=
+github.com/stbenjam/no-sprintf-host-port v0.2.0 h1:i8pxvGrt1+4G0czLr/WnmyH7zbZ8Bg8etvARQ1rpyl4=
+github.com/stbenjam/no-sprintf-host-port v0.2.0/go.mod h1:eL0bQ9PasS0hsyTyfTjjG+E80QIyPnBVQbYZyv20Jfk=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
@@ -630,34 +647,34 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
-github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
-github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
+github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
+github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs=
github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
-github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c h1:+aPplBwWcHBo6q9xrfWdMrT9o4kltkmmvpemgIjep/8=
-github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c/go.mod h1:SbErYREK7xXdsRiigaQiQkI9McGRzYMvlKYaP3Nimdk=
-github.com/tdakkota/asciicheck v0.2.0 h1:o8jvnUANo0qXtnslk2d3nMKTFNlOnJjRrNcj0j9qkHM=
-github.com/tdakkota/asciicheck v0.2.0/go.mod h1:Qb7Y9EgjCLJGup51gDHFzbI08/gbGhL/UVhYIPWG2rg=
+github.com/tdakkota/asciicheck v0.4.1 h1:bm0tbcmi0jezRA2b5kg4ozmMuGAFotKI3RZfrhfovg8=
+github.com/tdakkota/asciicheck v0.4.1/go.mod h1:0k7M3rCfRXb0Z6bwgvkEIMleKH3kXNz9UqJ9Xuqopr8=
github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA=
github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0=
github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag=
github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY=
-github.com/tetafro/godot v1.4.16 h1:4ChfhveiNLk4NveAZ9Pu2AN8QZ2nkUGFuadM9lrr5D0=
-github.com/tetafro/godot v1.4.16/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio=
-github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 h1:quvGphlmUVU+nhpFa4gg4yJyTRJ13reZMDHrKwYw53M=
-github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966/go.mod h1:27bSVNWSBOHm+qRp1T9qzaIpsWEP6TbUnei/43HK+PQ=
-github.com/timonwong/loggercheck v0.9.4 h1:HKKhqrjcVj8sxL7K77beXh0adEm6DLjV/QOGeMXEVi4=
-github.com/timonwong/loggercheck v0.9.4/go.mod h1:caz4zlPcgvpEkXgVnAJGowHAMW2NwHaNlpS8xDbVhTg=
-github.com/tomarrell/wrapcheck/v2 v2.8.3 h1:5ov+Cbhlgi7s/a42BprYoxsr73CbdMUTzE3bRDFASUs=
-github.com/tomarrell/wrapcheck/v2 v2.8.3/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo=
+github.com/tetafro/godot v1.5.0 h1:aNwfVI4I3+gdxjMgYPus9eHmoBeJIbnajOyqZYStzuw=
+github.com/tetafro/godot v1.5.0/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio=
+github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3 h1:y4mJRFlM6fUyPhoXuFg/Yu02fg/nIPFMOY8tOqppoFg=
+github.com/timakin/bodyclose v0.0.0-20241017074812-ed6a65f985e3/go.mod h1:mkjARE7Yr8qU23YcGMSALbIxTQ9r9QBVahQOBRfU460=
+github.com/timonwong/loggercheck v0.10.1 h1:uVZYClxQFpw55eh+PIoqM7uAOHMrhVcDoWDery9R8Lg=
+github.com/timonwong/loggercheck v0.10.1/go.mod h1:HEAWU8djynujaAVX7QI65Myb8qgfcZ1uKbdpg3ZzKl8=
+github.com/tomarrell/wrapcheck/v2 v2.10.0 h1:SzRCryzy4IrAH7bVGG4cK40tNUhmVmMDuJujy4XwYDg=
+github.com/tomarrell/wrapcheck/v2 v2.10.0/go.mod h1:g9vNIyhb5/9TQgumxQyOEqDHsmGYcGsVMOx/xGkqdMo=
github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw=
github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw=
-github.com/ultraware/funlen v0.1.0 h1:BuqclbkY6pO+cvxoq7OsktIXZpgBSkYTQtmwhAK81vI=
-github.com/ultraware/funlen v0.1.0/go.mod h1:XJqmOQja6DpxarLj6Jj1U7JuoS8PvL4nEqDaQhy22p4=
-github.com/ultraware/whitespace v0.1.1 h1:bTPOGejYFulW3PkcrqkeQwOd6NKOOXvmGD9bo/Gk8VQ=
-github.com/ultraware/whitespace v0.1.1/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8=
-github.com/uudashr/gocognit v1.1.2 h1:l6BAEKJqQH2UpKAPKdMfZf5kE4W/2xk8pfU1OVLvniI=
-github.com/uudashr/gocognit v1.1.2/go.mod h1:aAVdLURqcanke8h3vg35BC++eseDm66Z7KmchI5et4k=
+github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLkI=
+github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA=
+github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g=
+github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8=
+github.com/uudashr/gocognit v1.2.0 h1:3BU9aMr1xbhPlvJLSydKwdLN3tEUUrzPSSM8S4hDYRA=
+github.com/uudashr/gocognit v1.2.0/go.mod h1:k/DdKPI6XBZO1q7HgoV2juESI2/Ofj9AcHPZhBBdrTU=
+github.com/uudashr/iface v1.3.1 h1:bA51vmVx1UIhiIsQFSNq6GZ6VPTk3WNMZgRiCe9R29U=
+github.com/uudashr/iface v1.3.1/go.mod h1:4QvspiRd3JLPAEXBQ9AiZpLbJlrWWgRChOKDJEuQTdg=
github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI=
github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
@@ -682,27 +699,39 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
-github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8=
-github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
-github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b h1:FosyBZYxY34Wul7O/MSKey3txpPYyCqVO5ZyceuQJEI=
-github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8=
+github.com/zclconf/go-cty v1.16.2 h1:LAJSwc3v81IRBZyUVQDUdZ7hs3SYs9jv0eZJDWHD/70=
+github.com/zclconf/go-cty v1.16.2/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE=
+github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo=
+github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM=
gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo=
gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8=
go-simpler.org/assert v0.9.0 h1:PfpmcSvL7yAnWyChSjOz6Sp6m9j5lyK8Ok9pEL31YkQ=
go-simpler.org/assert v0.9.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28=
-go-simpler.org/musttag v0.12.2 h1:J7lRc2ysXOq7eM8rwaTYnNrHd5JwjppzB6mScysB2Cs=
-go-simpler.org/musttag v0.12.2/go.mod h1:uN1DVIasMTQKk6XSik7yrJoEysGtR2GRqvWnI9S7TYM=
-go-simpler.org/sloglint v0.7.1 h1:qlGLiqHbN5islOxjeLXoPtUdZXb669RW+BDQ+xOSNoU=
-go-simpler.org/sloglint v0.7.1/go.mod h1:OlaVDRh/FKKd4X4sIMbsz8st97vomydceL146Fthh/c=
+go-simpler.org/musttag v0.13.0 h1:Q/YAW0AHvaoaIbsPj3bvEI5/QFP7w696IMUpnKXQfCE=
+go-simpler.org/musttag v0.13.0/go.mod h1:FTzIGeK6OkKlUDVpj0iQUXZLUO1Js9+mvykDQy9C5yM=
+go-simpler.org/sloglint v0.9.0 h1:/40NQtjRx9txvsB/RN022KsUJU+zaaSb/9q9BSefSrE=
+go-simpler.org/sloglint v0.9.0/go.mod h1:G/OrAF6uxj48sHahCzrbarVMptL2kjWTaUeC8+fOGww=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
+go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
+go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY=
+go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI=
+go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ=
+go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE=
+go.opentelemetry.io/otel/sdk v1.32.0 h1:RNxepc9vK59A8XsgZQouW8ue8Gkb4jpWtJm9ge5lEG4=
+go.opentelemetry.io/otel/sdk v1.32.0/go.mod h1:LqgegDBjKMmb2GC6/PrTnteJG39I8/vJCAP9LlJXEjU=
+go.opentelemetry.io/otel/sdk/metric v1.32.0 h1:rZvFnvmvawYb0alrYkjraqJq0Z4ZUJAiyYCU9snn1CU=
+go.opentelemetry.io/otel/sdk/metric v1.32.0/go.mod h1:PWeZlq0zt9YkYAp3gjKZ0eicRYvOh1Gd+X99x6GHpCQ=
+go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k=
+go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE=
go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
-go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8=
-go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0=
+go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs=
+go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8=
go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI=
go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ=
go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=
@@ -716,10 +745,11 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
-golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
+golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
+golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
-golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
-golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
+golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
+golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -730,12 +760,12 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
-golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc h1:ao2WRsKSzW6KuUY9IWPwWahcHCgR0s52IfwutMfEbdM=
-golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI=
+golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
+golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
-golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f h1:phY1HzDcf18Aq9A8KkmRtY9WvOFIxN8wgfvy6Zm1DV8=
-golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
+golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac h1:TSSpLIG4v+p0rPv1pNOQtl1I8knsO4S9trOxNMOLVP4=
+golang.org/x/exp/typeparams v0.0.0-20250210185358-939b2ce775ac/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -758,14 +788,15 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
-golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
-golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI=
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
-golang.org/x/mod v0.18.0 h1:5+9lSbEzPSdWkH32vYPBwEpX8KwDbM52Ud9xBUvNlb0=
-golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU=
+golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -800,21 +831,22 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
-golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
-golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
+golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
-golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ=
-golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
+golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
+golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
+golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c=
+golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
-golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA=
-golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
+golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
+golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -828,8 +860,10 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
-golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
+golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
+golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
+golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -864,7 +898,6 @@ golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -879,29 +912,28 @@ golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220702020025-31831981b65f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
-golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
+golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
-golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
-golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
+golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
+golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
+golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
-golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
-golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
+golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y=
+golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -911,12 +943,13 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
+golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
-golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
-golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
+golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
+golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -926,7 +959,6 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
@@ -934,10 +966,8 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
@@ -968,21 +998,19 @@ golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc
golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
-golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU=
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
-golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
-golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
-golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA=
golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
-golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
-golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA=
-golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c=
+golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
+golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
+golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
+golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU=
+golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -1040,8 +1068,8 @@ google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7Fc
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de h1:cZGRis4/ot9uVm639a+rHCUaG0JJHEsdyzSQTMX+suY=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:H4O17MA/PE9BsGx3w+a+W2VOLLD1Qf7oJneAoU6WktY=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20250219182151-9fdb1cabc7b2 h1:DMTIbak9GhdaSxEjvVzAeNZvyc03I61duqNbnm3SU0M=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20250219182151-9fdb1cabc7b2/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@@ -1054,8 +1082,8 @@ google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKa
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
-google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM=
-google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA=
+google.golang.org/grpc v1.70.0 h1:pWFv03aZoHzlRKHWicjsZytKAiYCtNS0dHbXnIdq7jQ=
+google.golang.org/grpc v1.70.0/go.mod h1:ofIJqVKDXx/JiXrwr2IG4/zwdH9txy3IlF40RmcJSQw=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@@ -1068,14 +1096,14 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
-google.golang.org/protobuf v1.34.0 h1:Qo/qEd2RZPCf2nKuorzksSknv0d3ERwp1vFG38gSmH4=
-google.golang.org/protobuf v1.34.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
+google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM=
+google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
-gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
@@ -1098,10 +1126,10 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
-honnef.co/go/tools v0.4.7 h1:9MDAWxMoSnB6QoSqiVr7P5mtkT9pOc1kSxchzPCnqJs=
-honnef.co/go/tools v0.4.7/go.mod h1:+rnGS1THNh8zMwnd2oVOTL9QF6vmfyG6ZXBULae2uc0=
-mvdan.cc/gofumpt v0.6.0 h1:G3QvahNDmpD+Aek/bNOLrFR2XC6ZAdo62dZu65gmwGo=
-mvdan.cc/gofumpt v0.6.0/go.mod h1:4L0wf+kgIPZtcCWXynNS2e6bhmj73umwnuXSZarixzA=
+honnef.co/go/tools v0.6.1 h1:R094WgE8K4JirYjBaOpz/AvTyUu/3wbmAoskKN/pxTI=
+honnef.co/go/tools v0.6.1/go.mod h1:3puzxxljPCe8RGJX7BIy1plGbxEOZni5mR2aXe3/uk4=
+mvdan.cc/gofumpt v0.7.0 h1:bg91ttqXmi9y2xawvkuMXyvAA/1ZGJqYAEGjXuP0JXU=
+mvdan.cc/gofumpt v0.7.0/go.mod h1:txVFJy/Sc/mvaycET54pV8SW8gWxTlUuGHVEcncmNUo=
mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f h1:lMpcwN6GxNbWtbpI1+xzFLSW8XzX0u72NttUGVFjO3U=
mvdan.cc/unparam v0.0.0-20240528143540-8a5130ca722f/go.mod h1:RSLa7mKKCNeTTMHBw5Hsy2rfJmd6O2ivt9Dw9ZqCQpQ=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
diff --git a/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go b/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go
index 19948c4547..e719155d96 100644
--- a/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go
+++ b/vendor/4d63.com/gocheckcompilerdirectives/checkcompilerdirectives/checkcompilerdirectives.go
@@ -72,12 +72,11 @@ func isKnown(directive string) bool {
return false
}
+// Found by running the following command on the source of go.
+// git grep -o -E -h '//go:[a-z_-]+' -- ':!**/*_test.go' ':!test/' ':!**/testdata/**' | sort -u
+// See https://pkg.go.dev/cmd/compile@go1.24#hdr-Compiler_Directives
var known = []string{
- // Found by running the following command on the source of go.
- // git grep -o -E -h '//go:[a-z_]+' -- ':!**/*_test.go' ':!test/' ':!**/testdata/**' | sort -u
- "binary",
"build",
- "buildsomethingelse",
"cgo_dynamic_linker",
"cgo_export_dynamic",
"cgo_export_static",
@@ -85,10 +84,10 @@ var known = []string{
"cgo_import_static",
"cgo_ldflag",
"cgo_unsafe_args",
+ "debug",
"embed",
"generate",
"linkname",
- "name",
"nocheckptr",
"noescape",
"noinline",
@@ -101,5 +100,7 @@ var known = []string{
"systemstack",
"uintptrescapes",
"uintptrkeepalive",
+ "wasmimport",
+ "wasmexport",
"yeswritebarrierrec",
}
diff --git a/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go b/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go
index edf9193ecb..c17c6acca6 100644
--- a/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go
+++ b/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go
@@ -1,7 +1,6 @@
package checknoglobals
import (
- "flag"
"fmt"
"go/ast"
"go/token"
@@ -37,18 +36,10 @@ func Analyzer() *analysis.Analyzer {
Name: "gochecknoglobals",
Doc: Doc,
Run: checkNoGlobals,
- Flags: flags(),
RunDespiteErrors: true,
}
}
-func flags() flag.FlagSet {
- flags := flag.NewFlagSet("", flag.ExitOnError)
- flags.Bool("t", false, "Include tests")
-
- return *flags
-}
-
func isAllowed(cm ast.CommentMap, v ast.Node, ti *types.Info) bool {
switch i := v.(type) {
case *ast.GenDecl:
@@ -138,16 +129,11 @@ func hasEmbedComment(cm ast.CommentMap, n ast.Node) bool {
}
func checkNoGlobals(pass *analysis.Pass) (interface{}, error) {
- includeTests := pass.Analyzer.Flags.Lookup("t").Value.(flag.Getter).Get().(bool)
-
for _, file := range pass.Files {
filename := pass.Fset.Position(file.Pos()).Filename
if !strings.HasSuffix(filename, ".go") {
continue
}
- if !includeTests && strings.HasSuffix(filename, "_test.go") {
- continue
- }
fileCommentMap := ast.NewCommentMap(pass.Fset, file, file.Comments)
diff --git a/vendor/github.com/4meepo/tagalign/.gitignore b/vendor/github.com/4meepo/tagalign/.gitignore
index e37bb52e49..1c6218ee29 100644
--- a/vendor/github.com/4meepo/tagalign/.gitignore
+++ b/vendor/github.com/4meepo/tagalign/.gitignore
@@ -17,6 +17,7 @@
*.test
.vscode
+.idea/
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
diff --git a/vendor/github.com/4meepo/tagalign/.goreleaser.yml b/vendor/github.com/4meepo/tagalign/.goreleaser.yml
index e7b6f6800e..37dfec7c88 100644
--- a/vendor/github.com/4meepo/tagalign/.goreleaser.yml
+++ b/vendor/github.com/4meepo/tagalign/.goreleaser.yml
@@ -1,4 +1,4 @@
----
+version: 2
project_name: tagalign
release:
@@ -29,4 +29,4 @@ builds:
goarch: 386
- goos: freebsd
goarch: arm64
- main: ./cmd/tagalign/
\ No newline at end of file
+ main: ./cmd/tagalign/
diff --git a/vendor/github.com/4meepo/tagalign/options.go b/vendor/github.com/4meepo/tagalign/options.go
index ddec98da73..2a78592465 100644
--- a/vendor/github.com/4meepo/tagalign/options.go
+++ b/vendor/github.com/4meepo/tagalign/options.go
@@ -2,13 +2,6 @@ package tagalign
type Option func(*Helper)
-// WithMode specify the mode of tagalign.
-func WithMode(mode Mode) Option {
- return func(h *Helper) {
- h.mode = mode
- }
-}
-
// WithSort enable tags sort.
// fixedOrder specify the order of tags, the other tags will be sorted by name.
// Sory is disabled by default.
diff --git a/vendor/github.com/4meepo/tagalign/tagalign.go b/vendor/github.com/4meepo/tagalign/tagalign.go
index 4734b56661..8161a0aa7f 100644
--- a/vendor/github.com/4meepo/tagalign/tagalign.go
+++ b/vendor/github.com/4meepo/tagalign/tagalign.go
@@ -1,27 +1,19 @@
package tagalign
import (
+ "cmp"
"fmt"
"go/ast"
"go/token"
- "log"
"reflect"
- "sort"
+ "slices"
"strconv"
"strings"
"github.com/fatih/structtag"
-
"golang.org/x/tools/go/analysis"
)
-type Mode int
-
-const (
- StandaloneMode Mode = iota
- GolangciLintMode
-)
-
type Style int
const (
@@ -44,11 +36,14 @@ func NewAnalyzer(options ...Option) *analysis.Analyzer {
}
}
-func Run(pass *analysis.Pass, options ...Option) []Issue {
- var issues []Issue
+func Run(pass *analysis.Pass, options ...Option) {
for _, f := range pass.Files {
+ filename := getFilename(pass.Fset, f)
+ if !strings.HasSuffix(filename, ".go") {
+ continue
+ }
+
h := &Helper{
- mode: StandaloneMode,
style: DefaultStyle,
align: true,
}
@@ -63,22 +58,19 @@ func Run(pass *analysis.Pass, options ...Option) []Issue {
if !h.align && !h.sort {
// do nothing
- return nil
+ return
}
ast.Inspect(f, func(n ast.Node) bool {
h.find(pass, n)
return true
})
+
h.Process(pass)
- issues = append(issues, h.issues...)
}
- return issues
}
type Helper struct {
- mode Mode
-
style Style
align bool // whether enable tags align.
@@ -87,19 +79,6 @@ type Helper struct {
singleFields []*ast.Field
consecutiveFieldsGroups [][]*ast.Field // fields in this group, must be consecutive in struct.
- issues []Issue
-}
-
-// Issue is used to integrate with golangci-lint's inline auto fix.
-type Issue struct {
- Pos token.Position
- Message string
- InlineFix InlineFix
-}
-type InlineFix struct {
- StartCol int // zero-based
- Length int
- NewString string
}
func (w *Helper) find(pass *analysis.Pass, n ast.Node) {
@@ -159,42 +138,28 @@ func (w *Helper) find(pass *analysis.Pass, n ast.Node) {
split()
}
-func (w *Helper) report(pass *analysis.Pass, field *ast.Field, startCol int, msg, replaceStr string) {
- if w.mode == GolangciLintMode {
- iss := Issue{
- Pos: pass.Fset.Position(field.Tag.Pos()),
- Message: msg,
- InlineFix: InlineFix{
- StartCol: startCol,
- Length: len(field.Tag.Value),
- NewString: replaceStr,
- },
- }
- w.issues = append(w.issues, iss)
- }
-
- if w.mode == StandaloneMode {
- pass.Report(analysis.Diagnostic{
- Pos: field.Tag.Pos(),
- End: field.Tag.End(),
- Message: msg,
- SuggestedFixes: []analysis.SuggestedFix{
- {
- Message: msg,
- TextEdits: []analysis.TextEdit{
- {
- Pos: field.Tag.Pos(),
- End: field.Tag.End(),
- NewText: []byte(replaceStr),
- },
+func (w *Helper) report(pass *analysis.Pass, field *ast.Field, msg, replaceStr string) {
+ pass.Report(analysis.Diagnostic{
+ Pos: field.Tag.Pos(),
+ End: field.Tag.End(),
+ Message: msg,
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ Message: msg,
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: field.Tag.Pos(),
+ End: field.Tag.End(),
+ NewText: []byte(replaceStr),
},
},
},
- })
- }
+ },
+ })
}
-func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
+//nolint:gocognit,gocyclo,nestif
+func (w *Helper) Process(pass *analysis.Pass) {
// process grouped fields
for _, fields := range w.consecutiveFieldsGroups {
offsets := make([]int, len(fields))
@@ -220,7 +185,7 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
tag, err := strconv.Unquote(field.Tag.Value)
if err != nil {
// if tag value is not a valid string, report it directly
- w.report(pass, field, column, errTagValueSyntax, field.Tag.Value)
+ w.report(pass, field, errTagValueSyntax, field.Tag.Value)
fields = removeField(fields, i)
continue
}
@@ -228,7 +193,7 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
tags, err := structtag.Parse(tag)
if err != nil {
// if tag value is not a valid struct tag, report it directly
- w.report(pass, field, column, err.Error(), field.Tag.Value)
+ w.report(pass, field, err.Error(), field.Tag.Value)
fields = removeField(fields, i)
continue
}
@@ -241,7 +206,7 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
cp[i] = tag
}
notSortedTagsGroup = append(notSortedTagsGroup, cp)
- sortBy(w.fixedTagOrder, tags)
+ sortTags(w.fixedTagOrder, tags)
}
for _, t := range tags.Tags() {
addKey(t.Key)
@@ -252,7 +217,7 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
}
if w.sort && StrictStyle == w.style {
- sortAllKeys(w.fixedTagOrder, uniqueKeys)
+ sortKeys(w.fixedTagOrder, uniqueKeys)
maxTagNum = len(uniqueKeys)
}
@@ -340,27 +305,26 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
msg := "tag is not aligned, should be: " + unquoteTag
- w.report(pass, field, offsets[i], msg, newTagValue)
+ w.report(pass, field, msg, newTagValue)
}
}
// process single fields
for _, field := range w.singleFields {
- column := pass.Fset.Position(field.Tag.Pos()).Column - 1
tag, err := strconv.Unquote(field.Tag.Value)
if err != nil {
- w.report(pass, field, column, errTagValueSyntax, field.Tag.Value)
+ w.report(pass, field, errTagValueSyntax, field.Tag.Value)
continue
}
tags, err := structtag.Parse(tag)
if err != nil {
- w.report(pass, field, column, err.Error(), field.Tag.Value)
+ w.report(pass, field, err.Error(), field.Tag.Value)
continue
}
originalTags := append([]*structtag.Tag(nil), tags.Tags()...)
if w.sort {
- sortBy(w.fixedTagOrder, tags)
+ sortTags(w.fixedTagOrder, tags)
}
newTagValue := fmt.Sprintf("`%s`", tags.String())
@@ -371,85 +335,47 @@ func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit
msg := "tag is not aligned , should be: " + tags.String()
- w.report(pass, field, column, msg, newTagValue)
+ w.report(pass, field, msg, newTagValue)
}
}
-// Issues returns all issues found by the analyzer.
-// It is used to integrate with golangci-lint.
-func (w *Helper) Issues() []Issue {
- log.Println("tagalign 's Issues() should only be called in golangci-lint mode")
- return w.issues
-}
-
-// sortBy sorts tags by fixed order.
+// sortTags sorts tags by fixed order.
// If a tag is not in the fixed order, it will be sorted by name.
-func sortBy(fixedOrder []string, tags *structtag.Tags) {
- // sort by fixed order
- sort.Slice(tags.Tags(), func(i, j int) bool {
- ti := tags.Tags()[i]
- tj := tags.Tags()[j]
-
- oi := findIndex(fixedOrder, ti.Key)
- oj := findIndex(fixedOrder, tj.Key)
-
- if oi == -1 && oj == -1 {
- return ti.Key < tj.Key
- }
-
- if oi == -1 {
- return false
- }
-
- if oj == -1 {
- return true
- }
-
- return oi < oj
+func sortTags(fixedOrder []string, tags *structtag.Tags) {
+ slices.SortFunc(tags.Tags(), func(a, b *structtag.Tag) int {
+ return compareByFixedOrder(fixedOrder)(a.Key, b.Key)
})
}
-func sortAllKeys(fixedOrder []string, keys []string) {
- sort.Slice(keys, func(i, j int) bool {
- oi := findIndex(fixedOrder, keys[i])
- oj := findIndex(fixedOrder, keys[j])
+func sortKeys(fixedOrder []string, keys []string) {
+ slices.SortFunc(keys, compareByFixedOrder(fixedOrder))
+}
+
+func compareByFixedOrder(fixedOrder []string) func(a, b string) int {
+ return func(a, b string) int {
+ oi := slices.Index(fixedOrder, a)
+ oj := slices.Index(fixedOrder, b)
if oi == -1 && oj == -1 {
- return keys[i] < keys[j]
+ return strings.Compare(a, b)
}
if oi == -1 {
- return false
+ return 1
}
if oj == -1 {
- return true
+ return -1
}
- return oi < oj
- })
-}
-
-func findIndex(s []string, e string) int {
- for i, a := range s {
- if a == e {
- return i
- }
+ return cmp.Compare(oi, oj)
}
- return -1
}
func alignFormat(length int) string {
return "%" + fmt.Sprintf("-%ds", length)
}
-func max(a, b int) int {
- if a > b {
- return a
- }
- return b
-}
-
func removeField(fields []*ast.Field, index int) []*ast.Field {
if index < 0 || index >= len(fields) {
return fields
@@ -457,3 +383,12 @@ func removeField(fields []*ast.Field, index int) []*ast.Field {
return append(fields[:index], fields[index+1:]...)
}
+
+func getFilename(fset *token.FileSet, file *ast.File) string {
+ filename := fset.PositionFor(file.Pos(), true).Filename
+ if !strings.HasSuffix(filename, ".go") {
+ return fset.PositionFor(file.Pos(), false).Filename
+ }
+
+ return filename
+}
diff --git a/vendor/github.com/Abirdcfly/dupword/dupword.go b/vendor/github.com/Abirdcfly/dupword/dupword.go
index 9a78fb6cca..6838d7e759 100644
--- a/vendor/github.com/Abirdcfly/dupword/dupword.go
+++ b/vendor/github.com/Abirdcfly/dupword/dupword.go
@@ -128,7 +128,12 @@ func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) {
}
func (a *analyzer) fixDuplicateWordInComment(pass *analysis.Pass, f *ast.File) {
+ isTestFile := strings.HasSuffix(pass.Fset.File(f.FileStart).Name(), "_test.go")
for _, cg := range f.Comments {
+ // avoid checking example outputs for duplicate words
+ if isTestFile && isExampleOutputStart(cg.List[0].Text) {
+ continue
+ }
var preLine *ast.Comment
for _, c := range cg.List {
update, keyword, find := a.Check(c.Text)
@@ -329,3 +334,10 @@ func ExcludeWords(word string) (exclude bool) {
}
return false
}
+
+func isExampleOutputStart(comment string) bool {
+ return strings.HasPrefix(comment, "// Output:") ||
+ strings.HasPrefix(comment, "// output:") ||
+ strings.HasPrefix(comment, "// Unordered output:") ||
+ strings.HasPrefix(comment, "// unordered output:")
+}
diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go
index aa85225108..2b8794dc2c 100644
--- a/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go
@@ -1,11 +1,9 @@
package analyzer
import (
- "fmt"
"go/ast"
"go/token"
- "strconv"
- "strings"
+ "go/types"
"unicode"
"golang.org/x/tools/go/analysis"
@@ -23,86 +21,61 @@ func New() *analysis.Analyzer {
}
}
-type stringSet = map[string]struct{}
-
-var (
- importNodes = []ast.Node{(*ast.ImportSpec)(nil)}
- typeNodes = []ast.Node{(*ast.TypeSpec)(nil)}
- funcNodes = []ast.Node{(*ast.FuncDecl)(nil)}
-)
-
func run(pass *analysis.Pass) (interface{}, error) {
insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- pkgAliases := map[string]string{}
- insp.Preorder(importNodes, func(node ast.Node) {
- i := node.(*ast.ImportSpec)
- if n := i.Name; n != nil && i.Path != nil {
- if path, err := strconv.Unquote(i.Path.Value); err == nil {
- pkgAliases[n.Name] = getPkgFromPath(path)
- }
- }
- })
-
- allTypes := stringSet{}
- typesSpecs := map[string]*ast.TypeSpec{}
- insp.Preorder(typeNodes, func(node ast.Node) {
- t := node.(*ast.TypeSpec)
- allTypes[t.Name.Name] = struct{}{}
- typesSpecs[t.Name.Name] = t
- })
-
- errorTypes := stringSet{}
- insp.Preorder(funcNodes, func(node ast.Node) {
- f := node.(*ast.FuncDecl)
- t, ok := isMethodError(f)
- if !ok {
- return
- }
- errorTypes[t] = struct{}{}
-
- tSpec, ok := typesSpecs[t]
- if !ok {
- panic(fmt.Sprintf("no specification for type %q", t))
- }
-
- if _, ok := tSpec.Type.(*ast.ArrayType); ok {
- if !isValidErrorArrayTypeName(t) {
- reportAboutErrorType(pass, tSpec.Pos(), t, true)
- }
- } else if !isValidErrorTypeName(t) {
- reportAboutErrorType(pass, tSpec.Pos(), t, false)
- }
- })
-
- errorFuncs := stringSet{}
- insp.Preorder(funcNodes, func(node ast.Node) {
- f := node.(*ast.FuncDecl)
- if isFuncReturningErr(f.Type, allTypes, errorTypes) {
- errorFuncs[f.Name.Name] = struct{}{}
+ insp.Nodes([]ast.Node{
+ (*ast.TypeSpec)(nil),
+ (*ast.ValueSpec)(nil),
+ (*ast.FuncDecl)(nil),
+ }, func(node ast.Node, push bool) bool {
+ if !push {
+ return false
}
- })
- inspectPkgLevelVarsOnly := func(node ast.Node) bool {
switch v := node.(type) {
case *ast.FuncDecl:
return false
case *ast.ValueSpec:
- if name, ok := isSentinelError(v, pkgAliases, allTypes, errorTypes, errorFuncs); ok && !isValidErrorVarName(name) {
- reportAboutErrorVar(pass, v.Pos(), name)
+ if len(v.Names) != 1 {
+ return false
+ }
+ ident := v.Names[0]
+
+ if exprImplementsError(pass, ident) && !isValidErrorVarName(ident.Name) {
+ reportAboutSentinelError(pass, v.Pos(), ident.Name)
+ }
+ return false
+
+ case *ast.TypeSpec:
+ tt := pass.TypesInfo.TypeOf(v.Name)
+ if tt == nil {
+ return false
+ }
+ // NOTE(a.telyshev): Pointer is the hack against Error() method with pointer receiver.
+ if !typeImplementsError(types.NewPointer(tt)) {
+ return false
}
+
+ name := v.Name.Name
+ if _, ok := v.Type.(*ast.ArrayType); ok {
+ if !isValidErrorArrayTypeName(name) {
+ reportAboutArrayErrorType(pass, v.Pos(), name)
+ }
+ } else if !isValidErrorTypeName(name) {
+ reportAboutErrorType(pass, v.Pos(), name)
+ }
+ return false
}
+
return true
- }
- for _, f := range pass.Files {
- ast.Inspect(f, inspectPkgLevelVarsOnly)
- }
+ })
return nil, nil //nolint:nilnil
}
-func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName string, isArrayType bool) {
+func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName string) {
var form string
if unicode.IsLower([]rune(typeName)[0]) {
form = "xxxError"
@@ -110,26 +83,26 @@ func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName strin
form = "XxxError"
}
- if isArrayType {
- form += "s"
+ pass.Reportf(typePos, "the error type name `%s` should conform to the `%s` format", typeName, form)
+}
+
+func reportAboutArrayErrorType(pass *analysis.Pass, typePos token.Pos, typeName string) {
+ var forms string
+ if unicode.IsLower([]rune(typeName)[0]) {
+ forms = "`xxxErrors` or `xxxError`"
+ } else {
+ forms = "`XxxErrors` or `XxxError`"
}
- pass.Reportf(typePos, "the type name `%s` should conform to the `%s` format", typeName, form)
+
+ pass.Reportf(typePos, "the error type name `%s` should conform to the %s format", typeName, forms)
}
-func reportAboutErrorVar(pass *analysis.Pass, pos token.Pos, varName string) {
+func reportAboutSentinelError(pass *analysis.Pass, pos token.Pos, varName string) {
var form string
if unicode.IsLower([]rune(varName)[0]) {
form = "errXxx"
} else {
form = "ErrXxx"
}
- pass.Reportf(pos, "the variable name `%s` should conform to the `%s` format", varName, form)
-}
-
-func getPkgFromPath(p string) string {
- idx := strings.LastIndex(p, "/")
- if idx == -1 {
- return p
- }
- return p[idx+1:]
+ pass.Reportf(pos, "the sentinel error name `%s` should conform to the `%s` format", varName, form)
}
diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go
index 06f8d61d8e..04e14fb68d 100644
--- a/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go
+++ b/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go
@@ -1,58 +1,22 @@
package analyzer
import (
- "fmt"
"go/ast"
- "go/token"
"go/types"
"strings"
"unicode"
-)
-
-func isMethodError(f *ast.FuncDecl) (typeName string, ok bool) {
- if f.Recv == nil || len(f.Recv.List) != 1 {
- return "", false
- }
- if f.Name == nil || f.Name.Name != "Error" {
- return "", false
- }
- if f.Type == nil || f.Type.Results == nil || len(f.Type.Results.List) != 1 {
- return "", false
- }
-
- returnType, ok := f.Type.Results.List[0].Type.(*ast.Ident)
- if !ok {
- return "", false
- }
-
- var receiverType string
-
- unwrapIdentName := func(e ast.Expr) string {
- switch v := e.(type) {
- case *ast.Ident:
- return v.Name
- case *ast.IndexExpr:
- if i, ok := v.X.(*ast.Ident); ok {
- return i.Name
- }
- case *ast.IndexListExpr:
- if i, ok := v.X.(*ast.Ident); ok {
- return i.Name
- }
- }
- panic(fmt.Errorf("unsupported Error() receiver type %q", types.ExprString(e)))
- }
+ "golang.org/x/tools/go/analysis"
+)
- switch rt := f.Recv.List[0].Type; v := rt.(type) {
- case *ast.Ident, *ast.IndexExpr, *ast.IndexListExpr: // SomeError, SomeError[T], SomeError[T1, T2, ...]
- receiverType = unwrapIdentName(rt)
+var errorIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
- case *ast.StarExpr: // *SomeError, *SomeError[T], *SomeError[T1, T2, ...]
- receiverType = unwrapIdentName(v.X)
- }
+func exprImplementsError(pass *analysis.Pass, e ast.Expr) bool {
+ return typeImplementsError(pass.TypesInfo.TypeOf(e))
+}
- return receiverType, returnType.Name == "string"
+func typeImplementsError(t types.Type) bool {
+ return t != nil && types.Implements(t, errorIface)
}
func isValidErrorTypeName(s string) bool {
@@ -77,153 +41,12 @@ func isValidErrorArrayTypeName(s string) bool {
words := split(s)
wordsCnt := wordsCount(words)
- if wordsCnt["errors"] != 1 {
- return false
- }
- return words[len(words)-1] == "errors"
-}
-
-func isFuncReturningErr(fType *ast.FuncType, allTypes, errorTypes stringSet) bool {
- if fType == nil || fType.Results == nil || len(fType.Results.List) != 1 {
+ if wordsCnt["errors"] != 1 && wordsCnt["error"] != 1 {
return false
}
- var returnTypeName string
- switch rt := fType.Results.List[0].Type.(type) {
- case *ast.Ident:
- returnTypeName = rt.Name
- case *ast.StarExpr:
- if i, ok := rt.X.(*ast.Ident); ok {
- returnTypeName = i.Name
- }
- }
-
- return isErrorType(returnTypeName, allTypes, errorTypes)
-}
-
-func isErrorType(tName string, allTypes, errorTypes stringSet) bool {
- _, isUserType := allTypes[tName]
- _, isErrType := errorTypes[tName]
- return isErrType || (tName == "error" && !isUserType)
-}
-
-var knownErrConstructors = stringSet{
- "fmt.Errorf": {},
- "errors.Errorf": {},
- "errors.New": {},
- "errors.Newf": {},
- "errors.NewWithDepth": {},
- "errors.NewWithDepthf": {},
- "errors.NewAssertionErrorWithWrappedErrf": {},
-}
-
-func isSentinelError( //nolint:gocognit,gocyclo
- v *ast.ValueSpec,
- pkgAliases map[string]string,
- allTypes, errorTypes, errorFuncs stringSet,
-) (varName string, ok bool) {
- if len(v.Names) != 1 {
- return "", false
- }
- varName = v.Names[0].Name
-
- switch vv := v.Type.(type) {
- // var ErrEndOfFile error
- // var ErrEndOfFile SomeErrType
- case *ast.Ident:
- if isErrorType(vv.Name, allTypes, errorTypes) {
- return varName, true
- }
-
- // var ErrEndOfFile *SomeErrType
- case *ast.StarExpr:
- if i, ok := vv.X.(*ast.Ident); ok && isErrorType(i.Name, allTypes, errorTypes) {
- return varName, true
- }
- }
-
- if len(v.Values) != 1 {
- return "", false
- }
-
- switch vv := v.Values[0].(type) {
- case *ast.CallExpr:
- switch fun := vv.Fun.(type) {
- // var ErrEndOfFile = errors.New("end of file")
- case *ast.SelectorExpr:
- pkg, ok := fun.X.(*ast.Ident)
- if !ok {
- return "", false
- }
- pkgFun := fun.Sel
-
- pkgName := pkg.Name
- if a, ok := pkgAliases[pkgName]; ok {
- pkgName = a
- }
-
- _, ok = knownErrConstructors[pkgName+"."+pkgFun.Name]
- return varName, ok
-
- // var ErrEndOfFile = newErrEndOfFile()
- // var ErrEndOfFile = new(EndOfFileError)
- // const ErrEndOfFile = constError("end of file")
- // var statusCodeError = new(SomePtrError[string])
- case *ast.Ident:
- if isErrorType(fun.Name, allTypes, errorTypes) {
- return varName, true
- }
-
- if _, ok := errorFuncs[fun.Name]; ok {
- return varName, true
- }
-
- if fun.Name == "new" && len(vv.Args) == 1 {
- switch i := vv.Args[0].(type) {
- case *ast.Ident:
- return varName, isErrorType(i.Name, allTypes, errorTypes)
- case *ast.IndexExpr:
- if ii, ok := i.X.(*ast.Ident); ok {
- return varName, isErrorType(ii.Name, allTypes, errorTypes)
- }
- }
- }
-
- // var ErrEndOfFile = func() error { ... }
- case *ast.FuncLit:
- return varName, isFuncReturningErr(fun.Type, allTypes, errorTypes)
- }
-
- // var ErrEndOfFile = &EndOfFileError{}
- // var ErrOK = &SomePtrError[string]{Code: "200 OK"}
- case *ast.UnaryExpr:
- if vv.Op == token.AND { // &
- if lit, ok := vv.X.(*ast.CompositeLit); ok {
- switch i := lit.Type.(type) {
- case *ast.Ident:
- return varName, isErrorType(i.Name, allTypes, errorTypes)
- case *ast.IndexExpr:
- if ii, ok := i.X.(*ast.Ident); ok {
- return varName, isErrorType(ii.Name, allTypes, errorTypes)
- }
- }
- }
- }
-
- // var ErrEndOfFile = EndOfFileError{}
- // var ErrNotFound = SomeError[string]{Code: "Not Found"}
- case *ast.CompositeLit:
- switch i := vv.Type.(type) {
- case *ast.Ident:
- return varName, isErrorType(i.Name, allTypes, errorTypes)
- case *ast.IndexExpr:
- if ii, ok := i.X.(*ast.Ident); ok {
- return varName, isErrorType(ii.Name, allTypes, errorTypes)
- }
- }
- }
-
- return "", false
+ lastWord := words[len(words)-1]
+ return lastWord == "errors" || lastWord == "error"
}
func isValidErrorVarName(s string) bool {
diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go
index 5646ee9094..703cc1c39f 100644
--- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go
@@ -15,7 +15,8 @@ const (
name = "nilnil"
doc = "Checks that there is no simultaneous return of `nil` error and an invalid value."
- reportMsg = "return both the `nil` error and invalid value: use a sentinel error instead"
+ nilNilReportMsg = "return both a `nil` error and an invalid value: use a sentinel error instead"
+ notNilNotNilReportMsg = "return both a non-nil error and a valid value: use separate returns instead"
)
// New returns new nilnil analyzer.
@@ -28,18 +29,22 @@ func New() *analysis.Analyzer {
Run: n.run,
Requires: []*analysis.Analyzer{inspect.Analyzer},
}
- a.Flags.Var(&n.checkedTypes, "checked-types", "coma separated list")
+ a.Flags.Var(&n.checkedTypes, "checked-types", "comma separated list of return types to check")
+ a.Flags.BoolVar(&n.detectOpposite, "detect-opposite", false,
+ "in addition, detect opposite situation (simultaneous return of non-nil error and valid value)")
return a
}
type nilNil struct {
- checkedTypes checkedTypes
+ checkedTypes checkedTypes
+ detectOpposite bool
}
func newNilNil() *nilNil {
return &nilNil{
- checkedTypes: newDefaultCheckedTypes(),
+ checkedTypes: newDefaultCheckedTypes(),
+ detectOpposite: false,
}
}
@@ -87,22 +92,22 @@ func (n *nilNil) run(pass *analysis.Pass) (interface{}, error) {
}
ok, zv := n.isDangerNilType(fRes1Type)
- if !(ok && isErrorType(fRes2Type)) {
+ if !(ok && implementsError(fRes2Type)) {
return false
}
retVal, retErr := v.Results[0], v.Results[1]
- var needWarn bool
- switch zv {
- case zeroValueNil:
- needWarn = isNil(pass, retVal) && isNil(pass, retErr)
- case zeroValueZero:
- needWarn = isZero(retVal) && isNil(pass, retErr)
+ if ((zv == zeroValueNil) && isNil(pass, retVal) && isNil(pass, retErr)) ||
+ ((zv == zeroValueZero) && isZero(retVal) && isNil(pass, retErr)) {
+ pass.Reportf(v.Pos(), nilNilReportMsg)
+ return false
}
- if needWarn {
- pass.Reportf(v.Pos(), reportMsg)
+ if n.detectOpposite && (((zv == zeroValueNil) && !isNil(pass, retVal) && !isNil(pass, retErr)) ||
+ ((zv == zeroValueZero) && !isZero(retVal) && !isNil(pass, retErr))) {
+ pass.Reportf(v.Pos(), notNilNotNilReportMsg)
+ return false
}
}
@@ -120,7 +125,7 @@ const (
)
func (n *nilNil) isDangerNilType(t types.Type) (bool, zeroValue) {
- switch v := t.(type) {
+ switch v := types.Unalias(t).(type) {
case *types.Pointer:
return n.checkedTypes.Contains(ptrType), zeroValueNil
@@ -152,7 +157,7 @@ func (n *nilNil) isDangerNilType(t types.Type) (bool, zeroValue) {
var errorIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
-func isErrorType(t types.Type) bool {
+func implementsError(t types.Type) bool {
_, ok := t.Underlying().(*types.Interface)
return ok && types.Implements(t, errorIface)
}
diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go
index c9b8e3eedc..90ae548f30 100644
--- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go
+++ b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go
@@ -8,11 +8,11 @@ import (
func newDefaultCheckedTypes() checkedTypes {
return checkedTypes{
- ptrType: {},
+ chanType: {},
funcType: {},
ifaceType: {},
mapType: {},
- chanType: {},
+ ptrType: {},
uintptrType: {},
unsafeptrType: {},
}
diff --git a/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go b/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go
index fcab8e3117..df04dfdc5f 100644
--- a/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go
+++ b/vendor/github.com/Antonboom/testifylint/analyzer/checkers_factory.go
@@ -55,6 +55,10 @@ func newCheckers(cfg config.Config) ([]checkers.RegularChecker, []checkers.Advan
case *checkers.ExpectedActual:
c.SetExpVarPattern(cfg.ExpectedActual.ExpVarPattern.Regexp)
+ case *checkers.Formatter:
+ c.SetCheckFormatString(cfg.Formatter.CheckFormatString)
+ c.SetRequireFFuncs(cfg.Formatter.RequireFFuncs)
+
case *checkers.GoRequire:
c.SetIgnoreHTTPHandlers(cfg.GoRequire.IgnoreHTTPHandlers)
diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go
new file mode 100644
index 0000000000..cafc283e6f
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go
@@ -0,0 +1,46 @@
+package analysisutil
+
+import "strings"
+
+var whitespaceRemover = strings.NewReplacer("\n", "", "\\n", "", "\t", "", "\\t", "", " ", "")
+
+// IsJSONLike returns true if the string has JSON format features.
+// A positive result can be returned for invalid JSON as well.
+func IsJSONLike(s string) bool {
+ s = whitespaceRemover.Replace(unescape(s))
+
+ var startMatch bool
+ for _, prefix := range []string{
+ `{{`, `{[`, `{"`,
+ `[{{`, `[{[`, `[{"`,
+ } {
+ if strings.HasPrefix(s, prefix) {
+ startMatch = true
+ break
+ }
+ }
+ if !startMatch {
+ return false
+ }
+
+ for _, keyValue := range []string{`":{`, `":[`, `":"`} {
+ if strings.Contains(s, keyValue) {
+ return true
+ }
+ }
+ return false
+
+ // NOTE(a.telyshev): We do not check the end of the string, because this is usually a field for typos.
+ // And one of the reasons for using JSON-specific assertions is to catch typos like this.
+}
+
+func unescape(s string) string {
+ s = strings.ReplaceAll(s, `\"`, `"`)
+ s = unquote(s, `"`)
+ s = unquote(s, "`")
+ return s
+}
+
+func unquote(s string, q string) string {
+ return strings.TrimLeft(strings.TrimRight(s, q), q)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go
index 3fc1f42b86..d552609182 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go
@@ -2,6 +2,7 @@ package analysisutil
import (
"go/ast"
+ "slices"
"strconv"
)
@@ -17,11 +18,8 @@ func Imports(file *ast.File, pkgs ...string) bool {
if err != nil {
continue
}
- // NOTE(a.telyshev): Don't use `slices.Contains` to keep the minimum module version 1.20.
- for _, pkg := range pkgs { // Small O(n).
- if pkg == path {
- return true
- }
+ if slices.Contains(pkgs, path) { // Small O(n).
+ return true
}
}
return false
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go
index 403691e270..56cd64e078 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go
@@ -53,7 +53,7 @@ func (checker BlankImport) Check(pass *analysis.Pass, _ *inspector.Inspector) (d
}
msg := fmt.Sprintf("avoid blank import of %s as it does nothing", pkg)
- diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), imp, msg, nil))
+ diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), imp, msg))
}
}
return diagnostics
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go
index d125c43f92..67959b633b 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go
@@ -49,13 +49,11 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.
}
survivingArg = newBoolCast(survivingArg)
}
- return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
- Pos: replaceStart,
- End: replaceEnd,
- NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
- }),
- )
+ return newUseFunctionDiagnostic(checker.Name(), call, proposed, analysis.TextEdit{
+ Pos: replaceStart,
+ End: replaceEnd,
+ NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
+ })
}
newUseTrueDiagnostic := func(survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic {
@@ -74,7 +72,7 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.
survivingArg = newBoolCast(survivingArg)
}
return newDiagnostic(checker.Name(), call, "need to simplify the assertion",
- &analysis.SuggestedFix{
+ analysis.SuggestedFix{
Message: "Simplify the assertion",
TextEdits: []analysis.TextEdit{{
Pos: replaceStart,
@@ -106,7 +104,7 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.
case xor(t1, t2):
survivingArg, _ := anyVal([]bool{t1, t2}, arg2, arg1)
if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) {
- // NOTE(a.telyshev): `Exactly` assumes no type casting.
+ // NOTE(a.telyshev): `Exactly` assumes no type conversion.
return nil
}
return newUseTrueDiagnostic(survivingArg, arg1.Pos(), arg2.End())
@@ -114,7 +112,7 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.
case xor(f1, f2):
survivingArg, _ := anyVal([]bool{f1, f2}, arg2, arg1)
if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) {
- // NOTE(a.telyshev): `Exactly` assumes no type casting.
+ // NOTE(a.telyshev): `Exactly` assumes no type conversion.
return nil
}
return newUseFalseDiagnostic(survivingArg, arg1.Pos(), arg2.End())
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go
index 94623da150..96b5b19b09 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/call_meta.go
@@ -6,6 +6,7 @@ import (
"strings"
"golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/types/typeutil"
"github.com/Antonboom/testifylint/internal/analysisutil"
"github.com/Antonboom/testifylint/internal/testify"
@@ -15,6 +16,8 @@ import (
//
// assert.Equal(t, 42, result, "helpful comment")
type CallMeta struct {
+ // Call stores the original AST call expression.
+ Call *ast.CallExpr
// Range contains start and end position of assertion call.
analysis.Range
// IsPkg true if this is package (not object) call.
@@ -49,6 +52,8 @@ type FnMeta struct {
NameFTrimmed string
// IsFmt is true if function is formatted, e.g. "Equalf".
IsFmt bool
+ // Signature represents assertion signature.
+ Signature *types.Signature
}
// NewCallMeta returns meta information about testify assertion call.
@@ -66,16 +71,16 @@ func NewCallMeta(pass *analysis.Pass, ce *ast.CallExpr) *CallMeta {
// s.Assert().Equal -> method of *assert.Assertions -> package assert ("vendor/github.com/stretchr/testify/assert")
// s.Equal -> method of *assert.Assertions -> package assert ("vendor/github.com/stretchr/testify/assert")
// reqObj.Falsef -> method of *require.Assertions -> package require ("vendor/github.com/stretchr/testify/require")
- if sel, ok := pass.TypesInfo.Selections[se]; ok {
+ if sel, isSel := pass.TypesInfo.Selections[se]; isSel {
return sel.Obj().Pkg(), false
}
// Examples:
// assert.False -> assert -> package assert ("vendor/github.com/stretchr/testify/assert")
// require.NotEqualf -> require -> package require ("vendor/github.com/stretchr/testify/require")
- if id, ok := se.X.(*ast.Ident); ok {
+ if id, isIdent := se.X.(*ast.Ident); isIdent {
if selObj := pass.TypesInfo.ObjectOf(id); selObj != nil {
- if pkg, ok := selObj.(*types.PkgName); ok {
+ if pkg, isPkgName := selObj.(*types.PkgName); isPkgName {
return pkg.Imported(), true
}
}
@@ -92,7 +97,13 @@ func NewCallMeta(pass *analysis.Pass, ce *ast.CallExpr) *CallMeta {
return nil
}
+ funcObj, ok := typeutil.Callee(pass.TypesInfo, ce).(*types.Func)
+ if !ok {
+ return nil
+ }
+
return &CallMeta{
+ Call: ce,
Range: ce,
IsPkg: isPkgCall,
IsAssert: isAssert,
@@ -103,6 +114,7 @@ func NewCallMeta(pass *analysis.Pass, ce *ast.CallExpr) *CallMeta {
Name: fnName,
NameFTrimmed: strings.TrimSuffix(fnName, "f"),
IsFmt: strings.HasSuffix(fnName, "f"),
+ Signature: funcObj.Type().(*types.Signature), // NOTE(a.telyshev): Func's Type() is always a *Signature.
},
Args: trimTArg(pass, ce.Args),
ArgsRaw: ce.Args,
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go
index 84b702b871..f881be4f20 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go
@@ -13,17 +13,23 @@ var registry = checkersRegistry{
{factory: asCheckerFactory(NewLen), enabledByDefault: true},
{factory: asCheckerFactory(NewNegativePositive), enabledByDefault: true},
{factory: asCheckerFactory(NewCompares), enabledByDefault: true},
+ {factory: asCheckerFactory(NewContains), enabledByDefault: true},
{factory: asCheckerFactory(NewErrorNil), enabledByDefault: true},
{factory: asCheckerFactory(NewNilCompare), enabledByDefault: true},
{factory: asCheckerFactory(NewErrorIsAs), enabledByDefault: true},
+ {factory: asCheckerFactory(NewEncodedCompare), enabledByDefault: true},
{factory: asCheckerFactory(NewExpectedActual), enabledByDefault: true},
+ {factory: asCheckerFactory(NewRegexp), enabledByDefault: true},
{factory: asCheckerFactory(NewSuiteExtraAssertCall), enabledByDefault: true},
{factory: asCheckerFactory(NewSuiteDontUsePkg), enabledByDefault: true},
{factory: asCheckerFactory(NewUselessAssert), enabledByDefault: true},
+ {factory: asCheckerFactory(NewFormatter), enabledByDefault: true},
// Advanced checkers.
{factory: asCheckerFactory(NewBlankImport), enabledByDefault: true},
{factory: asCheckerFactory(NewGoRequire), enabledByDefault: true},
{factory: asCheckerFactory(NewRequireError), enabledByDefault: true},
+ {factory: asCheckerFactory(NewSuiteBrokenParallel), enabledByDefault: true},
+ {factory: asCheckerFactory(NewSuiteSubtestRun), enabledByDefault: true},
{factory: asCheckerFactory(NewSuiteTHelper), enabledByDefault: false},
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go
index bdde03d95e..f0c4013f16 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go
@@ -61,7 +61,9 @@ func (checker Compares) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia
return nil
}
- if isPointer(pass, be.X) && isPointer(pass, be.Y) {
+ _, xp := isPointer(pass, be.X)
+ _, yp := isPointer(pass, be.Y)
+ if xp && yp {
switch proposedFn {
case "Equal":
proposedFn = "Same"
@@ -72,12 +74,11 @@ func (checker Compares) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia
a, b := be.X, be.Y
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: be.X.Pos(),
End: be.Y.End(),
NewText: formatAsCallArgs(pass, a, b),
- }),
- )
+ })
}
var tokenToProposedFnInsteadOfTrue = map[token.Token]string{
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go
new file mode 100644
index 0000000000..07f76c6e4f
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go
@@ -0,0 +1,71 @@
+package checkers
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// Contains detects situations like
+//
+// assert.True(t, strings.Contains(a, "abc123"))
+// assert.False(t, !strings.Contains(a, "abc123"))
+//
+// assert.False(t, strings.Contains(a, "abc123"))
+// assert.True(t, !strings.Contains(a, "abc123"))
+//
+// and requires
+//
+// assert.Contains(t, a, "abc123")
+// assert.NotContains(t, a, "abc123")
+type Contains struct{}
+
+// NewContains constructs Contains checker.
+func NewContains() Contains { return Contains{} }
+func (Contains) Name() string { return "contains" }
+
+func (checker Contains) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+ if len(call.Args) < 1 {
+ return nil
+ }
+
+ expr := call.Args[0]
+ unpacked, isNeg := isNegation(expr)
+ if isNeg {
+ expr = unpacked
+ }
+
+ ce, ok := expr.(*ast.CallExpr)
+ if !ok || len(ce.Args) != 2 {
+ return nil
+ }
+
+ if !isStringsContainsCall(pass, ce) {
+ return nil
+ }
+
+ var proposed string
+ switch call.Fn.NameFTrimmed {
+ default:
+ return nil
+
+ case "True":
+ proposed = "Contains"
+ if isNeg {
+ proposed = "NotContains"
+ }
+
+ case "False":
+ proposed = "NotContains"
+ if isNeg {
+ proposed = "Contains"
+ }
+ }
+
+ return newUseFunctionDiagnostic(checker.Name(), call, proposed,
+ analysis.TextEdit{
+ Pos: call.Args[0].Pos(),
+ End: call.Args[0].End(),
+ NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]),
+ })
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go
index ca7ff41dbb..71657fe117 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go
@@ -21,11 +21,16 @@ import (
// assert.Greater(t, 0, len(arr))
// assert.Less(t, len(arr), 1)
// assert.Greater(t, 1, len(arr))
+// assert.Zero(t, len(arr))
+// assert.Empty(t, len(arr))
//
// assert.NotEqual(t, 0, len(arr))
// assert.NotEqualValues(t, 0, len(arr))
// assert.Less(t, 0, len(arr))
// assert.Greater(t, len(arr), 0)
+// assert.Positive(t, len(arr))
+// assert.NotZero(t, len(arr))
+// assert.NotEmpty(t, len(arr))
//
// and requires
//
@@ -48,18 +53,34 @@ func (checker Empty) checkEmpty(pass *analysis.Pass, call *CallMeta) *analysis.D
newUseEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
const proposed = "Empty"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: replaceStart,
End: replaceEnd,
NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
- }),
- )
+ })
+ }
+
+ if len(call.Args) == 0 {
+ return nil
+ }
+ a := call.Args[0]
+
+ switch call.Fn.NameFTrimmed {
+ case "Zero":
+ if lenArg, ok := isBuiltinLenCall(pass, a); ok {
+ return newUseEmptyDiagnostic(a.Pos(), a.End(), lenArg)
+ }
+
+ case "Empty":
+ if lenArg, ok := isBuiltinLenCall(pass, a); ok {
+ return newRemoveLenDiagnostic(pass, checker.Name(), call, a, lenArg)
+ }
}
if len(call.Args) < 2 {
return nil
}
- a, b := call.Args[0], call.Args[1]
+ b := call.Args[1]
switch call.Fn.NameFTrimmed {
case "Len":
@@ -102,18 +123,34 @@ func (checker Empty) checkNotEmpty(pass *analysis.Pass, call *CallMeta) *analysi
newUseNotEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
const proposed = "NotEmpty"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: replaceStart,
End: replaceEnd,
NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
- }),
- )
+ })
+ }
+
+ if len(call.Args) == 0 {
+ return nil
+ }
+ a := call.Args[0]
+
+ switch call.Fn.NameFTrimmed {
+ case "NotZero", "Positive":
+ if lenArg, ok := isBuiltinLenCall(pass, a); ok {
+ return newUseNotEmptyDiagnostic(a.Pos(), a.End(), lenArg)
+ }
+
+ case "NotEmpty":
+ if lenArg, ok := isBuiltinLenCall(pass, a); ok {
+ return newRemoveLenDiagnostic(pass, checker.Name(), call, a, lenArg)
+ }
}
if len(call.Args) < 2 {
return nil
}
- a, b := call.Args[0], call.Args[1]
+ b := call.Args[1]
switch call.Fn.NameFTrimmed {
case "NotEqual", "NotEqualValues":
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go
new file mode 100644
index 0000000000..1464fd640b
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go
@@ -0,0 +1,101 @@
+package checkers
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// EncodedCompare detects situations like
+//
+// assert.Equal(t, `{"foo": "bar"}`, body)
+// assert.EqualValues(t, `{"foo": "bar"}`, body)
+// assert.Exactly(t, `{"foo": "bar"}`, body)
+// assert.Equal(t, expectedJSON, resultJSON)
+// assert.Equal(t, expBodyConst, w.Body.String())
+// assert.Equal(t, fmt.Sprintf(`{"value":"%s"}`, hexString), result)
+// assert.Equal(t, "{}", json.RawMessage(resp))
+// assert.Equal(t, expJSON, strings.Trim(string(resultJSONBytes), "\n")) // + Replace, ReplaceAll, TrimSpace
+//
+// assert.Equal(t, expectedYML, conf)
+//
+// and requires
+//
+// assert.JSONEq(t, `{"foo": "bar"}`, body)
+// assert.YAMLEq(t, expectedYML, conf)
+type EncodedCompare struct{}
+
+// NewEncodedCompare constructs EncodedCompare checker.
+func NewEncodedCompare() EncodedCompare { return EncodedCompare{} }
+func (EncodedCompare) Name() string { return "encoded-compare" }
+
+func (checker EncodedCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+ switch call.Fn.NameFTrimmed {
+ case "Equal", "EqualValues", "Exactly":
+ default:
+ return nil
+ }
+
+ if len(call.Args) < 2 {
+ return nil
+ }
+ lhs, rhs := call.Args[0], call.Args[1]
+
+ a, aIsExplicitJSON := checker.unwrap(pass, call.Args[0])
+ b, bIsExplicitJSON := checker.unwrap(pass, call.Args[1])
+
+ var proposed string
+ switch {
+ case aIsExplicitJSON, bIsExplicitJSON, isJSONStyleExpr(pass, a), isJSONStyleExpr(pass, b):
+ proposed = "JSONEq"
+ case isYAMLStyleExpr(pass, a), isYAMLStyleExpr(pass, b):
+ proposed = "YAMLEq"
+ }
+
+ if proposed != "" {
+ return newUseFunctionDiagnostic(checker.Name(), call, proposed,
+ analysis.TextEdit{
+ Pos: lhs.Pos(),
+ End: lhs.End(),
+ NewText: formatWithStringCastForBytes(pass, a),
+ },
+ analysis.TextEdit{
+ Pos: rhs.Pos(),
+ End: rhs.End(),
+ NewText: formatWithStringCastForBytes(pass, b),
+ },
+ )
+ }
+ return nil
+}
+
+// unwrap unwraps expression from string, []byte, strings.Replace(All), strings.Trim(Space) and json.RawMessage conversions.
+// Returns true in the second argument, if json.RawMessage was in the chain.
+func (checker EncodedCompare) unwrap(pass *analysis.Pass, e ast.Expr) (ast.Expr, bool) {
+ ce, ok := e.(*ast.CallExpr)
+ if !ok {
+ return e, false
+ }
+ if len(ce.Args) == 0 {
+ return e, false
+ }
+
+ if isJSONRawMessageCast(pass, ce) {
+ if isNil(ce.Args[0]) { // NOTE(a.telyshev): Ignore json.RawMessage(nil) case.
+ return checker.unwrap(pass, ce.Args[0])
+ }
+
+ v, _ := checker.unwrap(pass, ce.Args[0])
+ return v, true
+ }
+
+ if isIdentWithName("string", ce.Fun) ||
+ isByteArray(ce.Fun) ||
+ isStringsReplaceCall(pass, ce) ||
+ isStringsReplaceAllCall(pass, ce) ||
+ isStringsTrimCall(pass, ce) ||
+ isStringsTrimSpaceCall(pass, ce) {
+ return checker.unwrap(pass, ce.Args[0])
+ }
+ return e, false
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go
index ab92c2ec0b..f2812c9393 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go
@@ -67,12 +67,11 @@ func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Di
}
if proposed != "" {
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: ce.Pos(),
End: ce.End(),
NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]),
- }),
- )
+ })
}
case "False":
@@ -91,12 +90,11 @@ func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Di
if isErrorsIsCall(pass, ce) {
const proposed = "NotErrorIs"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: ce.Pos(),
End: ce.End(),
NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]),
- }),
- )
+ })
}
case "ErrorAs":
@@ -127,15 +125,15 @@ func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Di
pt, ok := tv.Type.Underlying().(*types.Pointer)
if !ok {
- return newDiagnostic(checker.Name(), call, defaultReport, nil)
+ return newDiagnostic(checker.Name(), call, defaultReport)
}
if pt.Elem() == errorType {
- return newDiagnostic(checker.Name(), call, errorPtrReport, nil)
+ return newDiagnostic(checker.Name(), call, errorPtrReport)
}
_, isInterface := pt.Elem().Underlying().(*types.Interface)
if !isInterface && !types.Implements(pt.Elem(), errorIface) {
- return newDiagnostic(checker.Name(), call, defaultReport, nil)
+ return newDiagnostic(checker.Name(), call, defaultReport)
}
}
return nil
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go
index 1e56d222ab..b9f28df218 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go
@@ -12,12 +12,16 @@ import (
// ErrorNil detects situations like
//
// assert.Nil(t, err)
-// assert.NotNil(t, err)
+// assert.Empty(t, err)
+// assert.Zero(t, err)
// assert.Equal(t, nil, err)
// assert.EqualValues(t, nil, err)
// assert.Exactly(t, nil, err)
// assert.ErrorIs(t, err, nil)
//
+// assert.NotNil(t, err)
+// assert.NotEmpty(t, err)
+// assert.NotZero(t, err)
// assert.NotEqual(t, nil, err)
// assert.NotEqualValues(t, nil, err)
// assert.NotErrorIs(t, err, nil)
@@ -40,12 +44,12 @@ func (checker ErrorNil) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia
proposedFn, survivingArg, replacementEndPos := func() (string, ast.Expr, token.Pos) {
switch call.Fn.NameFTrimmed {
- case "Nil":
+ case "Nil", "Empty", "Zero":
if len(call.Args) >= 1 && isError(pass, call.Args[0]) {
return noErrorFn, call.Args[0], call.Args[0].End()
}
- case "NotNil":
+ case "NotNil", "NotEmpty", "NotZero":
if len(call.Args) >= 1 && isError(pass, call.Args[0]) {
return errorFn, call.Args[0], call.Args[0].End()
}
@@ -81,12 +85,11 @@ func (checker ErrorNil) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia
if proposedFn != "" {
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: call.Args[0].Pos(),
End: replacementEndPos,
NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
- }),
- )
+ })
}
return nil
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go
index bcf89bd201..351d675cef 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go
@@ -14,7 +14,7 @@ import (
var DefaultExpectedVarPattern = regexp.MustCompile(
`(^(exp(ected)?|want(ed)?)([A-Z]\w*)?$)|(^(\w*[a-z])?(Exp(ected)?|Want(ed)?)$)`)
-// ExpectedActual detects situation like
+// ExpectedActual detects situations like
//
// assert.Equal(t, result, expected)
// assert.EqualExportedValues(t, resultObj, User{Name: "Anton"})
@@ -87,7 +87,7 @@ func (checker ExpectedActual) Check(pass *analysis.Pass, call *CallMeta) *analys
first, second := call.Args[0], call.Args[1]
if checker.isWrongExpectedActualOrder(pass, first, second) {
- return newDiagnostic(checker.Name(), call, "need to reverse actual and expected values", &analysis.SuggestedFix{
+ return newDiagnostic(checker.Name(), call, "need to reverse actual and expected values", analysis.SuggestedFix{
Message: "Reverse actual and expected values",
TextEdits: []analysis.TextEdit{
{
@@ -130,9 +130,9 @@ func (checker ExpectedActual) isExpectedValueCandidate(pass *analysis.Pass, expr
return isBasicLit(expr) ||
isUntypedConst(pass, expr) ||
isTypedConst(pass, expr) ||
- isIdentNamedAsExpected(checker.expVarPattern, expr) ||
- isStructVarNamedAsExpected(checker.expVarPattern, expr) ||
- isStructFieldNamedAsExpected(checker.expVarPattern, expr)
+ isIdentNamedAfterPattern(checker.expVarPattern, expr) ||
+ isStructVarNamedAfterPattern(checker.expVarPattern, expr) ||
+ isStructFieldNamedAfterPattern(checker.expVarPattern, expr)
}
func isParenExpr(ce *ast.CallExpr) bool {
@@ -158,7 +158,7 @@ func isCastedBasicLitOrExpectedValue(ce *ast.CallExpr, pattern *regexp.Regexp) b
"int", "int8", "int16", "int32", "int64",
"float32", "float64",
"rune", "string":
- return isBasicLit(ce.Args[0]) || isIdentNamedAsExpected(pattern, ce.Args[0])
+ return isBasicLit(ce.Args[0]) || isIdentNamedAfterPattern(pattern, ce.Args[0])
}
return false
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go
index df35674207..6bc22cd021 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go
@@ -7,7 +7,7 @@ import (
"golang.org/x/tools/go/analysis"
)
-// FloatCompare detects situation like
+// FloatCompare detects situations like
//
// assert.Equal(t, 42.42, result)
// assert.EqualValues(t, 42.42, result)
@@ -44,7 +44,7 @@ func (checker FloatCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis
if call.Fn.IsFmt {
format = "use %s.InEpsilonf (or InDeltaf)"
}
- return newDiagnostic(checker.Name(), call, fmt.Sprintf(format, call.SelectorXStr), nil)
+ return newDiagnostic(checker.Name(), call, fmt.Sprintf(format, call.SelectorXStr))
}
return nil
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go
new file mode 100644
index 0000000000..7ff4de470a
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go
@@ -0,0 +1,190 @@
+package checkers
+
+import (
+ "go/types"
+ "strconv"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/Antonboom/testifylint/internal/analysisutil"
+ "github.com/Antonboom/testifylint/internal/checkers/printf"
+ "github.com/Antonboom/testifylint/internal/testify"
+)
+
+// Formatter detects situations like
+//
+// assert.ElementsMatch(t, certConfig.Org, csr.Subject.Org, "organizations not equal")
+// assert.Error(t, err, fmt.Sprintf("Profile %s should not be valid", test.profile))
+// assert.Errorf(t, err, fmt.Sprintf("test %s", test.testName))
+// assert.Truef(t, targetTs.Equal(ts), "the timestamp should be as expected (%s) but was %s", targetTs)
+// ...
+//
+// and requires
+//
+// assert.ElementsMatchf(t, certConfig.Org, csr.Subject.Org, "organizations not equal")
+// assert.Errorf(t, err, "Profile %s should not be valid", test.profile)
+// assert.Errorf(t, err, "test %s", test.testName)
+// assert.Truef(t, targetTs.Equal(ts), "the timestamp should be as expected (%s) but was %s", targetTs, ts)
+type Formatter struct {
+ checkFormatString bool
+ requireFFuncs bool
+}
+
+// NewFormatter constructs Formatter checker.
+func NewFormatter() *Formatter {
+ return &Formatter{
+ checkFormatString: true,
+ requireFFuncs: false,
+ }
+}
+
+func (Formatter) Name() string { return "formatter" }
+
+func (checker *Formatter) SetCheckFormatString(v bool) *Formatter {
+ checker.checkFormatString = v
+ return checker
+}
+
+func (checker *Formatter) SetRequireFFuncs(v bool) *Formatter {
+ checker.requireFFuncs = v
+ return checker
+}
+
+func (checker Formatter) Check(pass *analysis.Pass, call *CallMeta) (result *analysis.Diagnostic) {
+ if call.Fn.IsFmt {
+ return checker.checkFmtAssertion(pass, call)
+ }
+ return checker.checkNotFmtAssertion(pass, call)
+}
+
+func (checker Formatter) checkNotFmtAssertion(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+ msgAndArgsPos, ok := isPrintfLikeCall(pass, call)
+ if !ok {
+ return nil
+ }
+
+ fFunc := call.Fn.Name + "f"
+
+ if msgAndArgsPos == len(call.ArgsRaw)-1 {
+ msgAndArgs := call.ArgsRaw[msgAndArgsPos]
+ if args, ok := isFmtSprintfCall(pass, msgAndArgs); ok {
+ if checker.requireFFuncs {
+ return newRemoveFnAndUseDiagnostic(pass, checker.Name(), call, fFunc,
+ "fmt.Sprintf", msgAndArgs, args...)
+ }
+ return newRemoveSprintfDiagnostic(pass, checker.Name(), call, msgAndArgs, args)
+ }
+ }
+
+ if checker.requireFFuncs {
+ return newUseFunctionDiagnostic(checker.Name(), call, fFunc)
+ }
+ return nil
+}
+
+func (checker Formatter) checkFmtAssertion(pass *analysis.Pass, call *CallMeta) (result *analysis.Diagnostic) {
+ formatPos := getMsgPosition(call.Fn.Signature)
+ if formatPos < 0 {
+ return nil
+ }
+
+ msg := call.ArgsRaw[formatPos]
+
+ if formatPos == len(call.ArgsRaw)-1 {
+ if args, ok := isFmtSprintfCall(pass, msg); ok {
+ return newRemoveSprintfDiagnostic(pass, checker.Name(), call, msg, args)
+ }
+ }
+
+ if checker.checkFormatString {
+ report := pass.Report
+ defer func() { pass.Report = report }()
+
+ pass.Report = func(d analysis.Diagnostic) {
+ result = newDiagnostic(checker.Name(), call, d.Message)
+ }
+
+ format, err := strconv.Unquote(analysisutil.NodeString(pass.Fset, msg))
+ if err != nil {
+ return nil
+ }
+ printf.CheckPrintf(pass, call.Call, call.String(), format, formatPos)
+ }
+ return result
+}
+
+func isPrintfLikeCall(pass *analysis.Pass, call *CallMeta) (int, bool) {
+ msgAndArgsPos := getMsgAndArgsPosition(call.Fn.Signature)
+ if msgAndArgsPos <= 0 {
+ return -1, false
+ }
+
+ if !(len(call.ArgsRaw) > msgAndArgsPos && hasStringType(pass, call.ArgsRaw[msgAndArgsPos])) {
+ return -1, false
+ }
+
+ if !assertHasFormattedAnalogue(pass, call) {
+ return -1, false
+ }
+
+ return msgAndArgsPos, true
+}
+
+func assertHasFormattedAnalogue(pass *analysis.Pass, call *CallMeta) bool {
+ if fn := analysisutil.ObjectOf(pass.Pkg, testify.AssertPkgPath, call.Fn.Name+"f"); fn != nil {
+ return true
+ }
+
+ if fn := analysisutil.ObjectOf(pass.Pkg, testify.RequirePkgPath, call.Fn.Name+"f"); fn != nil {
+ return true
+ }
+
+ recv := call.Fn.Signature.Recv()
+ if recv == nil {
+ return false
+ }
+
+ recvT := recv.Type()
+ if ptr, ok := recv.Type().(*types.Pointer); ok {
+ recvT = ptr.Elem()
+ }
+
+ suite, ok := recvT.(*types.Named)
+ if !ok {
+ return false
+ }
+ for i := 0; i < suite.NumMethods(); i++ {
+ if suite.Method(i).Name() == call.Fn.Name+"f" {
+ return true
+ }
+ }
+
+ return false
+}
+
+func getMsgAndArgsPosition(sig *types.Signature) int {
+ params := sig.Params()
+ if params.Len() < 1 {
+ return -1
+ }
+
+ lastIdx := params.Len() - 1
+ lastParam := params.At(lastIdx)
+
+ _, isSlice := lastParam.Type().(*types.Slice)
+ if lastParam.Name() == "msgAndArgs" && isSlice {
+ return lastIdx
+ }
+ return -1
+}
+
+func getMsgPosition(sig *types.Signature) int {
+ for i := 0; i < sig.Params().Len(); i++ {
+ param := sig.Params().At(i)
+
+ if b, ok := param.Type().(*types.Basic); ok && b.Kind() == types.String && param.Name() == "msg" {
+ return i
+ }
+ }
+ return -1
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go
index 060c960330..8b0d39999e 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go
@@ -142,11 +142,11 @@ func (checker GoRequire) Check(pass *analysis.Pass, inspector *inspector.Inspect
if testifyCall != nil {
switch checker.checkCall(testifyCall) {
case goRequireVerdictRequire:
- d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, "require"), nil)
+ d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, "require"))
diagnostics = append(diagnostics, *d)
case goRequireVerdictAssertFailNow:
- d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, testifyCall), nil)
+ d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, testifyCall))
diagnostics = append(diagnostics, *d)
case goRequireVerdictNoExit:
@@ -163,7 +163,7 @@ func (checker GoRequire) Check(pass *analysis.Pass, inspector *inspector.Inspect
if v := checker.checkFunc(pass, calledFd, testsDecls, processedFuncs); v != goRequireVerdictNoExit {
caller := analysisutil.NodeString(pass.Fset, ce.Fun)
- d := newDiagnostic(checker.Name(), ce, fmt.Sprintf(goRequireFnReportFormat, caller), nil)
+ d := newDiagnostic(checker.Name(), ce, fmt.Sprintf(goRequireFnReportFormat, caller))
diagnostics = append(diagnostics, *d)
}
}
@@ -198,11 +198,11 @@ func (checker GoRequire) checkHTTPHandlers(pass *analysis.Pass, insp *inspector.
switch checker.checkCall(testifyCall) {
case goRequireVerdictRequire:
- d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, "require"), nil)
+ d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, "require"))
diagnostics = append(diagnostics, *d)
case goRequireVerdictAssertFailNow:
- d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, testifyCall), nil)
+ d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, testifyCall))
diagnostics = append(diagnostics, *d)
case goRequireVerdictNoExit:
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go
index 34dcb475f3..b4bb563219 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go
@@ -1,23 +1,84 @@
package checkers
import (
- "fmt"
"go/ast"
"go/token"
"go/types"
+ "strconv"
"golang.org/x/tools/go/analysis"
)
func isZero(e ast.Expr) bool { return isIntNumber(e, 0) }
-func isNotZero(e ast.Expr) bool { return !isZero(e) }
-
func isOne(e ast.Expr) bool { return isIntNumber(e, 1) }
-func isIntNumber(e ast.Expr, v int) bool {
+func isAnyZero(e ast.Expr) bool {
+ return isIntNumber(e, 0) || isTypedSignedIntNumber(e, 0) || isTypedUnsignedIntNumber(e, 0)
+}
+
+func isNotAnyZero(e ast.Expr) bool {
+ return !isAnyZero(e)
+}
+
+func isZeroOrSignedZero(e ast.Expr) bool {
+ return isIntNumber(e, 0) || isTypedSignedIntNumber(e, 0)
+}
+
+func isSignedNotZero(pass *analysis.Pass, e ast.Expr) bool {
+ return !isUnsigned(pass, e) && !isZeroOrSignedZero(e)
+}
+
+func isTypedSignedIntNumber(e ast.Expr, v int) bool {
+ return isTypedIntNumber(e, v, "int", "int8", "int16", "int32", "int64")
+}
+
+func isTypedUnsignedIntNumber(e ast.Expr, v int) bool {
+ return isTypedIntNumber(e, v, "uint", "uint8", "uint16", "uint32", "uint64")
+}
+
+func isTypedIntNumber(e ast.Expr, v int, types ...string) bool {
+ ce, ok := e.(*ast.CallExpr)
+ if !ok || len(ce.Args) != 1 {
+ return false
+ }
+
+ fn, ok := ce.Fun.(*ast.Ident)
+ if !ok {
+ return false
+ }
+
+ for _, t := range types {
+ if fn.Name == t {
+ return isIntNumber(ce.Args[0], v)
+ }
+ }
+ return false
+}
+
+func isIntNumber(e ast.Expr, rhs int) bool {
+ lhs, ok := isIntBasicLit(e)
+ return ok && (lhs == rhs)
+}
+
+func isNegativeIntNumber(e ast.Expr) bool {
+ v, ok := isIntBasicLit(e)
+ return ok && v < 0
+}
+
+func isPositiveIntNumber(e ast.Expr) bool {
+ v, ok := isIntBasicLit(e)
+ return ok && v > 0
+}
+
+func isEmptyStringLit(e ast.Expr) bool {
bl, ok := e.(*ast.BasicLit)
- return ok && bl.Kind == token.INT && bl.Value == fmt.Sprintf("%d", v)
+ return ok && bl.Kind == token.STRING && bl.Value == `""`
+}
+
+func isNotEmptyStringLit(e ast.Expr) bool {
+ bl, ok := e.(*ast.BasicLit)
+ return ok && bl.Kind == token.STRING && bl.Value != `""`
}
func isBasicLit(e ast.Expr) bool {
@@ -25,37 +86,97 @@ func isBasicLit(e ast.Expr) bool {
return ok
}
-func isIntBasicLit(e ast.Expr) bool {
+func isIntBasicLit(e ast.Expr) (int, bool) {
+ if un, ok := e.(*ast.UnaryExpr); ok {
+ if un.Op == token.SUB {
+ v, ok := isIntBasicLit(un.X)
+ return -1 * v, ok
+ }
+ }
+
bl, ok := e.(*ast.BasicLit)
- return ok && bl.Kind == token.INT
+ if !ok {
+ return 0, false
+ }
+ if bl.Kind != token.INT {
+ return 0, false
+ }
+
+ v, err := strconv.Atoi(bl.Value)
+ if err != nil {
+ return 0, false
+ }
+ return v, true
}
-func isUntypedConst(p *analysis.Pass, e ast.Expr) bool {
- t := p.TypesInfo.TypeOf(e)
+func isUntypedConst(pass *analysis.Pass, e ast.Expr) bool {
+ return isUnderlying(pass, e, types.IsUntyped)
+}
+
+func isTypedConst(pass *analysis.Pass, e ast.Expr) bool {
+ tt, ok := pass.TypesInfo.Types[e]
+ return ok && tt.IsValue() && tt.Value != nil
+}
+
+func isFloat(pass *analysis.Pass, e ast.Expr) bool {
+ return isUnderlying(pass, e, types.IsFloat)
+}
+
+func isUnsigned(pass *analysis.Pass, e ast.Expr) bool {
+ return isUnderlying(pass, e, types.IsUnsigned)
+}
+
+func isUnderlying(pass *analysis.Pass, e ast.Expr, flag types.BasicInfo) bool {
+ t := pass.TypesInfo.TypeOf(e)
if t == nil {
return false
}
- b, ok := t.(*types.Basic)
- return ok && b.Info()&types.IsUntyped > 0
+ bt, ok := t.Underlying().(*types.Basic)
+ return ok && (bt.Info()&flag > 0)
}
-func isTypedConst(p *analysis.Pass, e ast.Expr) bool {
- tt, ok := p.TypesInfo.Types[e]
- return ok && tt.IsValue() && tt.Value != nil
+func isPointer(pass *analysis.Pass, e ast.Expr) (types.Type, bool) {
+ ptr, ok := pass.TypesInfo.TypeOf(e).(*types.Pointer)
+ if !ok {
+ return nil, false
+ }
+ return ptr.Elem(), true
}
-func isFloat(pass *analysis.Pass, expr ast.Expr) bool {
- t := pass.TypesInfo.TypeOf(expr)
+// isByteArray returns true if expression is `[]byte` itself.
+func isByteArray(e ast.Expr) bool {
+ at, ok := e.(*ast.ArrayType)
+ return ok && isIdentWithName("byte", at.Elt)
+}
+
+// hasBytesType returns true if the expression is of `[]byte` type.
+func hasBytesType(pass *analysis.Pass, e ast.Expr) bool {
+ t := pass.TypesInfo.TypeOf(e)
if t == nil {
return false
}
- bt, ok := t.Underlying().(*types.Basic)
- return ok && (bt.Info()&types.IsFloat > 0)
+ sl, ok := t.(*types.Slice)
+ if !ok {
+ return false
+ }
+
+ el, ok := sl.Elem().(*types.Basic)
+ return ok && el.Kind() == types.Uint8
}
-func isPointer(pass *analysis.Pass, expr ast.Expr) bool {
- _, ok := pass.TypesInfo.TypeOf(expr).(*types.Pointer)
- return ok
+// hasStringType returns true if the expression is of `string` type.
+func hasStringType(pass *analysis.Pass, e ast.Expr) bool {
+ basicType, ok := pass.TypesInfo.TypeOf(e).(*types.Basic)
+ return ok && basicType.Kind() == types.String
+}
+
+// untype returns v from type(v) expression or v itself if there is no type conversion.
+func untype(e ast.Expr) ast.Expr {
+ ce, ok := e.(*ast.CallExpr)
+ if !ok || len(ce.Args) != 1 {
+ return e
+ }
+ return ce.Args[0]
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go
index 4ab69c69bb..f12d87aa35 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go
@@ -2,15 +2,37 @@ package checkers
import (
"fmt"
+ "go/ast"
"golang.org/x/tools/go/analysis"
)
+func newRemoveFnAndUseDiagnostic(
+ pass *analysis.Pass,
+ checker string,
+ call *CallMeta,
+ proposedFn string,
+ removedFn string,
+ removedFnPos analysis.Range,
+ removedFnArgs ...ast.Expr,
+) *analysis.Diagnostic {
+ f := proposedFn
+ if call.Fn.IsFmt {
+ f += "f"
+ }
+ msg := fmt.Sprintf("remove unnecessary %s and use %s.%s", removedFn, call.SelectorXStr, f)
+
+ return newDiagnostic(checker, call, msg,
+ newSuggestedFuncRemoving(pass, removedFn, removedFnPos, removedFnArgs...),
+ newSuggestedFuncReplacement(call, proposedFn),
+ )
+}
+
func newUseFunctionDiagnostic(
checker string,
call *CallMeta,
proposedFn string,
- fix *analysis.SuggestedFix,
+ additionalEdits ...analysis.TextEdit,
) *analysis.Diagnostic {
f := proposedFn
if call.Fn.IsFmt {
@@ -18,14 +40,57 @@ func newUseFunctionDiagnostic(
}
msg := fmt.Sprintf("use %s.%s", call.SelectorXStr, f)
- return newDiagnostic(checker, call, msg, fix)
+ return newDiagnostic(checker, call, msg,
+ newSuggestedFuncReplacement(call, proposedFn, additionalEdits...))
+}
+
+func newRemoveLenDiagnostic(
+ pass *analysis.Pass,
+ checker string,
+ call *CallMeta,
+ fnPos analysis.Range,
+ fnArg ast.Expr,
+) *analysis.Diagnostic {
+ return newRemoveFnDiagnostic(pass, checker, call, "len", fnPos, fnArg)
+}
+
+func newRemoveMustCompileDiagnostic(
+ pass *analysis.Pass,
+ checker string,
+ call *CallMeta,
+ fnPos analysis.Range,
+ fnArg ast.Expr,
+) *analysis.Diagnostic {
+ return newRemoveFnDiagnostic(pass, checker, call, "regexp.MustCompile", fnPos, fnArg)
+}
+
+func newRemoveSprintfDiagnostic(
+ pass *analysis.Pass,
+ checker string,
+ call *CallMeta,
+ fnPos analysis.Range,
+ fnArgs []ast.Expr,
+) *analysis.Diagnostic {
+ return newRemoveFnDiagnostic(pass, checker, call, "fmt.Sprintf", fnPos, fnArgs...)
+}
+
+func newRemoveFnDiagnostic(
+ pass *analysis.Pass,
+ checker string,
+ call *CallMeta,
+ fnName string,
+ fnPos analysis.Range,
+ fnArgs ...ast.Expr,
+) *analysis.Diagnostic {
+ return newDiagnostic(checker, call, "remove unnecessary "+fnName,
+ newSuggestedFuncRemoving(pass, fnName, fnPos, fnArgs...))
}
func newDiagnostic(
checker string,
rng analysis.Range,
msg string,
- fix *analysis.SuggestedFix,
+ fixes ...analysis.SuggestedFix,
) *analysis.Diagnostic {
d := analysis.Diagnostic{
Pos: rng.Pos(),
@@ -33,21 +98,39 @@ func newDiagnostic(
Category: checker,
Message: checker + ": " + msg,
}
- if fix != nil {
- d.SuggestedFixes = []analysis.SuggestedFix{*fix}
+ if len(fixes) != 0 {
+ d.SuggestedFixes = fixes
}
return &d
}
+func newSuggestedFuncRemoving(
+ pass *analysis.Pass,
+ fnName string,
+ fnPos analysis.Range,
+ fnArgs ...ast.Expr,
+) analysis.SuggestedFix {
+ return analysis.SuggestedFix{
+ Message: fmt.Sprintf("Remove `%s`", fnName),
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: fnPos.Pos(),
+ End: fnPos.End(),
+ NewText: formatAsCallArgs(pass, fnArgs...),
+ },
+ },
+ }
+}
+
func newSuggestedFuncReplacement(
call *CallMeta,
proposedFn string,
additionalEdits ...analysis.TextEdit,
-) *analysis.SuggestedFix {
+) analysis.SuggestedFix {
if call.Fn.IsFmt {
proposedFn += "f"
}
- return &analysis.SuggestedFix{
+ return analysis.SuggestedFix{
Message: fmt.Sprintf("Replace `%s` with `%s`", call.Fn.Name, proposedFn),
TextEdits: append([]analysis.TextEdit{
{
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go
new file mode 100644
index 0000000000..c366f85635
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go
@@ -0,0 +1,56 @@
+package checkers
+
+import (
+ "go/ast"
+ "go/token"
+ "regexp"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+var (
+ wordsRe = regexp.MustCompile(`[A-Z]+(?:[a-z]*|$)|[a-z]+`) // NOTE(a.telyshev): ChatGPT.
+
+ jsonIdentRe = regexp.MustCompile(`json|JSON|Json`)
+ yamlWordRe = regexp.MustCompile(`yaml|YAML|Yaml|^(yml|YML|Yml)$`)
+)
+
+func isJSONStyleExpr(pass *analysis.Pass, e ast.Expr) bool {
+ if isIdentNamedAfterPattern(jsonIdentRe, e) {
+ return hasBytesType(pass, e) || hasStringType(pass, e)
+ }
+
+ if t, ok := pass.TypesInfo.Types[e]; ok && t.Value != nil {
+ return analysisutil.IsJSONLike(t.Value.String())
+ }
+
+ if bl, ok := e.(*ast.BasicLit); ok {
+ return bl.Kind == token.STRING && analysisutil.IsJSONLike(bl.Value)
+ }
+
+ if args, ok := isFmtSprintfCall(pass, e); ok {
+ return isJSONStyleExpr(pass, args[0])
+ }
+
+ return false
+}
+
+func isYAMLStyleExpr(pass *analysis.Pass, e ast.Expr) bool {
+ id, ok := e.(*ast.Ident)
+ return ok && (hasBytesType(pass, e) || hasStringType(pass, e)) && hasWordAfterPattern(id.Name, yamlWordRe)
+}
+
+func hasWordAfterPattern(s string, re *regexp.Regexp) bool {
+ for _, w := range splitIntoWords(s) {
+ if re.MatchString(w) {
+ return true
+ }
+ }
+ return false
+}
+
+func splitIntoWords(s string) []string {
+ return wordsRe.FindAllString(s, -1)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go
index 55cd5fd05a..859a39ee87 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go
@@ -5,8 +5,6 @@ import (
"go/types"
"golang.org/x/tools/go/analysis"
-
- "github.com/Antonboom/testifylint/internal/analysisutil"
)
var (
@@ -20,23 +18,9 @@ func isError(pass *analysis.Pass, expr ast.Expr) bool {
}
func isErrorsIsCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
- return isErrorsPkgFnCall(pass, ce, "Is")
+ return isPkgFnCall(pass, ce, "errors", "Is")
}
func isErrorsAsCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
- return isErrorsPkgFnCall(pass, ce, "As")
-}
-
-func isErrorsPkgFnCall(pass *analysis.Pass, ce *ast.CallExpr, fn string) bool {
- se, ok := ce.Fun.(*ast.SelectorExpr)
- if !ok {
- return false
- }
-
- errorsIsObj := analysisutil.ObjectOf(pass.Pkg, "errors", fn)
- if errorsIsObj == nil {
- return false
- }
-
- return analysisutil.IsObj(pass.TypesInfo, se.Sel, errorsIsObj)
+ return isPkgFnCall(pass, ce, "errors", "As")
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go
index c8719551c2..d69c42860f 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go
@@ -3,16 +3,59 @@ package checkers
import (
"bytes"
"go/ast"
+ "strings"
"golang.org/x/tools/go/analysis"
"github.com/Antonboom/testifylint/internal/analysisutil"
)
-// formatAsCallArgs joins a and b and return bytes like `a, b`.
-func formatAsCallArgs(pass *analysis.Pass, a, b ast.Node) []byte {
- return bytes.Join([][]byte{
- analysisutil.NodeBytes(pass.Fset, a),
- analysisutil.NodeBytes(pass.Fset, b),
- }, []byte(", "))
+// formatAsCallArgs joins a, b and c and returns bytes like `a, b, c`.
+func formatAsCallArgs(pass *analysis.Pass, args ...ast.Expr) []byte {
+ if len(args) == 0 {
+ return []byte("")
+ }
+
+ var buf bytes.Buffer
+ for i, arg := range args {
+ buf.Write(analysisutil.NodeBytes(pass.Fset, arg))
+ if i != len(args)-1 {
+ buf.WriteString(", ")
+ }
+ }
+ return buf.Bytes()
+}
+
+func formatWithStringCastForBytes(pass *analysis.Pass, e ast.Expr) []byte {
+ if !hasBytesType(pass, e) {
+ return analysisutil.NodeBytes(pass.Fset, e)
+ }
+
+ if se, ok := isBufferBytesCall(pass, e); ok {
+ return []byte(analysisutil.NodeString(pass.Fset, se) + ".String()")
+ }
+ return []byte("string(" + analysisutil.NodeString(pass.Fset, e) + ")")
+}
+
+func isBufferBytesCall(pass *analysis.Pass, e ast.Expr) (ast.Node, bool) {
+ ce, ok := e.(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+
+ se, ok := ce.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return nil, false
+ }
+
+ if !isIdentWithName("Bytes", se.Sel) {
+ return nil, false
+ }
+ if t := pass.TypesInfo.TypeOf(se.X); t != nil {
+ // NOTE(a.telyshev): This is hack, because `bytes` package can be not imported,
+ // and we cannot do "true" comparison with `Buffer` object.
+ return se.X, strings.TrimPrefix(t.String(), "*") == "bytes.Buffer"
+ }
+
+ return nil, false
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go
index 0b7f405762..ad39c72d74 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go
@@ -15,18 +15,21 @@ func isEmptyInterface(pass *analysis.Pass, expr ast.Expr) bool {
if !ok {
return false
}
+ return isEmptyInterfaceType(t.Type)
+}
- iface, ok := t.Type.Underlying().(*types.Interface)
+func isEmptyInterfaceType(t types.Type) bool {
+ iface, ok := t.Underlying().(*types.Interface)
return ok && iface.NumMethods() == 0
}
-func implementsTestifySuite(pass *analysis.Pass, rcv ast.Expr) bool {
+func implementsTestifySuite(pass *analysis.Pass, e ast.Expr) bool {
suiteIfaceObj := analysisutil.ObjectOf(pass.Pkg, testify.SuitePkgPath, "TestingSuite")
- return (suiteIfaceObj != nil) && implements(pass, rcv, suiteIfaceObj)
+ return (suiteIfaceObj != nil) && implements(pass, e, suiteIfaceObj)
}
-func implementsTestingT(pass *analysis.Pass, arg ast.Expr) bool {
- return implementsAssertTestingT(pass, arg) || implementsRequireTestingT(pass, arg)
+func implementsTestingT(pass *analysis.Pass, e ast.Expr) bool {
+ return implementsAssertTestingT(pass, e) || implementsRequireTestingT(pass, e)
}
func implementsAssertTestingT(pass *analysis.Pass, e ast.Expr) bool {
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_naming.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_naming.go
index e97c5117b8..1d92e3e810 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_naming.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_naming.go
@@ -5,17 +5,22 @@ import (
"regexp"
)
-func isStructVarNamedAsExpected(pattern *regexp.Regexp, e ast.Expr) bool {
+func isStructVarNamedAfterPattern(pattern *regexp.Regexp, e ast.Expr) bool {
s, ok := e.(*ast.SelectorExpr)
- return ok && isIdentNamedAsExpected(pattern, s.X)
+ return ok && isIdentNamedAfterPattern(pattern, s.X)
}
-func isStructFieldNamedAsExpected(pattern *regexp.Regexp, e ast.Expr) bool {
+func isStructFieldNamedAfterPattern(pattern *regexp.Regexp, e ast.Expr) bool {
s, ok := e.(*ast.SelectorExpr)
- return ok && isIdentNamedAsExpected(pattern, s.Sel)
+ return ok && isIdentNamedAfterPattern(pattern, s.Sel)
}
-func isIdentNamedAsExpected(pattern *regexp.Regexp, e ast.Expr) bool {
+func isIdentNamedAfterPattern(pattern *regexp.Regexp, e ast.Expr) bool {
id, ok := e.(*ast.Ident)
return ok && pattern.MatchString(id.Name)
}
+
+func isIdentWithName(name string, e ast.Expr) bool {
+ id, ok := e.(*ast.Ident)
+ return ok && id.Name == name
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_nil.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_nil.go
index 2707adb465..112fca38e7 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_nil.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_nil.go
@@ -14,6 +14,5 @@ func xorNil(first, second ast.Expr) (ast.Expr, bool) {
}
func isNil(expr ast.Expr) bool {
- ident, ok := expr.(*ast.Ident)
- return ok && ident.Name == "nil"
+ return isIdentWithName("nil", expr)
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go
new file mode 100644
index 0000000000..daf309339c
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go
@@ -0,0 +1,59 @@
+package checkers
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+func isFmtSprintfCall(pass *analysis.Pass, e ast.Expr) ([]ast.Expr, bool) {
+ ce, ok := e.(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+ return ce.Args, isPkgFnCall(pass, ce, "fmt", "Sprintf")
+}
+
+func isJSONRawMessageCast(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "encoding/json", "RawMessage")
+}
+
+func isRegexpMustCompileCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "regexp", "MustCompile")
+}
+
+func isStringsContainsCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "Contains")
+}
+
+func isStringsReplaceCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "Replace")
+}
+
+func isStringsReplaceAllCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "ReplaceAll")
+}
+
+func isStringsTrimCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "Trim")
+}
+
+func isStringsTrimSpaceCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "TrimSpace")
+}
+
+func isPkgFnCall(pass *analysis.Pass, ce *ast.CallExpr, pkg, fn string) bool {
+ se, ok := ce.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return false
+ }
+
+ fnObj := analysisutil.ObjectOf(pass.Pkg, pkg, fn)
+ if fnObj == nil {
+ return false
+ }
+
+ return analysisutil.IsObj(pass.TypesInfo, se.Sel, fnObj)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go
index 47330568c6..c240a61744 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go
@@ -31,17 +31,16 @@ func (checker Len) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnost
a, b := call.Args[0], call.Args[1]
if lenArg, expectedLen, ok := xorLenCall(pass, a, b); ok {
- if expectedLen == b && !isIntBasicLit(expectedLen) {
+ if _, ok := isIntBasicLit(expectedLen); (expectedLen == b) && !ok {
// https://github.com/Antonboom/testifylint/issues/9
return nil
}
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: a.Pos(),
End: b.End(),
NewText: formatAsCallArgs(pass, lenArg, expectedLen),
- }),
- )
+ })
}
case "True":
@@ -50,14 +49,16 @@ func (checker Len) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnost
}
expr := call.Args[0]
- if lenArg, expectedLen, ok := isLenEquality(pass, expr); ok && isIntBasicLit(expectedLen) {
+ if lenArg, expectedLen, ok := isLenEquality(pass, expr); ok {
+ if _, ok := isIntBasicLit(expectedLen); !ok {
+ return nil
+ }
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: expr.Pos(),
End: expr.End(),
NewText: formatAsCallArgs(pass, lenArg, expectedLen),
- }),
- )
+ })
}
}
return nil
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_postive.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_positive.go
similarity index 62%
rename from vendor/github.com/Antonboom/testifylint/internal/checkers/negative_postive.go
rename to vendor/github.com/Antonboom/testifylint/internal/checkers/negative_positive.go
index c3f7175c2f..a61bbdfcb8 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_postive.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_positive.go
@@ -29,6 +29,8 @@ import (
//
// assert.Negative(t, value)
// assert.Positive(t, value)
+//
+// Typed zeros (like `int8(0)`, ..., `uint64(0)`) are also supported.
type NegativePositive struct{}
// NewNegativePositive constructs NegativePositive checker.
@@ -46,14 +48,15 @@ func (checker NegativePositive) checkNegative(pass *analysis.Pass, call *CallMet
newUseNegativeDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
const proposed = "Negative"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: replaceStart,
End: replaceEnd,
NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
- }),
- )
+ })
}
+ // NOTE(a.telyshev): We ignore uint-asserts as being no sense for assert.Negative.
+
switch call.Fn.NameFTrimmed {
case "Less":
if len(call.Args) < 2 {
@@ -61,8 +64,8 @@ func (checker NegativePositive) checkNegative(pass *analysis.Pass, call *CallMet
}
a, b := call.Args[0], call.Args[1]
- if isNotZero(a) && isZero(b) {
- return newUseNegativeDiagnostic(a.Pos(), b.End(), a)
+ if isSignedNotZero(pass, a) && isZeroOrSignedZero(b) {
+ return newUseNegativeDiagnostic(a.Pos(), b.End(), untype(a))
}
case "Greater":
@@ -71,8 +74,8 @@ func (checker NegativePositive) checkNegative(pass *analysis.Pass, call *CallMet
}
a, b := call.Args[0], call.Args[1]
- if isZero(a) && isNotZero(b) {
- return newUseNegativeDiagnostic(a.Pos(), b.End(), b)
+ if isZeroOrSignedZero(a) && isSignedNotZero(pass, b) {
+ return newUseNegativeDiagnostic(a.Pos(), b.End(), untype(b))
}
case "True":
@@ -81,12 +84,12 @@ func (checker NegativePositive) checkNegative(pass *analysis.Pass, call *CallMet
}
expr := call.Args[0]
- a, _, ok1 := isStrictComparisonWith(pass, expr, p(isNotZero), token.LSS, p(isZero)) // a < 0
- _, b, ok2 := isStrictComparisonWith(pass, expr, p(isZero), token.GTR, p(isNotZero)) // 0 > a
+ a, _, ok1 := isStrictComparisonWith(pass, expr, isSignedNotZero, token.LSS, p(isZeroOrSignedZero)) // a < 0
+ _, b, ok2 := isStrictComparisonWith(pass, expr, p(isZeroOrSignedZero), token.GTR, isSignedNotZero) // 0 > a
survivingArg, ok := anyVal([]bool{ok1, ok2}, a, b)
if ok {
- return newUseNegativeDiagnostic(expr.Pos(), expr.End(), survivingArg)
+ return newUseNegativeDiagnostic(expr.Pos(), expr.End(), untype(survivingArg))
}
case "False":
@@ -95,12 +98,12 @@ func (checker NegativePositive) checkNegative(pass *analysis.Pass, call *CallMet
}
expr := call.Args[0]
- a, _, ok1 := isStrictComparisonWith(pass, expr, p(isNotZero), token.GEQ, p(isZero)) // a >= 0
- _, b, ok2 := isStrictComparisonWith(pass, expr, p(isZero), token.LEQ, p(isNotZero)) // 0 <= a
+ a, _, ok1 := isStrictComparisonWith(pass, expr, isSignedNotZero, token.GEQ, p(isZeroOrSignedZero)) // a >= 0
+ _, b, ok2 := isStrictComparisonWith(pass, expr, p(isZeroOrSignedZero), token.LEQ, isSignedNotZero) // 0 <= a
survivingArg, ok := anyVal([]bool{ok1, ok2}, a, b)
if ok {
- return newUseNegativeDiagnostic(expr.Pos(), expr.End(), survivingArg)
+ return newUseNegativeDiagnostic(expr.Pos(), expr.End(), untype(survivingArg))
}
}
return nil
@@ -110,12 +113,11 @@ func (checker NegativePositive) checkPositive(pass *analysis.Pass, call *CallMet
newUsePositiveDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
const proposed = "Positive"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: replaceStart,
End: replaceEnd,
NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
- }),
- )
+ })
}
switch call.Fn.NameFTrimmed {
@@ -125,8 +127,8 @@ func (checker NegativePositive) checkPositive(pass *analysis.Pass, call *CallMet
}
a, b := call.Args[0], call.Args[1]
- if isNotZero(a) && isZero(b) {
- return newUsePositiveDiagnostic(a.Pos(), b.End(), a)
+ if isNotAnyZero(a) && isAnyZero(b) {
+ return newUsePositiveDiagnostic(a.Pos(), b.End(), untype(a))
}
case "Less":
@@ -135,8 +137,8 @@ func (checker NegativePositive) checkPositive(pass *analysis.Pass, call *CallMet
}
a, b := call.Args[0], call.Args[1]
- if isZero(a) && isNotZero(b) {
- return newUsePositiveDiagnostic(a.Pos(), b.End(), b)
+ if isAnyZero(a) && isNotAnyZero(b) {
+ return newUsePositiveDiagnostic(a.Pos(), b.End(), untype(b))
}
case "True":
@@ -145,12 +147,12 @@ func (checker NegativePositive) checkPositive(pass *analysis.Pass, call *CallMet
}
expr := call.Args[0]
- a, _, ok1 := isStrictComparisonWith(pass, expr, p(isNotZero), token.GTR, p(isZero)) // a > 0
- _, b, ok2 := isStrictComparisonWith(pass, expr, p(isZero), token.LSS, p(isNotZero)) // 0 < a
+ a, _, ok1 := isStrictComparisonWith(pass, expr, p(isNotAnyZero), token.GTR, p(isAnyZero)) // a > 0
+ _, b, ok2 := isStrictComparisonWith(pass, expr, p(isAnyZero), token.LSS, p(isNotAnyZero)) // 0 < a
survivingArg, ok := anyVal([]bool{ok1, ok2}, a, b)
if ok {
- return newUsePositiveDiagnostic(expr.Pos(), expr.End(), survivingArg)
+ return newUsePositiveDiagnostic(expr.Pos(), expr.End(), untype(survivingArg))
}
case "False":
@@ -159,12 +161,12 @@ func (checker NegativePositive) checkPositive(pass *analysis.Pass, call *CallMet
}
expr := call.Args[0]
- a, _, ok1 := isStrictComparisonWith(pass, expr, p(isNotZero), token.LEQ, p(isZero)) // a <= 0
- _, b, ok2 := isStrictComparisonWith(pass, expr, p(isZero), token.GEQ, p(isNotZero)) // 0 >= a
+ a, _, ok1 := isStrictComparisonWith(pass, expr, p(isNotAnyZero), token.LEQ, p(isAnyZero)) // a <= 0
+ _, b, ok2 := isStrictComparisonWith(pass, expr, p(isAnyZero), token.GEQ, p(isNotAnyZero)) // 0 >= a
survivingArg, ok := anyVal([]bool{ok1, ok2}, a, b)
if ok {
- return newUsePositiveDiagnostic(expr.Pos(), expr.End(), survivingArg)
+ return newUsePositiveDiagnostic(expr.Pos(), expr.End(), untype(survivingArg))
}
}
return nil
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go
index 47c4a7383f..fc1adb7ead 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go
@@ -47,10 +47,9 @@ func (checker NilCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.D
}
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: call.Args[0].Pos(),
End: call.Args[1].End(),
NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
- }),
- )
+ })
}
diff --git a/vendor/github.com/golangci/gofmt/goimports/LICENSE b/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/LICENSE
similarity index 100%
rename from vendor/github.com/golangci/gofmt/goimports/LICENSE
rename to vendor/github.com/Antonboom/testifylint/internal/checkers/printf/LICENSE
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/doc.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/doc.go
new file mode 100644
index 0000000000..09cd239937
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/doc.go
@@ -0,0 +1,6 @@
+// Package printf is a patched fork of
+// https://github.com/golang/tools/blob/b6235391adb3b7f8bcfc4df81055e8f023de2688/go/analysis/passes/printf/printf.go#L538
+//
+// Initial discussion:
+// https://go-review.googlesource.com/c/tools/+/580555/comments/dfe3ef96_b1b815d5
+package printf
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go
new file mode 100644
index 0000000000..4f6e3f9c44
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go
@@ -0,0 +1,559 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package printf
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// CheckPrintf checks a call to a formatted print routine such as Printf.
+func CheckPrintf(
+ pass *analysis.Pass,
+ call *ast.CallExpr,
+ fnName string,
+ format string,
+ formatIdx int,
+) {
+ firstArg := formatIdx + 1 // Arguments are immediately after format string.
+ if !strings.Contains(format, "%") {
+ if len(call.Args) > firstArg {
+ pass.Reportf(call.Lparen, "%s call has arguments but no formatting directives", fnName)
+ }
+ return
+ }
+ // Hard part: check formats against args.
+ argNum := firstArg
+ maxArgNum := firstArg
+ anyIndex := false
+ for i, w := 0, 0; i < len(format); i += w {
+ w = 1
+ if format[i] != '%' {
+ continue
+ }
+ state := parsePrintfVerb(pass, call, fnName, format[i:], firstArg, argNum)
+ if state == nil {
+ return
+ }
+ w = len(state.format)
+ if !okPrintfArg(pass, call, state) { // One error per format is enough.
+ return
+ }
+ if state.hasIndex {
+ anyIndex = true
+ }
+ if state.verb == 'w' {
+ pass.Reportf(call.Pos(), "%s does not support error-wrapping directive %%w", state.name)
+ return
+ }
+ if len(state.argNums) > 0 {
+ // Continue with the next sequential argument.
+ argNum = state.argNums[len(state.argNums)-1] + 1
+ }
+ for _, n := range state.argNums {
+ if n >= maxArgNum {
+ maxArgNum = n + 1
+ }
+ }
+ }
+ // Dotdotdot is hard.
+ if call.Ellipsis.IsValid() && maxArgNum >= len(call.Args)-1 {
+ return
+ }
+ // If any formats are indexed, extra arguments are ignored.
+ if anyIndex {
+ return
+ }
+ // There should be no leftover arguments.
+ if maxArgNum != len(call.Args) {
+ expect := maxArgNum - firstArg
+ numArgs := len(call.Args) - firstArg
+ pass.ReportRangef(call, "%s call needs %v but has %v", fnName, count(expect, "arg"), count(numArgs, "arg"))
+ }
+}
+
+// formatState holds the parsed representation of a printf directive such as "%3.*[4]d".
+// It is constructed by parsePrintfVerb.
+type formatState struct {
+ verb rune // the format verb: 'd' for "%d"
+ format string // the full format directive from % through verb, "%.3d".
+ name string // Printf, Sprintf etc.
+ flags []byte // the list of # + etc.
+ argNums []int // the successive argument numbers that are consumed, adjusted to refer to actual arg in call
+ firstArg int // Index of first argument after the format in the Printf call.
+ // Used only during parse.
+ pass *analysis.Pass
+ call *ast.CallExpr
+ argNum int // Which argument we're expecting to format now.
+ hasIndex bool // Whether the argument is indexed.
+ indexPending bool // Whether we have an indexed argument that has not resolved.
+ nbytes int // number of bytes of the format string consumed.
+}
+
+// parseFlags accepts any printf flags.
+func (s *formatState) parseFlags() {
+ for s.nbytes < len(s.format) {
+ switch c := s.format[s.nbytes]; c {
+ case '#', '0', '+', '-', ' ':
+ s.flags = append(s.flags, c)
+ s.nbytes++
+ default:
+ return
+ }
+ }
+}
+
+// scanNum advances through a decimal number if present.
+func (s *formatState) scanNum() {
+ for ; s.nbytes < len(s.format); s.nbytes++ {
+ c := s.format[s.nbytes]
+ if c < '0' || '9' < c {
+ return
+ }
+ }
+}
+
+// parseIndex scans an index expression. It returns false if there is a syntax error.
+func (s *formatState) parseIndex() bool {
+ if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' {
+ return true
+ }
+ // Argument index present.
+ s.nbytes++ // skip '['
+ start := s.nbytes
+ s.scanNum()
+ ok := true
+ if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' {
+ ok = false // syntax error is either missing "]" or invalid index.
+ s.nbytes = strings.Index(s.format[start:], "]")
+ if s.nbytes < 0 {
+ s.pass.ReportRangef(s.call, "%s format %s is missing closing ]", s.name, s.format)
+ return false
+ }
+ s.nbytes += start
+ }
+ arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32)
+ if err != nil || !ok || arg32 <= 0 || arg32 > int64(len(s.call.Args)-s.firstArg) {
+ s.pass.ReportRangef(s.call, "%s format has invalid argument index [%s]", s.name, s.format[start:s.nbytes])
+ return false
+ }
+ s.nbytes++ // skip ']'
+ arg := int(arg32)
+ arg += s.firstArg - 1 // We want to zero-index the actual arguments.
+ s.argNum = arg
+ s.hasIndex = true
+ s.indexPending = true
+ return true
+}
+
+// parseNum scans a width or precision (or *). It returns false if there's a bad index expression.
+func (s *formatState) parseNum() bool {
+ if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' {
+ if s.indexPending { // Absorb it.
+ s.indexPending = false
+ }
+ s.nbytes++
+ s.argNums = append(s.argNums, s.argNum)
+ s.argNum++
+ } else {
+ s.scanNum()
+ }
+ return true
+}
+
+// parsePrecision scans for a precision. It returns false if there's a bad index expression.
+func (s *formatState) parsePrecision() bool {
+ // If there's a period, there may be a precision.
+ if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' {
+ s.flags = append(s.flags, '.') // Treat precision as a flag.
+ s.nbytes++
+ if !s.parseIndex() {
+ return false
+ }
+ if !s.parseNum() {
+ return false
+ }
+ }
+ return true
+}
+
+// isFormatter reports whether t could satisfy fmt.Formatter.
+// The only interface method to look for is "Format(State, rune)".
+func isFormatter(typ types.Type) bool {
+ // If the type is an interface, the value it holds might satisfy fmt.Formatter.
+ if _, ok := typ.Underlying().(*types.Interface); ok {
+ // Don't assume type parameters could be formatters. With the greater
+ // expressiveness of constraint interface syntax we expect more type safety
+ // when using type parameters.
+ if !isTypeParam(typ) {
+ return true
+ }
+ }
+ obj, _, _ := types.LookupFieldOrMethod(typ, false, nil, "Format")
+ fn, ok := obj.(*types.Func)
+ if !ok {
+ return false
+ }
+ sig := fn.Type().(*types.Signature)
+ return sig.Params().Len() == 2 &&
+ sig.Results().Len() == 0 &&
+ isNamedType(sig.Params().At(0).Type(), "fmt", "State") &&
+ types.Identical(sig.Params().At(1).Type(), types.Typ[types.Rune])
+}
+
+// isTypeParam reports whether t is a type parameter (or an alias of one).
+func isTypeParam(t types.Type) bool {
+ _, ok := types.Unalias(t).(*types.TypeParam)
+ return ok
+}
+
+// isNamedType reports whether t is the named type with the given package path
+// and one of the given names.
+// This function avoids allocating the concatenation of "pkg.Name",
+// which is important for the performance of syntax matching.
+func isNamedType(t types.Type, pkgPath string, names ...string) bool {
+ n, ok := types.Unalias(t).(*types.Named)
+ if !ok {
+ return false
+ }
+ obj := n.Obj()
+ if obj == nil || obj.Pkg() == nil || obj.Pkg().Path() != pkgPath {
+ return false
+ }
+ name := obj.Name()
+ for _, n := range names {
+ if name == n {
+ return true
+ }
+ }
+ return false
+}
+
+// parsePrintfVerb looks the formatting directive that begins the format string
+// and returns a formatState that encodes what the directive wants, without looking
+// at the actual arguments present in the call. The result is nil if there is an error.
+func parsePrintfVerb(pass *analysis.Pass, call *ast.CallExpr, name, format string, firstArg, argNum int) *formatState {
+ state := &formatState{
+ format: format,
+ name: name,
+ flags: make([]byte, 0, 5),
+ argNum: argNum,
+ argNums: make([]int, 0, 1),
+ nbytes: 1, // There's guaranteed to be a percent sign.
+ firstArg: firstArg,
+ pass: pass,
+ call: call,
+ }
+ // There may be flags.
+ state.parseFlags()
+ // There may be an index.
+ if !state.parseIndex() {
+ return nil
+ }
+ // There may be a width.
+ if !state.parseNum() {
+ return nil
+ }
+ // There may be a precision.
+ if !state.parsePrecision() {
+ return nil
+ }
+ // Now a verb, possibly prefixed by an index (which we may already have).
+ if !state.indexPending && !state.parseIndex() {
+ return nil
+ }
+ if state.nbytes == len(state.format) {
+ pass.ReportRangef(call.Fun, "%s format %s is missing verb at end of string", name, state.format)
+ return nil
+ }
+ verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:])
+ state.verb = verb
+ state.nbytes += w
+ if verb != '%' {
+ state.argNums = append(state.argNums, state.argNum)
+ }
+ state.format = state.format[:state.nbytes]
+ return state
+}
+
+// printfArgType encodes the types of expressions a printf verb accepts. It is a bitmask.
+type printfArgType int
+
+const (
+ argBool printfArgType = 1 << iota
+ argInt
+ argRune
+ argString
+ argFloat
+ argComplex
+ argPointer
+ argError
+ anyType printfArgType = ^0
+)
+
+type printVerb struct {
+ verb rune // User may provide verb through Formatter; could be a rune.
+ flags string // known flags are all ASCII
+ typ printfArgType
+}
+
+// Common flag sets for printf verbs.
+const (
+ noFlag = ""
+ numFlag = " -+.0"
+ sharpNumFlag = " -+.0#"
+ allFlags = " -+.0#"
+)
+
+// printVerbs identifies which flags are known to printf for each verb.
+var printVerbs = []printVerb{
+ // '-' is a width modifier, always valid.
+ // '.' is a precision for float, max width for strings.
+ // '+' is required sign for numbers, Go format for %v.
+ // '#' is alternate format for several verbs.
+ // ' ' is spacer for numbers
+ {'%', noFlag, 0},
+ {'b', sharpNumFlag, argInt | argFloat | argComplex | argPointer},
+ {'c', "-", argRune | argInt},
+ {'d', numFlag, argInt | argPointer},
+ {'e', sharpNumFlag, argFloat | argComplex},
+ {'E', sharpNumFlag, argFloat | argComplex},
+ {'f', sharpNumFlag, argFloat | argComplex},
+ {'F', sharpNumFlag, argFloat | argComplex},
+ {'g', sharpNumFlag, argFloat | argComplex},
+ {'G', sharpNumFlag, argFloat | argComplex},
+ {'o', sharpNumFlag, argInt | argPointer},
+ {'O', sharpNumFlag, argInt | argPointer},
+ {'p', "-#", argPointer},
+ {'q', " -+.0#", argRune | argInt | argString},
+ {'s', " -+.0", argString},
+ {'t', "-", argBool},
+ {'T', "-", anyType},
+ {'U', "-#", argRune | argInt},
+ {'v', allFlags, anyType},
+ {'w', allFlags, argError},
+ {'x', sharpNumFlag, argRune | argInt | argString | argPointer | argFloat | argComplex},
+ {'X', sharpNumFlag, argRune | argInt | argString | argPointer | argFloat | argComplex},
+}
+
+// okPrintfArg compares the formatState to the arguments actually present,
+// reporting any discrepancies it can discern. If the final argument is ellipsissed,
+// there's little it can do for that.
+func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (ok bool) {
+ var v printVerb
+ found := false
+ // Linear scan is fast enough for a small list.
+ for _, v = range printVerbs {
+ if v.verb == state.verb {
+ found = true
+ break
+ }
+ }
+
+ // Could current arg implement fmt.Formatter?
+ // Skip check for the %w verb, which requires an error.
+ formatter := false
+ if v.typ != argError && state.argNum < len(call.Args) {
+ if tv, ok := pass.TypesInfo.Types[call.Args[state.argNum]]; ok {
+ formatter = isFormatter(tv.Type)
+ }
+ }
+
+ if !formatter {
+ if !found {
+ pass.ReportRangef(call, "%s format %s has unknown verb %c", state.name, state.format, state.verb)
+ return false
+ }
+ for _, flag := range state.flags {
+ // TODO: Disable complaint about '0' for Go 1.10. To be fixed properly in 1.11.
+ // See issues 23598 and 23605.
+ if flag == '0' {
+ continue
+ }
+ if !strings.ContainsRune(v.flags, rune(flag)) {
+ pass.ReportRangef(call, "%s format %s has unrecognized flag %c", state.name, state.format, flag)
+ return false
+ }
+ }
+ }
+ // Verb is good. If len(state.argNums)>trueArgs, we have something like %.*s and all
+ // but the final arg must be an integer.
+ trueArgs := 1
+ if state.verb == '%' {
+ trueArgs = 0
+ }
+ nargs := len(state.argNums)
+ for i := 0; i < nargs-trueArgs; i++ {
+ if !argCanBeChecked(pass, call, i, state) {
+ return
+ }
+ // NOTE(a.telyshev): `matchArgType` leads to a lot of "golang.org/x/tools/internal" code.
+ /*
+ argNum := state.argNums[i]
+ arg := call.Args[argNum]
+
+ if reason, ok := matchArgType(pass, argInt, arg); !ok {
+ details := ""
+ if reason != "" {
+ details = " (" + reason + ")"
+ }
+ pass.ReportRangef(call, "%s format %s uses non-int %s%s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg), details)
+ return false
+ }
+ */
+ }
+
+ if state.verb == '%' || formatter {
+ return true
+ }
+ argNum := state.argNums[len(state.argNums)-1]
+ if !argCanBeChecked(pass, call, len(state.argNums)-1, state) {
+ return false
+ }
+ arg := call.Args[argNum]
+ if isFunctionValue(pass, arg) && state.verb != 'p' && state.verb != 'T' {
+ pass.ReportRangef(call, "%s format %s arg %s is a func value, not called", state.name, state.format, analysisutil.NodeString(pass.Fset, arg))
+ return false
+ }
+ // NOTE(a.telyshev): `matchArgType` leads to a lot of "golang.org/x/tools/internal" code.
+ /*
+ if reason, ok := matchArgType(pass, v.typ, arg); !ok {
+ typeString := ""
+ if typ := pass.TypesInfo.Types[arg].Type; typ != nil {
+ typeString = typ.String()
+ }
+ details := ""
+ if reason != "" {
+ details = " (" + reason + ")"
+ }
+ pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s%s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString, details)
+ return false
+ }
+ */
+ if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) {
+ if methodName, ok := recursiveStringer(pass, arg); ok {
+ pass.ReportRangef(call, "%s format %s with arg %s causes recursive %s method call", state.name, state.format, analysisutil.NodeString(pass.Fset, arg), methodName)
+ return false
+ }
+ }
+ return true
+}
+
+// recursiveStringer reports whether the argument e is a potential
+// recursive call to stringer or is an error, such as t and &t in these examples:
+//
+// func (t *T) String() string { printf("%s", t) }
+// func (t T) Error() string { printf("%s", t) }
+// func (t T) String() string { printf("%s", &t) }
+func recursiveStringer(pass *analysis.Pass, e ast.Expr) (string, bool) {
+ typ := pass.TypesInfo.Types[e].Type
+
+ // It's unlikely to be a recursive stringer if it has a Format method.
+ if isFormatter(typ) {
+ return "", false
+ }
+
+ // Does e allow e.String() or e.Error()?
+ strObj, _, _ := types.LookupFieldOrMethod(typ, false, pass.Pkg, "String")
+ strMethod, strOk := strObj.(*types.Func)
+ errObj, _, _ := types.LookupFieldOrMethod(typ, false, pass.Pkg, "Error")
+ errMethod, errOk := errObj.(*types.Func)
+ if !strOk && !errOk {
+ return "", false
+ }
+
+ // inScope returns true if e is in the scope of f.
+ inScope := func(e ast.Expr, f *types.Func) bool {
+ return f.Scope() != nil && f.Scope().Contains(e.Pos())
+ }
+
+ // Is the expression e within the body of that String or Error method?
+ var method *types.Func
+ if strOk && strMethod.Pkg() == pass.Pkg && inScope(e, strMethod) {
+ method = strMethod
+ } else if errOk && errMethod.Pkg() == pass.Pkg && inScope(e, errMethod) {
+ method = errMethod
+ } else {
+ return "", false
+ }
+
+ sig := method.Type().(*types.Signature)
+ if !isStringer(sig) {
+ return "", false
+ }
+
+ // Is it the receiver r, or &r?
+ if u, ok := e.(*ast.UnaryExpr); ok && u.Op == token.AND {
+ e = u.X // strip off & from &r
+ }
+ if id, ok := e.(*ast.Ident); ok {
+ if pass.TypesInfo.Uses[id] == sig.Recv() {
+ return method.FullName(), true
+ }
+ }
+ return "", false
+}
+
+// isStringer reports whether the method signature matches the String() definition in fmt.Stringer.
+func isStringer(sig *types.Signature) bool {
+ return sig.Params().Len() == 0 &&
+ sig.Results().Len() == 1 &&
+ sig.Results().At(0).Type() == types.Typ[types.String]
+}
+
+// isFunctionValue reports whether the expression is a function as opposed to a function call.
+// It is almost always a mistake to print a function value.
+func isFunctionValue(pass *analysis.Pass, e ast.Expr) bool {
+ if typ := pass.TypesInfo.Types[e].Type; typ != nil {
+ // Don't call Underlying: a named func type with a String method is ok.
+ // TODO(adonovan): it would be more precise to check isStringer.
+ _, ok := typ.(*types.Signature)
+ return ok
+ }
+ return false
+}
+
+// argCanBeChecked reports whether the specified argument is statically present;
+// it may be beyond the list of arguments or in a terminal slice... argument, which
+// means we can't see it.
+func argCanBeChecked(pass *analysis.Pass, call *ast.CallExpr, formatArg int, state *formatState) bool {
+ argNum := state.argNums[formatArg]
+ if argNum <= 0 {
+ return false
+ }
+ if argNum < len(call.Args)-1 {
+ return true // Always OK.
+ }
+ if call.Ellipsis.IsValid() {
+ return false // We just can't tell; there could be many more arguments.
+ }
+ if argNum < len(call.Args) {
+ return true
+ }
+ // There are bad indexes in the format or there are fewer arguments than the format needs.
+ // This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi".
+ arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed.
+ pass.ReportRangef(call, "%s format %s reads arg #%d, but call has %v", state.name, state.format, arg, count(len(call.Args)-state.firstArg, "arg"))
+ return false
+}
+
+// count(n, what) returns "1 what" or "N whats"
+// (assuming the plural of what is whats).
+func count(n int, what string) string {
+ if n == 1 {
+ return "1 " + what
+ }
+ return fmt.Sprintf("%d %ss", n, what)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go
new file mode 100644
index 0000000000..d634b74bd8
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go
@@ -0,0 +1,44 @@
+package checkers
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// Regexp detects situations like
+//
+// assert.Regexp(t, regexp.MustCompile(`\[.*\] DEBUG \(.*TestNew.*\): message`), out)
+// assert.NotRegexp(t, regexp.MustCompile(`\[.*\] TRACE message`), out)
+//
+// and requires
+//
+// assert.Regexp(t, `\[.*\] DEBUG \(.*TestNew.*\): message`, out)
+// assert.NotRegexp(t, `\[.*\] TRACE message`, out)
+type Regexp struct{}
+
+// NewRegexp constructs Regexp checker.
+func NewRegexp() Regexp { return Regexp{} }
+func (Regexp) Name() string { return "regexp" }
+
+func (checker Regexp) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+ switch call.Fn.NameFTrimmed {
+ default:
+ return nil
+ case "Regexp", "NotRegexp":
+ }
+
+ if len(call.Args) < 1 {
+ return nil
+ }
+
+ ce, ok := call.Args[0].(*ast.CallExpr)
+ if !ok || len(ce.Args) != 1 {
+ return nil
+ }
+
+ if isRegexpMustCompileCall(pass, ce) {
+ return newRemoveMustCompileDiagnostic(pass, checker.Name(), call, ce, ce.Args[0])
+ }
+ return nil
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go
index 4303828fd9..e4e30aaf4a 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go
@@ -134,7 +134,7 @@ func (checker RequireError) Check(pass *analysis.Pass, inspector *inspector.Insp
}
diagnostics = append(diagnostics,
- *newDiagnostic(checker.Name(), c.testifyCall, requireErrorReport, nil))
+ *newDiagnostic(checker.Name(), c.testifyCall, requireErrorReport))
}
}
@@ -197,11 +197,10 @@ func findRootIf(stack []ast.Node) *ast.IfStmt {
nearestIf, i := findNearestNodeWithIdx[*ast.IfStmt](stack)
for ; i > 0; i-- {
parent, ok := stack[i-1].(*ast.IfStmt)
- if ok {
- nearestIf = parent
- } else {
+ if !ok {
break
}
+ nearestIf = parent
}
return nearestIf
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go
new file mode 100644
index 0000000000..4374c9359b
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go
@@ -0,0 +1,89 @@
+package checkers
+
+import (
+ "fmt"
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/ast/inspector"
+
+ "github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// SuiteBrokenParallel detects unsupported t.Parallel() call in suite tests
+//
+// func (s *MySuite) SetupTest() {
+// s.T().Parallel()
+// }
+//
+// // And other hooks...
+//
+// func (s *MySuite) TestSomething() {
+// s.T().Parallel()
+//
+// for _, tt := range cases {
+// s.Run(tt.name, func() {
+// s.T().Parallel()
+// })
+//
+// s.T().Run(tt.name, func(t *testing.T) {
+// t.Parallel()
+// })
+// }
+// }
+type SuiteBrokenParallel struct{}
+
+// NewSuiteBrokenParallel constructs SuiteBrokenParallel checker.
+func NewSuiteBrokenParallel() SuiteBrokenParallel { return SuiteBrokenParallel{} }
+func (SuiteBrokenParallel) Name() string { return "suite-broken-parallel" }
+
+func (checker SuiteBrokenParallel) Check(pass *analysis.Pass, insp *inspector.Inspector) (diagnostics []analysis.Diagnostic) {
+ const report = "testify v1 does not support suite's parallel tests and subtests"
+
+ insp.WithStack([]ast.Node{(*ast.CallExpr)(nil)}, func(node ast.Node, push bool, stack []ast.Node) bool {
+ if !push {
+ return false
+ }
+ ce := node.(*ast.CallExpr)
+
+ se, ok := ce.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return true
+ }
+ if !isIdentWithName("Parallel", se.Sel) {
+ return true
+ }
+ if !implementsTestingT(pass, se.X) {
+ return true
+ }
+
+ for i := len(stack) - 2; i >= 0; i-- {
+ fd, ok := stack[i].(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+
+ if !isSuiteMethod(pass, fd) {
+ continue
+ }
+
+ nextLine := pass.Fset.Position(ce.Pos()).Line + 1
+ d := newDiagnostic(checker.Name(), ce, report, analysis.SuggestedFix{
+ Message: fmt.Sprintf("Remove `%s` call", analysisutil.NodeString(pass.Fset, ce)),
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: ce.Pos(),
+ End: pass.Fset.File(ce.Pos()).LineStart(nextLine),
+ NewText: []byte(""),
+ },
+ },
+ })
+
+ diagnostics = append(diagnostics, *d)
+ return false
+ }
+
+ return true
+ })
+ return diagnostics
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go
index 7f3e9c7c81..4fbfbe7e09 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go
@@ -7,7 +7,7 @@ import (
"golang.org/x/tools/go/analysis"
)
-// SuiteDontUsePkg detects situation like
+// SuiteDontUsePkg detects situations like
//
// func (s *MySuite) TestSomething() {
// assert.Equal(s.T(), 42, value)
@@ -60,7 +60,7 @@ func (checker SuiteDontUsePkg) Check(pass *analysis.Pass, call *CallMeta) *analy
}
msg := fmt.Sprintf("use %s.%s", newSelector, call.Fn.Name)
- return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{
+ return newDiagnostic(checker.Name(), call, msg, analysis.SuggestedFix{
Message: fmt.Sprintf("Replace `%s` with `%s`", call.SelectorXStr, newSelector),
TextEdits: []analysis.TextEdit{
// Replace package function with suite method.
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go
index c16f1ea63c..fdea324fd1 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go
@@ -19,7 +19,7 @@ const (
const DefaultSuiteExtraAssertCallMode = SuiteExtraAssertCallModeRemove
-// SuiteExtraAssertCall detects situation like
+// SuiteExtraAssertCall detects situations like
//
// func (s *MySuite) TestSomething() {
// s.Assert().Equal(42, value)
@@ -61,7 +61,7 @@ func (checker SuiteExtraAssertCall) Check(pass *analysis.Pass, call *CallMeta) *
}
msg := fmt.Sprintf("use an explicit %s.Assert().%s", analysisutil.NodeString(pass.Fset, x), call.Fn.Name)
- return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{
+ return newDiagnostic(checker.Name(), call, msg, analysis.SuggestedFix{
Message: "Add `Assert()` call",
TextEdits: []analysis.TextEdit{{
Pos: x.End(),
@@ -85,7 +85,7 @@ func (checker SuiteExtraAssertCall) Check(pass *analysis.Pass, call *CallMeta) *
}
msg := fmt.Sprintf("need to simplify the assertion to %s.%s", analysisutil.NodeString(pass.Fset, se.X), call.Fn.Name)
- return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{
+ return newDiagnostic(checker.Name(), call, msg, analysis.SuggestedFix{
Message: "Remove `Assert()` call",
TextEdits: []analysis.TextEdit{{
Pos: se.Sel.Pos(),
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go
new file mode 100644
index 0000000000..525d5ffd86
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go
@@ -0,0 +1,60 @@
+package checkers
+
+import (
+ "fmt"
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/ast/inspector"
+
+ "github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+// SuiteSubtestRun detects situations like
+//
+// s.T().Run("subtest", func(t *testing.T) {
+// assert.Equal(t, 42, result)
+// })
+//
+// and requires
+//
+// s.Run("subtest", func() {
+// s.Equal(42, result)
+// })
+type SuiteSubtestRun struct{}
+
+// NewSuiteSubtestRun constructs SuiteSubtestRun checker.
+func NewSuiteSubtestRun() SuiteSubtestRun { return SuiteSubtestRun{} }
+func (SuiteSubtestRun) Name() string { return "suite-subtest-run" }
+
+func (checker SuiteSubtestRun) Check(pass *analysis.Pass, insp *inspector.Inspector) (diagnostics []analysis.Diagnostic) {
+ insp.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, func(node ast.Node) {
+ ce := node.(*ast.CallExpr) // s.T().Run
+
+ se, ok := ce.Fun.(*ast.SelectorExpr) // s.T() + .Run
+ if !ok {
+ return
+ }
+ if !isIdentWithName("Run", se.Sel) {
+ return
+ }
+
+ tCall, ok := se.X.(*ast.CallExpr) // s.T()
+ if !ok {
+ return
+ }
+ tCallSel, ok := tCall.Fun.(*ast.SelectorExpr) // s + .T()
+ if !ok {
+ return
+ }
+ if !isIdentWithName("T", tCallSel.Sel) {
+ return
+ }
+
+ if implementsTestifySuite(pass, tCallSel.X) && implementsTestingT(pass, tCall) {
+ msg := fmt.Sprintf("use %s.Run to run subtest", analysisutil.NodeString(pass.Fset, tCallSel.X))
+ diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), ce, msg))
+ }
+ })
+ return diagnostics
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go
index 59455290d4..ef8d821321 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go
@@ -50,8 +50,8 @@ func (checker SuiteTHelper) Check(pass *analysis.Pass, inspector *inspector.Insp
return
}
- msg := fmt.Sprintf("suite helper method must start with " + helperCallStr)
- d := newDiagnostic(checker.Name(), fd, msg, &analysis.SuggestedFix{
+ msg := "suite helper method must start with " + helperCallStr
+ d := newDiagnostic(checker.Name(), fd, msg, analysis.SuggestedFix{
Message: fmt.Sprintf("Insert `%s`", helperCallStr),
TextEdits: []analysis.TextEdit{
{
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go
index 6f206d0958..045706e5dc 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go
@@ -10,15 +10,40 @@ import (
// UselessAssert detects useless asserts like
//
-// 1) Asserting of the same variable
-//
+// assert.Contains(t, tt.value, tt.value)
+// assert.ElementsMatch(t, tt.value, tt.value)
// assert.Equal(t, tt.value, tt.value)
-// assert.ElementsMatch(t, users, users)
+// assert.EqualExportedValues(t, tt.value, tt.value)
// ...
+//
// assert.True(t, num > num)
+// assert.True(t, num < num)
+// assert.True(t, num >= num)
+// assert.True(t, num <= num)
+// assert.True(t, num == num)
+// assert.True(t, num != num)
+//
+// assert.False(t, num > num)
+// assert.False(t, num < num)
+// assert.False(t, num >= num)
+// assert.False(t, num <= num)
// assert.False(t, num == num)
+// assert.False(t, num != num)
//
-// 2) Open for contribution...
+// assert.Empty(t, "")
+// assert.False(t, false)
+// assert.Implements(t, (*any)(nil), new(Conn))
+// assert.Negative(t, -42)
+// assert.Nil(t, nil)
+// assert.NoError(t, nil)
+// assert.NotEmpty(t, "value")
+// assert.NotZero(t, 42)
+// assert.NotZero(t, "value")
+// assert.Positive(t, 42)
+// assert.True(t, true)
+// assert.Zero(t, 0)
+// assert.Zero(t, "")
+// assert.Zero(t, nil)
type UselessAssert struct{}
// NewUselessAssert constructs UselessAssert checker.
@@ -26,6 +51,58 @@ func NewUselessAssert() UselessAssert { return UselessAssert{} }
func (UselessAssert) Name() string { return "useless-assert" }
func (checker UselessAssert) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+ if d := checker.checkSameVars(pass, call); d != nil {
+ return d
+ }
+
+ var isMeaningless bool
+ switch call.Fn.NameFTrimmed {
+ case "Empty":
+ isMeaningless = (len(call.Args) >= 1) && isEmptyStringLit(call.Args[0])
+
+ case "False":
+ isMeaningless = (len(call.Args) >= 1) && isUntypedFalse(pass, call.Args[0])
+
+ case "Implements":
+ if len(call.Args) < 2 {
+ return nil
+ }
+
+ elem, ok := isPointer(pass, call.Args[0])
+ isMeaningless = ok && isEmptyInterfaceType(elem)
+
+ case "Negative":
+ isMeaningless = (len(call.Args) >= 1) && isNegativeIntNumber(call.Args[0])
+
+ case "Nil", "NoError":
+ isMeaningless = (len(call.Args) >= 1) && isNil(call.Args[0])
+
+ case "NotEmpty":
+ isMeaningless = (len(call.Args) >= 1) && isNotEmptyStringLit(call.Args[0])
+
+ case "NotZero":
+ isMeaningless = (len(call.Args) >= 1) &&
+ (isNotEmptyStringLit(call.Args[0]) ||
+ isNegativeIntNumber(call.Args[0]) || isPositiveIntNumber(call.Args[0]))
+
+ case "Positive":
+ isMeaningless = (len(call.Args) >= 1) && isPositiveIntNumber(call.Args[0])
+
+ case "True":
+ isMeaningless = (len(call.Args) >= 1) && isUntypedTrue(pass, call.Args[0])
+
+ case "Zero":
+ isMeaningless = (len(call.Args) >= 1) &&
+ (isZero(call.Args[0]) || isEmptyStringLit(call.Args[0]) || isNil(call.Args[0]))
+ }
+
+ if isMeaningless {
+ return newDiagnostic(checker.Name(), call, "meaningless assertion")
+ }
+ return nil
+}
+
+func (checker UselessAssert) checkSameVars(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
var first, second ast.Node
switch call.Fn.NameFTrimmed {
@@ -82,7 +159,7 @@ func (checker UselessAssert) Check(pass *analysis.Pass, call *CallMeta) *analysi
}
if analysisutil.NodeString(pass.Fset, first) == analysisutil.NodeString(pass.Fset, second) {
- return newDiagnostic(checker.Name(), call, "asserting of the same variable", nil)
+ return newDiagnostic(checker.Name(), call, "asserting of the same variable")
}
return nil
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/config/config.go b/vendor/github.com/Antonboom/testifylint/internal/config/config.go
index dd2eb9598e..23b673428e 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/config/config.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/config/config.go
@@ -21,6 +21,10 @@ func NewDefault() Config {
ExpectedActual: ExpectedActualConfig{
ExpVarPattern: RegexpValue{checkers.DefaultExpectedVarPattern},
},
+ Formatter: FormatterConfig{
+ CheckFormatString: true,
+ RequireFFuncs: false,
+ },
GoRequire: GoRequireConfig{
IgnoreHTTPHandlers: false,
},
@@ -42,6 +46,7 @@ type Config struct {
BoolCompare BoolCompareConfig
ExpectedActual ExpectedActualConfig
+ Formatter FormatterConfig
GoRequire GoRequireConfig
RequireError RequireErrorConfig
SuiteExtraAssertCall SuiteExtraAssertCallConfig
@@ -57,6 +62,12 @@ type ExpectedActualConfig struct {
ExpVarPattern RegexpValue
}
+// FormatterConfig implements configuration of checkers.Formatter.
+type FormatterConfig struct {
+ CheckFormatString bool
+ RequireFFuncs bool
+}
+
// GoRequireConfig implements configuration of checkers.GoRequire.
type GoRequireConfig struct {
IgnoreHTTPHandlers bool
@@ -109,14 +120,32 @@ func BindToFlags(cfg *Config, fs *flag.FlagSet) {
fs.BoolVar(&cfg.DisableAll, "disable-all", false, "disable all checkers")
fs.Var(&cfg.EnabledCheckers, "enable", "comma separated list of enabled checkers (in addition to enabled by default)")
- fs.BoolVar(&cfg.BoolCompare.IgnoreCustomTypes, "bool-compare.ignore-custom-types", false,
+ fs.BoolVar(&cfg.BoolCompare.IgnoreCustomTypes,
+ "bool-compare.ignore-custom-types", false,
"to ignore user defined types (over builtin bool)")
- fs.Var(&cfg.ExpectedActual.ExpVarPattern, "expected-actual.pattern", "regexp for expected variable name")
- fs.BoolVar(&cfg.GoRequire.IgnoreHTTPHandlers, "go-require.ignore-http-handlers", false,
+
+ fs.Var(&cfg.ExpectedActual.ExpVarPattern,
+ "expected-actual.pattern",
+ "regexp for expected variable name")
+
+ fs.BoolVar(&cfg.Formatter.CheckFormatString,
+ "formatter.check-format-string", true,
+ "to enable go vet's printf checks")
+ fs.BoolVar(&cfg.Formatter.RequireFFuncs,
+ "formatter.require-f-funcs", false,
+ "to require f-assertions (e.g. assert.Equalf) if format string is used, even if there are no variable-length variables.")
+
+ fs.BoolVar(&cfg.GoRequire.IgnoreHTTPHandlers,
+ "go-require.ignore-http-handlers", false,
"to ignore HTTP handlers (like http.HandlerFunc)")
- fs.Var(&cfg.RequireError.FnPattern, "require-error.fn-pattern", "regexp for error assertions that should only be analyzed")
+
+ fs.Var(&cfg.RequireError.FnPattern,
+ "require-error.fn-pattern",
+ "regexp for error assertions that should only be analyzed")
+
fs.Var(NewEnumValue(suiteExtraAssertCallModeAsString, &cfg.SuiteExtraAssertCall.Mode),
- "suite-extra-assert-call.mode", "to require or remove extra Assert() call")
+ "suite-extra-assert-call.mode",
+ "to require or remove extra Assert() call")
}
var suiteExtraAssertCallModeAsString = map[string]checkers.SuiteExtraAssertCallMode{
diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go
index 7aaf462c94..c05a0b7e56 100644
--- a/vendor/github.com/BurntSushi/toml/decode.go
+++ b/vendor/github.com/BurntSushi/toml/decode.go
@@ -540,12 +540,14 @@ func (md *MetaData) badtype(dst string, data any) error {
func (md *MetaData) parseErr(err error) error {
k := md.context.String()
+ d := string(md.data)
return ParseError{
+ Message: err.Error(),
+ err: err,
LastKey: k,
- Position: md.keyInfo[k].pos,
+ Position: md.keyInfo[k].pos.withCol(d),
Line: md.keyInfo[k].pos.Line,
- err: err,
- input: string(md.data),
+ input: d,
}
}
diff --git a/vendor/github.com/BurntSushi/toml/error.go b/vendor/github.com/BurntSushi/toml/error.go
index b45a3f45f6..1dd5232111 100644
--- a/vendor/github.com/BurntSushi/toml/error.go
+++ b/vendor/github.com/BurntSushi/toml/error.go
@@ -67,21 +67,36 @@ type ParseError struct {
// Position of an error.
type Position struct {
Line int // Line number, starting at 1.
+ Col int // Error column, starting at 1.
Start int // Start of error, as byte offset starting at 0.
- Len int // Lenght in bytes.
+ Len int // Lenght of the error in bytes.
}
-func (pe ParseError) Error() string {
- msg := pe.Message
- if msg == "" { // Error from errorf()
- msg = pe.err.Error()
+func (p Position) withCol(tomlFile string) Position {
+ var (
+ pos int
+ lines = strings.Split(tomlFile, "\n")
+ )
+ for i := range lines {
+ ll := len(lines[i]) + 1 // +1 for the removed newline
+ if pos+ll >= p.Start {
+ p.Col = p.Start - pos + 1
+ if p.Col < 1 { // Should never happen, but just in case.
+ p.Col = 1
+ }
+ break
+ }
+ pos += ll
}
+ return p
+}
+func (pe ParseError) Error() string {
if pe.LastKey == "" {
- return fmt.Sprintf("toml: line %d: %s", pe.Position.Line, msg)
+ return fmt.Sprintf("toml: line %d: %s", pe.Position.Line, pe.Message)
}
return fmt.Sprintf("toml: line %d (last key %q): %s",
- pe.Position.Line, pe.LastKey, msg)
+ pe.Position.Line, pe.LastKey, pe.Message)
}
// ErrorWithPosition returns the error with detailed location context.
@@ -92,26 +107,19 @@ func (pe ParseError) ErrorWithPosition() string {
return pe.Error()
}
+ // TODO: don't show control characters as literals? This may not show up
+ // well everywhere.
+
var (
lines = strings.Split(pe.input, "\n")
- col = pe.column(lines)
b = new(strings.Builder)
)
-
- msg := pe.Message
- if msg == "" {
- msg = pe.err.Error()
- }
-
- // TODO: don't show control characters as literals? This may not show up
- // well everywhere.
-
if pe.Position.Len == 1 {
fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d:\n\n",
- msg, pe.Position.Line, col+1)
+ pe.Message, pe.Position.Line, pe.Position.Col)
} else {
fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d-%d:\n\n",
- msg, pe.Position.Line, col, col+pe.Position.Len)
+ pe.Message, pe.Position.Line, pe.Position.Col, pe.Position.Col+pe.Position.Len-1)
}
if pe.Position.Line > 2 {
fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line-2, expandTab(lines[pe.Position.Line-3]))
@@ -129,7 +137,7 @@ func (pe ParseError) ErrorWithPosition() string {
diff := len(expanded) - len(lines[pe.Position.Line-1])
fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line, expanded)
- fmt.Fprintf(b, "% 10s%s%s\n", "", strings.Repeat(" ", col+diff), strings.Repeat("^", pe.Position.Len))
+ fmt.Fprintf(b, "% 10s%s%s\n", "", strings.Repeat(" ", pe.Position.Col-1+diff), strings.Repeat("^", pe.Position.Len))
return b.String()
}
@@ -151,23 +159,6 @@ func (pe ParseError) ErrorWithUsage() string {
return m
}
-func (pe ParseError) column(lines []string) int {
- var pos, col int
- for i := range lines {
- ll := len(lines[i]) + 1 // +1 for the removed newline
- if pos+ll >= pe.Position.Start {
- col = pe.Position.Start - pos
- if col < 0 { // Should never happen, but just in case.
- col = 0
- }
- break
- }
- pos += ll
- }
-
- return col
-}
-
func expandTab(s string) string {
var (
b strings.Builder
diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go
index a1016d98a8..6878d9d698 100644
--- a/vendor/github.com/BurntSushi/toml/lex.go
+++ b/vendor/github.com/BurntSushi/toml/lex.go
@@ -492,6 +492,9 @@ func lexKeyEnd(lx *lexer) stateFn {
lx.emit(itemKeyEnd)
return lexSkip(lx, lexValue)
default:
+ if r == '\n' {
+ return lx.errorPrevLine(fmt.Errorf("expected '.' or '=', but got %q instead", r))
+ }
return lx.errorf("expected '.' or '=', but got %q instead", r)
}
}
@@ -560,6 +563,9 @@ func lexValue(lx *lexer) stateFn {
if r == eof {
return lx.errorf("unexpected EOF; expected value")
}
+ if r == '\n' {
+ return lx.errorPrevLine(fmt.Errorf("expected value but found %q instead", r))
+ }
return lx.errorf("expected value but found %q instead", r)
}
diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go
index 11ac3108be..3f2c090c86 100644
--- a/vendor/github.com/BurntSushi/toml/parse.go
+++ b/vendor/github.com/BurntSushi/toml/parse.go
@@ -65,7 +65,7 @@ func parse(data string) (p *parser, err error) {
if i := strings.IndexRune(data[:ex], 0); i > -1 {
return nil, ParseError{
Message: "files cannot contain NULL bytes; probably using UTF-16; TOML files must be UTF-8",
- Position: Position{Line: 1, Start: i, Len: 1},
+ Position: Position{Line: 1, Col: 1, Start: i, Len: 1},
Line: 1,
input: data,
}
@@ -92,8 +92,9 @@ func parse(data string) (p *parser, err error) {
func (p *parser) panicErr(it item, err error) {
panic(ParseError{
+ Message: err.Error(),
err: err,
- Position: it.pos,
+ Position: it.pos.withCol(p.lx.input),
Line: it.pos.Len,
LastKey: p.current(),
})
@@ -102,7 +103,7 @@ func (p *parser) panicErr(it item, err error) {
func (p *parser) panicItemf(it item, format string, v ...any) {
panic(ParseError{
Message: fmt.Sprintf(format, v...),
- Position: it.pos,
+ Position: it.pos.withCol(p.lx.input),
Line: it.pos.Len,
LastKey: p.current(),
})
@@ -111,7 +112,7 @@ func (p *parser) panicItemf(it item, format string, v ...any) {
func (p *parser) panicf(format string, v ...any) {
panic(ParseError{
Message: fmt.Sprintf(format, v...),
- Position: p.pos,
+ Position: p.pos.withCol(p.lx.input),
Line: p.pos.Line,
LastKey: p.current(),
})
@@ -123,10 +124,11 @@ func (p *parser) next() item {
if it.typ == itemError {
if it.err != nil {
panic(ParseError{
- Position: it.pos,
+ Message: it.err.Error(),
+ err: it.err,
+ Position: it.pos.withCol(p.lx.input),
Line: it.pos.Line,
LastKey: p.current(),
- err: it.err,
})
}
diff --git a/vendor/github.com/Crocmagnon/fatcontext/pkg/analyzer/analyzer.go b/vendor/github.com/Crocmagnon/fatcontext/pkg/analyzer/analyzer.go
index 7bb8fd4282..42c06f4e78 100644
--- a/vendor/github.com/Crocmagnon/fatcontext/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/Crocmagnon/fatcontext/pkg/analyzer/analyzer.go
@@ -3,30 +3,57 @@ package analyzer
import (
"bytes"
"errors"
+ "flag"
"fmt"
"go/ast"
"go/printer"
"go/token"
+ "slices"
+
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
)
-var Analyzer = &analysis.Analyzer{
- Name: "fatcontext",
- Doc: "detects nested contexts in loops",
- Run: run,
- Requires: []*analysis.Analyzer{inspect.Analyzer},
+const FlagCheckStructPointers = "check-struct-pointers"
+
+func NewAnalyzer() *analysis.Analyzer {
+ r := &runner{}
+
+ flags := flag.NewFlagSet("fatcontext", flag.ExitOnError)
+ flags.BoolVar(&r.DetectInStructPointers, FlagCheckStructPointers, false,
+ "set to true to detect potential fat contexts in struct pointers")
+
+ return &analysis.Analyzer{
+ Name: "fatcontext",
+ Doc: "detects nested contexts in loops and function literals",
+ Run: r.run,
+ Flags: *flags,
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ }
}
var errUnknown = errors.New("unknown node type")
-func run(pass *analysis.Pass) (interface{}, error) {
+const (
+ categoryInLoop = "nested context in loop"
+ categoryInFuncLit = "nested context in function literal"
+ categoryInStructPointer = "potential nested context in struct pointer"
+ categoryUnsupported = "unsupported nested context type"
+)
+
+type runner struct {
+ DetectInStructPointers bool
+}
+
+func (r *runner) run(pass *analysis.Pass) (interface{}, error) {
inspctr := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
nodeFilter := []ast.Node{
(*ast.ForStmt)(nil),
(*ast.RangeStmt)(nil),
+ (*ast.FuncLit)(nil),
+ (*ast.FuncDecl)(nil),
}
inspctr.Preorder(nodeFilter, func(node ast.Node) {
@@ -35,79 +62,241 @@ func run(pass *analysis.Pass) (interface{}, error) {
return
}
- for _, stmt := range body.List {
- assignStmt, ok := stmt.(*ast.AssignStmt)
- if !ok {
- continue
- }
-
- t := pass.TypesInfo.TypeOf(assignStmt.Lhs[0])
- if t == nil {
- continue
- }
+ if body == nil {
+ return
+ }
- if t.String() != "context.Context" {
- continue
- }
+ assignStmt := findNestedContext(pass, node, body.List)
+ if assignStmt == nil {
+ return
+ }
- if assignStmt.Tok == token.DEFINE {
- break
- }
+ category := getCategory(pass, node, assignStmt)
- suggestedStmt := ast.AssignStmt{
- Lhs: assignStmt.Lhs,
- TokPos: assignStmt.TokPos,
- Tok: token.DEFINE,
- Rhs: assignStmt.Rhs,
- }
- suggested, err := render(pass.Fset, &suggestedStmt)
-
- var fixes []analysis.SuggestedFix
- if err == nil {
- fixes = append(fixes, analysis.SuggestedFix{
- Message: "replace `=` with `:=`",
- TextEdits: []analysis.TextEdit{
- {
- Pos: assignStmt.Pos(),
- End: assignStmt.End(),
- NewText: []byte(suggested),
- },
- },
- })
- }
+ if r.shouldIgnoreReport(category) {
+ return
+ }
- pass.Report(analysis.Diagnostic{
- Pos: assignStmt.Pos(),
- Message: "nested context in loop",
- SuggestedFixes: fixes,
- })
+ fixes := r.getSuggestedFixes(pass, assignStmt, category)
- break
- }
+ pass.Report(analysis.Diagnostic{
+ Pos: assignStmt.Pos(),
+ Message: category,
+ SuggestedFixes: fixes,
+ })
})
return nil, nil
}
-func getBody(node ast.Node) (*ast.BlockStmt, error) {
- forStmt, ok := node.(*ast.ForStmt)
- if ok {
- return forStmt.Body, nil
+func (r *runner) shouldIgnoreReport(category string) bool {
+ return category == categoryInStructPointer && !r.DetectInStructPointers
+}
+
+func (r *runner) getSuggestedFixes(pass *analysis.Pass, assignStmt *ast.AssignStmt, category string) []analysis.SuggestedFix {
+ switch category {
+ case categoryInStructPointer, categoryUnsupported:
+ return nil
+ }
+
+ suggestedStmt := ast.AssignStmt{
+ Lhs: assignStmt.Lhs,
+ TokPos: assignStmt.TokPos,
+ Tok: token.DEFINE,
+ Rhs: assignStmt.Rhs,
+ }
+ suggested, err := render(pass.Fset, &suggestedStmt)
+
+ var fixes []analysis.SuggestedFix
+ if err == nil {
+ fixes = append(fixes, analysis.SuggestedFix{
+ Message: "replace `=` with `:=`",
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: assignStmt.Pos(),
+ End: assignStmt.End(),
+ NewText: suggested,
+ },
+ },
+ })
+ }
+
+ return fixes
+}
+
+func getCategory(pass *analysis.Pass, node ast.Node, assignStmt *ast.AssignStmt) string {
+ switch node.(type) {
+ case *ast.ForStmt, *ast.RangeStmt:
+ return categoryInLoop
}
- rangeStmt, ok := node.(*ast.RangeStmt)
- if ok {
- return rangeStmt.Body, nil
+ if isPointer(pass, assignStmt.Lhs[0]) {
+ return categoryInStructPointer
+ }
+
+ switch node.(type) {
+ case *ast.FuncLit, *ast.FuncDecl:
+ return categoryInFuncLit
+ default:
+ return categoryUnsupported
+ }
+}
+
+func getBody(node ast.Node) (*ast.BlockStmt, error) {
+ switch typedNode := node.(type) {
+ case *ast.ForStmt:
+ return typedNode.Body, nil
+ case *ast.RangeStmt:
+ return typedNode.Body, nil
+ case *ast.FuncLit:
+ return typedNode.Body, nil
+ case *ast.FuncDecl:
+ return typedNode.Body, nil
}
return nil, errUnknown
}
+func findNestedContext(pass *analysis.Pass, node ast.Node, stmts []ast.Stmt) *ast.AssignStmt {
+ for _, stmt := range stmts {
+ // Recurse if necessary
+ switch typedStmt := stmt.(type) {
+ case *ast.BlockStmt:
+ if found := findNestedContext(pass, node, typedStmt.List); found != nil {
+ return found
+ }
+ case *ast.IfStmt:
+ if found := findNestedContext(pass, node, typedStmt.Body.List); found != nil {
+ return found
+ }
+ case *ast.SwitchStmt:
+ if found := findNestedContext(pass, node, typedStmt.Body.List); found != nil {
+ return found
+ }
+ case *ast.CaseClause:
+ if found := findNestedContext(pass, node, typedStmt.Body); found != nil {
+ return found
+ }
+ case *ast.SelectStmt:
+ if found := findNestedContext(pass, node, typedStmt.Body.List); found != nil {
+ return found
+ }
+ case *ast.CommClause:
+ if found := findNestedContext(pass, node, typedStmt.Body); found != nil {
+ return found
+ }
+ }
+
+ // Actually check for nested context
+ assignStmt, ok := stmt.(*ast.AssignStmt)
+ if !ok {
+ continue
+ }
+
+ t := pass.TypesInfo.TypeOf(assignStmt.Lhs[0])
+ if t == nil {
+ continue
+ }
+
+ if t.String() != "context.Context" {
+ continue
+ }
+
+ if assignStmt.Tok == token.DEFINE {
+ continue
+ }
+
+ // Ignore [context.Background] & [context.TODO].
+ if isContextFunction(assignStmt.Rhs[0], "Background", "TODO") {
+ continue
+ }
+
+ if isPointer(pass, assignStmt.Lhs[0]) {
+ return assignStmt
+ }
+
+ // allow assignment to non-pointer children of values defined within the loop
+ if isWithinLoop(assignStmt.Lhs[0], node, pass) {
+ continue
+ }
+
+ return assignStmt
+ }
+
+ return nil
+}
+
// render returns the pretty-print of the given node
-func render(fset *token.FileSet, x interface{}) (string, error) {
+func render(fset *token.FileSet, x interface{}) ([]byte, error) {
var buf bytes.Buffer
if err := printer.Fprint(&buf, fset, x); err != nil {
- return "", fmt.Errorf("printing node: %w", err)
+ return nil, fmt.Errorf("printing node: %w", err)
+ }
+ return buf.Bytes(), nil
+}
+
+func isContextFunction(exp ast.Expr, fnName ...string) bool {
+ call, ok := exp.(*ast.CallExpr)
+ if !ok {
+ return false
}
- return buf.String(), nil
+
+ selector, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return false
+ }
+
+ ident, ok := selector.X.(*ast.Ident)
+ if !ok {
+ return false
+ }
+
+ return ident.Name == "context" && slices.Contains(fnName, selector.Sel.Name)
+}
+
+func isWithinLoop(exp ast.Expr, node ast.Node, pass *analysis.Pass) bool {
+ lhs := getRootIdent(pass, exp)
+ if lhs == nil {
+ return false
+ }
+
+ obj := pass.TypesInfo.ObjectOf(lhs)
+ if obj == nil {
+ return false
+ }
+
+ scope := obj.Parent()
+ if scope == nil {
+ return false
+ }
+
+ return scope.Pos() >= node.Pos() && scope.End() <= node.End()
+}
+
+func getRootIdent(pass *analysis.Pass, node ast.Node) *ast.Ident {
+ for {
+ switch n := node.(type) {
+ case *ast.Ident:
+ return n
+ case *ast.IndexExpr:
+ node = n.X
+ case *ast.SelectorExpr:
+ if sel, ok := pass.TypesInfo.Selections[n]; ok && sel.Indirect() {
+ return nil // indirected (pointer) roots don't imply a (safe) copy
+ }
+ node = n.X
+ default:
+ return nil
+ }
+ }
+}
+
+func isPointer(pass *analysis.Pass, exp ast.Node) bool {
+ switch n := exp.(type) {
+ case *ast.SelectorExpr:
+ sel, ok := pass.TypesInfo.Selections[n]
+ return ok && sel.Indirect()
+ }
+
+ return false
}
diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go
index b490f1c640..5be31eb686 100644
--- a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go
+++ b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v3/analyzer/analyzer.go
@@ -100,12 +100,9 @@ func (a *analyzer) newVisitor(pass *analysis.Pass) func(n ast.Node, push bool, s
if len(lit.Elts) == 0 {
if ret, ok := stackParentIsReturn(stack); ok {
- if returnContainsNonNilError(pass, ret) {
+ if returnContainsNonNilError(pass, ret, n) {
// it is okay to return uninitialized structure in case struct's direct parent is
// a return statement containing non-nil error
- //
- // we're unable to check if returned error is custom, but at least we're able to
- // cover str [error] type.
return true
}
}
@@ -153,7 +150,7 @@ func getCompositeLitRelatedComments(stack []ast.Node, cm ast.CommentMap) []*ast.
}
func getStructType(pass *analysis.Pass, lit *ast.CompositeLit) (*types.Struct, *TypeInfo, bool) {
- switch typ := pass.TypesInfo.TypeOf(lit).(type) {
+ switch typ := types.Unalias(pass.TypesInfo.TypeOf(lit)).(type) {
case *types.Named: // named type
if structTyp, ok := typ.Underlying().(*types.Struct); ok {
pkg := typ.Obj().Pkg()
@@ -184,17 +181,47 @@ func getStructType(pass *analysis.Pass, lit *ast.CompositeLit) (*types.Struct, *
func stackParentIsReturn(stack []ast.Node) (*ast.ReturnStmt, bool) {
// it is safe to skip boundary check, since stack always has at least one element
- // - whole file.
- ret, ok := stack[len(stack)-2].(*ast.ReturnStmt)
+ // we also have no reason to check the first element, since it is always a file
+ for i := len(stack) - 2; i > 0; i-- {
+ switch st := stack[i].(type) {
+ case *ast.ReturnStmt:
+ return st, true
- return ret, ok
+ case *ast.UnaryExpr:
+ // in case we're dealing with pointers - it is still viable to check pointer's
+ // parent for return statement
+ continue
+
+ default:
+ return nil, false
+ }
+ }
+
+ return nil, false
}
-func returnContainsNonNilError(pass *analysis.Pass, ret *ast.ReturnStmt) bool {
+// errorIface is a type that represents [error] interface and all types will be
+// compared against.
+var errorIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
+
+func returnContainsNonNilError(pass *analysis.Pass, ret *ast.ReturnStmt, except ast.Node) bool {
// errors are mostly located at the end of return statement, so we're starting
// from the end.
for i := len(ret.Results) - 1; i >= 0; i-- {
- if pass.TypesInfo.TypeOf(ret.Results[i]).String() == "error" {
+ ri := ret.Results[i]
+
+ // skip current node
+ if ri == except {
+ continue
+ }
+
+ if un, ok := ri.(*ast.UnaryExpr); ok {
+ if un.X == except {
+ continue
+ }
+ }
+
+ if types.Implements(pass.TypesInfo.TypeOf(ri), errorIface) {
return true
}
}
diff --git a/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md b/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md
index f12626423a..f95a504fe7 100644
--- a/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md
+++ b/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md
@@ -1,5 +1,33 @@
# Changelog
+## 3.3.0 (2024-08-27)
+
+### Added
+
+- #238: Add LessThanEqual and GreaterThanEqual functions (thanks @grosser)
+- #213: nil version equality checking (thanks @KnutZuidema)
+
+### Changed
+
+- #241: Simplify StrictNewVersion parsing (thanks @grosser)
+- Testing support up through Go 1.23
+- Minimum version set to 1.21 as this is what's tested now
+- Fuzz testing now supports caching
+
+## 3.2.1 (2023-04-10)
+
+### Changed
+
+- #198: Improved testing around pre-release names
+- #200: Improved code scanning with addition of CodeQL
+- #201: Testing now includes Go 1.20. Go 1.17 has been dropped
+- #202: Migrated Fuzz testing to Go built-in Fuzzing. CI runs daily
+- #203: Docs updated for security details
+
+### Fixed
+
+- #199: Fixed issue with range transformations
+
## 3.2.0 (2022-11-28)
### Added
diff --git a/vendor/github.com/Masterminds/semver/v3/Makefile b/vendor/github.com/Masterminds/semver/v3/Makefile
index 0e7b5c7138..9ca87a2c79 100644
--- a/vendor/github.com/Masterminds/semver/v3/Makefile
+++ b/vendor/github.com/Masterminds/semver/v3/Makefile
@@ -19,6 +19,7 @@ test-cover:
.PHONY: fuzz
fuzz:
@echo "==> Running Fuzz Tests"
+ go env GOCACHE
go test -fuzz=FuzzNewVersion -fuzztime=15s .
go test -fuzz=FuzzStrictNewVersion -fuzztime=15s .
go test -fuzz=FuzzNewConstraint -fuzztime=15s .
@@ -27,4 +28,4 @@ $(GOLANGCI_LINT):
# Install golangci-lint. The configuration for it is in the .golangci.yml
# file in the root of the repository
echo ${GOPATH}
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(GOPATH)/bin v1.17.1
+ curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(GOPATH)/bin v1.56.2
diff --git a/vendor/github.com/Masterminds/semver/v3/README.md b/vendor/github.com/Masterminds/semver/v3/README.md
index eab8cac3b7..ed56936084 100644
--- a/vendor/github.com/Masterminds/semver/v3/README.md
+++ b/vendor/github.com/Masterminds/semver/v3/README.md
@@ -13,12 +13,9 @@ Active](https://masterminds.github.io/stability/active.svg)](https://masterminds
[](https://pkg.go.dev/github.com/Masterminds/semver/v3)
[](https://goreportcard.com/report/github.com/Masterminds/semver)
-If you are looking for a command line tool for version comparisons please see
-[vert](https://github.com/Masterminds/vert) which uses this library.
-
## Package Versions
-Note, import `github.com/github.com/Masterminds/semver/v3` to use the latest version.
+Note, import `github.com/Masterminds/semver/v3` to use the latest version.
There are three major versions fo the `semver` package.
@@ -80,12 +77,12 @@ There are two methods for comparing versions. One uses comparison methods on
differences to notes between these two methods of comparison.
1. When two versions are compared using functions such as `Compare`, `LessThan`,
- and others it will follow the specification and always include prereleases
+ and others it will follow the specification and always include pre-releases
within the comparison. It will provide an answer that is valid with the
comparison section of the spec at https://semver.org/#spec-item-11
2. When constraint checking is used for checks or validation it will follow a
different set of rules that are common for ranges with tools like npm/js
- and Rust/Cargo. This includes considering prereleases to be invalid if the
+ and Rust/Cargo. This includes considering pre-releases to be invalid if the
ranges does not include one. If you want to have it include pre-releases a
simple solution is to include `-0` in your range.
3. Constraint ranges can have some complex rules including the shorthand use of
@@ -113,7 +110,7 @@ v, err := semver.NewVersion("1.3")
if err != nil {
// Handle version not being parsable.
}
-// Check if the version meets the constraints. The a variable will be true.
+// Check if the version meets the constraints. The variable a will be true.
a := c.Check(v)
```
@@ -137,20 +134,20 @@ The basic comparisons are:
### Working With Prerelease Versions
Pre-releases, for those not familiar with them, are used for software releases
-prior to stable or generally available releases. Examples of prereleases include
-development, alpha, beta, and release candidate releases. A prerelease may be
+prior to stable or generally available releases. Examples of pre-releases include
+development, alpha, beta, and release candidate releases. A pre-release may be
a version such as `1.2.3-beta.1` while the stable release would be `1.2.3`. In the
-order of precedence, prereleases come before their associated releases. In this
+order of precedence, pre-releases come before their associated releases. In this
example `1.2.3-beta.1 < 1.2.3`.
-According to the Semantic Version specification prereleases may not be
+According to the Semantic Version specification, pre-releases may not be
API compliant with their release counterpart. It says,
> A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version.
-SemVer comparisons using constraints without a prerelease comparator will skip
-prerelease versions. For example, `>=1.2.3` will skip prereleases when looking
-at a list of releases while `>=1.2.3-0` will evaluate and find prereleases.
+SemVer's comparisons using constraints without a pre-release comparator will skip
+pre-release versions. For example, `>=1.2.3` will skip pre-releases when looking
+at a list of releases while `>=1.2.3-0` will evaluate and find pre-releases.
The reason for the `0` as a pre-release version in the example comparison is
because pre-releases can only contain ASCII alphanumerics and hyphens (along with
@@ -171,6 +168,9 @@ These look like:
* `1.2 - 1.4.5` which is equivalent to `>= 1.2 <= 1.4.5`
* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4 <= 4.5`
+Note that `1.2-1.4.5` without whitespace is parsed completely differently; it's
+parsed as a single constraint `1.2.0` with _prerelease_ `1.4.5`.
+
### Wildcards In Comparisons
The `x`, `X`, and `*` characters can be used as a wildcard character. This works
diff --git a/vendor/github.com/Masterminds/semver/v3/version.go b/vendor/github.com/Masterminds/semver/v3/version.go
index 7c4bed3347..ff499fb664 100644
--- a/vendor/github.com/Masterminds/semver/v3/version.go
+++ b/vendor/github.com/Masterminds/semver/v3/version.go
@@ -83,22 +83,23 @@ func StrictNewVersion(v string) (*Version, error) {
original: v,
}
- // check for prerelease or build metadata
- var extra []string
- if strings.ContainsAny(parts[2], "-+") {
- // Start with the build metadata first as it needs to be on the right
- extra = strings.SplitN(parts[2], "+", 2)
- if len(extra) > 1 {
- // build metadata found
- sv.metadata = extra[1]
- parts[2] = extra[0]
+ // Extract build metadata
+ if strings.Contains(parts[2], "+") {
+ extra := strings.SplitN(parts[2], "+", 2)
+ sv.metadata = extra[1]
+ parts[2] = extra[0]
+ if err := validateMetadata(sv.metadata); err != nil {
+ return nil, err
}
+ }
- extra = strings.SplitN(parts[2], "-", 2)
- if len(extra) > 1 {
- // prerelease found
- sv.pre = extra[1]
- parts[2] = extra[0]
+ // Extract build prerelease
+ if strings.Contains(parts[2], "-") {
+ extra := strings.SplitN(parts[2], "-", 2)
+ sv.pre = extra[1]
+ parts[2] = extra[0]
+ if err := validatePrerelease(sv.pre); err != nil {
+ return nil, err
}
}
@@ -114,7 +115,7 @@ func StrictNewVersion(v string) (*Version, error) {
}
}
- // Extract the major, minor, and patch elements onto the returned Version
+ // Extract major, minor, and patch
var err error
sv.major, err = strconv.ParseUint(parts[0], 10, 64)
if err != nil {
@@ -131,23 +132,6 @@ func StrictNewVersion(v string) (*Version, error) {
return nil, err
}
- // No prerelease or build metadata found so returning now as a fastpath.
- if sv.pre == "" && sv.metadata == "" {
- return sv, nil
- }
-
- if sv.pre != "" {
- if err = validatePrerelease(sv.pre); err != nil {
- return nil, err
- }
- }
-
- if sv.metadata != "" {
- if err = validateMetadata(sv.metadata); err != nil {
- return nil, err
- }
- }
-
return sv, nil
}
@@ -381,15 +365,31 @@ func (v *Version) LessThan(o *Version) bool {
return v.Compare(o) < 0
}
+// LessThanEqual tests if one version is less or equal than another one.
+func (v *Version) LessThanEqual(o *Version) bool {
+ return v.Compare(o) <= 0
+}
+
// GreaterThan tests if one version is greater than another one.
func (v *Version) GreaterThan(o *Version) bool {
return v.Compare(o) > 0
}
+// GreaterThanEqual tests if one version is greater or equal than another one.
+func (v *Version) GreaterThanEqual(o *Version) bool {
+ return v.Compare(o) >= 0
+}
+
// Equal tests if two versions are equal to each other.
// Note, versions can be equal with different metadata since metadata
// is not considered part of the comparable version.
func (v *Version) Equal(o *Version) bool {
+ if v == o {
+ return true
+ }
+ if v == nil || o == nil {
+ return false
+ }
return v.Compare(o) == 0
}
diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md b/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md
index 2ccfa22c59..0bf603b2bc 100644
--- a/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md
+++ b/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md
@@ -7,7 +7,7 @@ allow specific packages within a repository.
## Install
```bash
-go install github.com/OpenPeeDeeP/depguard@latest
+go install github.com/OpenPeeDeeP/depguard/cmd/depguard@latest
```
## Config
@@ -49,7 +49,7 @@ the linter's output.
- `files` - list of file globs that will match this list of settings to compare against
- `allow` - list of allowed packages
- `deny` - map of packages that are not allowed where the value is a suggestion
-= `listMode` - the mode to use for package matching
+- `listMode` - the mode to use for package matching
Files are matched using [Globs](https://github.com/gobwas/glob). If the files
list is empty, then all files will match that list. Prefixing a file
@@ -153,11 +153,29 @@ would be allowed.
```yaml
Main:
deny:
- - github.com/OpenPeeDeeP/depguard$
+ github.com/OpenPeeDeeP/depguard$: Please use v2
```
-## Golangci-lint
+## golangci-lint
This linter was built with
-[Golangci-lint](https://github.com/golangci/golangci-lint) in mind. It is compatible
-and read their docs to see how to implement all their linters, including this one.
+[golangci-lint](https://github.com/golangci/golangci-lint) in mind, read the [linters docs](https://golangci-lint.run/usage/linters/#depguard) to see how to configure all their linters, including this one.
+
+The config is similar to the YAML depguard config documented above, however due to [golangci-lint limitation](https://github.com/golangci/golangci-lint/pull/4227) the `deny` value must be provided as a list, with `pkg` and `desc` keys (otherwise a [panic](https://github.com/OpenPeeDeeP/depguard/issues/74) may occur):
+
+```yaml
+# golangci-lint config
+linters-settings:
+ depguard:
+ rules:
+ prevent_unmaintained_packages:
+ list-mode: lax # allow unless explicitely denied
+ files:
+ - $all
+ - "!$test"
+ allow:
+ - $gostd
+ deny:
+ - pkg: io/ioutil
+ desc: "replaced by io and os packages since Go 1.16: https://tip.golang.org/doc/go1.16#ioutil"
+```
diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go b/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go
index 2729091e8a..af07b9bb6f 100644
--- a/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go
+++ b/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go
@@ -47,12 +47,12 @@ func (ua *UncompiledAnalyzer) Compile() error {
return nil
}
-func (settings LinterSettings) run(pass *analysis.Pass) (interface{}, error) {
- s, err := settings.compile()
+func (s LinterSettings) run(pass *analysis.Pass) (interface{}, error) {
+ settings, err := s.compile()
if err != nil {
return nil, err
}
- return s.run(pass)
+ return settings.run(pass)
}
func newAnalyzer(run func(*analysis.Pass) (interface{}, error)) *analysis.Analyzer {
diff --git a/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go b/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go
index 311cacc889..5bc74f8d07 100644
--- a/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go
+++ b/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go
@@ -202,9 +202,9 @@ func (l LinterSettings) compile() (linterSettings, error) {
return li, nil
}
-func (ls linterSettings) whichLists(fileName string) []*list {
+func (s linterSettings) whichLists(fileName string) []*list {
var matches []*list
- for _, l := range ls {
+ for _, l := range s {
if l.fileMatch(fileName) {
matches = append(matches, l)
}
@@ -236,5 +236,13 @@ func strInPrefixList(str string, prefixList []string) (bool, int) {
if ioc[len(ioc)-1] == '$' {
return str == ioc[:len(ioc)-1], idx
}
- return strings.HasPrefix(str, prefixList[idx]), idx
+
+ // There is no sep chars in ioc so it is a GOROOT import that is being matched to the import (str) (see $gostd expander)
+ // AND the import contains a period which GOROOT cannot have. This eliminates the go.evil.me/pkg scenario
+ // BUT should still allow /os/exec and ./os/exec imports which are very uncommon
+ if !strings.ContainsAny(ioc, "./") && strings.ContainsRune(str, '.') {
+ return false, idx
+ }
+
+ return strings.HasPrefix(str, ioc), idx
}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/ecdh/ecdh.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/ecdh/ecdh.go
index c895bad6bb..db8fb163b6 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/ecdh/ecdh.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/ecdh/ecdh.go
@@ -163,13 +163,9 @@ func buildKey(pub *PublicKey, zb []byte, curveOID, fingerprint []byte, stripLead
if _, err := param.Write([]byte("Anonymous Sender ")); err != nil {
return nil, err
}
- // For v5 keys, the 20 leftmost octets of the fingerprint are used.
- if _, err := param.Write(fingerprint[:20]); err != nil {
+ if _, err := param.Write(fingerprint[:]); err != nil {
return nil, err
}
- if param.Len()-len(curveOID) != 45 {
- return nil, errors.New("ecdh: malformed KDF Param")
- }
// MB = Hash ( 00 || 00 || 00 || 01 || ZB || Param );
h := pub.KDF.Hash.New()
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/errors/errors.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/errors/errors.go
index 8d6969c0bf..0eb3937b39 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/errors/errors.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/errors/errors.go
@@ -3,12 +3,24 @@
// license that can be found in the LICENSE file.
// Package errors contains common error types for the OpenPGP packages.
-package errors // import "github.com/ProtonMail/go-crypto/v2/openpgp/errors"
+package errors // import "github.com/ProtonMail/go-crypto/openpgp/errors"
import (
"strconv"
)
+var (
+ // ErrDecryptSessionKeyParsing is a generic error message for parsing errors in decrypted data
+ // to reduce the risk of oracle attacks.
+ ErrDecryptSessionKeyParsing = DecryptWithSessionKeyError("parsing error")
+ // ErrAEADTagVerification is returned if one of the tag verifications in SEIPDv2 fails
+ ErrAEADTagVerification error = DecryptWithSessionKeyError("AEAD tag verification failed")
+ // ErrMDCHashMismatch
+ ErrMDCHashMismatch error = SignatureError("MDC hash mismatch")
+ // ErrMDCMissing
+ ErrMDCMissing error = SignatureError("MDC packet not found")
+)
+
// A StructuralError is returned when OpenPGP data is found to be syntactically
// invalid.
type StructuralError string
@@ -17,6 +29,34 @@ func (s StructuralError) Error() string {
return "openpgp: invalid data: " + string(s)
}
+// A DecryptWithSessionKeyError is returned when a failure occurs when reading from symmetrically decrypted data or
+// an authentication tag verification fails.
+// Such an error indicates that the supplied session key is likely wrong or the data got corrupted.
+type DecryptWithSessionKeyError string
+
+func (s DecryptWithSessionKeyError) Error() string {
+ return "openpgp: decryption with session key failed: " + string(s)
+}
+
+// HandleSensitiveParsingError handles parsing errors when reading data from potentially decrypted data.
+// The function makes parsing errors generic to reduce the risk of oracle attacks in SEIPDv1.
+func HandleSensitiveParsingError(err error, decrypted bool) error {
+ if !decrypted {
+ // Data was not encrypted so we return the inner error.
+ return err
+ }
+ // The data is read from a stream that decrypts using a session key;
+ // therefore, we need to handle parsing errors appropriately.
+ // This is essential to mitigate the risk of oracle attacks.
+ if decError, ok := err.(*DecryptWithSessionKeyError); ok {
+ return decError
+ }
+ if decError, ok := err.(DecryptWithSessionKeyError); ok {
+ return decError
+ }
+ return ErrDecryptSessionKeyParsing
+}
+
// UnsupportedError indicates that, although the OpenPGP data is valid, it
// makes use of currently unimplemented features.
type UnsupportedError string
@@ -41,9 +81,6 @@ func (b SignatureError) Error() string {
return "openpgp: invalid signature: " + string(b)
}
-var ErrMDCHashMismatch error = SignatureError("MDC hash mismatch")
-var ErrMDCMissing error = SignatureError("MDC packet not found")
-
type signatureExpiredError int
func (se signatureExpiredError) Error() string {
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/curve_info.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/curve_info.go
index 97f891ffc0..0da2d0d852 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/curve_info.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/curve_info.go
@@ -10,6 +10,8 @@ import (
"github.com/ProtonMail/go-crypto/openpgp/internal/encoding"
)
+const Curve25519GenName = "Curve25519"
+
type CurveInfo struct {
GenName string
Oid *encoding.OID
@@ -43,7 +45,7 @@ var Curves = []CurveInfo{
},
{
// Curve25519
- GenName: "Curve25519",
+ GenName: Curve25519GenName,
Oid: encoding.NewOID([]byte{0x2B, 0x06, 0x01, 0x04, 0x01, 0x97, 0x55, 0x01, 0x05, 0x01}),
Curve: NewCurve25519(),
},
@@ -55,7 +57,7 @@ var Curves = []CurveInfo{
},
{
// Ed25519
- GenName: "Curve25519",
+ GenName: Curve25519GenName,
Oid: encoding.NewOID([]byte{0x2B, 0x06, 0x01, 0x04, 0x01, 0xDA, 0x47, 0x0F, 0x01}),
Curve: NewEd25519(),
},
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/ed25519.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/ed25519.go
index 54a08a8a38..5a4c3a8596 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/ed25519.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/ed25519.go
@@ -2,6 +2,7 @@
package ecc
import (
+ "bytes"
"crypto/subtle"
"io"
@@ -90,7 +91,14 @@ func (c *ed25519) GenerateEdDSA(rand io.Reader) (pub, priv []byte, err error) {
}
func getEd25519Sk(publicKey, privateKey []byte) ed25519lib.PrivateKey {
- return append(privateKey, publicKey...)
+ privateKeyCap, privateKeyLen, publicKeyLen := cap(privateKey), len(privateKey), len(publicKey)
+
+ if privateKeyCap >= privateKeyLen+publicKeyLen &&
+ bytes.Equal(privateKey[privateKeyLen:privateKeyLen+publicKeyLen], publicKey) {
+ return privateKey[:privateKeyLen+publicKeyLen]
+ }
+
+ return append(privateKey[:privateKeyLen:privateKeyLen], publicKey...)
}
func (c *ed25519) Sign(publicKey, privateKey, message []byte) (sig []byte, err error) {
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/ed448.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/ed448.go
index 18cd80434b..b6edda7480 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/ed448.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/internal/ecc/ed448.go
@@ -2,6 +2,7 @@
package ecc
import (
+ "bytes"
"crypto/subtle"
"io"
@@ -84,7 +85,14 @@ func (c *ed448) GenerateEdDSA(rand io.Reader) (pub, priv []byte, err error) {
}
func getEd448Sk(publicKey, privateKey []byte) ed448lib.PrivateKey {
- return append(privateKey, publicKey...)
+ privateKeyCap, privateKeyLen, publicKeyLen := cap(privateKey), len(privateKey), len(publicKey)
+
+ if privateKeyCap >= privateKeyLen+publicKeyLen &&
+ bytes.Equal(privateKey[privateKeyLen:privateKeyLen+publicKeyLen], publicKey) {
+ return privateKey[:privateKeyLen+publicKeyLen]
+ }
+
+ return append(privateKey[:privateKeyLen:privateKeyLen], publicKey...)
}
func (c *ed448) Sign(publicKey, privateKey, message []byte) (sig []byte, err error) {
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/key_generation.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/key_generation.go
index a40e45beee..77213f66be 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/key_generation.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/key_generation.go
@@ -41,7 +41,9 @@ func NewEntity(name, comment, email string, config *packet.Config) (*Entity, err
}
primary := packet.NewSignerPrivateKey(creationTime, primaryPrivRaw)
if config.V6() {
- primary.UpgradeToV6()
+ if err := primary.UpgradeToV6(); err != nil {
+ return nil, err
+ }
}
e := &Entity{
@@ -89,13 +91,15 @@ func (t *Entity) AddUserId(name, comment, email string, config *packet.Config) e
}
func writeKeyProperties(selfSignature *packet.Signature, creationTime time.Time, keyLifetimeSecs uint32, config *packet.Config) error {
+ advertiseAead := config.AEAD() != nil
+
selfSignature.CreationTime = creationTime
selfSignature.KeyLifetimeSecs = &keyLifetimeSecs
selfSignature.FlagsValid = true
selfSignature.FlagSign = true
selfSignature.FlagCertify = true
selfSignature.SEIPDv1 = true // true by default, see 5.8 vs. 5.14
- selfSignature.SEIPDv2 = config.AEAD() != nil
+ selfSignature.SEIPDv2 = advertiseAead
// Set the PreferredHash for the SelfSignature from the packet.Config.
// If it is not the must-implement algorithm from rfc4880bis, append that.
@@ -124,16 +128,19 @@ func writeKeyProperties(selfSignature *packet.Signature, creationTime time.Time,
selfSignature.PreferredCompression = append(selfSignature.PreferredCompression, uint8(config.Compression()))
}
- // And for DefaultMode.
- modes := []uint8{uint8(config.AEAD().Mode())}
- if config.AEAD().Mode() != packet.AEADModeOCB {
- modes = append(modes, uint8(packet.AEADModeOCB))
- }
+ if advertiseAead {
+ // Get the preferred AEAD mode from the packet.Config.
+ // If it is not the must-implement algorithm from rfc9580, append that.
+ modes := []uint8{uint8(config.AEAD().Mode())}
+ if config.AEAD().Mode() != packet.AEADModeOCB {
+ modes = append(modes, uint8(packet.AEADModeOCB))
+ }
- // For preferred (AES256, GCM), we'll generate (AES256, GCM), (AES256, OCB), (AES128, GCM), (AES128, OCB)
- for _, cipher := range selfSignature.PreferredSymmetric {
- for _, mode := range modes {
- selfSignature.PreferredCipherSuites = append(selfSignature.PreferredCipherSuites, [2]uint8{cipher, mode})
+ // For preferred (AES256, GCM), we'll generate (AES256, GCM), (AES256, OCB), (AES128, GCM), (AES128, OCB)
+ for _, cipher := range selfSignature.PreferredSymmetric {
+ for _, mode := range modes {
+ selfSignature.PreferredCipherSuites = append(selfSignature.PreferredCipherSuites, [2]uint8{cipher, mode})
+ }
}
}
return nil
@@ -187,7 +194,9 @@ func (e *Entity) AddSigningSubkey(config *packet.Config) error {
sub := packet.NewSignerPrivateKey(creationTime, subPrivRaw)
sub.IsSubkey = true
if config.V6() {
- sub.UpgradeToV6()
+ if err := sub.UpgradeToV6(); err != nil {
+ return err
+ }
}
subkey := Subkey{
@@ -232,7 +241,9 @@ func (e *Entity) addEncryptionSubkey(config *packet.Config, creationTime time.Ti
sub := packet.NewDecrypterPrivateKey(creationTime, subPrivRaw)
sub.IsSubkey = true
if config.V6() {
- sub.UpgradeToV6()
+ if err := sub.UpgradeToV6(); err != nil {
+ return err
+ }
}
subkey := Subkey{
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/aead_crypter.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/aead_crypter.go
index 2d1aeed65c..2eecd062f5 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/aead_crypter.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/aead_crypter.go
@@ -147,7 +147,7 @@ func (ar *aeadDecrypter) openChunk(data []byte) ([]byte, error) {
nonce := ar.computeNextNonce()
plainChunk, err := ar.aead.Open(nil, nonce, chunk, adata)
if err != nil {
- return nil, err
+ return nil, errors.ErrAEADTagVerification
}
ar.bytesProcessed += len(plainChunk)
if err = ar.aeadCrypter.incrementIndex(); err != nil {
@@ -172,9 +172,8 @@ func (ar *aeadDecrypter) validateFinalTag(tag []byte) error {
// ... and total number of encrypted octets
adata = append(adata, amountBytes...)
nonce := ar.computeNextNonce()
- _, err := ar.aead.Open(nil, nonce, tag, adata)
- if err != nil {
- return err
+ if _, err := ar.aead.Open(nil, nonce, tag, adata); err != nil {
+ return errors.ErrAEADTagVerification
}
return nil
}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/compressed.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/compressed.go
index 334de286b3..0bcb38caca 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/compressed.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/compressed.go
@@ -9,7 +9,6 @@ import (
"compress/flate"
"compress/zlib"
"io"
- "io/ioutil"
"strconv"
"github.com/ProtonMail/go-crypto/openpgp/errors"
@@ -91,7 +90,7 @@ func (c *Compressed) parse(r io.Reader) error {
}
c.Body = newDecompressionReader(r, decompressor)
case 3:
- c.Body = newDecompressionReader(r, ioutil.NopCloser(bzip2.NewReader(r)))
+ c.Body = newDecompressionReader(r, io.NopCloser(bzip2.NewReader(r)))
default:
err = errors.UnsupportedError("unknown compression algorithm: " + strconv.Itoa(int(buf[0])))
}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/config.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/config.go
index 181d5d344e..8bf8e6e51f 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/config.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/config.go
@@ -19,6 +19,10 @@ var (
PubKeyAlgoElGamal: true,
PubKeyAlgoDSA: true,
}
+ defaultRejectHashAlgorithms = map[crypto.Hash]bool{
+ crypto.MD5: true,
+ crypto.RIPEMD160: true,
+ }
defaultRejectMessageHashAlgorithms = map[crypto.Hash]bool{
crypto.SHA1: true,
crypto.MD5: true,
@@ -29,6 +33,15 @@ var (
}
)
+// A global feature flag to indicate v5 support.
+// Can be set via a build tag, e.g.: `go build -tags v5 ./...`
+// If the build tag is missing config_v5.go will set it to true.
+//
+// Disables parsing of v5 keys and v5 signatures.
+// These are non-standard entities, which in the crypto-refresh have been superseded
+// by v6 keys, v6 signatures and SEIPDv2 encrypted data, respectively.
+var V5Disabled = false
+
// Config collects a number of parameters along with sensible defaults.
// A nil *Config is valid and results in all default values.
type Config struct {
@@ -95,6 +108,7 @@ type Config struct {
MinRSABits uint16
// Reject insecure algorithms, only works with v2 api
RejectPublicKeyAlgorithms map[PublicKeyAlgorithm]bool
+ RejectHashAlgorithms map[crypto.Hash]bool
RejectMessageHashAlgorithms map[crypto.Hash]bool
RejectCurves map[Curve]bool
// "The validity period of the key. This is the number of seconds after
@@ -125,6 +139,11 @@ type Config struct {
// might be no other way than to tolerate the missing MDC. Setting this flag, allows this
// mode of operation. It should be considered a measure of last resort.
InsecureAllowUnauthenticatedMessages bool
+ // InsecureAllowDecryptionWithSigningKeys allows decryption with keys marked as signing keys in the v2 API.
+ // This setting is potentially insecure, but it is needed as some libraries
+ // ignored key flags when selecting a key for encryption.
+ // Not relevant for the v1 API, as all keys were allowed in decryption.
+ InsecureAllowDecryptionWithSigningKeys bool
// KnownNotations is a map of Notation Data names to bools, which controls
// the notation names that are allowed to be present in critical Notation Data
// signature subpackets.
@@ -146,6 +165,14 @@ type Config struct {
// that the packet sequence conforms with the grammar mandated by rfc4880.
// The default behavior, when the config or flag is nil, is to check the packet sequence.
CheckPacketSequence *bool
+ // NonDeterministicSignaturesViaNotation is a flag to enable randomization of signatures.
+ // If true, a salt notation is used to randomize signatures generated by v4 and v5 keys
+ // (v6 signatures are always non-deterministic, by design).
+ // This protects EdDSA signatures from potentially leaking the secret key in case of faults (i.e. bitflips) which, in principle, could occur
+ // during the signing computation. It is added to signatures of any algo for simplicity, and as it may also serve as protection in case of
+ // weaknesses in the hash algo, potentially hindering e.g. some chosen-prefix attacks.
+ // The default behavior, when the config or flag is nil, is to enable the feature.
+ NonDeterministicSignaturesViaNotation *bool
}
func (c *Config) Random() io.Reader {
@@ -233,7 +260,7 @@ func (c *Config) S2K() *s2k.Config {
return nil
}
// for backwards compatibility
- if c != nil && c.S2KCount > 0 && c.S2KConfig == nil {
+ if c.S2KCount > 0 && c.S2KConfig == nil {
return &s2k.Config{
S2KCount: c.S2KCount,
}
@@ -269,6 +296,13 @@ func (c *Config) AllowUnauthenticatedMessages() bool {
return c.InsecureAllowUnauthenticatedMessages
}
+func (c *Config) AllowDecryptionWithSigningKeys() bool {
+ if c == nil {
+ return false
+ }
+ return c.InsecureAllowDecryptionWithSigningKeys
+}
+
func (c *Config) KnownNotation(notationName string) bool {
if c == nil {
return false
@@ -322,6 +356,17 @@ func (c *Config) RejectPublicKeyAlgorithm(alg PublicKeyAlgorithm) bool {
return rejectedAlgorithms[alg]
}
+func (c *Config) RejectHashAlgorithm(hash crypto.Hash) bool {
+ var rejectedAlgorithms map[crypto.Hash]bool
+ if c == nil || c.RejectHashAlgorithms == nil {
+ // Default
+ rejectedAlgorithms = defaultRejectHashAlgorithms
+ } else {
+ rejectedAlgorithms = c.RejectHashAlgorithms
+ }
+ return rejectedAlgorithms[hash]
+}
+
func (c *Config) RejectMessageHashAlgorithm(hash crypto.Hash) bool {
var rejectedAlgorithms map[crypto.Hash]bool
if c == nil || c.RejectMessageHashAlgorithms == nil {
@@ -350,3 +395,16 @@ func (c *Config) StrictPacketSequence() bool {
}
return *c.CheckPacketSequence
}
+
+func (c *Config) RandomizeSignaturesViaNotation() bool {
+ if c == nil || c.NonDeterministicSignaturesViaNotation == nil {
+ return true
+ }
+ return *c.NonDeterministicSignaturesViaNotation
+}
+
+// BoolPointer is a helper function to set a boolean pointer in the Config.
+// e.g., config.CheckPacketSequence = BoolPointer(true)
+func BoolPointer(value bool) *bool {
+ return &value
+}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/config_v5.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/config_v5.go
new file mode 100644
index 0000000000..f2415906b9
--- /dev/null
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/config_v5.go
@@ -0,0 +1,7 @@
+//go:build !v5
+
+package packet
+
+func init() {
+ V5Disabled = true
+}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/encrypted_key.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/encrypted_key.go
index e70f9d9411..b90bb28911 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/encrypted_key.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/encrypted_key.go
@@ -181,7 +181,12 @@ func (e *EncryptedKey) Decrypt(priv *PrivateKey, config *Config) error {
vsG := e.encryptedMPI1.Bytes()
m := e.encryptedMPI2.Bytes()
oid := priv.PublicKey.oid.EncodedBytes()
- b, err = ecdh.Decrypt(priv.PrivateKey.(*ecdh.PrivateKey), vsG, m, oid, priv.PublicKey.Fingerprint[:])
+ fp := priv.PublicKey.Fingerprint[:]
+ if priv.PublicKey.Version == 5 {
+ // For v5 the, the fingerprint must be restricted to 20 bytes
+ fp = fp[:20]
+ }
+ b, err = ecdh.Decrypt(priv.PrivateKey.(*ecdh.PrivateKey), vsG, m, oid, fp)
case PubKeyAlgoX25519:
b, err = x25519.Decrypt(priv.PrivateKey.(*x25519.PrivateKey), e.ephemeralPublicX25519, e.encryptedSession)
case PubKeyAlgoX448:
@@ -316,7 +321,8 @@ func (e *EncryptedKey) Serialize(w io.Writer) error {
// SerializeEncryptedKeyAEAD serializes an encrypted key packet to w that contains
// key, encrypted to pub.
-// If aeadSupported is set, PKESK v6 is used else v4.
+// If aeadSupported is set, PKESK v6 is used, otherwise v3.
+// Note: aeadSupported MUST match the value passed to SerializeSymmetricallyEncrypted.
// If config is nil, sensible defaults will be used.
func SerializeEncryptedKeyAEAD(w io.Writer, pub *PublicKey, cipherFunc CipherFunction, aeadSupported bool, key []byte, config *Config) error {
return SerializeEncryptedKeyAEADwithHiddenOption(w, pub, cipherFunc, aeadSupported, key, false, config)
@@ -325,7 +331,8 @@ func SerializeEncryptedKeyAEAD(w io.Writer, pub *PublicKey, cipherFunc CipherFun
// SerializeEncryptedKeyAEADwithHiddenOption serializes an encrypted key packet to w that contains
// key, encrypted to pub.
// Offers the hidden flag option to indicated if the PKESK packet should include a wildcard KeyID.
-// If aeadSupported is set, PKESK v6 is used else v4.
+// If aeadSupported is set, PKESK v6 is used, otherwise v3.
+// Note: aeadSupported MUST match the value passed to SerializeSymmetricallyEncrypted.
// If config is nil, sensible defaults will be used.
func SerializeEncryptedKeyAEADwithHiddenOption(w io.Writer, pub *PublicKey, cipherFunc CipherFunction, aeadSupported bool, key []byte, hidden bool, config *Config) error {
var buf [36]byte // max possible header size is v6
@@ -421,6 +428,7 @@ func SerializeEncryptedKeyAEADwithHiddenOption(w io.Writer, pub *PublicKey, ciph
// key, encrypted to pub.
// PKESKv6 is used if config.AEAD() is not nil.
// If config is nil, sensible defaults will be used.
+// Deprecated: Use SerializeEncryptedKeyAEAD instead.
func SerializeEncryptedKey(w io.Writer, pub *PublicKey, cipherFunc CipherFunction, key []byte, config *Config) error {
return SerializeEncryptedKeyAEAD(w, pub, cipherFunc, config.AEAD() != nil, key, config)
}
@@ -429,6 +437,7 @@ func SerializeEncryptedKey(w io.Writer, pub *PublicKey, cipherFunc CipherFunctio
// key, encrypted to pub. PKESKv6 is used if config.AEAD() is not nil.
// The hidden option controls if the packet should be anonymous, i.e., omit key metadata.
// If config is nil, sensible defaults will be used.
+// Deprecated: Use SerializeEncryptedKeyAEADwithHiddenOption instead.
func SerializeEncryptedKeyWithHiddenOption(w io.Writer, pub *PublicKey, cipherFunc CipherFunction, key []byte, hidden bool, config *Config) error {
return SerializeEncryptedKeyAEADwithHiddenOption(w, pub, cipherFunc, config.AEAD() != nil, key, hidden, config)
}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/packet.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/packet.go
index da12fbce06..1e92e22c97 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/packet.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/packet.go
@@ -4,7 +4,7 @@
// Package packet implements parsing and serialization of OpenPGP packets, as
// specified in RFC 4880.
-package packet // import "github.com/ProtonMail/go-crypto/v2/openpgp/packet"
+package packet // import "github.com/ProtonMail/go-crypto/openpgp/packet"
import (
"bytes"
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/padding.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/padding.go
index 06fa83740d..3b6a7045d1 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/padding.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/padding.go
@@ -2,7 +2,6 @@ package packet
import (
"io"
- "io/ioutil"
)
// Padding type represents a Padding Packet (Tag 21).
@@ -12,7 +11,7 @@ type Padding int
// parse just ignores the padding content.
func (pad Padding) parse(reader io.Reader) error {
- _, err := io.CopyN(ioutil.Discard, reader, int64(pad))
+ _, err := io.CopyN(io.Discard, reader, int64(pad))
return err
}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/private_key.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/private_key.go
index 099b4d9ba0..f04e6c6b87 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/private_key.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/private_key.go
@@ -202,6 +202,10 @@ func (pk *PrivateKey) parse(r io.Reader) (err error) {
v5 := pk.PublicKey.Version == 5
v6 := pk.PublicKey.Version == 6
+ if V5Disabled && v5 {
+ return errors.UnsupportedError("support for parsing v5 entities is disabled; build with `-tags v5` if needed")
+ }
+
var buf [1]byte
_, err = readFull(r, buf[:])
if err != nil {
@@ -261,6 +265,12 @@ func (pk *PrivateKey) parse(r io.Reader) (err error) {
if pk.s2kParams.Dummy() {
return
}
+ if pk.s2kParams.Mode() == s2k.Argon2S2K && pk.s2kType != S2KAEAD {
+ return errors.StructuralError("using Argon2 S2K without AEAD is not allowed")
+ }
+ if pk.s2kParams.Mode() == s2k.SimpleS2K && pk.Version == 6 {
+ return errors.StructuralError("using Simple S2K with version 6 keys is not allowed")
+ }
pk.s2k, err = pk.s2kParams.Function()
if err != nil {
return
@@ -655,6 +665,14 @@ func (pk *PrivateKey) encrypt(key []byte, params *s2k.Params, s2kType S2KType, c
return errors.InvalidArgumentError("supplied encryption key has the wrong size")
}
+ if params.Mode() == s2k.Argon2S2K && s2kType != S2KAEAD {
+ return errors.InvalidArgumentError("using Argon2 S2K without AEAD is not allowed")
+ }
+ if params.Mode() != s2k.Argon2S2K && params.Mode() != s2k.IteratedSaltedS2K &&
+ params.Mode() != s2k.SaltedS2K { // only allowed for high-entropy passphrases
+ return errors.InvalidArgumentError("insecure S2K mode")
+ }
+
priv := bytes.NewBuffer(nil)
err := pk.serializePrivateKey(priv)
if err != nil {
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/public_key.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/public_key.go
index dd93c98702..f8da781bbe 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/public_key.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/public_key.go
@@ -63,9 +63,10 @@ func (pk *PublicKey) UpgradeToV5() {
// UpgradeToV6 updates the version of the key to v6, and updates all necessary
// fields.
-func (pk *PublicKey) UpgradeToV6() {
+func (pk *PublicKey) UpgradeToV6() error {
pk.Version = 6
pk.setFingerprintAndKeyId()
+ return pk.checkV6Compatibility()
}
// signingKey provides a convenient abstraction over signature verification
@@ -236,11 +237,16 @@ func (pk *PublicKey) parse(r io.Reader) (err error) {
if err != nil {
return
}
- if buf[0] != 4 && buf[0] != 5 && buf[0] != 6 {
+
+ pk.Version = int(buf[0])
+ if pk.Version != 4 && pk.Version != 5 && pk.Version != 6 {
return errors.UnsupportedError("public key version " + strconv.Itoa(int(buf[0])))
}
- pk.Version = int(buf[0])
+ if V5Disabled && pk.Version == 5 {
+ return errors.UnsupportedError("support for parsing v5 entities is disabled; build with `-tags v5` if needed")
+ }
+
if pk.Version >= 5 {
// Read the four-octet scalar octet count
// The count is not used in this implementation
@@ -308,6 +314,23 @@ func (pk *PublicKey) setFingerprintAndKeyId() {
}
}
+func (pk *PublicKey) checkV6Compatibility() error {
+ // Implementations MUST NOT accept or generate version 6 key material using the deprecated OIDs.
+ switch pk.PubKeyAlgo {
+ case PubKeyAlgoECDH:
+ curveInfo := ecc.FindByOid(pk.oid)
+ if curveInfo == nil {
+ return errors.UnsupportedError(fmt.Sprintf("unknown oid: %x", pk.oid))
+ }
+ if curveInfo.GenName == ecc.Curve25519GenName {
+ return errors.StructuralError("cannot generate v6 key with deprecated OID: Curve25519Legacy")
+ }
+ case PubKeyAlgoEdDSA:
+ return errors.StructuralError("cannot generate v6 key with deprecated algorithm: EdDSALegacy")
+ }
+ return nil
+}
+
// parseRSA parses RSA public key material from the given Reader. See RFC 4880,
// section 5.5.2.
func (pk *PublicKey) parseRSA(r io.Reader) (err error) {
@@ -432,6 +455,11 @@ func (pk *PublicKey) parseECDH(r io.Reader) (err error) {
return errors.UnsupportedError(fmt.Sprintf("unknown oid: %x", pk.oid))
}
+ if pk.Version == 6 && curveInfo.GenName == ecc.Curve25519GenName {
+ // Implementations MUST NOT accept or generate version 6 key material using the deprecated OIDs.
+ return errors.StructuralError("cannot read v6 key with deprecated OID: Curve25519Legacy")
+ }
+
pk.p = new(encoding.MPI)
if _, err = pk.p.ReadFrom(r); err != nil {
return
@@ -469,6 +497,11 @@ func (pk *PublicKey) parseECDH(r io.Reader) (err error) {
}
func (pk *PublicKey) parseEdDSA(r io.Reader) (err error) {
+ if pk.Version == 6 {
+ // Implementations MUST NOT accept or generate version 6 key material using the deprecated OIDs.
+ return errors.StructuralError("cannot generate v6 key with deprecated algorithm: EdDSALegacy")
+ }
+
pk.oid = new(encoding.OID)
if _, err = pk.oid.ReadFrom(r); err != nil {
return
@@ -590,10 +623,7 @@ func (pk *PublicKey) SerializeSignaturePrefix(w io.Writer) error {
byte(pLength >> 8),
byte(pLength),
})
- if err != nil {
- return err
- }
- return nil
+ return err
}
if _, err := w.Write([]byte{0x99, byte(pLength >> 8), byte(pLength)}); err != nil {
return err
@@ -752,6 +782,20 @@ func (pk *PublicKey) CanSign() bool {
return pk.PubKeyAlgo != PubKeyAlgoRSAEncryptOnly && pk.PubKeyAlgo != PubKeyAlgoElGamal && pk.PubKeyAlgo != PubKeyAlgoECDH
}
+// VerifyHashTag returns nil iff sig appears to be a plausible signature of the data
+// hashed into signed, based solely on its HashTag. signed is mutated by this call.
+func VerifyHashTag(signed hash.Hash, sig *Signature) (err error) {
+ if sig.Version == 5 && (sig.SigType == 0x00 || sig.SigType == 0x01) {
+ sig.AddMetadataToHashSuffix()
+ }
+ signed.Write(sig.HashSuffix)
+ hashBytes := signed.Sum(nil)
+ if hashBytes[0] != sig.HashTag[0] || hashBytes[1] != sig.HashTag[1] {
+ return errors.SignatureError("hash tag doesn't match")
+ }
+ return nil
+}
+
// VerifySignature returns nil iff sig is a valid signature, made by this
// public key, of the data hashed into signed. signed is mutated by this call.
func (pk *PublicKey) VerifySignature(signed hash.Hash, sig *Signature) (err error) {
@@ -835,6 +879,20 @@ func keySignatureHash(pk, signed signingKey, hashFunc hash.Hash) (h hash.Hash, e
return
}
+// VerifyKeyHashTag returns nil iff sig appears to be a plausible signature over this
+// primary key and subkey, based solely on its HashTag.
+func (pk *PublicKey) VerifyKeyHashTag(signed *PublicKey, sig *Signature) error {
+ preparedHash, err := sig.PrepareVerify()
+ if err != nil {
+ return err
+ }
+ h, err := keySignatureHash(pk, signed, preparedHash)
+ if err != nil {
+ return err
+ }
+ return VerifyHashTag(h, sig)
+}
+
// VerifyKeySignature returns nil iff sig is a valid signature, made by this
// public key, of signed.
func (pk *PublicKey) VerifyKeySignature(signed *PublicKey, sig *Signature) error {
@@ -878,6 +936,19 @@ func keyRevocationHash(pk signingKey, hashFunc hash.Hash) (err error) {
return pk.SerializeForHash(hashFunc)
}
+// VerifyRevocationHashTag returns nil iff sig appears to be a plausible signature
+// over this public key, based solely on its HashTag.
+func (pk *PublicKey) VerifyRevocationHashTag(sig *Signature) (err error) {
+ preparedHash, err := sig.PrepareVerify()
+ if err != nil {
+ return err
+ }
+ if err = keyRevocationHash(pk, preparedHash); err != nil {
+ return err
+ }
+ return VerifyHashTag(preparedHash, sig)
+}
+
// VerifyRevocationSignature returns nil iff sig is a valid signature, made by this
// public key.
func (pk *PublicKey) VerifyRevocationSignature(sig *Signature) (err error) {
@@ -885,7 +956,7 @@ func (pk *PublicKey) VerifyRevocationSignature(sig *Signature) (err error) {
if err != nil {
return err
}
- if keyRevocationHash(pk, preparedHash); err != nil {
+ if err = keyRevocationHash(pk, preparedHash); err != nil {
return err
}
return pk.VerifySignature(preparedHash, sig)
@@ -934,6 +1005,20 @@ func directKeySignatureHash(pk *PublicKey, h hash.Hash) (err error) {
return pk.SerializeForHash(h)
}
+// VerifyUserIdHashTag returns nil iff sig appears to be a plausible signature over this
+// public key and UserId, based solely on its HashTag
+func (pk *PublicKey) VerifyUserIdHashTag(id string, sig *Signature) (err error) {
+ preparedHash, err := sig.PrepareVerify()
+ if err != nil {
+ return err
+ }
+ err = userIdSignatureHash(id, pk, preparedHash)
+ if err != nil {
+ return err
+ }
+ return VerifyHashTag(preparedHash, sig)
+}
+
// VerifyUserIdSignature returns nil iff sig is a valid signature, made by this
// public key, that id is the identity of pub.
func (pk *PublicKey) VerifyUserIdSignature(id string, pub *PublicKey, sig *Signature) (err error) {
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go
index 420625386b..3a4b366d87 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/signature.go
@@ -8,9 +8,11 @@ import (
"bytes"
"crypto"
"crypto/dsa"
+ "encoding/asn1"
"encoding/binary"
"hash"
"io"
+ "math/big"
"strconv"
"time"
@@ -24,7 +26,8 @@ import (
)
const (
- // See RFC 4880, section 5.2.3.21 for details.
+ // First octet of key flags.
+ // See RFC 9580, section 5.2.3.29 for details.
KeyFlagCertify = 1 << iota
KeyFlagSign
KeyFlagEncryptCommunications
@@ -35,14 +38,29 @@ const (
KeyFlagGroupKey
)
-// Signature represents a signature. See RFC 4880, section 5.2.
+const (
+ // First octet of keyserver preference flags.
+ // See RFC 9580, section 5.2.3.25 for details.
+ _ = 1 << iota
+ _
+ _
+ _
+ _
+ _
+ _
+ KeyserverPrefNoModify
+)
+
+const SaltNotationName = "salt@notations.openpgpjs.org"
+
+// Signature represents a signature. See RFC 9580, section 5.2.
type Signature struct {
Version int
SigType SignatureType
PubKeyAlgo PublicKeyAlgorithm
Hash crypto.Hash
// salt contains a random salt value for v6 signatures
- // See RFC the crypto refresh Section 5.2.3.
+ // See RFC 9580 Section 5.2.4.
salt []byte
// HashSuffix is extra data that is hashed in after the signed data.
@@ -83,27 +101,37 @@ type Signature struct {
// TrustLevel and TrustAmount can be set by the signer to assert that
// the key is not only valid but also trustworthy at the specified
// level.
- // See RFC 4880, section 5.2.3.13 for details.
+ // See RFC 9580, section 5.2.3.21 for details.
TrustLevel TrustLevel
TrustAmount TrustAmount
// TrustRegularExpression can be used in conjunction with trust Signature
// packets to limit the scope of the trust that is extended.
- // See RFC 4880, section 5.2.3.14 for details.
+ // See RFC 9580, section 5.2.3.22 for details.
TrustRegularExpression *string
+ // KeyserverPrefsValid is set if any keyserver preferences were given. See RFC 9580, section
+ // 5.2.3.25 for details.
+ KeyserverPrefsValid bool
+ KeyserverPrefNoModify bool
+
+ // PreferredKeyserver can be set to a URI where the latest version of the
+ // key that this signature is made over can be found. See RFC 9580, section
+ // 5.2.3.26 for details.
+ PreferredKeyserver string
+
// PolicyURI can be set to the URI of a document that describes the
- // policy under which the signature was issued. See RFC 4880, section
- // 5.2.3.20 for details.
+ // policy under which the signature was issued. See RFC 9580, section
+ // 5.2.3.28 for details.
PolicyURI string
- // FlagsValid is set if any flags were given. See RFC 4880, section
- // 5.2.3.21 for details.
+ // FlagsValid is set if any flags were given. See RFC 9580, section
+ // 5.2.3.29 for details.
FlagsValid bool
FlagCertify, FlagSign, FlagEncryptCommunications, FlagEncryptStorage, FlagSplitKey, FlagAuthenticate, FlagGroupKey bool
// RevocationReason is set if this signature has been revoked.
- // See RFC 4880, section 5.2.3.23 for details.
+ // See RFC 9580, section 5.2.3.31 for details.
RevocationReason *ReasonForRevocation
RevocationReasonText string
@@ -143,17 +171,22 @@ func (sig *Signature) Salt() []byte {
}
func (sig *Signature) parse(r io.Reader) (err error) {
- // RFC 4880, section 5.2.3
+ // RFC 9580, section 5.2.3
var buf [7]byte
_, err = readFull(r, buf[:1])
if err != nil {
return
}
- if buf[0] != 4 && buf[0] != 5 && buf[0] != 6 {
+ sig.Version = int(buf[0])
+ if sig.Version != 4 && sig.Version != 5 && sig.Version != 6 {
err = errors.UnsupportedError("signature packet version " + strconv.Itoa(int(buf[0])))
return
}
- sig.Version = int(buf[0])
+
+ if V5Disabled && sig.Version == 5 {
+ return errors.UnsupportedError("support for parsing v5 entities is disabled; build with `-tags v5` if needed")
+ }
+
if sig.Version == 6 {
_, err = readFull(r, buf[:7])
} else {
@@ -310,7 +343,7 @@ func (sig *Signature) parse(r io.Reader) (err error) {
}
// parseSignatureSubpackets parses subpackets of the main signature packet. See
-// RFC 4880, section 5.2.3.1.
+// RFC 9580, section 5.2.3.1.
func parseSignatureSubpackets(sig *Signature, subpackets []byte, isHashed bool) (err error) {
for len(subpackets) > 0 {
subpackets, err = parseSignatureSubpacket(sig, subpackets, isHashed)
@@ -331,6 +364,7 @@ type signatureSubpacketType uint8
const (
creationTimeSubpacket signatureSubpacketType = 2
signatureExpirationSubpacket signatureSubpacketType = 3
+ exportableCertSubpacket signatureSubpacketType = 4
trustSubpacket signatureSubpacketType = 5
regularExpressionSubpacket signatureSubpacketType = 6
keyExpirationSubpacket signatureSubpacketType = 9
@@ -339,6 +373,8 @@ const (
notationDataSubpacket signatureSubpacketType = 20
prefHashAlgosSubpacket signatureSubpacketType = 21
prefCompressionSubpacket signatureSubpacketType = 22
+ keyserverPrefsSubpacket signatureSubpacketType = 23
+ prefKeyserverSubpacket signatureSubpacketType = 24
primaryUserIdSubpacket signatureSubpacketType = 25
policyUriSubpacket signatureSubpacketType = 26
keyFlagsSubpacket signatureSubpacketType = 27
@@ -353,7 +389,7 @@ const (
// parseSignatureSubpacket parses a single subpacket. len(subpacket) is >= 1.
func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (rest []byte, err error) {
- // RFC 4880, section 5.2.3.1
+ // RFC 9580, section 5.2.3.7
var (
length uint32
packetType signatureSubpacketType
@@ -411,19 +447,24 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
t := binary.BigEndian.Uint32(subpacket)
sig.CreationTime = time.Unix(int64(t), 0)
case signatureExpirationSubpacket:
- // Signature expiration time, section 5.2.3.10
+ // Signature expiration time, section 5.2.3.18
if len(subpacket) != 4 {
err = errors.StructuralError("expiration subpacket with bad length")
return
}
sig.SigLifetimeSecs = new(uint32)
*sig.SigLifetimeSecs = binary.BigEndian.Uint32(subpacket)
+ case exportableCertSubpacket:
+ if subpacket[0] == 0 {
+ err = errors.UnsupportedError("signature with non-exportable certification")
+ return
+ }
case trustSubpacket:
if len(subpacket) != 2 {
err = errors.StructuralError("trust subpacket with bad length")
return
}
- // Trust level and amount, section 5.2.3.13
+ // Trust level and amount, section 5.2.3.21
sig.TrustLevel = TrustLevel(subpacket[0])
sig.TrustAmount = TrustAmount(subpacket[1])
case regularExpressionSubpacket:
@@ -431,7 +472,7 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
err = errors.StructuralError("regexp subpacket with bad length")
return
}
- // Trust regular expression, section 5.2.3.14
+ // Trust regular expression, section 5.2.3.22
// RFC specifies the string should be null-terminated; remove a null byte from the end
if subpacket[len(subpacket)-1] != 0x00 {
err = errors.StructuralError("expected regular expression to be null-terminated")
@@ -440,7 +481,7 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
trustRegularExpression := string(subpacket[:len(subpacket)-1])
sig.TrustRegularExpression = &trustRegularExpression
case keyExpirationSubpacket:
- // Key expiration time, section 5.2.3.6
+ // Key expiration time, section 5.2.3.13
if len(subpacket) != 4 {
err = errors.StructuralError("key expiration subpacket with bad length")
return
@@ -448,11 +489,11 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
sig.KeyLifetimeSecs = new(uint32)
*sig.KeyLifetimeSecs = binary.BigEndian.Uint32(subpacket)
case prefSymmetricAlgosSubpacket:
- // Preferred symmetric algorithms, section 5.2.3.7
+ // Preferred symmetric algorithms, section 5.2.3.14
sig.PreferredSymmetric = make([]byte, len(subpacket))
copy(sig.PreferredSymmetric, subpacket)
case issuerSubpacket:
- // Issuer, section 5.2.3.5
+ // Issuer, section 5.2.3.12
if sig.Version > 4 && isHashed {
err = errors.StructuralError("issuer subpacket found in v6 key")
return
@@ -466,7 +507,7 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
*sig.IssuerKeyId = binary.BigEndian.Uint64(subpacket)
}
case notationDataSubpacket:
- // Notation data, section 5.2.3.16
+ // Notation data, section 5.2.3.24
if len(subpacket) < 8 {
err = errors.StructuralError("notation data subpacket with bad length")
return
@@ -488,15 +529,27 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
sig.Notations = append(sig.Notations, ¬ation)
case prefHashAlgosSubpacket:
- // Preferred hash algorithms, section 5.2.3.8
+ // Preferred hash algorithms, section 5.2.3.16
sig.PreferredHash = make([]byte, len(subpacket))
copy(sig.PreferredHash, subpacket)
case prefCompressionSubpacket:
- // Preferred compression algorithms, section 5.2.3.9
+ // Preferred compression algorithms, section 5.2.3.17
sig.PreferredCompression = make([]byte, len(subpacket))
copy(sig.PreferredCompression, subpacket)
+ case keyserverPrefsSubpacket:
+ // Keyserver preferences, section 5.2.3.25
+ sig.KeyserverPrefsValid = true
+ if len(subpacket) == 0 {
+ return
+ }
+ if subpacket[0]&KeyserverPrefNoModify != 0 {
+ sig.KeyserverPrefNoModify = true
+ }
+ case prefKeyserverSubpacket:
+ // Preferred keyserver, section 5.2.3.26
+ sig.PreferredKeyserver = string(subpacket)
case primaryUserIdSubpacket:
- // Primary User ID, section 5.2.3.19
+ // Primary User ID, section 5.2.3.27
if len(subpacket) != 1 {
err = errors.StructuralError("primary user id subpacket with bad length")
return
@@ -506,12 +559,11 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
*sig.IsPrimaryId = true
}
case keyFlagsSubpacket:
- // Key flags, section 5.2.3.21
+ // Key flags, section 5.2.3.29
+ sig.FlagsValid = true
if len(subpacket) == 0 {
- err = errors.StructuralError("empty key flags subpacket")
return
}
- sig.FlagsValid = true
if subpacket[0]&KeyFlagCertify != 0 {
sig.FlagCertify = true
}
@@ -537,7 +589,7 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
userId := string(subpacket)
sig.SignerUserId = &userId
case reasonForRevocationSubpacket:
- // Reason For Revocation, section 5.2.3.23
+ // Reason For Revocation, section 5.2.3.31
if len(subpacket) == 0 {
err = errors.StructuralError("empty revocation reason subpacket")
return
@@ -546,7 +598,7 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
*sig.RevocationReason = NewReasonForRevocation(subpacket[0])
sig.RevocationReasonText = string(subpacket[1:])
case featuresSubpacket:
- // Features subpacket, section 5.2.3.24 specifies a very general
+ // Features subpacket, section 5.2.3.32 specifies a very general
// mechanism for OpenPGP implementations to signal support for new
// features.
if len(subpacket) > 0 {
@@ -560,16 +612,13 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
}
case embeddedSignatureSubpacket:
// Only usage is in signatures that cross-certify
- // signing subkeys. section 5.2.3.26 describes the
+ // signing subkeys. section 5.2.3.34 describes the
// format, with its usage described in section 11.1
if sig.EmbeddedSignature != nil {
err = errors.StructuralError("Cannot have multiple embedded signatures")
return
}
sig.EmbeddedSignature = new(Signature)
- // Embedded signatures are required to be v4 signatures see
- // section 12.1. However, we only parse v4 signatures in this
- // file anyway.
if err := sig.EmbeddedSignature.parse(bytes.NewBuffer(subpacket)); err != nil {
return nil, err
}
@@ -577,7 +626,7 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
return nil, errors.StructuralError("cross-signature has unexpected type " + strconv.Itoa(int(sigType)))
}
case policyUriSubpacket:
- // Policy URI, section 5.2.3.20
+ // Policy URI, section 5.2.3.28
sig.PolicyURI = string(subpacket)
case issuerFingerprintSubpacket:
if len(subpacket) == 0 {
@@ -597,8 +646,7 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
*sig.IssuerKeyId = binary.BigEndian.Uint64(subpacket[13:21])
}
case intendedRecipientSubpacket:
- // Intended Recipient Fingerprint
- // https://datatracker.ietf.org/doc/html/draft-ietf-openpgp-crypto-refresh#name-intended-recipient-fingerpr
+ // Intended Recipient Fingerprint, section 5.2.3.36
if len(subpacket) < 1 {
return nil, errors.StructuralError("invalid intended recipient fingerpring length")
}
@@ -610,8 +658,7 @@ func parseSignatureSubpacket(sig *Signature, subpacket []byte, isHashed bool) (r
copy(fingerprint, subpacket[1:])
sig.IntendedRecipients = append(sig.IntendedRecipients, &Recipient{int(version), fingerprint})
case prefCipherSuitesSubpacket:
- // Preferred AEAD cipher suites
- // See https://www.ietf.org/archive/id/draft-ietf-openpgp-crypto-refresh-07.html#name-preferred-aead-ciphersuites
+ // Preferred AEAD cipher suites, section 5.2.3.15
if len(subpacket)%2 != 0 {
err = errors.StructuralError("invalid aead cipher suite length")
return
@@ -662,7 +709,7 @@ func (sig *Signature) CheckKeyIdOrFingerprintExplicit(fingerprint []byte, keyId
// serializeSubpacketLength marshals the given length into to.
func serializeSubpacketLength(to []byte, length int) int {
- // RFC 4880, Section 4.2.2.
+ // RFC 9580, Section 4.2.1.
if length < 192 {
to[0] = byte(length)
return 1
@@ -805,7 +852,7 @@ func (sig *Signature) signPrepareHash(h hash.Hash) (digest []byte, err error) {
// The created hash object initially hashes a randomly generated salt
// as required by v6 signatures. The generated salt is stored in sig. If the signature is not v6,
// the method returns an empty hash object.
-// See RFC the crypto refresh Section 3.2.4.
+// See RFC 9580 Section 5.2.4.
func (sig *Signature) PrepareSign(config *Config) (hash.Hash, error) {
if !sig.Hash.Available() {
return nil, errors.UnsupportedError("hash function")
@@ -829,7 +876,7 @@ func (sig *Signature) PrepareSign(config *Config) (hash.Hash, error) {
// If the signature is not v6, the method ignores the salt.
// Use PrepareSign whenever possible instead of generating and
// hashing the salt externally.
-// See RFC the crypto refresh Section 3.2.4.
+// See RFC 9580 Section 5.2.4.
func (sig *Signature) SetSalt(salt []byte) error {
if sig.Version == 6 {
expectedSaltLength, err := SaltLengthForHash(sig.Hash)
@@ -847,7 +894,7 @@ func (sig *Signature) SetSalt(salt []byte) error {
// PrepareVerify must be called to create a hash object before verifying v6 signatures.
// The created hash object initially hashes the internally stored salt.
// If the signature is not v6, the method returns an empty hash object.
-// See crypto refresh Section 3.2.4.
+// See RFC 9580 Section 5.2.4.
func (sig *Signature) PrepareVerify() (hash.Hash, error) {
if !sig.Hash.Available() {
return nil, errors.UnsupportedError("hash function")
@@ -872,6 +919,20 @@ func (sig *Signature) Sign(h hash.Hash, priv *PrivateKey, config *Config) (err e
}
sig.Version = priv.PublicKey.Version
sig.IssuerFingerprint = priv.PublicKey.Fingerprint
+ if sig.Version < 6 && config.RandomizeSignaturesViaNotation() {
+ sig.removeNotationsWithName(SaltNotationName)
+ salt, err := SignatureSaltForHash(sig.Hash, config.Random())
+ if err != nil {
+ return err
+ }
+ notation := Notation{
+ Name: SaltNotationName,
+ Value: salt,
+ IsCritical: false,
+ IsHumanReadable: false,
+ }
+ sig.Notations = append(sig.Notations, ¬ation)
+ }
sig.outSubpackets, err = sig.buildSubpackets(priv.PublicKey)
if err != nil {
return err
@@ -901,8 +962,16 @@ func (sig *Signature) Sign(h hash.Hash, priv *PrivateKey, config *Config) (err e
sig.DSASigS = new(encoding.MPI).SetBig(s)
}
case PubKeyAlgoECDSA:
- sk := priv.PrivateKey.(*ecdsa.PrivateKey)
- r, s, err := ecdsa.Sign(config.Random(), sk, digest)
+ var r, s *big.Int
+ if sk, ok := priv.PrivateKey.(*ecdsa.PrivateKey); ok {
+ r, s, err = ecdsa.Sign(config.Random(), sk, digest)
+ } else {
+ var b []byte
+ b, err = priv.PrivateKey.(crypto.Signer).Sign(config.Random(), digest, sig.Hash)
+ if err == nil {
+ r, s, err = unwrapECDSASig(b)
+ }
+ }
if err == nil {
sig.ECDSASigR = new(encoding.MPI).SetBig(r)
@@ -934,6 +1003,18 @@ func (sig *Signature) Sign(h hash.Hash, priv *PrivateKey, config *Config) (err e
return
}
+// unwrapECDSASig parses the two integer components of an ASN.1-encoded ECDSA signature.
+func unwrapECDSASig(b []byte) (r, s *big.Int, err error) {
+ var ecsdaSig struct {
+ R, S *big.Int
+ }
+ _, err = asn1.Unmarshal(b, &ecsdaSig)
+ if err != nil {
+ return
+ }
+ return ecsdaSig.R, ecsdaSig.S, nil
+}
+
// SignUserId computes a signature from priv, asserting that pub is a valid
// key for the identity id. On success, the signature is stored in sig. Call
// Serialize to write it out.
@@ -1176,28 +1257,81 @@ type outputSubpacket struct {
func (sig *Signature) buildSubpackets(issuer PublicKey) (subpackets []outputSubpacket, err error) {
creationTime := make([]byte, 4)
binary.BigEndian.PutUint32(creationTime, uint32(sig.CreationTime.Unix()))
- subpackets = append(subpackets, outputSubpacket{true, creationTimeSubpacket, false, creationTime})
-
+ // Signature Creation Time
+ subpackets = append(subpackets, outputSubpacket{true, creationTimeSubpacket, true, creationTime})
+ // Signature Expiration Time
+ if sig.SigLifetimeSecs != nil && *sig.SigLifetimeSecs != 0 {
+ sigLifetime := make([]byte, 4)
+ binary.BigEndian.PutUint32(sigLifetime, *sig.SigLifetimeSecs)
+ subpackets = append(subpackets, outputSubpacket{true, signatureExpirationSubpacket, true, sigLifetime})
+ }
+ // Trust Signature
+ if sig.TrustLevel != 0 {
+ subpackets = append(subpackets, outputSubpacket{true, trustSubpacket, true, []byte{byte(sig.TrustLevel), byte(sig.TrustAmount)}})
+ }
+ // Regular Expression
+ if sig.TrustRegularExpression != nil {
+ // RFC specifies the string should be null-terminated; add a null byte to the end
+ subpackets = append(subpackets, outputSubpacket{true, regularExpressionSubpacket, true, []byte(*sig.TrustRegularExpression + "\000")})
+ }
+ // Key Expiration Time
+ if sig.KeyLifetimeSecs != nil && *sig.KeyLifetimeSecs != 0 {
+ keyLifetime := make([]byte, 4)
+ binary.BigEndian.PutUint32(keyLifetime, *sig.KeyLifetimeSecs)
+ subpackets = append(subpackets, outputSubpacket{true, keyExpirationSubpacket, true, keyLifetime})
+ }
+ // Preferred Symmetric Ciphers for v1 SEIPD
+ if len(sig.PreferredSymmetric) > 0 {
+ subpackets = append(subpackets, outputSubpacket{true, prefSymmetricAlgosSubpacket, false, sig.PreferredSymmetric})
+ }
+ // Issuer Key ID
if sig.IssuerKeyId != nil && sig.Version == 4 {
keyId := make([]byte, 8)
binary.BigEndian.PutUint64(keyId, *sig.IssuerKeyId)
- subpackets = append(subpackets, outputSubpacket{true, issuerSubpacket, false, keyId})
+ subpackets = append(subpackets, outputSubpacket{true, issuerSubpacket, true, keyId})
}
- if sig.IssuerFingerprint != nil {
- contents := append([]uint8{uint8(issuer.Version)}, sig.IssuerFingerprint...)
- subpackets = append(subpackets, outputSubpacket{true, issuerFingerprintSubpacket, sig.Version >= 5, contents})
+ // Notation Data
+ for _, notation := range sig.Notations {
+ subpackets = append(
+ subpackets,
+ outputSubpacket{
+ true,
+ notationDataSubpacket,
+ notation.IsCritical,
+ notation.getData(),
+ })
}
- if sig.SignerUserId != nil {
- subpackets = append(subpackets, outputSubpacket{true, signerUserIdSubpacket, false, []byte(*sig.SignerUserId)})
+ // Preferred Hash Algorithms
+ if len(sig.PreferredHash) > 0 {
+ subpackets = append(subpackets, outputSubpacket{true, prefHashAlgosSubpacket, false, sig.PreferredHash})
}
- if sig.SigLifetimeSecs != nil && *sig.SigLifetimeSecs != 0 {
- sigLifetime := make([]byte, 4)
- binary.BigEndian.PutUint32(sigLifetime, *sig.SigLifetimeSecs)
- subpackets = append(subpackets, outputSubpacket{true, signatureExpirationSubpacket, true, sigLifetime})
+ // Preferred Compression Algorithms
+ if len(sig.PreferredCompression) > 0 {
+ subpackets = append(subpackets, outputSubpacket{true, prefCompressionSubpacket, false, sig.PreferredCompression})
}
-
+ // Keyserver Preferences
+ // Keyserver preferences may only appear in self-signatures or certification signatures.
+ if sig.KeyserverPrefsValid {
+ var prefs byte
+ if sig.KeyserverPrefNoModify {
+ prefs |= KeyserverPrefNoModify
+ }
+ subpackets = append(subpackets, outputSubpacket{true, keyserverPrefsSubpacket, false, []byte{prefs}})
+ }
+ // Preferred Keyserver
+ if len(sig.PreferredKeyserver) > 0 {
+ subpackets = append(subpackets, outputSubpacket{true, prefKeyserverSubpacket, false, []uint8(sig.PreferredKeyserver)})
+ }
+ // Primary User ID
+ if sig.IsPrimaryId != nil && *sig.IsPrimaryId {
+ subpackets = append(subpackets, outputSubpacket{true, primaryUserIdSubpacket, false, []byte{1}})
+ }
+ // Policy URI
+ if len(sig.PolicyURI) > 0 {
+ subpackets = append(subpackets, outputSubpacket{true, policyUriSubpacket, false, []uint8(sig.PolicyURI)})
+ }
+ // Key Flags
// Key flags may only appear in self-signatures or certification signatures.
-
if sig.FlagsValid {
var flags byte
if sig.FlagCertify {
@@ -1221,33 +1355,19 @@ func (sig *Signature) buildSubpackets(issuer PublicKey) (subpackets []outputSubp
if sig.FlagGroupKey {
flags |= KeyFlagGroupKey
}
- subpackets = append(subpackets, outputSubpacket{true, keyFlagsSubpacket, false, []byte{flags}})
+ subpackets = append(subpackets, outputSubpacket{true, keyFlagsSubpacket, true, []byte{flags}})
}
-
- for _, notation := range sig.Notations {
- subpackets = append(
- subpackets,
- outputSubpacket{
- true,
- notationDataSubpacket,
- notation.IsCritical,
- notation.getData(),
- })
+ // Signer's User ID
+ if sig.SignerUserId != nil {
+ subpackets = append(subpackets, outputSubpacket{true, signerUserIdSubpacket, false, []byte(*sig.SignerUserId)})
}
-
- for _, recipient := range sig.IntendedRecipients {
- subpackets = append(
- subpackets,
- outputSubpacket{
- true,
- intendedRecipientSubpacket,
- false,
- recipient.Serialize(),
- })
+ // Reason for Revocation
+ // Revocation reason appears only in revocation signatures and is serialized as per section 5.2.3.31.
+ if sig.RevocationReason != nil {
+ subpackets = append(subpackets, outputSubpacket{true, reasonForRevocationSubpacket, true,
+ append([]uint8{uint8(*sig.RevocationReason)}, []uint8(sig.RevocationReasonText)...)})
}
-
- // The following subpackets may only appear in self-signatures.
-
+ // Features
var features = byte(0x00)
if sig.SEIPDv1 {
features |= 0x01
@@ -1255,46 +1375,36 @@ func (sig *Signature) buildSubpackets(issuer PublicKey) (subpackets []outputSubp
if sig.SEIPDv2 {
features |= 0x08
}
-
if features != 0x00 {
subpackets = append(subpackets, outputSubpacket{true, featuresSubpacket, false, []byte{features}})
}
-
- if sig.TrustLevel != 0 {
- subpackets = append(subpackets, outputSubpacket{true, trustSubpacket, true, []byte{byte(sig.TrustLevel), byte(sig.TrustAmount)}})
- }
-
- if sig.TrustRegularExpression != nil {
- // RFC specifies the string should be null-terminated; add a null byte to the end
- subpackets = append(subpackets, outputSubpacket{true, regularExpressionSubpacket, true, []byte(*sig.TrustRegularExpression + "\000")})
- }
-
- if sig.KeyLifetimeSecs != nil && *sig.KeyLifetimeSecs != 0 {
- keyLifetime := make([]byte, 4)
- binary.BigEndian.PutUint32(keyLifetime, *sig.KeyLifetimeSecs)
- subpackets = append(subpackets, outputSubpacket{true, keyExpirationSubpacket, true, keyLifetime})
- }
-
- if sig.IsPrimaryId != nil && *sig.IsPrimaryId {
- subpackets = append(subpackets, outputSubpacket{true, primaryUserIdSubpacket, false, []byte{1}})
- }
-
- if len(sig.PreferredSymmetric) > 0 {
- subpackets = append(subpackets, outputSubpacket{true, prefSymmetricAlgosSubpacket, false, sig.PreferredSymmetric})
- }
-
- if len(sig.PreferredHash) > 0 {
- subpackets = append(subpackets, outputSubpacket{true, prefHashAlgosSubpacket, false, sig.PreferredHash})
+ // Embedded Signature
+ // EmbeddedSignature appears only in subkeys capable of signing and is serialized as per section 5.2.3.34.
+ if sig.EmbeddedSignature != nil {
+ var buf bytes.Buffer
+ err = sig.EmbeddedSignature.serializeBody(&buf)
+ if err != nil {
+ return
+ }
+ subpackets = append(subpackets, outputSubpacket{true, embeddedSignatureSubpacket, true, buf.Bytes()})
}
-
- if len(sig.PreferredCompression) > 0 {
- subpackets = append(subpackets, outputSubpacket{true, prefCompressionSubpacket, false, sig.PreferredCompression})
+ // Issuer Fingerprint
+ if sig.IssuerFingerprint != nil {
+ contents := append([]uint8{uint8(issuer.Version)}, sig.IssuerFingerprint...)
+ subpackets = append(subpackets, outputSubpacket{true, issuerFingerprintSubpacket, sig.Version >= 5, contents})
}
-
- if len(sig.PolicyURI) > 0 {
- subpackets = append(subpackets, outputSubpacket{true, policyUriSubpacket, false, []uint8(sig.PolicyURI)})
+ // Intended Recipient Fingerprint
+ for _, recipient := range sig.IntendedRecipients {
+ subpackets = append(
+ subpackets,
+ outputSubpacket{
+ true,
+ intendedRecipientSubpacket,
+ false,
+ recipient.Serialize(),
+ })
}
-
+ // Preferred AEAD Ciphersuites
if len(sig.PreferredCipherSuites) > 0 {
serialized := make([]byte, len(sig.PreferredCipherSuites)*2)
for i, cipherSuite := range sig.PreferredCipherSuites {
@@ -1303,23 +1413,6 @@ func (sig *Signature) buildSubpackets(issuer PublicKey) (subpackets []outputSubp
}
subpackets = append(subpackets, outputSubpacket{true, prefCipherSuitesSubpacket, false, serialized})
}
-
- // Revocation reason appears only in revocation signatures and is serialized as per section 5.2.3.23.
- if sig.RevocationReason != nil {
- subpackets = append(subpackets, outputSubpacket{true, reasonForRevocationSubpacket, true,
- append([]uint8{uint8(*sig.RevocationReason)}, []uint8(sig.RevocationReasonText)...)})
- }
-
- // EmbeddedSignature appears only in subkeys capable of signing and is serialized as per section 5.2.3.26.
- if sig.EmbeddedSignature != nil {
- var buf bytes.Buffer
- err = sig.EmbeddedSignature.serializeBody(&buf)
- if err != nil {
- return
- }
- subpackets = append(subpackets, outputSubpacket{true, embeddedSignatureSubpacket, true, buf.Bytes()})
- }
-
return
}
@@ -1371,7 +1464,7 @@ func (sig *Signature) AddMetadataToHashSuffix() {
// SaltLengthForHash selects the required salt length for the given hash algorithm,
// as per Table 23 (Hash algorithm registry) of the crypto refresh.
-// See https://datatracker.ietf.org/doc/html/draft-ietf-openpgp-crypto-refresh#section-9.5|Crypto Refresh Section 9.5.
+// See RFC 9580 Section 9.5.
func SaltLengthForHash(hash crypto.Hash) (int, error) {
switch hash {
case crypto.SHA256, crypto.SHA224, crypto.SHA3_256:
@@ -1387,7 +1480,7 @@ func SaltLengthForHash(hash crypto.Hash) (int, error) {
// SignatureSaltForHash generates a random signature salt
// with the length for the given hash algorithm.
-// See https://datatracker.ietf.org/doc/html/draft-ietf-openpgp-crypto-refresh#section-9.5|Crypto Refresh Section 9.5.
+// See RFC 9580 Section 9.5.
func SignatureSaltForHash(hash crypto.Hash, randReader io.Reader) ([]byte, error) {
saltLength, err := SaltLengthForHash(hash)
if err != nil {
@@ -1400,3 +1493,17 @@ func SignatureSaltForHash(hash crypto.Hash, randReader io.Reader) ([]byte, error
}
return salt, nil
}
+
+// removeNotationsWithName removes all notations in this signature with the given name.
+func (sig *Signature) removeNotationsWithName(name string) {
+ if sig == nil || sig.Notations == nil {
+ return
+ }
+ updatedNotations := make([]*Notation, 0, len(sig.Notations))
+ for _, notation := range sig.Notations {
+ if notation.Name != name {
+ updatedNotations = append(updatedNotations, notation)
+ }
+ }
+ sig.Notations = updatedNotations
+}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetric_key_encrypted.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetric_key_encrypted.go
index c97b98b930..2812a1db88 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetric_key_encrypted.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetric_key_encrypted.go
@@ -45,6 +45,10 @@ func (ske *SymmetricKeyEncrypted) parse(r io.Reader) error {
return errors.UnsupportedError("unknown SymmetricKeyEncrypted version")
}
+ if V5Disabled && ske.Version == 5 {
+ return errors.UnsupportedError("support for parsing v5 entities is disabled; build with `-tags v5` if needed")
+ }
+
if ske.Version > 5 {
// Scalar octet count
if _, err := readFull(r, buf[:]); err != nil {
@@ -191,9 +195,21 @@ func SerializeSymmetricKeyEncrypted(w io.Writer, passphrase []byte, config *Conf
// the given passphrase. The returned session key must be passed to
// SerializeSymmetricallyEncrypted.
// If config is nil, sensible defaults will be used.
+// Deprecated: Use SerializeSymmetricKeyEncryptedAEADReuseKey instead.
func SerializeSymmetricKeyEncryptedReuseKey(w io.Writer, sessionKey []byte, passphrase []byte, config *Config) (err error) {
+ return SerializeSymmetricKeyEncryptedAEADReuseKey(w, sessionKey, passphrase, config.AEAD() != nil, config)
+}
+
+// SerializeSymmetricKeyEncryptedAEADReuseKey serializes a symmetric key packet to w.
+// The packet contains the given session key, encrypted by a key derived from
+// the given passphrase. The returned session key must be passed to
+// SerializeSymmetricallyEncrypted.
+// If aeadSupported is set, SKESK v6 is used, otherwise v4.
+// Note: aeadSupported MUST match the value passed to SerializeSymmetricallyEncrypted.
+// If config is nil, sensible defaults will be used.
+func SerializeSymmetricKeyEncryptedAEADReuseKey(w io.Writer, sessionKey []byte, passphrase []byte, aeadSupported bool, config *Config) (err error) {
var version int
- if config.AEAD() != nil {
+ if aeadSupported {
version = 6
} else {
version = 4
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted.go
index e9bbf0327e..0e898742cf 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted.go
@@ -74,6 +74,10 @@ func (se *SymmetricallyEncrypted) Decrypt(c CipherFunction, key []byte) (io.Read
// SerializeSymmetricallyEncrypted serializes a symmetrically encrypted packet
// to w and returns a WriteCloser to which the to-be-encrypted packets can be
// written.
+// If aeadSupported is set to true, SEIPDv2 is used with the indicated CipherSuite.
+// Otherwise, SEIPDv1 is used with the indicated CipherFunction.
+// Note: aeadSupported MUST match the value passed to SerializeEncryptedKeyAEAD
+// and/or SerializeSymmetricKeyEncryptedAEADReuseKey.
// If config is nil, sensible defaults will be used.
func SerializeSymmetricallyEncrypted(w io.Writer, c CipherFunction, aeadSupported bool, cipherSuite CipherSuite, key []byte, config *Config) (Contents io.WriteCloser, err error) {
writeCloser := noOpCloser{w}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go
index a8ef0bbbec..3957b2d53e 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_aead.go
@@ -7,7 +7,9 @@ package packet
import (
"crypto/cipher"
"crypto/sha256"
+ "fmt"
"io"
+ "strconv"
"github.com/ProtonMail/go-crypto/openpgp/errors"
"golang.org/x/crypto/hkdf"
@@ -25,19 +27,19 @@ func (se *SymmetricallyEncrypted) parseAead(r io.Reader) error {
se.Cipher = CipherFunction(headerData[0])
// cipherFunc must have block size 16 to use AEAD
if se.Cipher.blockSize() != 16 {
- return errors.UnsupportedError("invalid aead cipher: " + string(se.Cipher))
+ return errors.UnsupportedError("invalid aead cipher: " + strconv.Itoa(int(se.Cipher)))
}
// Mode
se.Mode = AEADMode(headerData[1])
if se.Mode.TagLength() == 0 {
- return errors.UnsupportedError("unknown aead mode: " + string(se.Mode))
+ return errors.UnsupportedError("unknown aead mode: " + strconv.Itoa(int(se.Mode)))
}
// Chunk size
se.ChunkSizeByte = headerData[2]
if se.ChunkSizeByte > 16 {
- return errors.UnsupportedError("invalid aead chunk size byte: " + string(se.ChunkSizeByte))
+ return errors.UnsupportedError("invalid aead chunk size byte: " + strconv.Itoa(int(se.ChunkSizeByte)))
}
// Salt
@@ -62,8 +64,11 @@ func (se *SymmetricallyEncrypted) associatedData() []byte {
// decryptAead decrypts a V2 SEIPD packet (AEAD) as specified in
// https://www.ietf.org/archive/id/draft-ietf-openpgp-crypto-refresh-07.html#section-5.13.2
func (se *SymmetricallyEncrypted) decryptAead(inputKey []byte) (io.ReadCloser, error) {
- aead, nonce := getSymmetricallyEncryptedAeadInstance(se.Cipher, se.Mode, inputKey, se.Salt[:], se.associatedData())
+ if se.Cipher.KeySize() != len(inputKey) {
+ return nil, errors.StructuralError(fmt.Sprintf("invalid session key length for cipher: got %d bytes, but expected %d bytes", len(inputKey), se.Cipher.KeySize()))
+ }
+ aead, nonce := getSymmetricallyEncryptedAeadInstance(se.Cipher, se.Mode, inputKey, se.Salt[:], se.associatedData())
// Carry the first tagLen bytes
tagLen := se.Mode.TagLength()
peekedBytes := make([]byte, tagLen)
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_mdc.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_mdc.go
index 645963fa78..8b18623684 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_mdc.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/packet/symmetrically_encrypted_mdc.go
@@ -148,7 +148,7 @@ const mdcPacketTagByte = byte(0x80) | 0x40 | 19
func (ser *seMDCReader) Close() error {
if ser.error {
- return errors.ErrMDCMissing
+ return errors.ErrMDCHashMismatch
}
for !ser.eof {
@@ -159,7 +159,7 @@ func (ser *seMDCReader) Close() error {
break
}
if err != nil {
- return errors.ErrMDCMissing
+ return errors.ErrMDCHashMismatch
}
}
@@ -172,7 +172,7 @@ func (ser *seMDCReader) Close() error {
// The hash already includes the MDC header, but we still check its value
// to confirm encryption correctness
if ser.trailer[0] != mdcPacketTagByte || ser.trailer[1] != sha1.Size {
- return errors.ErrMDCMissing
+ return errors.ErrMDCHashMismatch
}
return nil
}
@@ -241,9 +241,6 @@ func serializeSymmetricallyEncryptedMdc(ciphertext io.WriteCloser, c CipherFunct
if err != nil {
return nil, err
}
- if err != nil {
- return
- }
s, prefix := NewOCFBEncrypter(block, iv, OCFBNoResync)
_, err = ciphertext.Write(prefix)
if err != nil {
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/read.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/read.go
index 408506592f..e6dd9b5fd3 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/read.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/read.go
@@ -233,7 +233,7 @@ FindKey:
}
mdFinal, sensitiveParsingErr := readSignedMessage(packets, md, keyring, config)
if sensitiveParsingErr != nil {
- return nil, errors.StructuralError("parsing error")
+ return nil, errors.HandleSensitiveParsingError(sensitiveParsingErr, md.decrypted != nil)
}
return mdFinal, nil
}
@@ -368,7 +368,7 @@ func (cr *checkReader) Read(buf []byte) (int, error) {
}
if sensitiveParsingError != nil {
- return n, errors.StructuralError("parsing error")
+ return n, errors.HandleSensitiveParsingError(sensitiveParsingError, true)
}
return n, nil
@@ -392,6 +392,7 @@ func (scr *signatureCheckReader) Read(buf []byte) (int, error) {
scr.wrappedHash.Write(buf[:n])
}
+ readsDecryptedData := scr.md.decrypted != nil
if sensitiveParsingError == io.EOF {
var p packet.Packet
var readError error
@@ -410,7 +411,7 @@ func (scr *signatureCheckReader) Read(buf []byte) (int, error) {
key := scr.md.SignedBy
signatureError := key.PublicKey.VerifySignature(scr.h, sig)
if signatureError == nil {
- signatureError = checkSignatureDetails(key, sig, scr.config)
+ signatureError = checkMessageSignatureDetails(key, sig, scr.config)
}
scr.md.Signature = sig
scr.md.SignatureError = signatureError
@@ -434,16 +435,15 @@ func (scr *signatureCheckReader) Read(buf []byte) (int, error) {
// unsigned hash of its own. In order to check this we need to
// close that Reader.
if scr.md.decrypted != nil {
- mdcErr := scr.md.decrypted.Close()
- if mdcErr != nil {
- return n, mdcErr
+ if sensitiveParsingError := scr.md.decrypted.Close(); sensitiveParsingError != nil {
+ return n, errors.HandleSensitiveParsingError(sensitiveParsingError, true)
}
}
return n, io.EOF
}
if sensitiveParsingError != nil {
- return n, errors.StructuralError("parsing error")
+ return n, errors.HandleSensitiveParsingError(sensitiveParsingError, readsDecryptedData)
}
return n, nil
@@ -546,7 +546,7 @@ func verifyDetachedSignature(keyring KeyRing, signed, signature io.Reader, expec
for _, key := range keys {
err = key.PublicKey.VerifySignature(h, sig)
if err == nil {
- return sig, key.Entity, checkSignatureDetails(&key, sig, config)
+ return sig, key.Entity, checkMessageSignatureDetails(&key, sig, config)
}
}
@@ -564,7 +564,7 @@ func CheckArmoredDetachedSignature(keyring KeyRing, signed, signature io.Reader,
return CheckDetachedSignature(keyring, signed, body, config)
}
-// checkSignatureDetails returns an error if:
+// checkMessageSignatureDetails returns an error if:
// - The signature (or one of the binding signatures mentioned below)
// has a unknown critical notation data subpacket
// - The primary key of the signing entity is revoked
@@ -582,7 +582,7 @@ func CheckArmoredDetachedSignature(keyring KeyRing, signed, signature io.Reader,
// NOTE: The order of these checks is important, as the caller may choose to
// ignore ErrSignatureExpired or ErrKeyExpired errors, but should never
// ignore any other errors.
-func checkSignatureDetails(key *Key, signature *packet.Signature, config *packet.Config) error {
+func checkMessageSignatureDetails(key *Key, signature *packet.Signature, config *packet.Config) error {
now := config.Now()
primarySelfSignature, primaryIdentity := key.Entity.PrimarySelfSignature()
signedBySubKey := key.PublicKey != key.Entity.PrimaryKey
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/s2k/s2k.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/s2k/s2k.go
index f4f5c7832d..6871b84fc9 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/s2k/s2k.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/s2k/s2k.go
@@ -199,8 +199,8 @@ func Generate(rand io.Reader, c *Config) (*Params, error) {
}
params = &Params{
- mode: SaltedS2K,
- hashId: hashId,
+ mode: SaltedS2K,
+ hashId: hashId,
}
} else { // Enforce IteratedSaltedS2K method otherwise
hashId, ok := algorithm.HashToHashId(c.hash())
@@ -283,6 +283,9 @@ func ParseIntoParams(r io.Reader) (params *Params, err error) {
params.passes = buf[Argon2SaltSize]
params.parallelism = buf[Argon2SaltSize+1]
params.memoryExp = buf[Argon2SaltSize+2]
+ if err := validateArgon2Params(params); err != nil {
+ return nil, err
+ }
return params, nil
case GnuS2K:
// This is a GNU extension. See
@@ -300,6 +303,10 @@ func ParseIntoParams(r io.Reader) (params *Params, err error) {
return nil, errors.UnsupportedError("S2K function")
}
+func (params *Params) Mode() Mode {
+ return params.mode
+}
+
func (params *Params) Dummy() bool {
return params != nil && params.mode == GnuS2K
}
@@ -408,3 +415,22 @@ func Serialize(w io.Writer, key []byte, rand io.Reader, passphrase []byte, c *Co
f(key, passphrase)
return nil
}
+
+// validateArgon2Params checks that the argon2 parameters are valid according to RFC9580.
+func validateArgon2Params(params *Params) error {
+ // The number of passes t and the degree of parallelism p MUST be non-zero.
+ if params.parallelism == 0 {
+ return errors.StructuralError("invalid argon2 params: parallelism is 0")
+ }
+ if params.passes == 0 {
+ return errors.StructuralError("invalid argon2 params: iterations is 0")
+ }
+
+ // The encoded memory size MUST be a value from 3+ceil(log2(p)) to 31,
+ // such that the decoded memory size m is a value from 8*p to 2^31.
+ if params.memoryExp > 31 || decodeMemory(params.memoryExp) < 8*uint32(params.parallelism) {
+ return errors.StructuralError("invalid argon2 params: memory is out of bounds")
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/ProtonMail/go-crypto/openpgp/write.go b/vendor/github.com/ProtonMail/go-crypto/openpgp/write.go
index 0db5526ce0..b0f6ef7b09 100644
--- a/vendor/github.com/ProtonMail/go-crypto/openpgp/write.go
+++ b/vendor/github.com/ProtonMail/go-crypto/openpgp/write.go
@@ -444,7 +444,13 @@ func encrypt(keyWriter io.Writer, dataWriter io.Writer, to []*Entity, signed *En
}
}
- symKey := make([]byte, cipher.KeySize())
+ var symKey []byte
+ if aeadSupported {
+ symKey = make([]byte, aeadCipherSuite.Cipher.KeySize())
+ } else {
+ symKey = make([]byte, cipher.KeySize())
+ }
+
if _, err := io.ReadFull(config.Random(), symKey); err != nil {
return nil, err
}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/.golangci.yml b/vendor/github.com/alecthomas/go-check-sumtype/.golangci.yml
new file mode 100644
index 0000000000..758ae1a9e4
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/.golangci.yml
@@ -0,0 +1,92 @@
+run:
+ tests: true
+
+output:
+ print-issued-lines: false
+
+linters:
+ enable-all: true
+ disable:
+ - cyclop
+ - depguard
+ - dupl
+ - dupword
+ - err113
+ - errorlint
+ - exhaustive
+ - exhaustruct
+ - exportloopref
+ - forcetypeassert
+ - funlen
+ - gci
+ - gochecknoglobals
+ - gocognit
+ - goconst
+ - gocyclo
+ - godot
+ - godox
+ - gofumpt
+ - govet
+ - ireturn
+ - lll
+ - maintidx
+ - mnd
+ - mnd
+ - musttag
+ - nestif
+ - nilnil
+ - nlreturn
+ - nolintlint
+ - nonamedreturns
+ - paralleltest
+ - perfsprint
+ - predeclared
+ - revive
+ - stylecheck
+ - testableexamples
+ - testpackage
+ - thelper
+ - varnamelen
+ - wrapcheck
+ - wsl
+
+linters-settings:
+ govet:
+ enable:
+ - shadow
+ gocyclo:
+ min-complexity: 10
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 8
+ min-occurrences: 3
+ forbidigo:
+ exclude-godoc-examples: false
+ #forbid:
+ # - (Must)?NewLexer$
+
+issues:
+ max-issues-per-linter: 0
+ max-same-issues: 0
+ exclude-use-default: false
+ exclude-dirs:
+ - _examples
+ exclude:
+ # Captured by errcheck.
+ - "^(G104|G204):"
+ # Very commonly not checked.
+ - 'Error return value of .(.*\.Help|.*\.MarkFlagRequired|(os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked'
+ - 'exported method (.*\.MarshalJSON|.*\.UnmarshalJSON|.*\.EntityURN|.*\.GoString|.*\.Pos) should have comment or be unexported'
+ - "composite literal uses unkeyed fields"
+ - 'declaration of "err" shadows declaration'
+ - "should not use dot imports"
+ - "Potential file inclusion via variable"
+ - "should have comment or be unexported"
+ - "comment on exported var .* should be of the form"
+ - "at least one file in a package should have a package comment"
+ - "string literal contains the Unicode"
+ - "methods on the same type should have the same receiver name"
+ - "_TokenType_name should be _TokenTypeName"
+ - "`_TokenType_map` should be `_TokenTypeMap`"
+ - "rewrite if-else to switch statement"
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/README.md b/vendor/github.com/alecthomas/go-check-sumtype/README.md
index 36614ef400..287aa68b7f 100644
--- a/vendor/github.com/alecthomas/go-check-sumtype/README.md
+++ b/vendor/github.com/alecthomas/go-check-sumtype/README.md
@@ -86,11 +86,18 @@ mysumtype.go:18:2: exhaustiveness check failed for sum type 'MySumType': missing
```
Adding either a `default` clause or a clause to handle `*VariantB` will cause
-exhaustive checks to pass.
+exhaustive checks to pass. To prevent `default` clauses from automatically
+passing checks, set the `-default-signifies-exhasutive=false` flag.
As a special case, if the type switch statement contains a `default` clause
that always panics, then exhaustiveness checks are still performed.
+By default, `go-check-sumtype` will not include shared interfaces in the exhaustiviness check.
+This can be changed by setting the `-include-shared-interfaces=true` flag.
+When this flag is set, `go-check-sumtype` will not require that all concrete structs
+are listed in the switch statement, as long as the switch statement is exhaustive
+with respect to interfaces the structs implement.
+
## Details and motivation
Sum types are otherwise known as discriminated unions. That is, a sum type is
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/check.go b/vendor/github.com/alecthomas/go-check-sumtype/check.go
index 21d751af42..ff7fec728a 100644
--- a/vendor/github.com/alecthomas/go-check-sumtype/check.go
+++ b/vendor/github.com/alecthomas/go-check-sumtype/check.go
@@ -29,7 +29,7 @@ func (e inexhaustiveError) Error() string {
// Names returns a sorted list of names corresponding to the missing variant
// cases.
func (e inexhaustiveError) Names() []string {
- var list []string
+ list := make([]string, 0, len(e.Missing))
for _, o := range e.Missing {
list = append(list, o.Name())
}
@@ -39,7 +39,7 @@ func (e inexhaustiveError) Names() []string {
// check does exhaustiveness checking for the given sum type definitions in the
// given package. Every instance of inexhaustive case analysis is returned.
-func check(pkg *packages.Package, defs []sumTypeDef) []error {
+func check(pkg *packages.Package, defs []sumTypeDef, config Config) []error {
var errs []error
for _, astfile := range pkg.Syntax {
ast.Inspect(astfile, func(n ast.Node) bool {
@@ -47,7 +47,7 @@ func check(pkg *packages.Package, defs []sumTypeDef) []error {
if !ok {
return true
}
- if err := checkSwitch(pkg, defs, swtch); err != nil {
+ if err := checkSwitch(pkg, defs, swtch, config); err != nil {
errs = append(errs, err)
}
return true
@@ -67,8 +67,9 @@ func checkSwitch(
pkg *packages.Package,
defs []sumTypeDef,
swtch *ast.TypeSwitchStmt,
+ config Config,
) error {
- def, missing := missingVariantsInSwitch(pkg, defs, swtch)
+ def, missing := missingVariantsInSwitch(pkg, defs, swtch, config)
if len(missing) > 0 {
return inexhaustiveError{
Position: pkg.Fset.Position(swtch.Pos()),
@@ -87,9 +88,14 @@ func missingVariantsInSwitch(
pkg *packages.Package,
defs []sumTypeDef,
swtch *ast.TypeSwitchStmt,
+ config Config,
) (*sumTypeDef, []types.Object) {
asserted := findTypeAssertExpr(swtch)
ty := pkg.TypesInfo.TypeOf(asserted)
+ if ty == nil {
+ panic(fmt.Sprintf("no type found for asserted expression: %v", asserted))
+ }
+
def := findDef(defs, ty)
if def == nil {
// We couldn't find a corresponding sum type, so there's
@@ -97,15 +103,15 @@ func missingVariantsInSwitch(
return nil, nil
}
variantExprs, hasDefault := switchVariants(swtch)
- if hasDefault && !defaultClauseAlwaysPanics(swtch) {
+ if config.DefaultSignifiesExhaustive && hasDefault && !defaultClauseAlwaysPanics(swtch) {
// A catch-all case defeats all exhaustiveness checks.
return def, nil
}
- var variantTypes []types.Type
+ variantTypes := make([]types.Type, 0, len(variantExprs))
for _, expr := range variantExprs {
variantTypes = append(variantTypes, pkg.TypesInfo.TypeOf(expr))
}
- return def, def.missing(variantTypes)
+ return def, def.missing(variantTypes, config.IncludeSharedInterfaces)
}
// switchVariants returns all case expressions found in a type switch. This
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/config.go b/vendor/github.com/alecthomas/go-check-sumtype/config.go
new file mode 100644
index 0000000000..5c722b75c4
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/config.go
@@ -0,0 +1,8 @@
+package gochecksumtype
+
+type Config struct {
+ DefaultSignifiesExhaustive bool
+ // IncludeSharedInterfaces in the exhaustiviness check. If true, we do not need to list all concrete structs, as long
+ // as the switch statement is exhaustive with respect to interfaces the structs implement.
+ IncludeSharedInterfaces bool
+}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/def.go b/vendor/github.com/alecthomas/go-check-sumtype/def.go
index 24729ac01b..71bdf2f72d 100644
--- a/vendor/github.com/alecthomas/go-check-sumtype/def.go
+++ b/vendor/github.com/alecthomas/go-check-sumtype/def.go
@@ -71,7 +71,7 @@ type sumTypeDef struct {
// sum type declarations. If no such sum type definition could be found for
// any of the given declarations, then an error is returned.
func findSumTypeDefs(decls []sumTypeDecl) ([]sumTypeDef, []error) {
- var defs []sumTypeDef
+ defs := make([]sumTypeDef, 0, len(decls))
var errs []error
for _, decl := range decls {
def, err := newSumTypeDef(decl.Package.Types, decl)
@@ -104,7 +104,7 @@ func newSumTypeDef(pkg *types.Package, decl sumTypeDecl) (*sumTypeDef, error) {
return nil, notInterfaceError{decl}
}
hasUnexported := false
- for i := 0; i < iface.NumMethods(); i++ {
+ for i := range iface.NumMethods() {
if !iface.Method(i).Exported() {
hasUnexported = true
break
@@ -145,7 +145,7 @@ func (def *sumTypeDef) String() string {
// missing returns a list of variants in this sum type that are not in the
// given list of types.
-func (def *sumTypeDef) missing(tys []types.Type) []types.Object {
+func (def *sumTypeDef) missing(tys []types.Type, includeSharedInterfaces bool) []types.Object {
// TODO(ag): This is O(n^2). Fix that. /shrug
var missing []types.Object
for _, v := range def.Variants {
@@ -155,15 +155,29 @@ func (def *sumTypeDef) missing(tys []types.Type) []types.Object {
ty = indirect(ty)
if types.Identical(varty, ty) {
found = true
+ break
+ }
+ if includeSharedInterfaces && implements(varty, ty) {
+ found = true
+ break
}
}
- if !found {
+ if !found && !isInterface(varty) {
+ // we do not include interfaces extending the sumtype, as the
+ // all implementations of those interfaces are already covered
+ // by the sumtype.
missing = append(missing, v)
}
}
return missing
}
+func isInterface(ty types.Type) bool {
+ underlying := indirect(ty).Underlying()
+ _, ok := underlying.(*types.Interface)
+ return ok
+}
+
// indirect dereferences through an arbitrary number of pointer types.
func indirect(ty types.Type) types.Type {
if ty, ok := ty.(*types.Pointer); ok {
@@ -171,3 +185,11 @@ func indirect(ty types.Type) types.Type {
}
return ty
}
+
+func implements(varty, interfaceType types.Type) bool {
+ underlying := interfaceType.Underlying()
+ if interf, ok := underlying.(*types.Interface); ok {
+ return types.Implements(varty, interf) || types.Implements(types.NewPointer(varty), interf)
+ }
+ return false
+}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/renovate.json5 b/vendor/github.com/alecthomas/go-check-sumtype/renovate.json5
new file mode 100644
index 0000000000..77c7b016cc
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/renovate.json5
@@ -0,0 +1,18 @@
+{
+ $schema: "https://docs.renovatebot.com/renovate-schema.json",
+ extends: [
+ "config:recommended",
+ ":semanticCommits",
+ ":semanticCommitTypeAll(chore)",
+ ":semanticCommitScope(deps)",
+ "group:allNonMajor",
+ "schedule:earlyMondays", // Run once a week.
+ ],
+ packageRules: [
+ {
+ matchPackageNames: ["golangci-lint"],
+ matchManagers: ["hermit"],
+ enabled: false,
+ },
+ ],
+}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/run.go b/vendor/github.com/alecthomas/go-check-sumtype/run.go
index fdcb643c5d..f32942d7a0 100644
--- a/vendor/github.com/alecthomas/go-check-sumtype/run.go
+++ b/vendor/github.com/alecthomas/go-check-sumtype/run.go
@@ -3,7 +3,7 @@ package gochecksumtype
import "golang.org/x/tools/go/packages"
// Run sumtype checking on the given packages.
-func Run(pkgs []*packages.Package) []error {
+func Run(pkgs []*packages.Package, config Config) []error {
var errs []error
decls, err := findSumTypeDecls(pkgs)
@@ -18,7 +18,7 @@ func Run(pkgs []*packages.Package) []error {
}
for _, pkg := range pkgs {
- if pkgErrs := check(pkg, defs); pkgErrs != nil {
+ if pkgErrs := check(pkg, defs, config); pkgErrs != nil {
errs = append(errs, pkgErrs...)
}
}
diff --git a/vendor/github.com/alexkohler/nakedret/v2/nakedret.go b/vendor/github.com/alexkohler/nakedret/v2/nakedret.go
index f78bb8cb6c..a557359288 100644
--- a/vendor/github.com/alexkohler/nakedret/v2/nakedret.go
+++ b/vendor/github.com/alexkohler/nakedret/v2/nakedret.go
@@ -22,10 +22,11 @@ import (
const pwd = "./"
-func NakedReturnAnalyzer(defaultLines uint) *analysis.Analyzer {
+func NakedReturnAnalyzer(defaultLines uint, skipTestFiles bool) *analysis.Analyzer {
nakedRet := &NakedReturnRunner{}
flags := flag.NewFlagSet("nakedret", flag.ExitOnError)
flags.UintVar(&nakedRet.MaxLength, "l", defaultLines, "maximum number of lines for a naked return function")
+ flags.BoolVar(&nakedRet.SkipTestFiles, "skip-test-files", skipTestFiles, "set to true to skip test files")
var analyzer = &analysis.Analyzer{
Name: "nakedret",
Doc: "Checks that functions with naked returns are not longer than a maximum size (can be zero).",
@@ -37,7 +38,8 @@ func NakedReturnAnalyzer(defaultLines uint) *analysis.Analyzer {
}
type NakedReturnRunner struct {
- MaxLength uint
+ MaxLength uint
+ SkipTestFiles bool
}
func (n *NakedReturnRunner) run(pass *analysis.Pass) (any, error) {
@@ -49,18 +51,20 @@ func (n *NakedReturnRunner) run(pass *analysis.Pass) (any, error) {
(*ast.ReturnStmt)(nil),
}
retVis := &returnsVisitor{
- pass: pass,
- f: pass.Fset,
- maxLength: n.MaxLength,
+ pass: pass,
+ f: pass.Fset,
+ maxLength: n.MaxLength,
+ skipTestFiles: n.SkipTestFiles,
}
inspector.Nodes(nodeFilter, retVis.NodesVisit)
return nil, nil
}
type returnsVisitor struct {
- pass *analysis.Pass
- f *token.FileSet
- maxLength uint
+ pass *analysis.Pass
+ f *token.FileSet
+ maxLength uint
+ skipTestFiles bool
// functions contains funcInfo for each nested function definition encountered while visiting the AST.
functions []funcInfo
@@ -74,7 +78,7 @@ type funcInfo struct {
reportNaked bool
}
-func checkNakedReturns(args []string, maxLength *uint, setExitStatus bool) error {
+func checkNakedReturns(args []string, maxLength *uint, skipTestFiles bool, setExitStatus bool) error {
fset := token.NewFileSet()
@@ -87,7 +91,7 @@ func checkNakedReturns(args []string, maxLength *uint, setExitStatus bool) error
return errors.New("max length nil")
}
- analyzer := NakedReturnAnalyzer(*maxLength)
+ analyzer := NakedReturnAnalyzer(*maxLength, skipTestFiles)
pass := &analysis.Pass{
Analyzer: analyzer,
Fset: fset,
@@ -292,6 +296,9 @@ func (v *returnsVisitor) NodesVisit(node ast.Node, push bool) bool {
if push && funcType != nil {
// Push function info to track returns for this function
file := v.f.File(node.Pos())
+ if v.skipTestFiles && strings.HasSuffix(file.Name(), "_test.go") {
+ return false
+ }
length := file.Position(node.End()).Line - file.Position(node.Pos()).Line
if length == 0 {
// consider functions that finish on the same line as they start as single line functions, not zero lines!
diff --git a/vendor/github.com/alingse/nilnesserr/.gitignore b/vendor/github.com/alingse/nilnesserr/.gitignore
new file mode 100644
index 0000000000..6f72f89261
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/.gitignore
@@ -0,0 +1,25 @@
+# If you prefer the allow list template instead of the deny list, see community template:
+# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
+#
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, built with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Dependency directories (remove the comment below to include it)
+# vendor/
+
+# Go workspace file
+go.work
+go.work.sum
+
+# env file
+.env
diff --git a/vendor/github.com/alingse/nilnesserr/.golangci.yaml b/vendor/github.com/alingse/nilnesserr/.golangci.yaml
new file mode 100644
index 0000000000..1a2a270a66
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/.golangci.yaml
@@ -0,0 +1,66 @@
+linters:
+ enable-all: true
+ disable:
+ - wsl
+ - varnamelen
+ - nilnil
+ - ireturn
+ - gochecknoglobals
+ - nolintlint
+
+linters-settings:
+ depguard:
+ rules:
+ main:
+ list-mode: lax
+ files:
+ - $all
+ allow:
+ - $gostd
+ - github.com/alingse/nilnesserr
+
+issues:
+ exclude-rules:
+ - path: internal/typeparams
+ linters:
+ - nonamedreturns
+ - nlreturn
+ - intrange
+ - mnd
+ - forcetypeassert
+ - exhaustruct
+ - exhaustive
+ - err113
+ - gofumpt
+ - prealloc
+ - funclen
+ - gocritic
+ - funlen
+ - cyclop
+ - gocognit
+
+ - path: nilness.go
+ linters:
+ - nonamedreturns
+ - nlreturn
+ - nilnil
+ - mnd
+ - forcetypeassert
+ - gochecknoglobals
+ - nestif
+ - funlen
+ - godox
+ - gocognit
+ - gofumpt
+ - exhaustive
+ - cyclop
+ - unparam
+ - gocyclo
+
+ - text: "analysis."
+ linters:
+ - exhaustruct
+
+ - text: "newAnalyzer"
+ linters:
+ - unparam
diff --git a/vendor/github.com/lufeee/execinquery/LICENSE b/vendor/github.com/alingse/nilnesserr/LICENSE
similarity index 97%
rename from vendor/github.com/lufeee/execinquery/LICENSE
rename to vendor/github.com/alingse/nilnesserr/LICENSE
index b6ab14aec3..6caf1ea1c6 100644
--- a/vendor/github.com/lufeee/execinquery/LICENSE
+++ b/vendor/github.com/alingse/nilnesserr/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) 2022 lufe
+Copyright (c) 2024 alingse
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/alingse/nilnesserr/README.md b/vendor/github.com/alingse/nilnesserr/README.md
new file mode 100644
index 0000000000..6b199b6c53
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/README.md
@@ -0,0 +1,74 @@
+# nilnesserr
+
+nilnesserr = nilness + nilerr
+
+`nilnesserr` is a linter for report return nil error in Go. It combines the features of [nilness](https://cs.opensource.google/go/x/tools/+/refs/tags/v0.28.0:go/analysis/passes/nilness/nilness.go) and [nilerr](https://github.com/gostaticanalysis/nilerr), providing a concise way to detect return an unrelated/nil-values error.
+
+## Case
+
+case 1
+```go
+err := do()
+if err != nil {
+ return err
+}
+err2 := do2()
+if err2 != nil {
+ return err // which should return err2 after check `err2 != nil`, but return a nil value error
+}
+```
+
+
+## Some Real Bugs
+
+- https://github.com/alingse/sundrylint/issues/4
+- https://github.com/alingse/nilnesserr/issues/1
+
+We use https://github.com/alingse/go-linter-runner to run linter on GitHub Actions for public Go repos
+
+## Install
+
+```bash
+go install github.com/alingse/nilnesserr/cmd/nilnesserr@latest
+```
+
+
+## TODO
+
+case 2
+
+```go
+err := do()
+if err != nil {
+ return err
+}
+_, ok := do2()
+if !ok {
+ return err
+}
+
+```
+
+case 3
+
+```go
+err := do()
+if err != nil {
+ return err
+}
+_, ok := do2()
+if !ok {
+ return errors.Wrap(err)
+}
+```
+
+maybe this is also a bug, should return a non-nil value error after the if
+
+## License
+
+This project is licensed under the MIT License. See the LICENSE file for details.
+
+This project incorporates source code from two different libraries:
+
+1. [nilness](https://cs.opensource.google/go/x/tools/+/refs/tags/v0.28.0:go/analysis/passes/nilness/nilness.go) licensed under the BSD license.
+2. [nilerr](https://github.com/gostaticanalysis/nilerr) licensed under the MIT license.
diff --git a/vendor/github.com/alingse/nilnesserr/internal/typeparams/coretype.go b/vendor/github.com/alingse/nilnesserr/internal/typeparams/coretype.go
new file mode 100644
index 0000000000..7a744d123b
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/internal/typeparams/coretype.go
@@ -0,0 +1,122 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+ "go/types"
+)
+
+// CoreType returns the core type of T or nil if T does not have a core type.
+//
+// See https://go.dev/ref/spec#Core_types for the definition of a core type.
+func CoreType(T types.Type) types.Type {
+ U := T.Underlying()
+ if _, ok := U.(*types.Interface); !ok {
+ return U // for non-interface types,
+ }
+
+ terms, err := NormalTerms(U)
+ if len(terms) == 0 || err != nil {
+ // len(terms) -> empty type set of interface.
+ // err != nil => U is invalid, exceeds complexity bounds, or has an empty type set.
+ return nil // no core type.
+ }
+
+ U = terms[0].Type().Underlying()
+ var identical int // i in [0,identical) => Identical(U, terms[i].Type().Underlying())
+ for identical = 1; identical < len(terms); identical++ {
+ if !types.Identical(U, terms[identical].Type().Underlying()) {
+ break
+ }
+ }
+
+ if identical == len(terms) {
+ // https://go.dev/ref/spec#Core_types
+ // "There is a single type U which is the underlying type of all types in the type set of T"
+ return U
+ }
+ ch, ok := U.(*types.Chan)
+ if !ok {
+ return nil // no core type as identical < len(terms) and U is not a channel.
+ }
+ // https://go.dev/ref/spec#Core_types
+ // "the type chan E if T contains only bidirectional channels, or the type chan<- E or
+ // <-chan E depending on the direction of the directional channels present."
+ for chans := identical; chans < len(terms); chans++ {
+ curr, ok := terms[chans].Type().Underlying().(*types.Chan)
+ if !ok {
+ return nil
+ }
+ if !types.Identical(ch.Elem(), curr.Elem()) {
+ return nil // channel elements are not identical.
+ }
+ if ch.Dir() == types.SendRecv {
+ // ch is bidirectional. We can safely always use curr's direction.
+ ch = curr
+ } else if curr.Dir() != types.SendRecv && ch.Dir() != curr.Dir() {
+ // ch and curr are not bidirectional and not the same direction.
+ return nil
+ }
+ }
+ return ch
+}
+
+// NormalTerms returns a slice of terms representing the normalized structural
+// type restrictions of a type, if any.
+//
+// For all types other than *types.TypeParam, *types.Interface, and
+// *types.Union, this is just a single term with Tilde() == false and
+// Type() == typ. For *types.TypeParam, *types.Interface, and *types.Union, see
+// below.
+//
+// Structural type restrictions of a type parameter are created via
+// non-interface types embedded in its constraint interface (directly, or via a
+// chain of interface embeddings). For example, in the declaration type
+// T[P interface{~int; m()}] int the structural restriction of the type
+// parameter P is ~int.
+//
+// With interface embedding and unions, the specification of structural type
+// restrictions may be arbitrarily complex. For example, consider the
+// following:
+//
+// type A interface{ ~string|~[]byte }
+//
+// type B interface{ int|string }
+//
+// type C interface { ~string|~int }
+//
+// type T[P interface{ A|B; C }] int
+//
+// In this example, the structural type restriction of P is ~string|int: A|B
+// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int,
+// which when intersected with C (~string|~int) yields ~string|int.
+//
+// NormalTerms computes these expansions and reductions, producing a
+// "normalized" form of the embeddings. A structural restriction is normalized
+// if it is a single union containing no interface terms, and is minimal in the
+// sense that removing any term changes the set of types satisfying the
+// constraint. It is left as a proof for the reader that, modulo sorting, there
+// is exactly one such normalized form.
+//
+// Because the minimal representation always takes this form, NormalTerms
+// returns a slice of tilde terms corresponding to the terms of the union in
+// the normalized structural restriction. An error is returned if the type is
+// invalid, exceeds complexity bounds, or has an empty type set. In the latter
+// case, NormalTerms returns ErrEmptyTypeSet.
+//
+// NormalTerms makes no guarantees about the order of terms, except that it
+// is deterministic.
+func NormalTerms(typ types.Type) ([]*types.Term, error) {
+ switch typ := typ.Underlying().(type) {
+ case *types.TypeParam:
+ return StructuralTerms(typ)
+ case *types.Union:
+ return UnionTermSet(typ)
+ case *types.Interface:
+ return InterfaceTermSet(typ)
+ default:
+ return []*types.Term{types.NewTerm(false, typ)}, nil
+ }
+}
diff --git a/vendor/github.com/alingse/nilnesserr/internal/typeparams/normalize.go b/vendor/github.com/alingse/nilnesserr/internal/typeparams/normalize.go
new file mode 100644
index 0000000000..0302872f47
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/internal/typeparams/normalize.go
@@ -0,0 +1,200 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package typeparams
+
+import (
+ "errors"
+ "fmt"
+ "go/types"
+)
+
+var ErrEmptyTypeSet = errors.New("empty type set")
+
+// StructuralTerms returns a slice of terms representing the normalized
+// structural type restrictions of a type parameter, if any.
+//
+// Structural type restrictions of a type parameter are created via
+// non-interface types embedded in its constraint interface (directly, or via a
+// chain of interface embeddings). For example, in the declaration
+//
+// type T[P interface{~int; m()}] int
+//
+// the structural restriction of the type parameter P is ~int.
+//
+// With interface embedding and unions, the specification of structural type
+// restrictions may be arbitrarily complex. For example, consider the
+// following:
+//
+// type A interface{ ~string|~[]byte }
+//
+// type B interface{ int|string }
+//
+// type C interface { ~string|~int }
+//
+// type T[P interface{ A|B; C }] int
+//
+// In this example, the structural type restriction of P is ~string|int: A|B
+// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int,
+// which when intersected with C (~string|~int) yields ~string|int.
+//
+// StructuralTerms computes these expansions and reductions, producing a
+// "normalized" form of the embeddings. A structural restriction is normalized
+// if it is a single union containing no interface terms, and is minimal in the
+// sense that removing any term changes the set of types satisfying the
+// constraint. It is left as a proof for the reader that, modulo sorting, there
+// is exactly one such normalized form.
+//
+// Because the minimal representation always takes this form, StructuralTerms
+// returns a slice of tilde terms corresponding to the terms of the union in
+// the normalized structural restriction. An error is returned if the
+// constraint interface is invalid, exceeds complexity bounds, or has an empty
+// type set. In the latter case, StructuralTerms returns ErrEmptyTypeSet.
+//
+// StructuralTerms makes no guarantees about the order of terms, except that it
+// is deterministic.
+func StructuralTerms(tparam *types.TypeParam) ([]*types.Term, error) {
+ constraint := tparam.Constraint()
+ if constraint == nil {
+ return nil, fmt.Errorf("%s has nil constraint", tparam)
+ }
+ iface, _ := constraint.Underlying().(*types.Interface)
+ if iface == nil {
+ return nil, fmt.Errorf("constraint is %T, not *types.Interface", constraint.Underlying())
+ }
+ return InterfaceTermSet(iface)
+}
+
+// InterfaceTermSet computes the normalized terms for a constraint interface,
+// returning an error if the term set cannot be computed or is empty. In the
+// latter case, the error will be ErrEmptyTypeSet.
+//
+// See the documentation of StructuralTerms for more information on
+// normalization.
+func InterfaceTermSet(iface *types.Interface) ([]*types.Term, error) {
+ return computeTermSet(iface)
+}
+
+// UnionTermSet computes the normalized terms for a union, returning an error
+// if the term set cannot be computed or is empty. In the latter case, the
+// error will be ErrEmptyTypeSet.
+//
+// See the documentation of StructuralTerms for more information on
+// normalization.
+func UnionTermSet(union *types.Union) ([]*types.Term, error) {
+ return computeTermSet(union)
+}
+
+func computeTermSet(typ types.Type) ([]*types.Term, error) {
+ tset, err := computeTermSetInternal(typ, make(map[types.Type]*termSet), 0)
+ if err != nil {
+ return nil, err
+ }
+ if tset.terms.isEmpty() {
+ return nil, ErrEmptyTypeSet
+ }
+ if tset.terms.isAll() {
+ return nil, nil
+ }
+ var terms []*types.Term
+ for _, term := range tset.terms {
+ terms = append(terms, types.NewTerm(term.tilde, term.typ))
+ }
+ return terms, nil
+}
+
+// A termSet holds the normalized set of terms for a given type.
+//
+// The name termSet is intentionally distinct from 'type set': a type set is
+// all types that implement a type (and includes method restrictions), whereas
+// a term set just represents the structural restrictions on a type.
+type termSet struct {
+ complete bool
+ terms termlist
+}
+
+var ErrNilType = errors.New("nil type")
+var ErrUnreachable = errors.New("unreachable")
+
+func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth int) (res *termSet, err error) {
+ if t == nil {
+ return nil, ErrNilType
+ }
+
+ const maxTermCount = 100
+ if tset, ok := seen[t]; ok {
+ if !tset.complete {
+ return nil, fmt.Errorf("cycle detected in the declaration of %s", t)
+ }
+ return tset, nil
+ }
+
+ // Mark the current type as seen to avoid infinite recursion.
+ tset := new(termSet)
+ defer func() {
+ tset.complete = true
+ }()
+ seen[t] = tset
+
+ switch u := t.Underlying().(type) {
+ case *types.Interface:
+ // The term set of an interface is the intersection of the term sets of its
+ // embedded types.
+ tset.terms = allTermlist
+ for i := 0; i < u.NumEmbeddeds(); i++ {
+ embedded := u.EmbeddedType(i)
+ if _, ok := embedded.Underlying().(*types.TypeParam); ok {
+ return nil, fmt.Errorf("invalid embedded type %T", embedded)
+ }
+ tset2, err := computeTermSetInternal(embedded, seen, depth+1)
+ if err != nil {
+ return nil, err
+ }
+ tset.terms = tset.terms.intersect(tset2.terms)
+ }
+ case *types.Union:
+ // The term set of a union is the union of term sets of its terms.
+ tset.terms = nil
+ for i := 0; i < u.Len(); i++ {
+ t := u.Term(i)
+ var terms termlist
+ switch t.Type().Underlying().(type) {
+ case *types.Interface:
+ tset2, err := computeTermSetInternal(t.Type(), seen, depth+1)
+ if err != nil {
+ return nil, err
+ }
+ terms = tset2.terms
+ case *types.TypeParam, *types.Union:
+ // A stand-alone type parameter or union is not permitted as union
+ // term.
+ return nil, fmt.Errorf("invalid union term %T", t)
+ default:
+ if t.Type() == types.Typ[types.Invalid] {
+ continue
+ }
+ terms = termlist{{t.Tilde(), t.Type()}}
+ }
+ tset.terms = tset.terms.union(terms)
+ if len(tset.terms) > maxTermCount {
+ return nil, fmt.Errorf("exceeded max term count %d", maxTermCount)
+ }
+ }
+ case *types.TypeParam:
+ return nil, ErrUnreachable
+ default:
+ // For all other types, the term set is just a single non-tilde term
+ // holding the type itself.
+ if u != types.Typ[types.Invalid] {
+ tset.terms = termlist{{false, t}}
+ }
+ }
+ return tset, nil
+}
+
+// under is a facade for the go/types internal function of the same name. It is
+// used by typeterm.go.
+func under(t types.Type) types.Type {
+ return t.Underlying()
+}
diff --git a/vendor/github.com/alingse/nilnesserr/internal/typeparams/termlist.go b/vendor/github.com/alingse/nilnesserr/internal/typeparams/termlist.go
new file mode 100644
index 0000000000..cbd12f8013
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/internal/typeparams/termlist.go
@@ -0,0 +1,163 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Code generated by copytermlist.go DO NOT EDIT.
+
+package typeparams
+
+import (
+ "bytes"
+ "go/types"
+)
+
+// A termlist represents the type set represented by the union
+// t1 ∪ y2 ∪ ... tn of the type sets of the terms t1 to tn.
+// A termlist is in normal form if all terms are disjoint.
+// termlist operations don't require the operands to be in
+// normal form.
+type termlist []*term
+
+// allTermlist represents the set of all types.
+// It is in normal form.
+var allTermlist = termlist{new(term)}
+
+// String prints the termlist exactly (without normalization).
+func (xl termlist) String() string {
+ if len(xl) == 0 {
+ return "∅"
+ }
+ var buf bytes.Buffer
+ for i, x := range xl {
+ if i > 0 {
+ buf.WriteString(" | ")
+ }
+ buf.WriteString(x.String())
+ }
+ return buf.String()
+}
+
+// isEmpty reports whether the termlist xl represents the empty set of types.
+func (xl termlist) isEmpty() bool {
+ // If there's a non-nil term, the entire list is not empty.
+ // If the termlist is in normal form, this requires at most
+ // one iteration.
+ for _, x := range xl {
+ if x != nil {
+ return false
+ }
+ }
+ return true
+}
+
+// isAll reports whether the termlist xl represents the set of all types.
+func (xl termlist) isAll() bool {
+ // If there's a 𝓤 term, the entire list is 𝓤.
+ // If the termlist is in normal form, this requires at most
+ // one iteration.
+ for _, x := range xl {
+ if x != nil && x.typ == nil {
+ return true
+ }
+ }
+ return false
+}
+
+// norm returns the normal form of xl.
+func (xl termlist) norm() termlist {
+ // Quadratic algorithm, but good enough for now.
+ // TODO(gri) fix asymptotic performance
+ used := make([]bool, len(xl))
+ var rl termlist
+ for i, xi := range xl {
+ if xi == nil || used[i] {
+ continue
+ }
+ for j := i + 1; j < len(xl); j++ {
+ xj := xl[j]
+ if xj == nil || used[j] {
+ continue
+ }
+ if u1, u2 := xi.union(xj); u2 == nil {
+ // If we encounter a 𝓤 term, the entire list is 𝓤.
+ // Exit early.
+ // (Note that this is not just an optimization;
+ // if we continue, we may end up with a 𝓤 term
+ // and other terms and the result would not be
+ // in normal form.)
+ if u1.typ == nil {
+ return allTermlist
+ }
+ xi = u1
+ used[j] = true // xj is now unioned into xi - ignore it in future iterations
+ }
+ }
+ rl = append(rl, xi)
+ }
+ return rl
+}
+
+// union returns the union xl ∪ yl.
+func (xl termlist) union(yl termlist) termlist {
+ return append(xl, yl...).norm()
+}
+
+// intersect returns the intersection xl ∩ yl.
+func (xl termlist) intersect(yl termlist) termlist {
+ if xl.isEmpty() || yl.isEmpty() {
+ return nil
+ }
+
+ // Quadratic algorithm, but good enough for now.
+ // TODO(gri) fix asymptotic performance
+ var rl termlist
+ for _, x := range xl {
+ for _, y := range yl {
+ if r := x.intersect(y); r != nil {
+ rl = append(rl, r)
+ }
+ }
+ }
+ return rl.norm()
+}
+
+// equal reports whether xl and yl represent the same type set.
+func (xl termlist) equal(yl termlist) bool {
+ // TODO(gri) this should be more efficient
+ return xl.subsetOf(yl) && yl.subsetOf(xl)
+}
+
+// includes reports whether t ∈ xl.
+func (xl termlist) includes(t types.Type) bool {
+ for _, x := range xl {
+ if x.includes(t) {
+ return true
+ }
+ }
+ return false
+}
+
+// supersetOf reports whether y ⊆ xl.
+func (xl termlist) supersetOf(y *term) bool {
+ for _, x := range xl {
+ if y.subsetOf(x) {
+ return true
+ }
+ }
+ return false
+}
+
+// subsetOf reports whether xl ⊆ yl.
+func (xl termlist) subsetOf(yl termlist) bool {
+ if yl.isEmpty() {
+ return xl.isEmpty()
+ }
+
+ // each term x of xl must be a subset of yl
+ for _, x := range xl {
+ if !yl.supersetOf(x) {
+ return false // x is not a subset yl
+ }
+ }
+ return true
+}
diff --git a/vendor/github.com/alingse/nilnesserr/internal/typeparams/typeterm.go b/vendor/github.com/alingse/nilnesserr/internal/typeparams/typeterm.go
new file mode 100644
index 0000000000..35c66003d6
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/internal/typeparams/typeterm.go
@@ -0,0 +1,166 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Code generated by copytermlist.go DO NOT EDIT.
+
+package typeparams
+
+import "go/types"
+
+// A term describes elementary type sets:
+//
+// ∅: (*term)(nil) == ∅ // set of no types (empty set)
+// 𝓤: &term{} == 𝓤 // set of all types (𝓤niverse)
+// T: &term{false, T} == {T} // set of type T
+// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t
+type term struct {
+ tilde bool // valid if typ != nil
+ typ types.Type
+}
+
+func (x *term) String() string {
+ switch {
+ case x == nil:
+ return "∅"
+ case x.typ == nil:
+ return "𝓤"
+ case x.tilde:
+ return "~" + x.typ.String()
+ default:
+ return x.typ.String()
+ }
+}
+
+// equal reports whether x and y represent the same type set.
+func (x *term) equal(y *term) bool {
+ // easy cases
+ switch {
+ case x == nil || y == nil:
+ return x == y
+ case x.typ == nil || y.typ == nil:
+ return x.typ == y.typ
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ return x.tilde == y.tilde && types.Identical(x.typ, y.typ)
+}
+
+// union returns the union x ∪ y: zero, one, or two non-nil terms.
+func (x *term) union(y *term) (_, _ *term) {
+ // easy cases
+ switch {
+ case x == nil && y == nil:
+ return nil, nil // ∅ ∪ ∅ == ∅
+ case x == nil:
+ return y, nil // ∅ ∪ y == y
+ case y == nil:
+ return x, nil // x ∪ ∅ == x
+ case x.typ == nil:
+ return x, nil // 𝓤 ∪ y == 𝓤
+ case y.typ == nil:
+ return y, nil // x ∪ 𝓤 == 𝓤
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ if x.disjoint(y) {
+ return x, y // x ∪ y == (x, y) if x ∩ y == ∅
+ }
+ // x.typ == y.typ
+
+ // ~t ∪ ~t == ~t
+ // ~t ∪ T == ~t
+ // T ∪ ~t == ~t
+ // T ∪ T == T
+ if x.tilde || !y.tilde {
+ return x, nil
+ }
+ return y, nil
+}
+
+// intersect returns the intersection x ∩ y.
+func (x *term) intersect(y *term) *term {
+ // easy cases
+ switch {
+ case x == nil || y == nil:
+ return nil // ∅ ∩ y == ∅ and ∩ ∅ == ∅
+ case x.typ == nil:
+ return y // 𝓤 ∩ y == y
+ case y.typ == nil:
+ return x // x ∩ 𝓤 == x
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ if x.disjoint(y) {
+ return nil // x ∩ y == ∅ if x ∩ y == ∅
+ }
+ // x.typ == y.typ
+
+ // ~t ∩ ~t == ~t
+ // ~t ∩ T == T
+ // T ∩ ~t == T
+ // T ∩ T == T
+ if !x.tilde || y.tilde {
+ return x
+ }
+ return y
+}
+
+// includes reports whether t ∈ x.
+func (x *term) includes(t types.Type) bool {
+ // easy cases
+ switch {
+ case x == nil:
+ return false // t ∈ ∅ == false
+ case x.typ == nil:
+ return true // t ∈ 𝓤 == true
+ }
+ // ∅ ⊂ x ⊂ 𝓤
+
+ u := t
+ if x.tilde {
+ u = under(u)
+ }
+ return types.Identical(x.typ, u)
+}
+
+// subsetOf reports whether x ⊆ y.
+func (x *term) subsetOf(y *term) bool {
+ // easy cases
+ switch {
+ case x == nil:
+ return true // ∅ ⊆ y == true
+ case y == nil:
+ return false // x ⊆ ∅ == false since x != ∅
+ case y.typ == nil:
+ return true // x ⊆ 𝓤 == true
+ case x.typ == nil:
+ return false // 𝓤 ⊆ y == false since y != 𝓤
+ }
+ // ∅ ⊂ x, y ⊂ 𝓤
+
+ if x.disjoint(y) {
+ return false // x ⊆ y == false if x ∩ y == ∅
+ }
+ // x.typ == y.typ
+
+ // ~t ⊆ ~t == true
+ // ~t ⊆ T == false
+ // T ⊆ ~t == true
+ // T ⊆ T == true
+ return !x.tilde || y.tilde
+}
+
+// disjoint reports whether x ∩ y == ∅.
+// x.typ and y.typ must not be nil.
+func (x *term) disjoint(y *term) bool {
+ ux := x.typ
+ if y.tilde {
+ ux = under(ux)
+ }
+ uy := y.typ
+ if x.tilde {
+ uy = under(uy)
+ }
+ return !types.Identical(ux, uy)
+}
diff --git a/vendor/github.com/alingse/nilnesserr/linter.go b/vendor/github.com/alingse/nilnesserr/linter.go
new file mode 100644
index 0000000000..704e347ef3
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/linter.go
@@ -0,0 +1,48 @@
+package nilnesserr
+
+import (
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/buildssa"
+)
+
+const (
+ linterName = "nilnesserr"
+ linterDoc = `Reports constructs that checks for err != nil, but returns a different nil value error.
+Powered by nilness and nilerr.`
+
+ linterMessage = "return a nil value error after check error"
+)
+
+type LinterSetting struct{}
+
+func NewAnalyzer(setting LinterSetting) (*analysis.Analyzer, error) {
+ a, err := newAnalyzer(setting)
+ if err != nil {
+ return nil, err
+ }
+
+ return &analysis.Analyzer{
+ Name: linterName,
+ Doc: linterDoc,
+ Run: a.run,
+ Requires: []*analysis.Analyzer{
+ buildssa.Analyzer,
+ },
+ }, nil
+}
+
+type analyzer struct {
+ setting LinterSetting
+}
+
+func newAnalyzer(setting LinterSetting) (*analyzer, error) {
+ a := &analyzer{setting: setting}
+
+ return a, nil
+}
+
+func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) {
+ _, _ = a.checkNilnesserr(pass)
+
+ return nil, nil
+}
diff --git a/vendor/github.com/alingse/nilnesserr/nilerr.go b/vendor/github.com/alingse/nilnesserr/nilerr.go
new file mode 100644
index 0000000000..c05ec90031
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/nilerr.go
@@ -0,0 +1,83 @@
+// some code was copy from https://github.com/gostaticanalysis/nilerr/blob/master/nilerr.go
+
+package nilnesserr
+
+import (
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/ssa"
+)
+
+var errType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) // nolint: forcetypeassert
+
+func isErrType(res ssa.Value) bool {
+ return types.Implements(res.Type(), errType)
+}
+
+func isConstNil(res ssa.Value) bool {
+ v, ok := res.(*ssa.Const)
+ if ok && v.IsNil() {
+ return true
+ }
+
+ return false
+}
+
+func extractCheckedErrorValue(binOp *ssa.BinOp) ssa.Value {
+ if isErrType(binOp.X) && isConstNil(binOp.Y) {
+ return binOp.X
+ }
+ if isErrType(binOp.Y) && isConstNil(binOp.X) {
+ return binOp.Y
+ }
+
+ return nil
+}
+
+type errFact fact
+
+func findLastNonnilValue(errors []errFact, res ssa.Value) ssa.Value {
+ if len(errors) == 0 {
+ return nil
+ }
+
+ for j := len(errors) - 1; j >= 0; j-- {
+ last := errors[j]
+ if last.value == res {
+ return nil
+ } else if last.nilness == isnonnil {
+ return last.value
+ }
+ }
+
+ return nil
+}
+
+func checkNilnesserr(pass *analysis.Pass, b *ssa.BasicBlock, errors []errFact, isNilnees func(value ssa.Value) bool) {
+ for i := range b.Instrs {
+ instr, ok := b.Instrs[i].(*ssa.Return)
+ if !ok {
+ continue
+ }
+
+ for _, res := range instr.Results {
+ if !isErrType(res) || isConstNil(res) || !isNilnees(res) {
+ continue
+ }
+ // check the lastValue error that is isnonnil
+ lastValue := findLastNonnilValue(errors, res)
+ if lastValue == nil {
+ continue
+ }
+ // report
+ pos := instr.Pos()
+ if pos.IsValid() {
+ pass.Report(analysis.Diagnostic{
+ Pos: pos,
+ Message: linterMessage,
+ })
+ }
+ }
+ }
+}
diff --git a/vendor/github.com/alingse/nilnesserr/nilness.go b/vendor/github.com/alingse/nilnesserr/nilness.go
new file mode 100644
index 0000000000..cd5a691070
--- /dev/null
+++ b/vendor/github.com/alingse/nilnesserr/nilness.go
@@ -0,0 +1,374 @@
+// This file was copy from https://cs.opensource.google/go/x/tools/+/master:go/analysis/passes/nilness/nilness.go
+// I modified some to check the error return
+
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package nilnesserr
+
+import (
+ "go/token"
+ "go/types"
+
+ "github.com/alingse/nilnesserr/internal/typeparams"
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/buildssa"
+ "golang.org/x/tools/go/ssa"
+)
+
+func (a *analyzer) checkNilnesserr(pass *analysis.Pass) (interface{}, error) {
+ ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
+ for _, fn := range ssainput.SrcFuncs {
+ runFunc(pass, fn)
+ }
+ return nil, nil
+}
+
+func runFunc(pass *analysis.Pass, fn *ssa.Function) {
+ // visit visits reachable blocks of the CFG in dominance order,
+ // maintaining a stack of dominating nilness facts.
+ //
+ // By traversing the dom tree, we can pop facts off the stack as
+ // soon as we've visited a subtree. Had we traversed the CFG,
+ // we would need to retain the set of facts for each block.
+ seen := make([]bool, len(fn.Blocks)) // seen[i] means visit should ignore block i
+
+ var visit func(b *ssa.BasicBlock, stack []fact, errors []errFact)
+
+ visit = func(b *ssa.BasicBlock, stack []fact, errors []errFact) {
+ if seen[b.Index] {
+ return
+ }
+ seen[b.Index] = true
+
+ // check this block return a nil value error
+ checkNilnesserr(
+ pass, b,
+ errors,
+ func(v ssa.Value) bool {
+ return nilnessOf(stack, v) == isnil
+ })
+
+ // For nil comparison blocks, report an error if the condition
+ // is degenerate, and push a nilness fact on the stack when
+ // visiting its true and false successor blocks.
+ if binop, tsucc, fsucc := eq(b); binop != nil {
+ // extract the err != nil or err == nil
+ errValue := extractCheckedErrorValue(binop)
+
+ xnil := nilnessOf(stack, binop.X)
+ ynil := nilnessOf(stack, binop.Y)
+
+ if ynil != unknown && xnil != unknown && (xnil == isnil || ynil == isnil) {
+ // Degenerate condition:
+ // the nilness of both operands is known,
+ // and at least one of them is nil.
+
+ // If tsucc's or fsucc's sole incoming edge is impossible,
+ // it is unreachable. Prune traversal of it and
+ // all the blocks it dominates.
+ // (We could be more precise with full dataflow
+ // analysis of control-flow joins.)
+ var skip *ssa.BasicBlock
+ if xnil == ynil {
+ skip = fsucc
+ } else {
+ skip = tsucc
+ }
+ for _, d := range b.Dominees() {
+ if d == skip && len(d.Preds) == 1 {
+ continue
+ }
+
+ visit(d, stack, errors)
+ }
+
+ return
+ }
+
+ // "if x == nil" or "if nil == y" condition; x, y are unknown.
+ if xnil == isnil || ynil == isnil {
+ var newFacts facts
+ if xnil == isnil {
+ // x is nil, y is unknown:
+ // t successor learns y is nil.
+ newFacts = expandFacts(fact{binop.Y, isnil})
+ } else {
+ // y is nil, x is unknown:
+ // t successor learns x is nil.
+ newFacts = expandFacts(fact{binop.X, isnil})
+ }
+
+ for _, d := range b.Dominees() {
+ // Successor blocks learn a fact
+ // only at non-critical edges.
+ // (We could do be more precise with full dataflow
+ // analysis of control-flow joins.)
+ s := stack
+ errs := errors
+ if len(d.Preds) == 1 {
+ if d == tsucc {
+ s = append(s, newFacts...)
+ // add nil error
+ if errValue != nil {
+ errs = append(errs, errFact{value: errValue, nilness: isnil})
+ }
+ } else if d == fsucc {
+ s = append(s, newFacts.negate()...)
+ // add non-nil error
+ if errValue != nil {
+ errs = append(errs, errFact{value: errValue, nilness: isnonnil})
+ }
+ }
+ }
+
+ visit(d, s, errs)
+ }
+ return
+ }
+ }
+
+ // In code of the form:
+ //
+ // if ptr, ok := x.(*T); ok { ... } else { fsucc }
+ //
+ // the fsucc block learns that ptr == nil,
+ // since that's its zero value.
+ if If, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If); ok {
+ // Handle "if ok" and "if !ok" variants.
+ cond, fsucc := If.Cond, b.Succs[1]
+ if unop, ok := cond.(*ssa.UnOp); ok && unop.Op == token.NOT {
+ cond, fsucc = unop.X, b.Succs[0]
+ }
+
+ // Match pattern:
+ // t0 = typeassert (pointerlike)
+ // t1 = extract t0 #0 // ptr
+ // t2 = extract t0 #1 // ok
+ // if t2 goto tsucc, fsucc
+ if extract1, ok := cond.(*ssa.Extract); ok && extract1.Index == 1 {
+ if assert, ok := extract1.Tuple.(*ssa.TypeAssert); ok &&
+ isNillable(assert.AssertedType) {
+ for _, pinstr := range *assert.Referrers() {
+ if extract0, ok := pinstr.(*ssa.Extract); ok &&
+ extract0.Index == 0 &&
+ extract0.Tuple == extract1.Tuple {
+ for _, d := range b.Dominees() {
+ if len(d.Preds) == 1 && d == fsucc {
+ visit(d, append(stack, fact{extract0, isnil}), errors)
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ for _, d := range b.Dominees() {
+ visit(d, stack, errors)
+ }
+ }
+
+ // Visit the entry block. No need to visit fn.Recover.
+ if fn.Blocks != nil {
+ visit(fn.Blocks[0], make([]fact, 0, 20), nil) // 20 is plenty
+ }
+}
+
+// A fact records that a block is dominated
+// by the condition v == nil or v != nil.
+type fact struct {
+ value ssa.Value
+ nilness nilness
+}
+
+func (f fact) negate() fact { return fact{f.value, -f.nilness} }
+
+type nilness int
+
+const (
+ isnonnil = -1
+ unknown nilness = 0
+ isnil = 1
+)
+
+var nilnessStrings = []string{"non-nil", "unknown", "nil"}
+
+func (n nilness) String() string { return nilnessStrings[n+1] }
+
+// nilnessOf reports whether v is definitely nil, definitely not nil,
+// or unknown given the dominating stack of facts.
+func nilnessOf(stack []fact, v ssa.Value) nilness {
+ switch v := v.(type) {
+ // unwrap ChangeInterface and Slice values recursively, to detect if underlying
+ // values have any facts recorded or are otherwise known with regard to nilness.
+ //
+ // This work must be in addition to expanding facts about
+ // ChangeInterfaces during inference/fact gathering because this covers
+ // cases where the nilness of a value is intrinsic, rather than based
+ // on inferred facts, such as a zero value interface variable. That
+ // said, this work alone would only inform us when facts are about
+ // underlying values, rather than outer values, when the analysis is
+ // transitive in both directions.
+ case *ssa.ChangeInterface:
+ if underlying := nilnessOf(stack, v.X); underlying != unknown {
+ return underlying
+ }
+ case *ssa.MakeInterface:
+ // A MakeInterface is non-nil unless its operand is a type parameter.
+ tparam, ok := types.Unalias(v.X.Type()).(*types.TypeParam)
+ if !ok {
+ return isnonnil
+ }
+
+ // A MakeInterface of a type parameter is non-nil if
+ // the type parameter cannot be instantiated as an
+ // interface type (#66835).
+ if terms, err := typeparams.NormalTerms(tparam.Constraint()); err == nil && len(terms) > 0 {
+ return isnonnil
+ }
+
+ // If the type parameter can be instantiated as an
+ // interface (and thus also as a concrete type),
+ // we can't determine the nilness.
+
+ case *ssa.Slice:
+ if underlying := nilnessOf(stack, v.X); underlying != unknown {
+ return underlying
+ }
+ case *ssa.SliceToArrayPointer:
+ nn := nilnessOf(stack, v.X)
+ if slice2ArrayPtrLen(v) > 0 {
+ if nn == isnil {
+ // We know that *(*[1]byte)(nil) is going to panic because of the
+ // conversion. So return unknown to the caller, prevent useless
+ // nil deference reporting due to * operator.
+ return unknown
+ }
+ // Otherwise, the conversion will yield a non-nil pointer to array.
+ // Note that the instruction can still panic if array length greater
+ // than slice length. If the value is used by another instruction,
+ // that instruction can assume the panic did not happen when that
+ // instruction is reached.
+ return isnonnil
+ }
+ // In case array length is zero, the conversion result depends on nilness of the slice.
+ if nn != unknown {
+ return nn
+ }
+ }
+
+ // Is value intrinsically nil or non-nil?
+ switch v := v.(type) {
+ case *ssa.Alloc,
+ *ssa.FieldAddr,
+ *ssa.FreeVar,
+ *ssa.Function,
+ *ssa.Global,
+ *ssa.IndexAddr,
+ *ssa.MakeChan,
+ *ssa.MakeClosure,
+ *ssa.MakeMap,
+ *ssa.MakeSlice:
+ return isnonnil
+
+ case *ssa.Const:
+ if v.IsNil() {
+ return isnil // nil or zero value of a pointer-like type
+ } else {
+ return unknown // non-pointer
+ }
+ }
+
+ // Search dominating control-flow facts.
+ for _, f := range stack {
+ if f.value == v {
+ return f.nilness
+ }
+ }
+ return unknown
+}
+
+func slice2ArrayPtrLen(v *ssa.SliceToArrayPointer) int64 {
+ return v.Type().(*types.Pointer).Elem().Underlying().(*types.Array).Len()
+}
+
+// If b ends with an equality comparison, eq returns the operation and
+// its true (equal) and false (not equal) successors.
+func eq(b *ssa.BasicBlock) (op *ssa.BinOp, tsucc, fsucc *ssa.BasicBlock) {
+ if If, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If); ok {
+ if binop, ok := If.Cond.(*ssa.BinOp); ok {
+ switch binop.Op {
+ case token.EQL:
+ return binop, b.Succs[0], b.Succs[1]
+ case token.NEQ:
+ return binop, b.Succs[1], b.Succs[0]
+ }
+ }
+ }
+ return nil, nil, nil
+}
+
+// expandFacts takes a single fact and returns the set of facts that can be
+// known about it or any of its related values. Some operations, like
+// ChangeInterface, have transitive nilness, such that if you know the
+// underlying value is nil, you also know the value itself is nil, and vice
+// versa. This operation allows callers to match on any of the related values
+// in analyses, rather than just the one form of the value that happened to
+// appear in a comparison.
+//
+// This work must be in addition to unwrapping values within nilnessOf because
+// while this work helps give facts about transitively known values based on
+// inferred facts, the recursive check within nilnessOf covers cases where
+// nilness facts are intrinsic to the underlying value, such as a zero value
+// interface variables.
+//
+// ChangeInterface is the only expansion currently supported, but others, like
+// Slice, could be added. At this time, this tool does not check slice
+// operations in a way this expansion could help. See
+// https://play.golang.org/p/mGqXEp7w4fR for an example.
+func expandFacts(f fact) []fact {
+ ff := []fact{f}
+
+Loop:
+ for {
+ switch v := f.value.(type) {
+ case *ssa.ChangeInterface:
+ f = fact{v.X, f.nilness}
+ ff = append(ff, f)
+ default:
+ break Loop
+ }
+ }
+
+ return ff
+}
+
+type facts []fact
+
+func (ff facts) negate() facts {
+ nn := make([]fact, len(ff))
+ for i, f := range ff {
+ nn[i] = f.negate()
+ }
+ return nn
+}
+
+func isNillable(t types.Type) bool {
+ // TODO(adonovan): CoreType (+ case *Interface) looks wrong.
+ // This should probably use Underlying, and handle TypeParam
+ // by computing the union across its normal terms.
+ switch t := typeparams.CoreType(t).(type) {
+ case *types.Pointer,
+ *types.Map,
+ *types.Signature,
+ *types.Chan,
+ *types.Interface,
+ *types.Slice:
+ return true
+ case *types.Basic:
+ return t == types.Typ[types.UnsafePointer]
+ }
+ return false
+}
diff --git a/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go b/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go
index eaf408d6f3..1972379df4 100644
--- a/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go
@@ -19,9 +19,13 @@ var (
skipTests bool
)
+const (
+ defaultMaxComplexity = 10
+)
+
//nolint:gochecknoinits
func init() {
- flagSet.IntVar(&maxComplexity, "maxComplexity", 10, "max complexity the function can have")
+ flagSet.IntVar(&maxComplexity, "maxComplexity", defaultMaxComplexity, "max complexity the function can have")
flagSet.Float64Var(&packageAverage, "packageAverage", 0, "max average complexity in package")
flagSet.BoolVar(&skipTests, "skipTests", false, "should the linter execute on test files as well")
}
@@ -29,7 +33,7 @@ func init() {
func NewAnalyzer() *analysis.Analyzer {
return &analysis.Analyzer{
Name: "cyclop",
- Doc: "calculates cyclomatic complexity",
+ Doc: "checks function and package cyclomatic complexity",
Run: run,
Flags: flagSet,
}
@@ -40,9 +44,9 @@ func run(pass *analysis.Pass) (interface{}, error) {
var pkgName string
var pkgPos token.Pos
- for _, f := range pass.Files {
- ast.Inspect(f, func(node ast.Node) bool {
- f, ok := node.(*ast.FuncDecl)
+ for _, file := range pass.Files {
+ ast.Inspect(file, func(node ast.Node) bool {
+ funcDecl, ok := node.(*ast.FuncDecl)
if !ok {
if node == nil {
return true
@@ -55,15 +59,15 @@ func run(pass *analysis.Pass) (interface{}, error) {
return true
}
- if skipTests && testFunc(f) {
+ if skipTests && testFunc(funcDecl) {
return true
}
count++
- comp := complexity(f)
+ comp := complexity(funcDecl)
sum += float64(comp)
if comp > maxComplexity {
- pass.Reportf(node.Pos(), "calculated cyclomatic complexity for function %s is %d, max is %d", f.Name.Name, comp, maxComplexity)
+ pass.Reportf(node.Pos(), "calculated cyclomatic complexity for function %s is %d, max is %d", funcDecl.Name.Name, comp, maxComplexity)
}
return true
diff --git a/vendor/github.com/bombsimon/wsl/v4/.gitignore b/vendor/github.com/bombsimon/wsl/v4/.gitignore
index 1c8eba613e..b37c694812 100644
--- a/vendor/github.com/bombsimon/wsl/v4/.gitignore
+++ b/vendor/github.com/bombsimon/wsl/v4/.gitignore
@@ -68,3 +68,5 @@ tags
# End of https://www.gitignore.io/api/go,vim,macos
+
+.idea/
diff --git a/vendor/github.com/bombsimon/wsl/v4/.golangci.yml b/vendor/github.com/bombsimon/wsl/v4/.golangci.yml
index 543012008f..bc79b83961 100644
--- a/vendor/github.com/bombsimon/wsl/v4/.golangci.yml
+++ b/vendor/github.com/bombsimon/wsl/v4/.golangci.yml
@@ -1,24 +1,25 @@
---
run:
- deadline: 1m
+ timeout: 1m
issues-exit-code: 1
tests: true
- skip-dirs:
- - vendor$
output:
- format: colored-line-number
print-issued-lines: false
+ sort-results: true
+ formats:
+ - format: colored-line-number
linters-settings:
gocognit:
min-complexity: 10
depguard:
- list-type: blacklist
- include-go-root: false
- packages:
- - github.com/davecgh/go-spew/spew
+ rules:
+ main:
+ deny:
+ - pkg: "github.com/davecgh/go-spew/spew"
+ desc: not allowed
misspell:
locale: US
@@ -38,44 +39,33 @@ linters:
enable-all: true
disable:
- cyclop
- - deadcode
- depguard
- dupl
- dupword
- - exhaustivestruct
- exhaustruct
+ - exportloopref
- forbidigo
- funlen
- gci
- gocognit
- gocyclo
- godox
- - golint
- - gomnd
- - ifshort
- - interfacer
+ - mnd
- lll
- maintidx
- - maligned
- nakedret
- nestif
- nlreturn
- - nosnakecase
- paralleltest
- prealloc
- rowserrcheck
- - scopelint
- - structcheck
- testpackage
- - varcheck
+ - tparallel
- varnamelen
- wastedassign
- fast: false
-
issues:
exclude-use-default: true
max-issues-per-linter: 0
max-same-issues: 0
-
# vim: set sw=2 ts=2 et:
diff --git a/vendor/github.com/bombsimon/wsl/v4/README.md b/vendor/github.com/bombsimon/wsl/v4/README.md
index 0bcf01d96a..c9c42341ef 100644
--- a/vendor/github.com/bombsimon/wsl/v4/README.md
+++ b/vendor/github.com/bombsimon/wsl/v4/README.md
@@ -20,7 +20,7 @@ make something configurable!
```sh
# Latest release
-go install github.com/bombsimon/wsl/v4/cmd/wsl
+go install github.com/bombsimon/wsl/v4/cmd/wsl@latest
# Main branch
go install github.com/bombsimon/wsl/v4/cmd/wsl@master
diff --git a/vendor/github.com/bombsimon/wsl/v4/analyzer.go b/vendor/github.com/bombsimon/wsl/v4/analyzer.go
index b8eac15875..e51df89c6c 100644
--- a/vendor/github.com/bombsimon/wsl/v4/analyzer.go
+++ b/vendor/github.com/bombsimon/wsl/v4/analyzer.go
@@ -2,6 +2,8 @@ package wsl
import (
"flag"
+ "go/ast"
+ "go/token"
"strings"
"golang.org/x/tools/go/analysis"
@@ -30,6 +32,7 @@ func defaultConfig() *Configuration {
ForceCuddleErrCheckAndAssign: false,
ForceExclusiveShortDeclarations: false,
StrictAppend: true,
+ IncludeGenerated: false,
AllowCuddleWithCalls: []string{"Lock", "RLock"},
AllowCuddleWithRHS: []string{"Unlock", "RUnlock"},
ErrorVariableNames: []string{"err"},
@@ -64,6 +67,7 @@ func (wa *wslAnalyzer) flags() flag.FlagSet {
flags.BoolVar(&wa.config.ForceCuddleErrCheckAndAssign, "force-err-cuddling", false, "Force cuddling of error checks with error var assignment")
flags.BoolVar(&wa.config.ForceExclusiveShortDeclarations, "force-short-decl-cuddling", false, "Force short declarations to cuddle by themselves")
flags.BoolVar(&wa.config.StrictAppend, "strict-append", true, "Strict rules for append")
+ flags.BoolVar(&wa.config.IncludeGenerated, "include-generated", false, "Include generated files")
flags.IntVar(&wa.config.ForceCaseTrailingWhitespaceLimit, "force-case-trailing-whitespace", 0, "Force newlines for case blocks > this number.")
flags.Var(&multiStringValue{slicePtr: &wa.config.AllowCuddleWithCalls}, "allow-cuddle-with-calls", "Comma separated list of idents that can have cuddles after")
@@ -75,11 +79,20 @@ func (wa *wslAnalyzer) flags() flag.FlagSet {
func (wa *wslAnalyzer) run(pass *analysis.Pass) (interface{}, error) {
for _, file := range pass.Files {
- filename := pass.Fset.PositionFor(file.Pos(), false).Filename
+ filename := getFilename(pass.Fset, file)
if !strings.HasSuffix(filename, ".go") {
continue
}
+ // if the file is related to cgo the filename of the unadjusted position is a not a '.go' file.
+ fn := pass.Fset.PositionFor(file.Pos(), false).Filename
+
+ // The file is skipped if the "unadjusted" file is a Go file, and it's a generated file (ex: "_test.go" file).
+ // The other non-Go files are skipped by the first 'if' with the adjusted position.
+ if !wa.config.IncludeGenerated && ast.IsGenerated(file) && strings.HasSuffix(fn, ".go") {
+ continue
+ }
+
processor := newProcessorWithConfig(file, pass.Fset, wa.config)
processor.parseAST()
@@ -120,7 +133,7 @@ type multiStringValue struct {
// Set implements the flag.Value interface and will overwrite the pointer to the
// slice with a new pointer after splitting the flag by comma.
func (m *multiStringValue) Set(value string) error {
- s := []string{}
+ var s []string
for _, v := range strings.Split(value, ",") {
s = append(s, strings.TrimSpace(v))
@@ -139,3 +152,12 @@ func (m *multiStringValue) String() string {
return strings.Join(*m.slicePtr, ", ")
}
+
+func getFilename(fset *token.FileSet, file *ast.File) string {
+ filename := fset.PositionFor(file.Pos(), true).Filename
+ if !strings.HasSuffix(filename, ".go") {
+ return fset.PositionFor(file.Pos(), false).Filename
+ }
+
+ return filename
+}
diff --git a/vendor/github.com/bombsimon/wsl/v4/wsl.go b/vendor/github.com/bombsimon/wsl/v4/wsl.go
index 6fd33335a1..44c7abe219 100644
--- a/vendor/github.com/bombsimon/wsl/v4/wsl.go
+++ b/vendor/github.com/bombsimon/wsl/v4/wsl.go
@@ -174,6 +174,11 @@ type Configuration struct {
//
// is not allowed. This logic overrides ForceCuddleErrCheckAndAssign among others.
ForceExclusiveShortDeclarations bool
+
+ // IncludeGenerated will include generated files in the analysis and report
+ // errors even for generated files. Can be useful when developing
+ // generators.
+ IncludeGenerated bool
}
// fix is a range to fixup.
@@ -348,7 +353,7 @@ func (p *processor) parseBlockStatements(statements []ast.Stmt) {
return false
}
- for j := 0; j < n; j++ {
+ for j := range n {
s1 := statements[i+j]
s2 := statements[i+j+1]
@@ -578,7 +583,7 @@ func (p *processor) parseBlockStatements(statements []ast.Stmt) {
}
p.addWhitespaceBeforeError(t, reasonExpressionCuddledWithDeclOrRet)
- case *ast.IfStmt, *ast.RangeStmt, *ast.SwitchStmt:
+ case *ast.IfStmt, *ast.RangeStmt, *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.ForStmt:
p.addWhitespaceBeforeError(t, reasonExpressionCuddledWithBlock)
}
@@ -1108,8 +1113,8 @@ func (p *processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, ne
return
}
- blockStartLine = p.fileSet.PositionFor(blockStartPos, false).Line
- blockEndLine = p.fileSet.PositionFor(blockEndPos, false).Line
+ blockStartLine = p.fileSet.Position(blockStartPos).Line
+ blockEndLine = p.fileSet.Position(blockEndPos).Line
// No whitespace possible if LBrace and RBrace is on the same line.
if blockStartLine == blockEndLine {
@@ -1357,14 +1362,14 @@ func isExampleFunc(ident *ast.Ident) bool {
}
func (p *processor) nodeStart(node ast.Node) int {
- return p.fileSet.PositionFor(node.Pos(), false).Line
+ return p.fileSet.Position(node.Pos()).Line
}
func (p *processor) nodeEnd(node ast.Node) int {
- line := p.fileSet.PositionFor(node.End(), false).Line
+ line := p.fileSet.Position(node.End()).Line
if isEmptyLabeledStmt(node) {
- return p.fileSet.PositionFor(node.Pos(), false).Line
+ return p.fileSet.Position(node.Pos()).Line
}
return line
@@ -1403,7 +1408,7 @@ func (p *processor) addErrorRange(reportAt, start, end token.Pos, reason string)
}
func (p *processor) addWarning(w string, pos token.Pos, t interface{}) {
- position := p.fileSet.PositionFor(pos, false)
+ position := p.fileSet.Position(pos)
p.warnings = append(p.warnings,
fmt.Sprintf("%s:%d: %s (%T)", position.Filename, position.Line, w, t),
diff --git a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go
index f1bf20faba..39d3cd44ec 100644
--- a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go
+++ b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go
@@ -14,7 +14,7 @@ import (
)
const (
- doc = "bidichk detects dangerous unicode character sequences"
+ doc = "Checks for dangerous unicode character sequences"
disallowedDoc = `comma separated list of disallowed runes (full name or short name)
Supported runes
@@ -142,25 +142,28 @@ func NewAnalyzer() *analysis.Analyzer {
}
func (b bidichk) run(pass *analysis.Pass) (interface{}, error) {
- var err error
+ readFile := pass.ReadFile
+ if readFile == nil {
+ readFile = os.ReadFile
+ }
- pass.Fset.Iterate(func(f *token.File) bool {
- if strings.HasPrefix(f.Name(), "$GOROOT") {
- return true
+ for _, astFile := range pass.Files {
+ f := pass.Fset.File(astFile.FileStart)
+ if f == nil {
+ continue
}
- return b.check(f.Name(), f.Pos(0), pass) == nil
- })
-
- return nil, err
-}
+ body, err := readFile(f.Name())
+ if err != nil {
+ return nil, err
+ }
-func (b bidichk) check(filename string, pos token.Pos, pass *analysis.Pass) error {
- body, err := os.ReadFile(filename)
- if err != nil {
- return err
+ b.check(body, f.Pos(0), pass)
}
+ return nil, nil
+}
+func (b bidichk) check(body []byte, pos token.Pos, pass *analysis.Pass) {
for name, r := range b.disallowedRunes {
start := 0
for {
@@ -175,6 +178,4 @@ func (b bidichk) check(filename string, pos token.Pos, pass *analysis.Pass) erro
start += utf8.RuneLen(r)
}
}
-
- return nil
}
diff --git a/vendor/github.com/breml/errchkjson/.goreleaser.yml b/vendor/github.com/breml/errchkjson/.goreleaser.yml
index a05c172cb6..1113690539 100644
--- a/vendor/github.com/breml/errchkjson/.goreleaser.yml
+++ b/vendor/github.com/breml/errchkjson/.goreleaser.yml
@@ -1,3 +1,6 @@
+---
+version: 2
+
# This is an example .goreleaser.yml file with some sane defaults.
# Make sure to check the documentation at http://goreleaser.com
before:
@@ -23,9 +26,9 @@ archives:
{{- else }}{{ .Arch }}{{ end }}
{{- if .Arm }}v{{ .Arm }}{{ end -}}
snapshot:
- name_template: "{{ .Tag }}-next"
+ version_template: "{{ .Tag }}-next"
changelog:
- skip: true
+ disable: true
release:
github:
owner: breml
diff --git a/vendor/github.com/breml/errchkjson/README.md b/vendor/github.com/breml/errchkjson/README.md
index 1979597387..a387ea23d2 100644
--- a/vendor/github.com/breml/errchkjson/README.md
+++ b/vendor/github.com/breml/errchkjson/README.md
@@ -55,7 +55,7 @@ response type, the linter will warn you.
Download `errchkjson` from the [releases](https://github.com/breml/errchkjson/releases) or get the latest version from source with:
```shell
-go get github.com/breml/errchkjson/cmd/errchkjson
+go install github.com/breml/errchkjson/cmd/errchkjson@latest
```
## Usage
diff --git a/vendor/github.com/breml/errchkjson/errchkjson.go b/vendor/github.com/breml/errchkjson/errchkjson.go
index 4a23929cf2..7c8cd82e96 100644
--- a/vendor/github.com/breml/errchkjson/errchkjson.go
+++ b/vendor/github.com/breml/errchkjson/errchkjson.go
@@ -25,7 +25,7 @@ func NewAnalyzer() *analysis.Analyzer {
a := &analysis.Analyzer{
Name: "errchkjson",
- Doc: "Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omitted.",
+ Doc: "Checks types passed to the json encoding functions. Reports unsupported types and reports occurrences where the check for the returned error can be omitted.",
Run: errchkjson.run,
}
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go b/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go
index f68170fb31..ebf2a0dbea 100644
--- a/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go
+++ b/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go
@@ -8,12 +8,12 @@ import (
"strings"
"sync"
- "github.com/butuzov/ireturn/analyzer/internal/config"
- "github.com/butuzov/ireturn/analyzer/internal/types"
-
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
+
+ "github.com/butuzov/ireturn/analyzer/internal/config"
+ "github.com/butuzov/ireturn/analyzer/internal/types"
)
const name string = "ireturn" // linter name
@@ -23,11 +23,11 @@ type validator interface {
}
type analyzer struct {
- once sync.Once
- mu sync.RWMutex
- handler validator
- err error
- diabledNolint bool
+ once sync.Once
+ mu sync.RWMutex
+ handler validator
+ err error
+ disabledNolint bool
found []analysis.Diagnostic
}
@@ -63,7 +63,7 @@ func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) {
}
// 003. Is it allowed to be checked?
- if !a.diabledNolint && hasDisallowDirective(f.Doc) {
+ if !a.disabledNolint && hasDisallowDirective(f.Doc) {
return
}
@@ -115,7 +115,7 @@ func (a *analyzer) readConfiguration(fs *flag.FlagSet) {
// First: checking nonolint directive
val := fs.Lookup("nonolint")
if val != nil {
- a.diabledNolint = fs.Lookup("nonolint").Value.String() == "true"
+ a.disabledNolint = fs.Lookup("nonolint").Value.String() == "true"
}
// Second: validators implementation next
@@ -128,7 +128,7 @@ func (a *analyzer) readConfiguration(fs *flag.FlagSet) {
}
func NewAnalyzer() *analysis.Analyzer {
- a := analyzer{} //nolint: exhaustivestruct
+ a := analyzer{}
return &analysis.Analyzer{
Name: name,
@@ -196,7 +196,7 @@ func filterInterfaces(p *analysis.Pass, ft *ast.FuncType, di map[string]struct{}
typeParams := val.String()
prefix, suffix := "interface{", "}"
- if strings.HasPrefix(typeParams, prefix) { // nolint: gosimple
+ if strings.HasPrefix(typeParams, prefix) { //nolint:gosimple
typeParams = typeParams[len(prefix):]
}
if strings.HasSuffix(typeParams, suffix) {
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go
index 6a294ca35f..da101c7862 100644
--- a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go
+++ b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go
@@ -2,7 +2,7 @@ package config
import "github.com/butuzov/ireturn/analyzer/internal/types"
-// allowConfig specifies a list of interfaces (keywords, patters and regular expressions)
+// allowConfig specifies a list of interfaces (keywords, patterns and regular expressions)
// that are allowed by ireturn as valid to return, any non listed interface are rejected.
type allowConfig struct {
*defaultConfig
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go
index 6aa04e52e8..d6914af862 100644
--- a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go
+++ b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go
@@ -10,7 +10,6 @@ import (
var ErrCollisionOfInterests = errors.New("can't have both `-accept` and `-reject` specified at same time")
-// nolint: exhaustivestruct
func DefaultValidatorConfig() *allowConfig {
return allowAll([]string{
types.NameEmpty, // "empty": empty interfaces (interface{})
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go
index bef6913bb8..b2cde910ce 100644
--- a/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go
+++ b/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go
@@ -2,7 +2,7 @@ package config
import "github.com/butuzov/ireturn/analyzer/internal/types"
-// rejectConfig specifies a list of interfaces (keywords, patters and regular expressions)
+// rejectConfig specifies a list of interfaces (keywords, patterns and regular expressions)
// that are rejected by ireturn as valid to return, any non listed interface are allowed.
type rejectConfig struct {
*defaultConfig
diff --git a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go b/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go
index 5e576374d5..0f4286515f 100644
--- a/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go
+++ b/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go
@@ -47,7 +47,7 @@ func (i IFace) HashString() string {
}
func (i IFace) ExportDiagnostic() analysis.Diagnostic {
- return analysis.Diagnostic{ //nolint: exhaustivestruct
+ return analysis.Diagnostic{
Pos: i.Pos,
Message: i.String(),
}
diff --git a/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md b/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md
index 3dcc01e960..da30c8e00f 100644
--- a/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md
+++ b/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md
@@ -1,201 +1,55 @@
-
-func (*bufio.Writer) Write([]byte) (int, error) |
-func (*bufio.Writer) WriteString(string) (int, error) |
-
-
-func (*bufio.Writer) WriteRune(rune) (int, error) |
-func (*bufio.Writer) WriteString(string) (int, error) |
-
-
-func (*bytes.Buffer) Write([]byte) (int, error) |
-func (*bytes.Buffer) WriteString(string) (int, error) |
-
-
-func (*bytes.Buffer) WriteRune(rune) (int, error) |
-func (*bytes.Buffer) WriteString(string) (int, error) |
-
-
-func bytes.Compare([]byte, []byte) int |
-func strings.Compare(string, string) int |
-
-
-func bytes.Contains([]byte, []byte) bool |
-func strings.Contains(string, string) bool |
-
-
-func bytes.ContainsAny([]byte, string) bool |
-func strings.ContainsAny(string, string) bool |
-
-
-func bytes.ContainsRune([]byte, byte) bool |
-func strings.ContainsRune(string, byte) bool |
-
-
-func bytes.Count([]byte, []byte) int |
-func strings.Count(string, string) int |
-
-
-func bytes.EqualFold([]byte, []byte) bool |
-func strings.EqualFold(string, string) bool |
-
-
-func bytes.HasPrefix([]byte, []byte) bool |
-func strings.HasPrefix(string, string) bool |
-
-
-func bytes.HasSuffix([]byte, []byte) bool |
-func strings.HasSuffix(string, string) bool |
-
-
-func bytes.Index([]byte, []byte) int |
-func strings.Index(string, string) int |
-
-
-func bytes.IndexAny([]byte, string) int |
-func strings.IndexAny(string, string) int |
-
-
-func bytes.IndexByte([]byte, byte) int |
-func strings.IndexByte(string, byte) int |
-
-
-func bytes.IndexFunc([]byte, func(rune) bool) int |
-func strings.IndexFunc(string, func(rune) bool) int |
-
-
-func bytes.IndexRune([]byte, rune) int |
-func strings.IndexRune(string, rune) int |
-
-
-func bytes.LastIndex([]byte, []byte) int |
-func strings.LastIndex(string, string) int |
-
-
-func bytes.LastIndexAny([]byte, string) int |
-func strings.LastIndexAny(string, string) int |
-
-
-func bytes.LastIndexByte([]byte, byte) int |
-func strings.LastIndexByte(string, byte) int |
-
-
-func bytes.LastIndexFunc([]byte, func(rune) bool) int |
-func strings.LastIndexFunc(string, func(rune) bool) int |
-
-
-func bytes.NewBuffer([]byte) *bytes.Buffer |
-func bytes.NewBufferString(string) *bytes.Buffer |
-
-
-func (*httptest.ResponseRecorder) Write([]byte) (int, error) |
-func (*httptest.ResponseRecorder) WriteString(string) (int, error) |
-
-
-func (*maphash.Hash) Write([]byte) (int, error) |
-func (*maphash.Hash) WriteString(string) (int, error) |
-
-
-func (*os.File) Write([]byte) (int, error) |
-func (*os.File) WriteString(string) (int, error) |
-
-
-func regexp.Match(string, []byte) (bool, error) |
-func regexp.MatchString(string, string) (bool, error) |
-
-
-func (*regexp.Regexp) FindAllIndex([]byte, int) [][]int |
-func (*regexp.Regexp) FindAllStringIndex(string, int) [][]int |
-
-
-func (*regexp.Regexp) FindAllSubmatchIndex([]byte, int) [][]int |
-func (*regexp.Regexp) FindAllStringSubmatchIndex(string, int) [][]int |
-
-
-func (*regexp.Regexp) FindIndex([]byte) []int |
-func (*regexp.Regexp) FindStringIndex(string) []int |
-
-
-func (*regexp.Regexp) FindSubmatchIndex([]byte) []int |
-func (*regexp.Regexp) FindStringSubmatchIndex(string) []int |
-
-
-func (*regexp.Regexp) Match([]byte) bool |
-func (*regexp.Regexp) MatchString(string) bool |
-
-
-func (*strings.Builder) Write([]byte) (int, error) |
-func (*strings.Builder) WriteString(string) (int, error) |
-
-
-func (*strings.Builder) WriteRune(rune) (int, error) |
-func (*strings.Builder) WriteString(string) (int, error) |
-
-
-func strings.Compare(string) int |
-func bytes.Compare([]byte) int |
-
-
-func strings.Contains(string) bool |
-func bytes.Contains([]byte) bool |
-
-
-func strings.ContainsAny(string) bool |
-func bytes.ContainsAny([]byte) bool |
-
-
-func strings.ContainsRune(string) bool |
-func bytes.ContainsRune([]byte) bool |
-
-
-func strings.EqualFold(string) bool |
-func bytes.EqualFold([]byte) bool |
-
-
-func strings.HasPrefix(string) bool |
-func bytes.HasPrefix([]byte) bool |
-
-
-func strings.HasSuffix(string) bool |
-func bytes.HasSuffix([]byte) bool |
-
-
-func strings.Index(string) int |
-func bytes.Index([]byte) int |
-
-
-func strings.IndexFunc(string, func(r rune) bool) int |
-func bytes.IndexFunc([]byte, func(r rune) bool) int |
-
-
-func strings.LastIndex(string) int |
-func bytes.LastIndex([]byte) int |
-
-
-func strings.LastIndexAny(string) int |
-func bytes.LastIndexAny([]byte) int |
-
-
-func strings.LastIndexFunc(string, func(r rune) bool) int |
-func bytes.LastIndexFunc([]byte, func(r rune) bool) int |
-
-
-func utf8.DecodeLastRune([]byte) (rune, int) |
-func utf8.DecodeLastRuneInString(string) (rune, int) |
-
-
-func utf8.DecodeRune([]byte) (rune, int) |
-func utf8.DecodeRuneInString(string) (rune, int) |
-
-
-func utf8.FullRune([]byte) bool |
-func utf8.FullRuneInString(string) bool |
-
-
-func utf8.RuneCount([]byte) int |
-func utf8.RuneCountInString(string) int |
-
-
-func utf8.Valid([]byte) bool |
-func utf8.ValidString(string) bool |
-
+
+| Function | Mirror |
+|----------|--------|
+| `func (*bufio.Writer) Write([]byte) (int, error)` | `func (*bufio.Writer) WriteString(string) (int, error)` |
+| `func (*bufio.Writer) WriteRune(rune) (int, error)` | `func (*bufio.Writer) WriteString(string) (int, error)` |
+| `func (*bytes.Buffer) Write([]byte) (int, error)` | `func (*bytes.Buffer) WriteString(string) (int, error)` |
+| `func (*bytes.Buffer) WriteRune(rune) (int, error)` | `func (*bytes.Buffer) WriteString(string) (int, error)` |
+| `func bytes.Compare([]byte, []byte) int` | `func strings.Compare(string, string) int` |
+| `func bytes.Contains([]byte, []byte) bool` | `func strings.Contains(string, string) bool` |
+| `func bytes.ContainsAny([]byte, string) bool` | `func strings.ContainsAny(string, string) bool` |
+| `func bytes.ContainsRune([]byte, byte) bool` | `func strings.ContainsRune(string, byte) bool` |
+| `func bytes.Count([]byte, []byte) int` | `func strings.Count(string, string) int` |
+| `func bytes.EqualFold([]byte, []byte) bool` | `func strings.EqualFold(string, string) bool` |
+| `func bytes.HasPrefix([]byte, []byte) bool` | `func strings.HasPrefix(string, string) bool` |
+| `func bytes.HasSuffix([]byte, []byte) bool` | `func strings.HasSuffix(string, string) bool` |
+| `func bytes.Index([]byte, []byte) int` | `func strings.Index(string, string) int` |
+| `func bytes.IndexAny([]byte, string) int` | `func strings.IndexAny(string, string) int` |
+| `func bytes.IndexByte([]byte, byte) int` | `func strings.IndexByte(string, byte) int` |
+| `func bytes.IndexFunc([]byte, func(rune) bool) int` | `func strings.IndexFunc(string, func(rune) bool) int` |
+| `func bytes.IndexRune([]byte, rune) int` | `func strings.IndexRune(string, rune) int` |
+| `func bytes.LastIndex([]byte, []byte) int` | `func strings.LastIndex(string, string) int` |
+| `func bytes.LastIndexAny([]byte, string) int` | `func strings.LastIndexAny(string, string) int` |
+| `func bytes.LastIndexByte([]byte, byte) int` | `func strings.LastIndexByte(string, byte) int` |
+| `func bytes.LastIndexFunc([]byte, func(rune) bool) int` | `func strings.LastIndexFunc(string, func(rune) bool) int` |
+| `func bytes.NewBuffer([]byte) *bytes.Buffer` | `func bytes.NewBufferString(string) *bytes.Buffer` |
+| `func (*httptest.ResponseRecorder) Write([]byte) (int, error)` | `func (*httptest.ResponseRecorder) WriteString(string) (int, error)` |
+| `func maphash.Bytes([]byte) uint64` | `func maphash.String(string) uint64` |
+| `func (*maphash.Hash) Write([]byte) (int, error)` | `func (*maphash.Hash) WriteString(string) (int, error)` |
+| `func (*os.File) Write([]byte) (int, error)` | `func (*os.File) WriteString(string) (int, error)` |
+| `func regexp.Match(string, []byte) (bool, error)` | `func regexp.MatchString(string, string) (bool, error)` |
+| `func (*regexp.Regexp) FindAllIndex([]byte, int) [][]int` | `func (*regexp.Regexp) FindAllStringIndex(string, int) [][]int` |
+| `func (*regexp.Regexp) FindAllSubmatchIndex([]byte, int) [][]int` | `func (*regexp.Regexp) FindAllStringSubmatchIndex(string, int) [][]int` |
+| `func (*regexp.Regexp) FindIndex([]byte) []int` | `func (*regexp.Regexp) FindStringIndex(string) []int` |
+| `func (*regexp.Regexp) FindSubmatchIndex([]byte) []int` | `func (*regexp.Regexp) FindStringSubmatchIndex(string) []int` |
+| `func (*regexp.Regexp) Match([]byte) bool` | `func (*regexp.Regexp) MatchString(string) bool` |
+| `func (*strings.Builder) Write([]byte) (int, error)` | `func (*strings.Builder) WriteString(string) (int, error)` |
+| `func (*strings.Builder) WriteRune(rune) (int, error)` | `func (*strings.Builder) WriteString(string) (int, error)` |
+| `func strings.Compare(string) int` | `func bytes.Compare([]byte) int` |
+| `func strings.Contains(string) bool` | `func bytes.Contains([]byte) bool` |
+| `func strings.ContainsAny(string) bool` | `func bytes.ContainsAny([]byte) bool` |
+| `func strings.ContainsRune(string) bool` | `func bytes.ContainsRune([]byte) bool` |
+| `func strings.EqualFold(string) bool` | `func bytes.EqualFold([]byte) bool` |
+| `func strings.HasPrefix(string) bool` | `func bytes.HasPrefix([]byte) bool` |
+| `func strings.HasSuffix(string) bool` | `func bytes.HasSuffix([]byte) bool` |
+| `func strings.Index(string) int` | `func bytes.Index([]byte) int` |
+| `func strings.IndexFunc(string, func(r rune) bool) int` | `func bytes.IndexFunc([]byte, func(r rune) bool) int` |
+| `func strings.LastIndex(string) int` | `func bytes.LastIndex([]byte) int` |
+| `func strings.LastIndexAny(string) int` | `func bytes.LastIndexAny([]byte) int` |
+| `func strings.LastIndexFunc(string, func(r rune) bool) int` | `func bytes.LastIndexFunc([]byte, func(r rune) bool) int` |
+| `func utf8.DecodeLastRune([]byte) (rune, int)` | `func utf8.DecodeLastRuneInString(string) (rune, int)` |
+| `func utf8.DecodeRune([]byte) (rune, int)` | `func utf8.DecodeRuneInString(string) (rune, int)` |
+| `func utf8.FullRune([]byte) bool` | `func utf8.FullRuneInString(string) bool` |
+| `func utf8.RuneCount([]byte) int` | `func utf8.RuneCountInString(string) int` |
+| `func utf8.Valid([]byte) bool` | `func utf8.ValidString(string) bool` |
diff --git a/vendor/github.com/butuzov/mirror/Makefile b/vendor/github.com/butuzov/mirror/Makefile
index ac267208fb..dab6f160ae 100644
--- a/vendor/github.com/butuzov/mirror/Makefile
+++ b/vendor/github.com/butuzov/mirror/Makefile
@@ -10,7 +10,8 @@ endef
# Generate Artifacts ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
generate: ## Generate Assets
- $(MAKE)
+ $(MAKE) generate-tests
+ $(MAKE) generate-mirror-table
generate-tests: ## Generates Assets at testdata
go run ./cmd/internal/tests/ "$(PWD)/testdata"
@@ -52,7 +53,7 @@ tests-summary: bin/tparse
lints: ## Run golangci-lint
lints: bin/golangci-lint
lints:
- golangci-lint run --no-config ./... --skip-dirs "^(cmd|testdata)"
+ golangci-lint run --no-config ./... --exclude-dirs "^(cmd|testdata)"
cover: ## Run Coverage
@@ -71,8 +72,8 @@ bin/tparse: INSTALL_URL=github.com/mfridman/tparse@v0.13.2
bin/tparse:
$(call install_go_bin, tparse, $(INSTALL_URL))
-bin/golangci-lint: ## Installs golangci-lint@v1.55.2 (if not exists)
-bin/golangci-lint: INSTALL_URL=github.com/golangci/golangci-lint@v1.55.2
+bin/golangci-lint: ## Installs golangci-lint@v1.62.0 (if not exists)
+bin/golangci-lint: INSTALL_URL=github.com/golangci/golangci-lint@v1.62.0
bin/golangci-lint:
$(call install_go_bin, golangci-lint, $(INSTALL_URL))
@@ -99,7 +100,7 @@ help: dep-gawk
@ echo ""
-# Helper Mehtods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+# Helper Methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
dep-gawk:
@ if [ -z "$(shell command -v gawk)" ]; then \
if [ -x /usr/local/bin/brew ]; then $(MAKE) _brew_gawk_install; exit 0; fi; \
@@ -111,21 +112,21 @@ dep-gawk:
fi
_brew_gawk_install:
- @ echo "Instaling gawk using brew... "
+ @ echo "Installing gawk using brew... "
@ brew install gawk --quiet
@ echo "done"
_ubuntu_gawk_install:
- @ echo "Instaling gawk using apt-get... "
+ @ echo "Installing gawk using apt-get... "
@ apt-get -q install gawk -y
@ echo "done"
_alpine_gawk_install:
- @ echo "Instaling gawk using yum... "
+ @ echo "Installing gawk using yum... "
@ apk add --update --no-cache gawk
@ echo "done"
_centos_gawk_install:
- @ echo "Instaling gawk using yum... "
+ @ echo "Installing gawk using yum... "
@ yum install -q -y gawk;
@ echo "done"
diff --git a/vendor/github.com/butuzov/mirror/analyzer.go b/vendor/github.com/butuzov/mirror/analyzer.go
index 13ded46c6d..b15019ce1f 100644
--- a/vendor/github.com/butuzov/mirror/analyzer.go
+++ b/vendor/github.com/butuzov/mirror/analyzer.go
@@ -44,9 +44,9 @@ func Run(pass *analysis.Pass, withTests bool) []*checker.Violation {
BytesFunctions, BytesBufferMethods,
RegexpFunctions, RegexpRegexpMethods,
StringFunctions, StringsBuilderMethods,
+ MaphashMethods, MaphashFunctions,
BufioMethods, HTTPTestMethods,
- OsFileMethods, MaphashMethods,
- UTF8Functions,
+ OsFileMethods, UTF8Functions,
)
check.Type = checker.WrapType(pass.TypesInfo)
diff --git a/vendor/github.com/butuzov/mirror/checkers_maphash.go b/vendor/github.com/butuzov/mirror/checkers_maphash.go
index 0aa43ff7bb..345a64123e 100644
--- a/vendor/github.com/butuzov/mirror/checkers_maphash.go
+++ b/vendor/github.com/butuzov/mirror/checkers_maphash.go
@@ -2,35 +2,66 @@ package mirror
import "github.com/butuzov/mirror/internal/checker"
-var MaphashMethods = []checker.Violation{
- { // (*hash/maphash).Write
- Targets: checker.Bytes,
- Type: checker.Method,
- Package: "hash/maphash",
- Struct: "Hash",
- Caller: "Write",
- Args: []int{0},
- AltCaller: "WriteString",
+var (
+ MaphashFunctions = []checker.Violation{
+ { // maphash.Bytes
+ Targets: checker.Bytes,
+ Type: checker.Function,
+ Package: "hash/maphash",
+ Caller: "Bytes",
+ Args: []int{1},
+ AltCaller: "String",
- Generate: &checker.Generate{
- PreCondition: `h := maphash.Hash{}`,
- Pattern: `Write($0)`,
- Returns: []string{"int", "error"},
+ Generate: &checker.Generate{
+ Pattern: `Bytes(maphash.MakeSeed(), $0)`,
+ Returns: []string{"uint64"},
+ },
},
- },
- { // (*hash/maphash).WriteString
- Targets: checker.Strings,
- Type: checker.Method,
- Package: "hash/maphash",
- Struct: "Hash",
- Caller: "WriteString",
- Args: []int{0},
- AltCaller: "Write",
+ { // maphash.String
+ Targets: checker.Strings,
+ Type: checker.Function,
+ Package: "hash/maphash",
+ Caller: "String",
+ Args: []int{1},
+ AltCaller: "Bytes",
- Generate: &checker.Generate{
- PreCondition: `h := maphash.Hash{}`,
- Pattern: `WriteString($0)`,
- Returns: []string{"int", "error"},
+ Generate: &checker.Generate{
+ Pattern: `String(maphash.MakeSeed(), $0)`,
+ Returns: []string{"uint64"},
+ },
},
- },
-}
+ }
+
+ MaphashMethods = []checker.Violation{
+ { // (*hash/maphash).Write
+ Targets: checker.Bytes,
+ Type: checker.Method,
+ Package: "hash/maphash",
+ Struct: "Hash",
+ Caller: "Write",
+ Args: []int{0},
+ AltCaller: "WriteString",
+
+ Generate: &checker.Generate{
+ PreCondition: `h := maphash.Hash{}`,
+ Pattern: `Write($0)`,
+ Returns: []string{"int", "error"},
+ },
+ },
+ { // (*hash/maphash).WriteString
+ Targets: checker.Strings,
+ Type: checker.Method,
+ Package: "hash/maphash",
+ Struct: "Hash",
+ Caller: "WriteString",
+ Args: []int{0},
+ AltCaller: "Write",
+
+ Generate: &checker.Generate{
+ PreCondition: `h := maphash.Hash{}`,
+ Pattern: `WriteString($0)`,
+ Returns: []string{"int", "error"},
+ },
+ },
+ }
+)
diff --git a/vendor/github.com/butuzov/mirror/internal/checker/checker.go b/vendor/github.com/butuzov/mirror/internal/checker/checker.go
index c1a9416314..fb9ba41729 100644
--- a/vendor/github.com/butuzov/mirror/internal/checker/checker.go
+++ b/vendor/github.com/butuzov/mirror/internal/checker/checker.go
@@ -9,12 +9,12 @@ import (
"strings"
)
-// Checker will perform standart check on package and its methods.
+// Checker will perform standard check on package and its methods.
type Checker struct {
Violations []Violation // List of available violations
Packages map[string][]int // Storing indexes of Violations per pkg/kg.Struct
Type func(ast.Expr) string // Type Checker closure.
- Print func(ast.Node) []byte // String representation of the expresion.
+ Print func(ast.Node) []byte // String representation of the expression.
}
func New(violations ...[]Violation) Checker {
@@ -76,7 +76,7 @@ func (c *Checker) Handle(v *Violation, ce *ast.CallExpr) (map[int]ast.Expr, bool
continue
}
- // is it convertsion call
+ // is it conversion call
if !c.callConverts(call) {
continue
}
diff --git a/vendor/github.com/butuzov/mirror/internal/checker/violation.go b/vendor/github.com/butuzov/mirror/internal/checker/violation.go
index 3d8acf1415..c2c1492086 100644
--- a/vendor/github.com/butuzov/mirror/internal/checker/violation.go
+++ b/vendor/github.com/butuzov/mirror/internal/checker/violation.go
@@ -28,7 +28,7 @@ const (
UntypedRune string = "untyped rune"
)
-// Violation describs what message we going to give to a particular code violation
+// Violation describes what message we going to give to a particular code violation
type Violation struct {
Type ViolationType //
Args []int // Indexes of the arguments needs to be checked
@@ -143,7 +143,7 @@ func (v *Violation) Diagnostic(fSet *token.FileSet) analysis.Diagnostic {
v.AltPackage = v.Package
}
- // Hooray! we dont need to change package and redo imports.
+ // Hooray! we don't need to change package and redo imports.
if v.Type == Function && v.AltPackage == v.Package && noNl {
diagnostic.SuggestedFixes = []analysis.SuggestedFix{{
Message: "Fix Issue With",
@@ -166,7 +166,7 @@ type GolangIssue struct {
Original string
}
-// Issue intended to be used only within `golangci-lint`, bu you can use use it
+// Issue intended to be used only within `golangci-lint`, but you can use it
// alongside Diagnostic if you wish.
func (v *Violation) Issue(fSet *token.FileSet) GolangIssue {
issue := GolangIssue{
diff --git a/vendor/github.com/butuzov/mirror/readme.md b/vendor/github.com/butuzov/mirror/readme.md
index f830ea72ea..f5cfa47a68 100644
--- a/vendor/github.com/butuzov/mirror/readme.md
+++ b/vendor/github.com/butuzov/mirror/readme.md
@@ -2,6 +2,13 @@
`mirror` suggests use of alternative functions/methods in order to gain performance boosts by avoiding unnecessary `[]byte/string` conversion calls. See [MIRROR_FUNCS.md](MIRROR_FUNCS.md) list of mirror functions you can use in go's stdlib.
+---
+
+[](https://u24.gov.ua/)
+[](https://github.com/butuzov)
+
+---
+
## Linter Use Cases
### `github.com/argoproj/argo-cd`
@@ -86,13 +93,13 @@ util/cert/cert.go:82:10: avoid allocations with (*regexp.Regexp).MatchString (mi
- flag `--tests` (e.g. `--tests=false`)
- flag `--skip-files` (e.g. `--skip-files="_test.go"`)
- - yaml confguration `run.skip-files`:
+ - yaml configuration `run.skip-files`:
```yaml
run:
skip-files:
- '(.+)_test\.go'
```
- - yaml confguration `issues.exclude-rules`:
+ - yaml configuration `issues.exclude-rules`:
```yaml
issues:
exclude-rules:
@@ -106,7 +113,7 @@ util/cert/cert.go:82:10: avoid allocations with (*regexp.Regexp).MatchString (mi
```shell
# Update Assets (testdata/(strings|bytes|os|utf8|maphash|regexp|bufio).go)
-(task|make) generated
+(task|make) generate
# Run Tests
(task|make) tests
# Lint Code
diff --git a/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go b/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go
index 543b4bdbc7..866d11083a 100644
--- a/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go
+++ b/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go
@@ -16,23 +16,41 @@ import (
"golang.org/x/tools/go/analysis"
)
+type optionInt struct {
+ enabled bool
+ intConv bool
+}
+
+type optionErr struct {
+ enabled bool
+ errError bool
+ errorf bool
+}
+
+type optionStr struct {
+ enabled bool
+ sprintf1 bool
+ strconcat bool
+}
+
type perfSprint struct {
- intConv bool
- errError bool
- errorf bool
- sprintf1 bool
+ intFormat optionInt
+ errFormat optionErr
+ strFormat optionStr
+
+ boolFormat bool
+ hexFormat bool
fiximports bool
- strconcat bool
}
func newPerfSprint() *perfSprint {
return &perfSprint{
- intConv: true,
- errError: false,
- errorf: true,
- sprintf1: true,
+ intFormat: optionInt{enabled: true, intConv: true},
+ errFormat: optionErr{enabled: true, errError: false, errorf: true},
+ strFormat: optionStr{enabled: true, sprintf1: true, strconcat: true},
+ boolFormat: true,
+ hexFormat: true,
fiximports: true,
- strconcat: true,
}
}
@@ -40,27 +58,32 @@ func New() *analysis.Analyzer {
n := newPerfSprint()
r := &analysis.Analyzer{
Name: "perfsprint",
+ URL: "https://github.com/catenacyber/perfsprint",
Doc: "Checks that fmt.Sprintf can be replaced with a faster alternative.",
Run: n.run,
Requires: []*analysis.Analyzer{inspect.Analyzer},
}
- r.Flags.BoolVar(&n.intConv, "int-conversion", true, "optimizes even if it requires an int or uint type cast")
- r.Flags.BoolVar(&n.errError, "err-error", false, "optimizes into err.Error() even if it is only equivalent for non-nil errors")
- r.Flags.BoolVar(&n.errorf, "errorf", true, "optimizes fmt.Errorf")
- r.Flags.BoolVar(&n.sprintf1, "sprintf1", true, "optimizes fmt.Sprintf with only one argument")
- r.Flags.BoolVar(&n.fiximports, "fiximports", true, "fix needed imports from other fixes")
- r.Flags.BoolVar(&n.strconcat, "strconcat", true, "optimizes into strings concatenation")
+ r.Flags.BoolVar(&n.intFormat.enabled, "integer-format", n.intFormat.enabled, "enable/disable optimization of integer formatting")
+ r.Flags.BoolVar(&n.intFormat.intConv, "int-conversion", n.intFormat.intConv, "optimizes even if it requires an int or uint type cast")
+ r.Flags.BoolVar(&n.errFormat.enabled, "error-format", n.errFormat.enabled, "enable/disable optimization of error formatting")
+ r.Flags.BoolVar(&n.errFormat.errError, "err-error", n.errFormat.errError, "optimizes into err.Error() even if it is only equivalent for non-nil errors")
+ r.Flags.BoolVar(&n.errFormat.errorf, "errorf", n.errFormat.errorf, "optimizes fmt.Errorf")
+ r.Flags.BoolVar(&n.boolFormat, "bool-format", n.boolFormat, "enable/disable optimization of bool formatting")
+ r.Flags.BoolVar(&n.hexFormat, "hex-format", n.hexFormat, "enable/disable optimization of hex formatting")
+ r.Flags.BoolVar(&n.strFormat.enabled, "string-format", n.strFormat.enabled, "enable/disable optimization of string formatting")
+ r.Flags.BoolVar(&n.strFormat.sprintf1, "sprintf1", n.strFormat.sprintf1, "optimizes fmt.Sprintf with only one argument")
+ r.Flags.BoolVar(&n.strFormat.strconcat, "strconcat", n.strFormat.strconcat, "optimizes into strings concatenation")
+ r.Flags.BoolVar(&n.fiximports, "fiximports", n.fiximports, "fix needed imports from other fixes")
+
return r
}
// true if verb is a format string that could be replaced with concatenation.
func isConcatable(verb string) bool {
- hasPrefix :=
- (strings.HasPrefix(verb, "%s") && !strings.Contains(verb, "%[1]s")) ||
- (strings.HasPrefix(verb, "%[1]s") && !strings.Contains(verb, "%s"))
- hasSuffix :=
- (strings.HasSuffix(verb, "%s") && !strings.Contains(verb, "%[1]s")) ||
- (strings.HasSuffix(verb, "%[1]s") && !strings.Contains(verb, "%s"))
+ hasPrefix := (strings.HasPrefix(verb, "%s") && !strings.Contains(verb, "%[1]s")) ||
+ (strings.HasPrefix(verb, "%[1]s") && !strings.Contains(verb, "%s"))
+ hasSuffix := (strings.HasSuffix(verb, "%s") && !strings.Contains(verb, "%[1]s")) ||
+ (strings.HasSuffix(verb, "%[1]s") && !strings.Contains(verb, "%s"))
if strings.Count(verb, "%[1]s") > 1 {
return false
@@ -69,6 +92,18 @@ func isConcatable(verb string) bool {
}
func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
+ if !n.intFormat.enabled {
+ n.intFormat.intConv = false
+ }
+ if !n.errFormat.enabled {
+ n.errFormat.errError = false
+ n.errFormat.errorf = false
+ }
+ if !n.strFormat.enabled {
+ n.strFormat.sprintf1 = false
+ n.strFormat.strconcat = false
+ }
+
var fmtSprintObj, fmtSprintfObj, fmtErrorfObj types.Object
for _, pkg := range pass.Pkg.Imports() {
if pkg.Path() == "fmt" {
@@ -81,7 +116,7 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
return nil, nil
}
removedFmtUsages := make(map[string]int)
- neededPackages := make(map[string]map[string]bool)
+ neededPackages := make(map[string]map[string]struct{})
insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
nodeFilter := []ast.Node{
@@ -102,28 +137,20 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
err error
)
switch {
- case calledObj == fmtErrorfObj && len(call.Args) == 1:
- if n.errorf {
- fn = "fmt.Errorf"
- verb = "%s"
- value = call.Args[0]
- } else {
- return
- }
+ case calledObj == fmtErrorfObj && len(call.Args) == 1 && n.errFormat.errorf:
+ fn = "fmt.Errorf"
+ verb = "%s"
+ value = call.Args[0]
case calledObj == fmtSprintObj && len(call.Args) == 1:
fn = "fmt.Sprint"
verb = "%v"
value = call.Args[0]
- case calledObj == fmtSprintfObj && len(call.Args) == 1:
- if n.sprintf1 {
- fn = "fmt.Sprintf"
- verb = "%s"
- value = call.Args[0]
- } else {
- return
- }
+ case calledObj == fmtSprintfObj && len(call.Args) == 1 && n.strFormat.sprintf1:
+ fn = "fmt.Sprintf"
+ verb = "%s"
+ value = call.Args[0]
case calledObj == fmtSprintfObj && len(call.Args) == 2:
verbLit, ok := call.Args[0].(*ast.BasicLit)
@@ -149,7 +176,7 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
switch verb {
default:
- if fn == "fmt.Sprintf" && isConcatable(verb) && n.strconcat {
+ if fn == "fmt.Sprintf" && isConcatable(verb) && n.strFormat.strconcat {
break
}
return
@@ -164,18 +191,17 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
switch {
case isBasicType(valueType, types.String) && oneOf(verb, "%v", "%s"):
fname := pass.Fset.File(call.Pos()).Name()
- _, ok := neededPackages[fname]
- if !ok {
- neededPackages[fname] = make(map[string]bool)
+ if _, ok := neededPackages[fname]; !ok {
+ neededPackages[fname] = make(map[string]struct{})
}
removedFmtUsages[fname]++
- if fn == "fmt.Errorf" {
- neededPackages[fname]["errors"] = true
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with errors.New",
- SuggestedFixes: []analysis.SuggestedFix{
+ if fn == "fmt.Errorf" && n.errFormat.enabled {
+ neededPackages[fname]["errors"] = struct{}{}
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with errors.New",
+ []analysis.SuggestedFix{
{
Message: "Use errors.New",
TextEdits: []analysis.TextEdit{{
@@ -185,13 +211,13 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
}},
},
},
- }
- } else {
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with just using the string",
- SuggestedFixes: []analysis.SuggestedFix{
+ )
+ } else if fn != "fmt.Errorf" && n.strFormat.enabled {
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with just using the string",
+ []analysis.SuggestedFix{
{
Message: "Just use string value",
TextEdits: []analysis.TextEdit{{
@@ -201,19 +227,19 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
}},
},
},
- }
+ )
}
- case types.Implements(valueType, errIface) && oneOf(verb, "%v", "%s") && n.errError:
+ case types.Implements(valueType, errIface) && oneOf(verb, "%v", "%s") && n.errFormat.errError:
// known false positive if this error is nil
// fmt.Sprint(nil) does not panic like nil.Error() does
errMethodCall := formatNode(pass.Fset, value) + ".Error()"
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with " + errMethodCall,
- SuggestedFixes: []analysis.SuggestedFix{
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with "+errMethodCall,
+ []analysis.SuggestedFix{
{
Message: "Use " + errMethodCall,
TextEdits: []analysis.TextEdit{{
@@ -223,21 +249,20 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
}},
},
},
- }
+ )
- case isBasicType(valueType, types.Bool) && oneOf(verb, "%v", "%t"):
+ case isBasicType(valueType, types.Bool) && oneOf(verb, "%v", "%t") && n.boolFormat:
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- _, ok := neededPackages[fname]
- if !ok {
- neededPackages[fname] = make(map[string]bool)
- }
- neededPackages[fname]["strconv"] = true
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with faster strconv.FormatBool",
- SuggestedFixes: []analysis.SuggestedFix{
+ if _, ok := neededPackages[fname]; !ok {
+ neededPackages[fname] = make(map[string]struct{})
+ }
+ neededPackages[fname]["strconv"] = struct{}{}
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with faster strconv.FormatBool",
+ []analysis.SuggestedFix{
{
Message: "Use strconv.FormatBool",
TextEdits: []analysis.TextEdit{{
@@ -247,9 +272,9 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
}},
},
},
- }
+ )
- case isArray && isBasicType(a.Elem(), types.Uint8) && oneOf(verb, "%x"):
+ case isArray && isBasicType(a.Elem(), types.Uint8) && oneOf(verb, "%x") && n.hexFormat:
if _, ok := value.(*ast.Ident); !ok {
// Doesn't support array literals.
return
@@ -257,16 +282,15 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- _, ok := neededPackages[fname]
- if !ok {
- neededPackages[fname] = make(map[string]bool)
- }
- neededPackages[fname]["encoding/hex"] = true
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with faster hex.EncodeToString",
- SuggestedFixes: []analysis.SuggestedFix{
+ if _, ok := neededPackages[fname]; !ok {
+ neededPackages[fname] = make(map[string]struct{})
+ }
+ neededPackages[fname]["encoding/hex"] = struct{}{}
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with faster hex.EncodeToString",
+ []analysis.SuggestedFix{
{
Message: "Use hex.EncodeToString",
TextEdits: []analysis.TextEdit{
@@ -283,20 +307,19 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
},
},
},
- }
- case isSlice && isBasicType(s.Elem(), types.Uint8) && oneOf(verb, "%x"):
+ )
+ case isSlice && isBasicType(s.Elem(), types.Uint8) && oneOf(verb, "%x") && n.hexFormat:
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- _, ok := neededPackages[fname]
- if !ok {
- neededPackages[fname] = make(map[string]bool)
- }
- neededPackages[fname]["encoding/hex"] = true
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with faster hex.EncodeToString",
- SuggestedFixes: []analysis.SuggestedFix{
+ if _, ok := neededPackages[fname]; !ok {
+ neededPackages[fname] = make(map[string]struct{})
+ }
+ neededPackages[fname]["encoding/hex"] = struct{}{}
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with faster hex.EncodeToString",
+ []analysis.SuggestedFix{
{
Message: "Use hex.EncodeToString",
TextEdits: []analysis.TextEdit{{
@@ -306,21 +329,20 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
}},
},
},
- }
+ )
- case isBasicType(valueType, types.Int8, types.Int16, types.Int32) && oneOf(verb, "%v", "%d") && n.intConv:
+ case isBasicType(valueType, types.Int8, types.Int16, types.Int32) && oneOf(verb, "%v", "%d") && n.intFormat.intConv:
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- _, ok := neededPackages[fname]
- if !ok {
- neededPackages[fname] = make(map[string]bool)
- }
- neededPackages[fname]["strconv"] = true
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with faster strconv.Itoa",
- SuggestedFixes: []analysis.SuggestedFix{
+ if _, ok := neededPackages[fname]; !ok {
+ neededPackages[fname] = make(map[string]struct{})
+ }
+ neededPackages[fname]["strconv"] = struct{}{}
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with faster strconv.Itoa",
+ []analysis.SuggestedFix{
{
Message: "Use strconv.Itoa",
TextEdits: []analysis.TextEdit{
@@ -337,20 +359,19 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
},
},
},
- }
- case isBasicType(valueType, types.Int) && oneOf(verb, "%v", "%d"):
+ )
+ case isBasicType(valueType, types.Int) && oneOf(verb, "%v", "%d") && n.intFormat.enabled:
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- _, ok := neededPackages[fname]
- if !ok {
- neededPackages[fname] = make(map[string]bool)
- }
- neededPackages[fname]["strconv"] = true
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with faster strconv.Itoa",
- SuggestedFixes: []analysis.SuggestedFix{
+ if _, ok := neededPackages[fname]; !ok {
+ neededPackages[fname] = make(map[string]struct{})
+ }
+ neededPackages[fname]["strconv"] = struct{}{}
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with faster strconv.Itoa",
+ []analysis.SuggestedFix{
{
Message: "Use strconv.Itoa",
TextEdits: []analysis.TextEdit{{
@@ -360,20 +381,19 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
}},
},
},
- }
- case isBasicType(valueType, types.Int64) && oneOf(verb, "%v", "%d"):
+ )
+ case isBasicType(valueType, types.Int64) && oneOf(verb, "%v", "%d") && n.intFormat.enabled:
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- _, ok := neededPackages[fname]
- if !ok {
- neededPackages[fname] = make(map[string]bool)
- }
- neededPackages[fname]["strconv"] = true
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with faster strconv.FormatInt",
- SuggestedFixes: []analysis.SuggestedFix{
+ if _, ok := neededPackages[fname]; !ok {
+ neededPackages[fname] = make(map[string]struct{})
+ }
+ neededPackages[fname]["strconv"] = struct{}{}
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with faster strconv.FormatInt",
+ []analysis.SuggestedFix{
{
Message: "Use strconv.FormatInt",
TextEdits: []analysis.TextEdit{
@@ -390,25 +410,24 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
},
},
},
- }
+ )
- case isBasicType(valueType, types.Uint8, types.Uint16, types.Uint32, types.Uint) && oneOf(verb, "%v", "%d", "%x") && n.intConv:
+ case isBasicType(valueType, types.Uint8, types.Uint16, types.Uint32, types.Uint) && oneOf(verb, "%v", "%d", "%x") && n.intFormat.intConv:
base := []byte("), 10")
if verb == "%x" {
base = []byte("), 16")
}
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- _, ok := neededPackages[fname]
- if !ok {
- neededPackages[fname] = make(map[string]bool)
- }
- neededPackages[fname]["strconv"] = true
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with faster strconv.FormatUint",
- SuggestedFixes: []analysis.SuggestedFix{
+ if _, ok := neededPackages[fname]; !ok {
+ neededPackages[fname] = make(map[string]struct{})
+ }
+ neededPackages[fname]["strconv"] = struct{}{}
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with faster strconv.FormatUint",
+ []analysis.SuggestedFix{
{
Message: "Use strconv.FormatUint",
TextEdits: []analysis.TextEdit{
@@ -425,24 +444,23 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
},
},
},
- }
- case isBasicType(valueType, types.Uint64) && oneOf(verb, "%v", "%d", "%x"):
+ )
+ case isBasicType(valueType, types.Uint64) && oneOf(verb, "%v", "%d", "%x") && n.intFormat.enabled:
base := []byte(", 10")
if verb == "%x" {
base = []byte(", 16")
}
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- _, ok := neededPackages[fname]
- if !ok {
- neededPackages[fname] = make(map[string]bool)
- }
- neededPackages[fname]["strconv"] = true
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with faster strconv.FormatUint",
- SuggestedFixes: []analysis.SuggestedFix{
+ if _, ok := neededPackages[fname]; !ok {
+ neededPackages[fname] = make(map[string]struct{})
+ }
+ neededPackages[fname]["strconv"] = struct{}{}
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with faster strconv.FormatUint",
+ []analysis.SuggestedFix{
{
Message: "Use strconv.FormatUint",
TextEdits: []analysis.TextEdit{
@@ -459,8 +477,8 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
},
},
},
- }
- case isBasicType(valueType, types.String) && fn == "fmt.Sprintf" && isConcatable(verb):
+ )
+ case isBasicType(valueType, types.String) && fn == "fmt.Sprintf" && isConcatable(verb) && n.strFormat.enabled:
var fix string
if strings.HasSuffix(verb, "%s") {
fix = strconv.Quote(verb[:len(verb)-2]) + "+" + formatNode(pass.Fset, value)
@@ -473,11 +491,11 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
}
fname := pass.Fset.File(call.Pos()).Name()
removedFmtUsages[fname]++
- d = &analysis.Diagnostic{
- Pos: call.Pos(),
- End: call.End(),
- Message: fn + " can be replaced with string concatenation",
- SuggestedFixes: []analysis.SuggestedFix{
+ d = newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ call,
+ fn+" can be replaced with string concatenation",
+ []analysis.SuggestedFix{
{
Message: "Use string concatenation",
TextEdits: []analysis.TextEdit{{
@@ -487,7 +505,7 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
}},
},
},
- }
+ )
}
if d != nil {
@@ -522,8 +540,7 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
gd := node.(*ast.ImportSpec)
if gd.Path.Value == strconv.Quote(pkg.Path()) {
fname := pass.Fset.File(gd.Pos()).Name()
- _, ok := neededPackages[fname]
- if ok {
+ if _, ok := neededPackages[fname]; ok {
delete(neededPackages[fname], pkg.Path())
}
}
@@ -553,11 +570,11 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
for _, k := range keys {
fix = fix + "\n\t\"" + k + `"`
}
- pass.Report(analysis.Diagnostic{
- Pos: gd.Pos(),
- End: gd.End(),
- Message: "Fix imports",
- SuggestedFixes: []analysis.SuggestedFix{
+ pass.Report(*newAnalysisDiagnostic(
+ "", // TODO: precise checker
+ gd,
+ "Fix imports",
+ []analysis.SuggestedFix{
{
Message: "Fix imports",
TextEdits: []analysis.TextEdit{{
@@ -566,7 +583,7 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) {
NewText: []byte(fix),
}},
},
- }})
+ }))
}
})
}
diff --git a/vendor/github.com/catenacyber/perfsprint/analyzer/diagnostic.go b/vendor/github.com/catenacyber/perfsprint/analyzer/diagnostic.go
new file mode 100644
index 0000000000..f1d8d090e5
--- /dev/null
+++ b/vendor/github.com/catenacyber/perfsprint/analyzer/diagnostic.go
@@ -0,0 +1,24 @@
+package analyzer
+
+import (
+ "golang.org/x/tools/go/analysis"
+)
+
+func newAnalysisDiagnostic(
+ checker string,
+ analysisRange analysis.Range,
+ message string,
+ suggestedFixes []analysis.SuggestedFix,
+) *analysis.Diagnostic {
+ if checker != "" {
+ message = checker + ": " + message
+ }
+
+ return &analysis.Diagnostic{
+ Pos: analysisRange.Pos(),
+ End: analysisRange.End(),
+ SuggestedFixes: suggestedFixes,
+ Message: message,
+ Category: checker, // Possible hashtag available on the documentation
+ }
+}
diff --git a/vendor/github.com/cespare/xxhash/v2/README.md b/vendor/github.com/cespare/xxhash/v2/README.md
index 8bf0e5b781..33c88305c4 100644
--- a/vendor/github.com/cespare/xxhash/v2/README.md
+++ b/vendor/github.com/cespare/xxhash/v2/README.md
@@ -70,3 +70,5 @@ benchstat <(go test -benchtime 500ms -count 15 -bench 'Sum64$')
- [VictoriaMetrics](https://github.com/VictoriaMetrics/VictoriaMetrics)
- [FreeCache](https://github.com/coocood/freecache)
- [FastCache](https://github.com/VictoriaMetrics/fastcache)
+- [Ristretto](https://github.com/dgraph-io/ristretto)
+- [Badger](https://github.com/dgraph-io/badger)
diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash.go b/vendor/github.com/cespare/xxhash/v2/xxhash.go
index a9e0d45c9d..78bddf1cee 100644
--- a/vendor/github.com/cespare/xxhash/v2/xxhash.go
+++ b/vendor/github.com/cespare/xxhash/v2/xxhash.go
@@ -19,10 +19,13 @@ const (
// Store the primes in an array as well.
//
// The consts are used when possible in Go code to avoid MOVs but we need a
-// contiguous array of the assembly code.
+// contiguous array for the assembly code.
var primes = [...]uint64{prime1, prime2, prime3, prime4, prime5}
// Digest implements hash.Hash64.
+//
+// Note that a zero-valued Digest is not ready to receive writes.
+// Call Reset or create a Digest using New before calling other methods.
type Digest struct {
v1 uint64
v2 uint64
@@ -33,19 +36,31 @@ type Digest struct {
n int // how much of mem is used
}
-// New creates a new Digest that computes the 64-bit xxHash algorithm.
+// New creates a new Digest with a zero seed.
func New() *Digest {
+ return NewWithSeed(0)
+}
+
+// NewWithSeed creates a new Digest with the given seed.
+func NewWithSeed(seed uint64) *Digest {
var d Digest
- d.Reset()
+ d.ResetWithSeed(seed)
return &d
}
// Reset clears the Digest's state so that it can be reused.
+// It uses a seed value of zero.
func (d *Digest) Reset() {
- d.v1 = primes[0] + prime2
- d.v2 = prime2
- d.v3 = 0
- d.v4 = -primes[0]
+ d.ResetWithSeed(0)
+}
+
+// ResetWithSeed clears the Digest's state so that it can be reused.
+// It uses the given seed to initialize the state.
+func (d *Digest) ResetWithSeed(seed uint64) {
+ d.v1 = seed + prime1 + prime2
+ d.v2 = seed + prime2
+ d.v3 = seed
+ d.v4 = seed - prime1
d.total = 0
d.n = 0
}
diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_asm.go b/vendor/github.com/cespare/xxhash/v2/xxhash_asm.go
index 9216e0a40c..78f95f2561 100644
--- a/vendor/github.com/cespare/xxhash/v2/xxhash_asm.go
+++ b/vendor/github.com/cespare/xxhash/v2/xxhash_asm.go
@@ -6,7 +6,7 @@
package xxhash
-// Sum64 computes the 64-bit xxHash digest of b.
+// Sum64 computes the 64-bit xxHash digest of b with a zero seed.
//
//go:noescape
func Sum64(b []byte) uint64
diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_other.go b/vendor/github.com/cespare/xxhash/v2/xxhash_other.go
index 26df13bba4..118e49e819 100644
--- a/vendor/github.com/cespare/xxhash/v2/xxhash_other.go
+++ b/vendor/github.com/cespare/xxhash/v2/xxhash_other.go
@@ -3,7 +3,7 @@
package xxhash
-// Sum64 computes the 64-bit xxHash digest of b.
+// Sum64 computes the 64-bit xxHash digest of b with a zero seed.
func Sum64(b []byte) uint64 {
// A simpler version would be
// d := New()
diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go b/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go
index e86f1b5fd8..05f5e7dfe7 100644
--- a/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go
+++ b/vendor/github.com/cespare/xxhash/v2/xxhash_safe.go
@@ -5,7 +5,7 @@
package xxhash
-// Sum64String computes the 64-bit xxHash digest of s.
+// Sum64String computes the 64-bit xxHash digest of s with a zero seed.
func Sum64String(s string) uint64 {
return Sum64([]byte(s))
}
diff --git a/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go b/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go
index 1c1638fd88..cf9d42aed5 100644
--- a/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go
+++ b/vendor/github.com/cespare/xxhash/v2/xxhash_unsafe.go
@@ -33,7 +33,7 @@ import (
//
// See https://github.com/golang/go/issues/42739 for discussion.
-// Sum64String computes the 64-bit xxHash digest of s.
+// Sum64String computes the 64-bit xxHash digest of s with a zero seed.
// It may be faster than Sum64([]byte(s)) by avoiding a copy.
func Sum64String(s string) uint64 {
b := *(*[]byte)(unsafe.Pointer(&sliceHeader{s, len(s)}))
diff --git a/vendor/github.com/ckaznocha/intrange/.golangci.yml b/vendor/github.com/ckaznocha/intrange/.golangci.yml
index 2ad830d1b2..b240f85ce9 100644
--- a/vendor/github.com/ckaznocha/intrange/.golangci.yml
+++ b/vendor/github.com/ckaznocha/intrange/.golangci.yml
@@ -1,6 +1,9 @@
linters-settings:
gci:
- local-prefixes: github.com/ckaznocha/intrange
+ sections:
+ - standard
+ - default
+ - localmodule
gocritic:
enabled-tags:
- diagnostic
@@ -10,10 +13,7 @@ linters-settings:
- style
goimports:
local-prefixes: github.com/ckaznocha/intrange
- golint:
- min-confidence: 0
govet:
- check-shadowing: true
enable:
- asmdecl
- assign
@@ -24,6 +24,7 @@ linters-settings:
- cgocall
- composite
- copylock
+ - copyloopvar
- deepequalerrors
- errorsas
- fieldalignment
@@ -57,18 +58,16 @@ linters:
- dupl
- errcheck
- errorlint
- - exportloopref
- gci
- gochecknoinits
- goconst
- gocritic
- godot
- godox
- - goerr113
+ - err113
- gofmt
- gofumpt
- goimports
- - gomnd
- goprintffuncname
- gosec
- gosimple
@@ -94,6 +93,6 @@ linters:
- wastedassign
- whitespace
- wsl
-run:
- skip-dirs:
+issues:
+ exclude-dirs:
- testdata/
diff --git a/vendor/github.com/ckaznocha/intrange/go.work b/vendor/github.com/ckaznocha/intrange/go.work
index f41a04a2fb..3814c99f95 100644
--- a/vendor/github.com/ckaznocha/intrange/go.work
+++ b/vendor/github.com/ckaznocha/intrange/go.work
@@ -1,4 +1,4 @@
-go 1.22.0
+go 1.22
use (
.
diff --git a/vendor/github.com/ckaznocha/intrange/intrange.go b/vendor/github.com/ckaznocha/intrange/intrange.go
index fac4e3deae..229c847d5a 100644
--- a/vendor/github.com/ckaznocha/intrange/intrange.go
+++ b/vendor/github.com/ckaznocha/intrange/intrange.go
@@ -5,6 +5,7 @@ import (
"fmt"
"go/ast"
"go/token"
+ "go/types"
"strconv"
"golang.org/x/tools/go/analysis"
@@ -23,7 +24,11 @@ var (
errFailedAnalysis = errors.New("failed analysis")
)
-const msg = "for loop can be changed to use an integer range (Go 1.22+)"
+const (
+ msg = "for loop can be changed to use an integer range (Go 1.22+)"
+ msgLenRange = "for loop can be changed to `%s := range %s`"
+ msgLenRangeNoIdent = "for loop can be changed to `range %s`"
+)
func run(pass *analysis.Pass) (any, error) {
result, ok := pass.ResultOf[inspect.Analyzer]
@@ -44,90 +49,133 @@ func run(pass *analysis.Pass) (any, error) {
)
}
- resultInspector.Preorder([]ast.Node{(*ast.ForStmt)(nil)}, check(pass))
+ resultInspector.Preorder([]ast.Node{(*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)}, check(pass))
return nil, nil
}
func check(pass *analysis.Pass) func(node ast.Node) {
return func(node ast.Node) {
- forStmt, ok := node.(*ast.ForStmt)
- if !ok {
+ switch stmt := node.(type) {
+ case *ast.ForStmt:
+ checkForStmt(pass, stmt)
+ case *ast.RangeStmt:
+ checkRangeStmt(pass, stmt)
+ default:
return
}
+ }
+}
- if forStmt.Init == nil || forStmt.Cond == nil || forStmt.Post == nil {
- return
- }
+func checkForStmt(pass *analysis.Pass, forStmt *ast.ForStmt) {
+ // Existing checks for other patterns
+ if forStmt.Init == nil || forStmt.Cond == nil || forStmt.Post == nil {
+ return
+ }
+
+ // i := 0;;
+ init, ok := forStmt.Init.(*ast.AssignStmt)
+ if !ok {
+ return
+ }
+
+ initAssign := init.Tok == token.ASSIGN
+
+ if len(init.Lhs) != 1 || len(init.Rhs) != 1 {
+ return
+ }
+
+ initIdent, ok := init.Lhs[0].(*ast.Ident)
+ if !ok {
+ return
+ }
+
+ if !compareNumberLit(init.Rhs[0], 0) {
+ return
+ }
+
+ cond, ok := forStmt.Cond.(*ast.BinaryExpr)
+ if !ok {
+ return
+ }
+
+ var (
+ operand ast.Expr
+ hasEquivalentOperator bool
+ )
- // i := 0;;
- init, ok := forStmt.Init.(*ast.AssignStmt)
+ switch cond.Op {
+ case token.LSS, token.LEQ: // ;i < n; || ;i <= n;
+ x, ok := cond.X.(*ast.Ident)
if !ok {
return
}
- if len(init.Lhs) != 1 || len(init.Rhs) != 1 {
+ if x.Name != initIdent.Name {
return
}
- initIdent, ok := init.Lhs[0].(*ast.Ident)
+ hasEquivalentOperator = cond.Op == token.LEQ
+ operand = cond.Y
+ case token.GTR, token.GEQ: // ;n > i; || ;n >= i;
+ y, ok := cond.Y.(*ast.Ident)
if !ok {
return
}
- if !compareNumberLit(init.Rhs[0], 0) {
+ if y.Name != initIdent.Name {
return
}
- cond, ok := forStmt.Cond.(*ast.BinaryExpr)
- if !ok {
+ hasEquivalentOperator = cond.Op == token.GEQ
+ operand = cond.X
+ default:
+ return
+ }
+
+ switch post := forStmt.Post.(type) {
+ case *ast.IncDecStmt: // ;;i++
+ if post.Tok != token.INC {
return
}
- var nExpr ast.Expr
+ ident, ok := post.X.(*ast.Ident)
+ if !ok {
+ return
+ }
- switch cond.Op {
- case token.LSS: // ;i < n;
- if isBenchmark(cond.Y) {
+ if ident.Name != initIdent.Name {
+ return
+ }
+ case *ast.AssignStmt:
+ switch post.Tok {
+ case token.ADD_ASSIGN: // ;;i += 1
+ if len(post.Lhs) != 1 {
return
}
- nExpr = findNExpr(cond.Y)
-
- x, ok := cond.X.(*ast.Ident)
+ ident, ok := post.Lhs[0].(*ast.Ident)
if !ok {
return
}
- if x.Name != initIdent.Name {
- return
- }
- case token.GTR: // ;n > i;
- if isBenchmark(cond.X) {
+ if ident.Name != initIdent.Name {
return
}
- nExpr = findNExpr(cond.X)
-
- y, ok := cond.Y.(*ast.Ident)
- if !ok {
+ if len(post.Rhs) != 1 {
return
}
- if y.Name != initIdent.Name {
+ if !compareNumberLit(post.Rhs[0], 1) {
return
}
- default:
- return
- }
-
- switch post := forStmt.Post.(type) {
- case *ast.IncDecStmt: // ;;i++
- if post.Tok != token.INC {
+ case token.ASSIGN: // ;;i = i + 1 && ;;i = 1 + i
+ if len(post.Lhs) != 1 || len(post.Rhs) != 1 {
return
}
- ident, ok := post.X.(*ast.Ident)
+ ident, ok := post.Lhs[0].(*ast.Ident)
if !ok {
return
}
@@ -135,35 +183,31 @@ func check(pass *analysis.Pass) func(node ast.Node) {
if ident.Name != initIdent.Name {
return
}
- case *ast.AssignStmt:
- switch post.Tok {
- case token.ADD_ASSIGN: // ;;i += 1
- if len(post.Lhs) != 1 {
- return
- }
- ident, ok := post.Lhs[0].(*ast.Ident)
- if !ok {
- return
- }
+ bin, ok := post.Rhs[0].(*ast.BinaryExpr)
+ if !ok {
+ return
+ }
- if ident.Name != initIdent.Name {
- return
- }
+ if bin.Op != token.ADD {
+ return
+ }
- if len(post.Rhs) != 1 {
+ switch x := bin.X.(type) {
+ case *ast.Ident: // ;;i = i + 1
+ if x.Name != initIdent.Name {
return
}
- if !compareNumberLit(post.Rhs[0], 1) {
+ if !compareNumberLit(bin.Y, 1) {
return
}
- case token.ASSIGN: // ;;i = i + 1 && ;;i = 1 + i
- if len(post.Lhs) != 1 || len(post.Rhs) != 1 {
+ case *ast.BasicLit: // ;;i = 1 + i
+ if !compareNumberLit(x, 1) {
return
}
- ident, ok := post.Lhs[0].(*ast.Ident)
+ ident, ok := bin.Y.(*ast.Ident)
if !ok {
return
}
@@ -171,64 +215,171 @@ func check(pass *analysis.Pass) func(node ast.Node) {
if ident.Name != initIdent.Name {
return
}
-
- bin, ok := post.Rhs[0].(*ast.BinaryExpr)
- if !ok {
- return
- }
-
- if bin.Op != token.ADD {
- return
- }
-
- switch x := bin.X.(type) {
- case *ast.Ident: // ;;i = i + 1
- if x.Name != initIdent.Name {
- return
- }
-
- if !compareNumberLit(bin.Y, 1) {
- return
- }
- case *ast.BasicLit: // ;;i = 1 + i
- if !compareNumberLit(x, 1) {
- return
- }
-
- ident, ok := bin.Y.(*ast.Ident)
- if !ok {
- return
- }
-
- if ident.Name != initIdent.Name {
- return
- }
- default:
- return
- }
default:
return
}
default:
return
}
+ default:
+ return
+ }
- bc := &bodyChecker{
- initIdent: initIdent,
- nExpr: nExpr,
- }
+ bc := &bodyChecker{
+ initIdent: initIdent,
+ nExpr: findNExpr(operand),
+ }
+
+ ast.Inspect(forStmt.Body, bc.check)
+
+ if bc.modified {
+ return
+ }
+
+ if initAssign {
+ pass.Report(analysis.Diagnostic{
+ Pos: forStmt.Pos(),
+ Message: msg + "\nBecause the key is not part of the loop's scope, take care to consider side effects.",
+ })
+
+ return
+ }
+
+ operandIsNumberLit := isNumberLit(operand)
- ast.Inspect(forStmt.Body, bc.check)
+ if hasEquivalentOperator && !operandIsNumberLit {
+ return
+ }
- if bc.modified {
+ rangeX := operandToString(
+ pass,
+ initIdent,
+ operand,
+ hasEquivalentOperator && operandIsNumberLit,
+ )
+
+ var replacement string
+ if bc.accessed {
+ replacement = fmt.Sprintf("%s := range %s", initIdent.Name, rangeX)
+ } else {
+ replacement = fmt.Sprintf("range %s", rangeX)
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: forStmt.Pos(),
+ Message: msg,
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ Message: fmt.Sprintf("Replace loop with `%s`", replacement),
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: forStmt.Init.Pos(),
+ End: forStmt.Post.End(),
+ NewText: []byte(replacement),
+ },
+ },
+ },
+ },
+ })
+}
+
+func checkRangeStmt(pass *analysis.Pass, rangeStmt *ast.RangeStmt) {
+ if rangeStmt.Value != nil {
+ return
+ }
+
+ startPos := rangeStmt.Range
+ usesKey := rangeStmt.Key != nil
+ identName := ""
+
+ if usesKey {
+ ident, ok := rangeStmt.Key.(*ast.Ident)
+ if !ok {
return
}
+ if ident.Name == "_" {
+ usesKey = false
+ }
+
+ identName = ident.Name
+ startPos = ident.Pos()
+ }
+
+ if rangeStmt.X == nil {
+ return
+ }
+
+ x, ok := rangeStmt.X.(*ast.CallExpr)
+ if !ok {
+ return
+ }
+
+ fn, ok := x.Fun.(*ast.Ident)
+ if !ok {
+ return
+ }
+
+ if fn.Name != "len" || len(x.Args) != 1 {
+ return
+ }
+
+ arg, ok := x.Args[0].(*ast.Ident)
+ if !ok {
+ return
+ }
+
+ // make sure arg is a slice or array
+ obj := pass.TypesInfo.ObjectOf(arg)
+ if obj == nil {
+ return
+ }
+
+ switch obj.Type().Underlying().(type) {
+ case *types.Slice, *types.Array:
+ default:
+ return
+ }
+
+ if usesKey {
pass.Report(analysis.Diagnostic{
- Pos: forStmt.Pos(),
- Message: msg,
+ Pos: startPos,
+ End: x.End(),
+ Message: fmt.Sprintf(msgLenRange, identName, arg.Name),
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ Message: fmt.Sprintf("Replace `len(%s)` with `%s`", arg.Name, arg.Name),
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: x.Pos(),
+ End: x.End(),
+ NewText: []byte(arg.Name),
+ },
+ },
+ },
+ },
})
+
+ return
}
+
+ pass.Report(analysis.Diagnostic{
+ Pos: startPos,
+ End: x.End(),
+ Message: fmt.Sprintf(msgLenRangeNoIdent, arg.Name),
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ Message: fmt.Sprintf("Replace `len(%s)` with `%s`", arg.Name, arg.Name),
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: startPos,
+ End: x.End(),
+ NewText: []byte(fmt.Sprintf("range %s", arg.Name)),
+ },
+ },
+ },
+ },
+ })
}
func findNExpr(expr ast.Expr) ast.Expr {
@@ -252,26 +403,45 @@ func findNExpr(expr ast.Expr) ast.Expr {
}
}
-func isBenchmark(expr ast.Expr) bool {
- selectorExpr, ok := expr.(*ast.SelectorExpr)
- if !ok {
- return false
- }
+func recursiveOperandToString(
+ expr ast.Expr,
+ incrementInt bool,
+) string {
+ switch e := expr.(type) {
+ case *ast.CallExpr:
+ args := ""
- if selectorExpr.Sel.Name != "N" {
- return false
- }
+ for i, v := range e.Args {
+ if i > 0 {
+ args += ", "
+ }
- ident, ok := selectorExpr.X.(*ast.Ident)
- if !ok {
- return false
- }
+ args += recursiveOperandToString(v, incrementInt && len(e.Args) == 1)
+ }
- if ident.Name == "b" {
- return true
- }
+ return recursiveOperandToString(e.Fun, false) + "(" + args + ")"
+ case *ast.BasicLit:
+ if incrementInt && e.Kind == token.INT {
+ v, err := strconv.Atoi(e.Value)
+ if err == nil {
+ return strconv.Itoa(v + 1)
+ }
+
+ return e.Value
+ }
- return false
+ return e.Value
+ case *ast.Ident:
+ return e.Name
+ case *ast.SelectorExpr:
+ return recursiveOperandToString(e.X, false) + "." + recursiveOperandToString(e.Sel, false)
+ case *ast.IndexExpr:
+ return recursiveOperandToString(e.X, false) + "[" + recursiveOperandToString(e.Index, false) + "]"
+ case *ast.BinaryExpr:
+ return recursiveOperandToString(e.X, false) + " " + e.Op.String() + " " + recursiveOperandToString(e.Y, false)
+ default:
+ return ""
+ }
}
func identEqual(a, b ast.Expr) bool {
@@ -317,6 +487,7 @@ type bodyChecker struct {
initIdent *ast.Ident
nExpr ast.Expr
modified bool
+ accessed bool
}
func (b *bodyChecker) check(n ast.Node) bool {
@@ -335,11 +506,55 @@ func (b *bodyChecker) check(n ast.Node) bool {
return false
}
+ case *ast.Ident:
+ if identEqual(stmt, b.initIdent) {
+ b.accessed = true
+ }
}
return true
}
+func isNumberLit(exp ast.Expr) bool {
+ switch lit := exp.(type) {
+ case *ast.BasicLit:
+ if lit.Kind == token.INT {
+ return true
+ }
+
+ return false
+ case *ast.CallExpr:
+ switch fun := lit.Fun.(type) {
+ case *ast.Ident:
+ switch fun.Name {
+ case
+ "int",
+ "int8",
+ "int16",
+ "int32",
+ "int64",
+ "uint",
+ "uint8",
+ "uint16",
+ "uint32",
+ "uint64":
+ default:
+ return false
+ }
+ default:
+ return false
+ }
+
+ if len(lit.Args) != 1 {
+ return false
+ }
+
+ return isNumberLit(lit.Args[0])
+ default:
+ return false
+ }
+}
+
func compareNumberLit(exp ast.Expr, val int) bool {
switch lit := exp.(type) {
case *ast.BasicLit:
@@ -386,3 +601,27 @@ func compareNumberLit(exp ast.Expr, val int) bool {
return false
}
}
+
+func operandToString(
+ pass *analysis.Pass,
+ i *ast.Ident,
+ operand ast.Expr,
+ increment bool,
+) string {
+ s := recursiveOperandToString(operand, increment)
+ t := pass.TypesInfo.TypeOf(i)
+
+ if t == types.Typ[types.Int] {
+ if len(s) > 5 && s[:4] == "int(" && s[len(s)-1] == ')' {
+ s = s[4 : len(s)-1]
+ }
+
+ return s
+ }
+
+ if len(s) > 2 && s[len(s)-1] == ')' {
+ return s
+ }
+
+ return t.String() + "(" + s + ")"
+}
diff --git a/vendor/github.com/curioswitch/go-reassign/.golangci.yml b/vendor/github.com/curioswitch/go-reassign/.golangci.yml
index e3bf79ae72..fdf0bb2f22 100644
--- a/vendor/github.com/curioswitch/go-reassign/.golangci.yml
+++ b/vendor/github.com/curioswitch/go-reassign/.golangci.yml
@@ -5,14 +5,12 @@ linters:
- bodyclose
- decorder
- durationcheck
+ - err113
- errchkjson
- errname
- errorlint
- - execinquery
- exhaustive
- - exportloopref
- gocritic
- - goerr113
- gofmt
- goimports
- goprintffuncname
@@ -20,7 +18,6 @@ linters:
- importas
- misspell
- nolintlint
- - nosnakecase
- prealloc
- predeclared
- promlinter
diff --git a/vendor/github.com/curioswitch/go-reassign/README.md b/vendor/github.com/curioswitch/go-reassign/README.md
index ac9c131df2..190756f928 100644
--- a/vendor/github.com/curioswitch/go-reassign/README.md
+++ b/vendor/github.com/curioswitch/go-reassign/README.md
@@ -47,7 +47,8 @@ Package variable reassignment is generally confusing, though, and we recommend a
The `pattern` flag can be set to a regular expression to define what variables cannot be reassigned, and `.*` is
recommended if it works with your code.
-## Limitations
+## Development
-If a variable shadows the name of an import, an assignment of a field in the variable will trigger the linter. Shadowing
-can be confusing, so it's recommended to rename the variable.
+[mage](https://magefile.org/) is used for development. Run `go run mage.go -l` to see available targets.
+
+For example, to run checks before sending a PR, run `go run mage.go check`.
diff --git a/vendor/github.com/curioswitch/go-reassign/internal/analyzer/analyzer.go b/vendor/github.com/curioswitch/go-reassign/internal/analyzer/analyzer.go
index e1b47d5b95..c2a29c5299 100644
--- a/vendor/github.com/curioswitch/go-reassign/internal/analyzer/analyzer.go
+++ b/vendor/github.com/curioswitch/go-reassign/internal/analyzer/analyzer.go
@@ -48,23 +48,35 @@ func run(pass *analysis.Pass) (interface{}, error) {
func reportImported(pass *analysis.Pass, expr ast.Expr, checkRE *regexp.Regexp, prefix string) {
switch x := expr.(type) {
case *ast.SelectorExpr:
- if !checkRE.MatchString(x.Sel.Name) {
- return
- }
-
selectIdent, ok := x.X.(*ast.Ident)
if !ok {
return
}
+ var pkgPath string
if selectObj, ok := pass.TypesInfo.Uses[selectIdent]; ok {
- if pkg, ok := selectObj.(*types.PkgName); !ok || pkg.Imported() == pass.Pkg {
+ pkg, ok := selectObj.(*types.PkgName)
+ if !ok || pkg.Imported() == pass.Pkg {
return
}
+ pkgPath = pkg.Imported().Path()
}
- pass.Reportf(expr.Pos(), "%s variable %s in other package %s", prefix, x.Sel.Name, selectIdent.Name)
+ matches := false
+ if checkRE.MatchString(x.Sel.Name) {
+ matches = true
+ }
+ if !matches {
+ // Expression may include a package name, so check that too. Support was added later so we check
+ // just name and qualified name separately for compatibility.
+ if checkRE.MatchString(pkgPath + "." + x.Sel.Name) {
+ matches = true
+ }
+ }
+ if matches {
+ pass.Reportf(expr.Pos(), "%s variable %s in other package %s", prefix, x.Sel.Name, selectIdent.Name)
+ }
case *ast.Ident:
use, ok := pass.TypesInfo.Uses[x].(*types.Var)
if !ok {
diff --git a/vendor/github.com/daixiang0/gci/pkg/config/config.go b/vendor/github.com/daixiang0/gci/pkg/config/config.go
index cc43f2fa07..814201a006 100644
--- a/vendor/github.com/daixiang0/gci/pkg/config/config.go
+++ b/vendor/github.com/daixiang0/gci/pkg/config/config.go
@@ -26,6 +26,7 @@ type BoolConfig struct {
SkipGenerated bool `yaml:"skipGenerated"`
SkipVendor bool `yaml:"skipVendor"`
CustomOrder bool `yaml:"customOrder"`
+ NoLexOrder bool `yaml:"noLexOrder"`
}
type Config struct {
@@ -63,10 +64,11 @@ func (g YamlConfig) Parse() (*Config, error) {
sort.Slice(sections, func(i, j int) bool {
sectionI, sectionJ := sections[i].Type(), sections[j].Type()
- if strings.Compare(sectionI, sectionJ) == 0 {
- return strings.Compare(sections[i].String(), sections[j].String()) < 0
+ if g.Cfg.NoLexOrder || strings.Compare(sectionI, sectionJ) != 0 {
+ return defaultOrder[sectionI] < defaultOrder[sectionJ]
}
- return defaultOrder[sectionI] < defaultOrder[sectionJ]
+
+ return strings.Compare(sections[i].String(), sections[j].String()) < 0
})
}
diff --git a/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go b/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go
index a2cd0a6dd2..5a2dcdc899 100644
--- a/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go
+++ b/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go
@@ -1,6 +1,6 @@
package section
-// Code generated based on go1.22.0 X:boringcrypto,arenas. DO NOT EDIT.
+// Code generated based on go1.23.0 X:boringcrypto,arenas. DO NOT EDIT.
var standardPackages = map[string]struct{}{
"archive/tar": {},
@@ -100,6 +100,7 @@ var standardPackages = map[string]struct{}{
"io": {},
"io/fs": {},
"io/ioutil": {},
+ "iter": {},
"log": {},
"log/slog": {},
"log/syslog": {},
@@ -151,6 +152,7 @@ var standardPackages = map[string]struct{}{
"sort": {},
"strconv": {},
"strings": {},
+ "structs": {},
"sync": {},
"sync/atomic": {},
"syscall": {},
@@ -168,5 +170,6 @@ var standardPackages = map[string]struct{}{
"unicode": {},
"unicode/utf16": {},
"unicode/utf8": {},
+ "unique": {},
"unsafe": {},
}
diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md
index be82827cac..d135bfe023 100644
--- a/vendor/github.com/fatih/color/README.md
+++ b/vendor/github.com/fatih/color/README.md
@@ -9,7 +9,7 @@ suits you.
## Install
-```bash
+```
go get github.com/fatih/color
```
@@ -30,6 +30,18 @@ color.Magenta("And many others ..")
```
+### RGB colors
+
+If your terminal supports 24-bit colors, you can use RGB color codes.
+
+```go
+color.RGB(255, 128, 0).Println("foreground orange")
+color.RGB(230, 42, 42).Println("foreground red")
+
+color.BgRGB(255, 128, 0).Println("background orange")
+color.BgRGB(230, 42, 42).Println("background red")
+```
+
### Mix and reuse colors
```go
@@ -49,6 +61,11 @@ boldRed.Println("This will print text in bold red.")
whiteBackground := red.Add(color.BgWhite)
whiteBackground.Println("Red text with white background.")
+
+// Mix with RGB color codes
+color.RGB(255, 128, 0).AddBgRGB(0, 0, 0).Println("orange with black background")
+
+color.BgRGB(255, 128, 0).AddRGB(255, 255, 255).Println("orange background with white foreground")
```
### Use your own output (io.Writer)
@@ -161,10 +178,6 @@ c.Println("This prints again cyan...")
To output color in GitHub Actions (or other CI systems that support ANSI colors), make sure to set `color.NoColor = false` so that it bypasses the check for non-tty output streams.
-## Todo
-
-* Save/Return previous values
-* Evaluate fmt.Formatter interface
## Credits
diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go
index 81094e87c5..ee39b408e9 100644
--- a/vendor/github.com/fatih/color/color.go
+++ b/vendor/github.com/fatih/color/color.go
@@ -98,6 +98,9 @@ const (
FgMagenta
FgCyan
FgWhite
+
+ // used internally for 256 and 24-bit coloring
+ foreground
)
// Foreground Hi-Intensity text colors
@@ -122,6 +125,9 @@ const (
BgMagenta
BgCyan
BgWhite
+
+ // used internally for 256 and 24-bit coloring
+ background
)
// Background Hi-Intensity text colors
@@ -150,6 +156,30 @@ func New(value ...Attribute) *Color {
return c
}
+// RGB returns a new foreground color in 24-bit RGB.
+func RGB(r, g, b int) *Color {
+ return New(foreground, 2, Attribute(r), Attribute(g), Attribute(b))
+}
+
+// BgRGB returns a new background color in 24-bit RGB.
+func BgRGB(r, g, b int) *Color {
+ return New(background, 2, Attribute(r), Attribute(g), Attribute(b))
+}
+
+// AddRGB is used to chain foreground RGB SGR parameters. Use as many as parameters to combine
+// and create custom color objects. Example: .Add(34, 0, 12).Add(255, 128, 0).
+func (c *Color) AddRGB(r, g, b int) *Color {
+ c.params = append(c.params, foreground, 2, Attribute(r), Attribute(g), Attribute(b))
+ return c
+}
+
+// AddRGB is used to chain background RGB SGR parameters. Use as many as parameters to combine
+// and create custom color objects. Example: .Add(34, 0, 12).Add(255, 128, 0).
+func (c *Color) AddBgRGB(r, g, b int) *Color {
+ c.params = append(c.params, background, 2, Attribute(r), Attribute(g), Attribute(b))
+ return c
+}
+
// Set sets the given parameters immediately. It will change the color of
// output with the given SGR parameters until color.Unset() is called.
func Set(p ...Attribute) *Color {
@@ -401,7 +431,7 @@ func (c *Color) format() string {
func (c *Color) unformat() string {
//return fmt.Sprintf("%s[%dm", escape, Reset)
- //for each element in sequence let's use the speficic reset escape, ou the generic one if not found
+ //for each element in sequence let's use the specific reset escape, or the generic one if not found
format := make([]string, len(c.params))
for i, v := range c.params {
format[i] = strconv.Itoa(int(Reset))
diff --git a/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml b/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml
index a70d0fb006..cc5d4cffb9 100644
--- a/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml
+++ b/vendor/github.com/ghostiam/protogetter/.goreleaser.yaml
@@ -1,3 +1,4 @@
+version: 2
before:
hooks:
- go mod tidy
@@ -21,4 +22,4 @@ changelog:
exclude:
- '^docs:'
- '^test:'
- - '^ci:'
\ No newline at end of file
+ - '^ci:'
diff --git a/vendor/github.com/ghostiam/protogetter/processor.go b/vendor/github.com/ghostiam/protogetter/processor.go
index eca82939d8..44f346e850 100644
--- a/vendor/github.com/ghostiam/protogetter/processor.go
+++ b/vendor/github.com/ghostiam/protogetter/processor.go
@@ -218,7 +218,7 @@ func (c *processor) processInner(expr ast.Expr) {
c.write("*")
c.processInner(x.X)
- case *ast.CompositeLit, *ast.TypeAssertExpr, *ast.ArrayType, *ast.FuncLit:
+ case *ast.CompositeLit, *ast.TypeAssertExpr, *ast.ArrayType, *ast.FuncLit, *ast.SliceExpr:
// Process the node as is.
c.write(formatNode(x))
diff --git a/vendor/github.com/ghostiam/protogetter/protogetter.go b/vendor/github.com/ghostiam/protogetter/protogetter.go
index 31eee8572a..c1c42c75d0 100644
--- a/vendor/github.com/ghostiam/protogetter/protogetter.go
+++ b/vendor/github.com/ghostiam/protogetter/protogetter.go
@@ -16,13 +16,6 @@ import (
"golang.org/x/tools/go/ast/inspector"
)
-type Mode int
-
-const (
- StandaloneMode Mode = iota
- GolangciLintMode
-)
-
const msgFormat = "avoid direct access to proto field %s, use %s instead"
func NewAnalyzer(cfg *Config) *analysis.Analyzer {
@@ -35,7 +28,7 @@ func NewAnalyzer(cfg *Config) *analysis.Analyzer {
Doc: "Reports direct reads from proto message fields when getters should be used",
Flags: flags(cfg),
Run: func(pass *analysis.Pass) (any, error) {
- _, err := Run(pass, cfg)
+ err := Run(pass, cfg)
return nil, err
},
}
@@ -62,14 +55,13 @@ func flags(opts *Config) flag.FlagSet {
}
type Config struct {
- Mode Mode // Zero value is StandaloneMode.
SkipGeneratedBy []string
SkipFiles []string
SkipAnyGenerated bool
ReplaceFirstArgInAppend bool
}
-func Run(pass *analysis.Pass, cfg *Config) ([]Issue, error) {
+func Run(pass *analysis.Pass, cfg *Config) error {
skipGeneratedBy := make([]string, 0, len(cfg.SkipGeneratedBy)+3)
// Always skip files generated by protoc-gen-go, protoc-gen-go-grpc and protoc-gen-grpc-gateway.
skipGeneratedBy = append(skipGeneratedBy, "protoc-gen-go", "protoc-gen-go-grpc", "protoc-gen-grpc-gateway")
@@ -90,7 +82,7 @@ func Run(pass *analysis.Pass, cfg *Config) ([]Issue, error) {
compile, err := glob.Compile(s)
if err != nil {
- return nil, fmt.Errorf("invalid glob pattern: %w", err)
+ return fmt.Errorf("invalid glob pattern: %w", err)
}
skipFilesGlobPatterns = append(skipFilesGlobPatterns, compile)
@@ -124,24 +116,16 @@ func Run(pass *analysis.Pass, cfg *Config) ([]Issue, error) {
ins := inspector.New(files)
- var issues []Issue
-
filter := NewPosFilter()
ins.Preorder(nodeTypes, func(node ast.Node) {
report := analyse(pass, filter, node, cfg)
if report == nil {
return
}
-
- switch cfg.Mode {
- case StandaloneMode:
- pass.Report(report.ToDiagReport())
- case GolangciLintMode:
- issues = append(issues, report.ToIssue(pass.Fset))
- }
+ pass.Report(report.ToDiagReport())
})
- return issues, nil
+ return nil
}
func analyse(pass *analysis.Pass, filter *PosFilter, n ast.Node, cfg *Config) *Report {
@@ -185,19 +169,6 @@ func analyse(pass *analysis.Pass, filter *PosFilter, n ast.Node, cfg *Config) *R
}
}
-// Issue is used to integrate with golangci-lint's inline auto fix.
-type Issue struct {
- Pos token.Position
- Message string
- InlineFix InlineFix
-}
-
-type InlineFix struct {
- StartCol int // zero-based
- Length int
- NewString string
-}
-
type Report struct {
node ast.Node
result *Result
@@ -225,27 +196,13 @@ func (r *Report) ToDiagReport() analysis.Diagnostic {
}
}
-func (r *Report) ToIssue(fset *token.FileSet) Issue {
- msg := fmt.Sprintf(msgFormat, r.result.From, r.result.To)
- return Issue{
- Pos: fset.Position(r.node.Pos()),
- Message: msg,
- InlineFix: InlineFix{
- StartCol: fset.Position(r.node.Pos()).Column - 1,
- Length: len(r.result.From),
- NewString: r.result.To,
- },
- }
-}
-
func skipGeneratedFile(f *ast.File, prefixes []string, skipAny bool) bool {
if len(f.Comments) == 0 {
return false
}
firstComment := f.Comments[0].Text()
- // https://golang.org/s/generatedcode
- if skipAny && strings.HasPrefix(firstComment, "Code generated") {
+ if skipAny && ast.IsGenerated(f) {
return true
}
diff --git a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go
index 306756834b..345274f1c8 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go
@@ -82,7 +82,7 @@ func (c *caseOrderChecker) warnUnknownType(cause, concrete ast.Node) {
c.ctx.Warn(cause, "type is not defined %s", concrete)
}
-func (c *caseOrderChecker) checkSwitch(s *ast.SwitchStmt) {
+func (c *caseOrderChecker) checkSwitch(_ *ast.SwitchStmt) {
// TODO(quasilyte): can handle expression cases that overlap.
// Cases that have narrower value range should go before wider ones.
}
diff --git a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go
index 7b7a3c538b..170c3f4171 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go
@@ -57,6 +57,7 @@ func (*hugeParamChecker) isImplementStringer(decl *ast.FuncDecl) bool {
decl.Name.Name == "String" &&
decl.Type != nil &&
len(decl.Type.Params.List) == 0 &&
+ decl.Type.Results != nil &&
len(decl.Type.Results.List) == 1 &&
astcast.ToIdent(decl.Type.Results.List[0].Type).Name == "string" {
return true
diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go
index 1f6e948d5c..96d2dd0e6f 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go
@@ -17,7 +17,7 @@ type WalkHandler struct {
// EnterFile is a default walkerEvents.EnterFile implementation
// that reports every file as accepted candidate for checking.
-func (w *WalkHandler) EnterFile(f *ast.File) bool {
+func (w *WalkHandler) EnterFile(_ *ast.File) bool {
return true
}
diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go
new file mode 100644
index 0000000000..f4851d4024
--- /dev/null
+++ b/vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go
@@ -0,0 +1,100 @@
+package checkers
+
+import (
+ "go/ast"
+ "go/token"
+
+ "github.com/go-critic/go-critic/checkers/internal/astwalk"
+ "github.com/go-critic/go-critic/linter"
+ "github.com/go-toolsmith/astcast"
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+func init() {
+ var info linter.CheckerInfo
+ info.Name = "rangeAppendAll"
+ info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag}
+ info.Summary = "Detects append all its data while range it"
+ info.Before = `for _, n := range ns {
+ ...
+ rs = append(rs, ns...) // append all slice data
+ }
+}`
+ info.After = `for _, n := range ns {
+ ...
+ rs = append(rs, n)
+ }
+}`
+
+ collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) {
+ c := &rangeAppendAllChecker{ctx: ctx}
+ return astwalk.WalkerForStmt(c), nil
+ })
+}
+
+type rangeAppendAllChecker struct {
+ astwalk.WalkHandler
+ ctx *linter.CheckerContext
+}
+
+func (c *rangeAppendAllChecker) VisitStmt(stmt ast.Stmt) {
+ rangeStmt, ok := stmt.(*ast.RangeStmt)
+ if !ok || len(rangeStmt.Body.List) == 0 {
+ return
+ }
+ rangeIdent, ok := rangeStmt.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ rangeObj := c.ctx.TypesInfo.ObjectOf(rangeIdent)
+
+ astutil.Apply(rangeStmt.Body, nil, func(cur *astutil.Cursor) bool {
+ appendFrom := c.getValidAppendFrom(cur.Node())
+ if appendFrom != nil {
+ appendFromObj := c.ctx.TypesInfo.ObjectOf(appendFrom)
+ if appendFromObj == rangeObj {
+ c.warn(appendFrom)
+ }
+ }
+ return true
+ })
+}
+
+func (c *rangeAppendAllChecker) getValidAppendFrom(expr ast.Node) *ast.Ident {
+ call := astcast.ToCallExpr(expr)
+ if len(call.Args) != 2 || call.Ellipsis == token.NoPos {
+ return nil
+ }
+ if qualifiedName(call.Fun) != "append" {
+ return nil
+ }
+ if c.isSliceLiteral(call.Args[0]) {
+ return nil
+ }
+ appendFrom, ok := call.Args[1].(*ast.Ident)
+ if !ok {
+ return nil
+ }
+ return appendFrom
+}
+
+func (c *rangeAppendAllChecker) isSliceLiteral(arg ast.Expr) bool {
+ switch v := arg.(type) {
+ // []T{}, []T{n}
+ case *ast.CompositeLit:
+ return true
+ // []T(nil)
+ case *ast.CallExpr:
+ if astcast.ToArrayType(v.Fun) != astcast.NilArrayType && len(v.Args) == 1 {
+ id := astcast.ToIdent(v.Args[0])
+ return id.Name == "nil" && id.Obj == nil
+ }
+ return false
+ default:
+ return false
+ }
+}
+
+func (c *rangeAppendAllChecker) warn(appendFrom *ast.Ident) {
+ c.ctx.Warn(appendFrom, "append all `%s` data while range it", appendFrom)
+}
diff --git a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go
index 29723a69a9..485819842c 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go
@@ -87,7 +87,7 @@ func newErrorHandler(failOnErrorFlag string) (*parseErrorHandler, error) {
failOnErrorPredicates := map[string]func(error) bool{
"dsl": func(err error) bool { var e *ruleguard.ImportError; return !errors.As(err, &e) },
"import": func(err error) bool { var e *ruleguard.ImportError; return errors.As(err, &e) },
- "all": func(err error) bool { return true },
+ "all": func(_ error) bool { return true },
}
for _, k := range strings.Split(failOnErrorFlag, ",") {
if k == "" {
diff --git a/vendor/github.com/go-critic/go-critic/linter/helpers.go b/vendor/github.com/go-critic/go-critic/linter/helpers.go
index 0a3fc0292f..d5110df642 100644
--- a/vendor/github.com/go-critic/go-critic/linter/helpers.go
+++ b/vendor/github.com/go-critic/go-critic/linter/helpers.go
@@ -116,7 +116,7 @@ func validateCheckerName(info *CheckerInfo) error {
return nil
}
-func validateCheckerDocumentation(info *CheckerInfo) error {
+func validateCheckerDocumentation(_ *CheckerInfo) error {
// TODO(quasilyte): validate documentation.
return nil
}
diff --git a/vendor/github.com/go-jose/go-jose/v3/jwe.go b/vendor/github.com/go-jose/go-jose/v3/jwe.go
index 4267ac7502..1ba4ae0c00 100644
--- a/vendor/github.com/go-jose/go-jose/v3/jwe.go
+++ b/vendor/github.com/go-jose/go-jose/v3/jwe.go
@@ -202,10 +202,11 @@ func (parsed *rawJSONWebEncryption) sanitized() (*JSONWebEncryption, error) {
// parseEncryptedCompact parses a message in compact format.
func parseEncryptedCompact(input string) (*JSONWebEncryption, error) {
- parts := strings.Split(input, ".")
- if len(parts) != 5 {
+ // Five parts is four separators
+ if strings.Count(input, ".") != 4 {
return nil, fmt.Errorf("go-jose/go-jose: compact JWE format must have five parts")
}
+ parts := strings.SplitN(input, ".", 5)
rawProtected, err := base64URLDecode(parts[0])
if err != nil {
diff --git a/vendor/github.com/go-jose/go-jose/v3/jws.go b/vendor/github.com/go-jose/go-jose/v3/jws.go
index e37007dbb8..401fc18ac4 100644
--- a/vendor/github.com/go-jose/go-jose/v3/jws.go
+++ b/vendor/github.com/go-jose/go-jose/v3/jws.go
@@ -275,10 +275,11 @@ func (parsed *rawJSONWebSignature) sanitized() (*JSONWebSignature, error) {
// parseSignedCompact parses a message in compact format.
func parseSignedCompact(input string, payload []byte) (*JSONWebSignature, error) {
- parts := strings.Split(input, ".")
- if len(parts) != 3 {
+ // Three parts is two separators
+ if strings.Count(input, ".") != 2 {
return nil, fmt.Errorf("go-jose/go-jose: compact JWS format must have three parts")
}
+ parts := strings.SplitN(input, ".", 3)
if parts[1] != "" && payload != nil {
return nil, fmt.Errorf("go-jose/go-jose: payload is not detached")
diff --git a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
index 24d82f07c3..1f3c69d4b8 100644
--- a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
+++ b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
@@ -6,6 +6,7 @@ import (
"fmt"
"net"
"net/netip"
+ "net/url"
"reflect"
"strconv"
"strings"
@@ -36,6 +37,30 @@ func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc {
return nil
}
+// cachedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns
+// it into a closure to be used directly
+// if the type fails to convert we return a closure always erroring to keep the previous behaviour
+func cachedDecodeHook(raw DecodeHookFunc) func(from reflect.Value, to reflect.Value) (interface{}, error) {
+ switch f := typedDecodeHook(raw).(type) {
+ case DecodeHookFuncType:
+ return func(from reflect.Value, to reflect.Value) (interface{}, error) {
+ return f(from.Type(), to.Type(), from.Interface())
+ }
+ case DecodeHookFuncKind:
+ return func(from reflect.Value, to reflect.Value) (interface{}, error) {
+ return f(from.Kind(), to.Kind(), from.Interface())
+ }
+ case DecodeHookFuncValue:
+ return func(from reflect.Value, to reflect.Value) (interface{}, error) {
+ return f(from, to)
+ }
+ default:
+ return func(from reflect.Value, to reflect.Value) (interface{}, error) {
+ return nil, errors.New("invalid decode hook signature")
+ }
+ }
+}
+
// DecodeHookExec executes the given decode hook. This should be used
// since it'll naturally degrade to the older backwards compatible DecodeHookFunc
// that took reflect.Kind instead of reflect.Type.
@@ -61,13 +86,17 @@ func DecodeHookExec(
// The composed funcs are called in order, with the result of the
// previous transformation.
func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc {
+ cached := make([]func(from reflect.Value, to reflect.Value) (interface{}, error), 0, len(fs))
+ for _, f := range fs {
+ cached = append(cached, cachedDecodeHook(f))
+ }
return func(f reflect.Value, t reflect.Value) (interface{}, error) {
var err error
data := f.Interface()
newFrom := f
- for _, f1 := range fs {
- data, err = DecodeHookExec(f1, newFrom, t)
+ for _, c := range cached {
+ data, err = c(newFrom, t)
if err != nil {
return nil, err
}
@@ -81,13 +110,17 @@ func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc {
// OrComposeDecodeHookFunc executes all input hook functions until one of them returns no error. In that case its value is returned.
// If all hooks return an error, OrComposeDecodeHookFunc returns an error concatenating all error messages.
func OrComposeDecodeHookFunc(ff ...DecodeHookFunc) DecodeHookFunc {
+ cached := make([]func(from reflect.Value, to reflect.Value) (interface{}, error), 0, len(ff))
+ for _, f := range ff {
+ cached = append(cached, cachedDecodeHook(f))
+ }
return func(a, b reflect.Value) (interface{}, error) {
var allErrs string
var out interface{}
var err error
- for _, f := range ff {
- out, err = DecodeHookExec(f, a, b)
+ for _, c := range cached {
+ out, err = c(a, b)
if err != nil {
allErrs += err.Error() + "\n"
continue
@@ -144,6 +177,26 @@ func StringToTimeDurationHookFunc() DecodeHookFunc {
}
}
+// StringToURLHookFunc returns a DecodeHookFunc that converts
+// strings to *url.URL.
+func StringToURLHookFunc() DecodeHookFunc {
+ return func(
+ f reflect.Type,
+ t reflect.Type,
+ data interface{},
+ ) (interface{}, error) {
+ if f.Kind() != reflect.String {
+ return data, nil
+ }
+ if t != reflect.TypeOf(&url.URL{}) {
+ return data, nil
+ }
+
+ // Convert it by parsing
+ return url.Parse(data.(string))
+ }
+}
+
// StringToIPHookFunc returns a DecodeHookFunc that converts
// strings to net.IP
func StringToIPHookFunc() DecodeHookFunc {
diff --git a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
index 4b54fae087..e77e63ba38 100644
--- a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
+++ b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
@@ -266,6 +266,10 @@ type DecoderConfig struct {
// defaults to "mapstructure"
TagName string
+ // The option of the value in the tag that indicates a field should
+ // be squashed. This defaults to "squash".
+ SquashTagOption string
+
// IgnoreUntaggedFields ignores all struct fields without explicit
// TagName, comparable to `mapstructure:"-"` as default behaviour.
IgnoreUntaggedFields bool
@@ -274,6 +278,10 @@ type DecoderConfig struct {
// field name or tag. Defaults to `strings.EqualFold`. This can be used
// to implement case-sensitive tag values, support snake casing, etc.
MatchName func(mapKey, fieldName string) bool
+
+ // DecodeNil, if set to true, will cause the DecodeHook (if present) to run
+ // even if the input is nil. This can be used to provide default values.
+ DecodeNil bool
}
// A Decoder takes a raw interface value and turns it into structured
@@ -283,7 +291,8 @@ type DecoderConfig struct {
// structure. The top-level Decode method is just a convenience that sets
// up the most basic Decoder.
type Decoder struct {
- config *DecoderConfig
+ config *DecoderConfig
+ cachedDecodeHook func(from reflect.Value, to reflect.Value) (interface{}, error)
}
// Metadata contains information about decoding a structure that
@@ -401,6 +410,10 @@ func NewDecoder(config *DecoderConfig) (*Decoder, error) {
config.TagName = "mapstructure"
}
+ if config.SquashTagOption == "" {
+ config.SquashTagOption = "squash"
+ }
+
if config.MatchName == nil {
config.MatchName = strings.EqualFold
}
@@ -408,6 +421,9 @@ func NewDecoder(config *DecoderConfig) (*Decoder, error) {
result := &Decoder{
config: config,
}
+ if config.DecodeHook != nil {
+ result.cachedDecodeHook = cachedDecodeHook(config.DecodeHook)
+ }
return result, nil
}
@@ -426,19 +442,26 @@ func (d *Decoder) Decode(input interface{}) error {
return err
}
+// isNil returns true if the input is nil or a typed nil pointer.
+func isNil(input interface{}) bool {
+ if input == nil {
+ return true
+ }
+ val := reflect.ValueOf(input)
+ return val.Kind() == reflect.Ptr && val.IsNil()
+}
+
// Decodes an unknown data type into a specific reflection value.
func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error {
- var inputVal reflect.Value
- if input != nil {
- inputVal = reflect.ValueOf(input)
-
- // We need to check here if input is a typed nil. Typed nils won't
- // match the "input == nil" below so we check that here.
- if inputVal.Kind() == reflect.Ptr && inputVal.IsNil() {
- input = nil
- }
+ var (
+ inputVal = reflect.ValueOf(input)
+ outputKind = getKind(outVal)
+ decodeNil = d.config.DecodeNil && d.cachedDecodeHook != nil
+ )
+ if isNil(input) {
+ // Typed nils won't match the "input == nil" below, so reset input.
+ input = nil
}
-
if input == nil {
// If the data is nil, then we don't set anything, unless ZeroFields is set
// to true.
@@ -449,30 +472,46 @@ func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) e
d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
}
}
- return nil
+ if !decodeNil {
+ return nil
+ }
}
-
if !inputVal.IsValid() {
- // If the input value is invalid, then we just set the value
- // to be the zero value.
- outVal.Set(reflect.Zero(outVal.Type()))
- if d.config.Metadata != nil && name != "" {
- d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+ if !decodeNil {
+ // If the input value is invalid, then we just set the value
+ // to be the zero value.
+ outVal.Set(reflect.Zero(outVal.Type()))
+ if d.config.Metadata != nil && name != "" {
+ d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+ }
+ return nil
+ }
+ // Hooks need a valid inputVal, so reset it to zero value of outVal type.
+ switch outputKind {
+ case reflect.Struct, reflect.Map:
+ var mapVal map[string]interface{}
+ inputVal = reflect.ValueOf(mapVal) // create nil map pointer
+ case reflect.Slice, reflect.Array:
+ var sliceVal []interface{}
+ inputVal = reflect.ValueOf(sliceVal) // create nil slice pointer
+ default:
+ inputVal = reflect.Zero(outVal.Type())
}
- return nil
}
- if d.config.DecodeHook != nil {
+ if d.cachedDecodeHook != nil {
// We have a DecodeHook, so let's pre-process the input.
var err error
- input, err = DecodeHookExec(d.config.DecodeHook, inputVal, outVal)
+ input, err = d.cachedDecodeHook(inputVal, outVal)
if err != nil {
return fmt.Errorf("error decoding '%s': %w", name, err)
}
}
+ if isNil(input) {
+ return nil
+ }
var err error
- outputKind := getKind(outVal)
addMetaKey := true
switch outputKind {
case reflect.Bool:
@@ -753,8 +792,8 @@ func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) e
}
default:
return fmt.Errorf(
- "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
- name, val.Type(), dataVal.Type(), data)
+ "'%s' expected type '%s', got unconvertible type '%#v', value: '%#v'",
+ name, val, dataVal, data)
}
return nil
@@ -973,7 +1012,7 @@ func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val re
}
// If "squash" is specified in the tag, we squash the field down.
- squash = squash || strings.Index(tagValue[index+1:], "squash") != -1
+ squash = squash || strings.Contains(tagValue[index+1:], d.config.SquashTagOption)
if squash {
// When squashing, the embedded type can be a pointer to a struct.
if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Struct {
@@ -1351,7 +1390,7 @@ func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) e
// We always parse the tags cause we're looking for other tags too
tagParts := strings.Split(fieldType.Tag.Get(d.config.TagName), ",")
for _, tag := range tagParts[1:] {
- if tag == "squash" {
+ if tag == d.config.SquashTagOption {
squash = true
break
}
@@ -1363,10 +1402,15 @@ func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) e
}
if squash {
- if fieldVal.Kind() != reflect.Struct {
- errs = append(errs, fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldVal.Kind()))
- } else {
+ switch fieldVal.Kind() {
+ case reflect.Struct:
structs = append(structs, fieldVal)
+ case reflect.Interface:
+ if !fieldVal.IsNil() {
+ structs = append(structs, fieldVal.Elem().Elem())
+ }
+ default:
+ errs = append(errs, fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldVal.Kind()))
}
continue
}
diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go b/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go
index 4245e5ad72..365a1d0477 100644
--- a/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go
+++ b/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go
@@ -14,7 +14,10 @@ import (
)
var (
- reg = regexp.MustCompile(`<([/!]?)([^>]+?)(/?)>`)
+ reg = regexp.MustCompile(`<([/!]?)([^>]+?)(/?)>`)
+ reXMLComments = regexp.MustCompile(`(?s)()`)
+ reSpaces = regexp.MustCompile(`(?s)>\s+<`)
+ reNewlines = regexp.MustCompile(`\r*\n`)
// NL is the newline string used in XML output.
NL = "\n"
)
@@ -33,20 +36,19 @@ func FormatXML(xmls, prefix, indent string, nestedTagsInComments ...bool) string
if len(nestedTagsInComments) > 0 {
nestedTagsInComment = nestedTagsInComments[0]
}
- reXmlComments := regexp.MustCompile(`(?s)()`)
- src := regexp.MustCompile(`(?s)>\s+<`).ReplaceAllString(xmls, "><")
+ src := reSpaces.ReplaceAllString(xmls, "><")
if nestedTagsInComment {
- src = reXmlComments.ReplaceAllStringFunc(src, func(m string) string {
- parts := reXmlComments.FindStringSubmatch(m)
- p2 := regexp.MustCompile(`\r*\n`).ReplaceAllString(parts[2], " ")
+ src = reXMLComments.ReplaceAllStringFunc(src, func(m string) string {
+ parts := reXMLComments.FindStringSubmatch(m)
+ p2 := reNewlines.ReplaceAllString(parts[2], " ")
return parts[1] + html.EscapeString(p2) + parts[3]
})
}
rf := replaceTag(prefix, indent)
r := prefix + reg.ReplaceAllStringFunc(src, rf)
if nestedTagsInComment {
- r = reXmlComments.ReplaceAllStringFunc(r, func(m string) string {
- parts := reXmlComments.FindStringSubmatch(m)
+ r = reXMLComments.ReplaceAllStringFunc(r, func(m string) string {
+ parts := reXMLComments.FindStringSubmatch(m)
return parts[1] + html.UnescapeString(parts[2]) + parts[3]
})
}
diff --git a/vendor/github.com/gofrs/flock/.golangci.yml b/vendor/github.com/gofrs/flock/.golangci.yml
new file mode 100644
index 0000000000..3ad88a38fc
--- /dev/null
+++ b/vendor/github.com/gofrs/flock/.golangci.yml
@@ -0,0 +1,114 @@
+run:
+ timeout: 10m
+
+linters:
+ enable:
+ - asasalint
+ - bidichk
+ - dogsled
+ - dupword
+ - durationcheck
+ - err113
+ - errname
+ - errorlint
+ - fatcontext
+ - forbidigo
+ - gocheckcompilerdirectives
+ - gochecknoinits
+ - gocritic
+ - godot
+ - godox
+ - gofumpt
+ - goheader
+ - goimports
+ - gomoddirectives
+ - goprintffuncname
+ - gosec
+ - inamedparam
+ - interfacebloat
+ - ireturn
+ - mirror
+ - misspell
+ - nolintlint
+ - revive
+ - stylecheck
+ - tenv
+ - testifylint
+ - thelper
+ - unconvert
+ - unparam
+ - usestdlibvars
+ - whitespace
+
+linters-settings:
+ misspell:
+ locale: US
+ godox:
+ keywords:
+ - FIXME
+ goheader:
+ template: |-
+ Copyright 2015 Tim Heckman. All rights reserved.
+ Copyright 2018-{{ YEAR }} The Gofrs. All rights reserved.
+ Use of this source code is governed by the BSD 3-Clause
+ license that can be found in the LICENSE file.
+ gofumpt:
+ extra-rules: true
+ gocritic:
+ enabled-tags:
+ - diagnostic
+ - style
+ - performance
+ disabled-checks:
+ - paramTypeCombine # already handle by gofumpt.extra-rules
+ - whyNoLint # already handle by nonolint
+ - unnamedResult
+ - hugeParam
+ - sloppyReassign
+ - rangeValCopy
+ - octalLiteral
+ - ptrToRefParam
+ - appendAssign
+ - ruleguard
+ - httpNoBody
+ - exposedSyncMutex
+
+ revive:
+ rules:
+ - name: struct-tag
+ - name: blank-imports
+ - name: context-as-argument
+ - name: context-keys-type
+ - name: dot-imports
+ - name: error-return
+ - name: error-strings
+ - name: error-naming
+ - name: exported
+ - name: if-return
+ - name: increment-decrement
+ - name: var-naming
+ - name: var-declaration
+ - name: package-comments
+ - name: range
+ - name: receiver-naming
+ - name: time-naming
+ - name: unexported-return
+ - name: indent-error-flow
+ - name: errorf
+ - name: empty-block
+ - name: superfluous-else
+ - name: unused-parameter
+ - name: unreachable-code
+ - name: redefines-builtin-id
+
+issues:
+ exclude-use-default: true
+ max-issues-per-linter: 0
+ max-same-issues: 0
+
+output:
+ show-stats: true
+ sort-results: true
+ sort-order:
+ - linter
+ - file
diff --git a/vendor/github.com/gofrs/flock/.travis.yml b/vendor/github.com/gofrs/flock/.travis.yml
deleted file mode 100644
index b16d040fa8..0000000000
--- a/vendor/github.com/gofrs/flock/.travis.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-language: go
-go:
- - 1.14.x
- - 1.15.x
-script: go test -v -check.vv -race ./...
-sudo: false
-notifications:
- email:
- on_success: never
- on_failure: always
diff --git a/vendor/github.com/gofrs/flock/LICENSE b/vendor/github.com/gofrs/flock/LICENSE
index 8b8ff36fe4..7de525bf02 100644
--- a/vendor/github.com/gofrs/flock/LICENSE
+++ b/vendor/github.com/gofrs/flock/LICENSE
@@ -1,3 +1,4 @@
+Copyright (c) 2018-2024, The Gofrs
Copyright (c) 2015-2020, Tim Heckman
All rights reserved.
diff --git a/vendor/github.com/gofrs/flock/Makefile b/vendor/github.com/gofrs/flock/Makefile
new file mode 100644
index 0000000000..65c139d68c
--- /dev/null
+++ b/vendor/github.com/gofrs/flock/Makefile
@@ -0,0 +1,15 @@
+.PHONY: lint test test_race build_cross_os
+
+default: lint test build_cross_os
+
+test:
+ go test -v -cover ./...
+
+test_race:
+ CGO_ENABLED=1 go test -v -race ./...
+
+lint:
+ golangci-lint run
+
+build_cross_os:
+ ./build.sh
diff --git a/vendor/github.com/gofrs/flock/README.md b/vendor/github.com/gofrs/flock/README.md
index 71ce63692e..f7ca0dd9c2 100644
--- a/vendor/github.com/gofrs/flock/README.md
+++ b/vendor/github.com/gofrs/flock/README.md
@@ -1,26 +1,22 @@
# flock
-[](https://travis-ci.org/gofrs/flock)
-[](https://godoc.org/github.com/gofrs/flock)
-[](https://github.com/gofrs/flock/blob/master/LICENSE)
-[](https://goreportcard.com/report/github.com/gofrs/flock)
-`flock` implements a thread-safe sync.Locker interface for file locking. It also
-includes a non-blocking TryLock() function to allow locking without blocking execution.
+[](https://pkg.go.dev/github.com/gofrs/flock)
+[](https://github.com/gofrs/flock/blob/main/LICENSE)
+[](https://goreportcard.com/report/github.com/gofrs/flock)
-## License
-`flock` is released under the BSD 3-Clause License. See the `LICENSE` file for more details.
+`flock` implements a thread-safe file lock.
-## Go Compatibility
-This package makes use of the `context` package that was introduced in Go 1.7. As such, this
-package has an implicit dependency on Go 1.7+.
+It also includes a non-blocking `TryLock()` function to allow locking without blocking execution.
## Installation
-```
+
+```bash
go get -u github.com/gofrs/flock
```
## Usage
-```Go
+
+```go
import "github.com/gofrs/flock"
fileLock := flock.New("/var/lock/go-lock.lock")
@@ -38,4 +34,12 @@ if locked {
```
For more detailed usage information take a look at the package API docs on
-[GoDoc](https://godoc.org/github.com/gofrs/flock).
+[GoDoc](https://pkg.go.dev/github.com/gofrs/flock).
+
+## License
+
+`flock` is released under the BSD 3-Clause License. See the [`LICENSE`](./LICENSE) file for more details.
+
+## Project History
+
+This project was originally `github.com/theckman/go-flock`, it was transferred to Gofrs by the original author [Tim Heckman ](https://github.com/theckman).
diff --git a/vendor/github.com/gofrs/flock/SECURITY.md b/vendor/github.com/gofrs/flock/SECURITY.md
new file mode 100644
index 0000000000..01419bd592
--- /dev/null
+++ b/vendor/github.com/gofrs/flock/SECURITY.md
@@ -0,0 +1,21 @@
+# Security Policy
+
+## Supported Versions
+
+We support the latest version of this library.
+We do not guarantee support of previous versions.
+
+If a defect is reported, it will generally be fixed on the latest version (provided it exists) irrespective of whether it was introduced in a prior version.
+
+## Reporting a Vulnerability
+
+To report a potential security vulnerability, please create a [security advisory](https://github.com/gofrs/flock/security/advisories/new).
+
+For us to respond to your report most effectively, please include any of the following:
+
+- Steps to reproduce or a proof-of-concept
+- Any relevant information, including the versions used
+
+## Security Scorecard
+
+This project submits security [results](https://scorecard.dev/viewer/?uri=github.com/gofrs/flock) to the [OpenSSF Scorecard](https://securityscorecards.dev/).
diff --git a/vendor/github.com/gofrs/flock/appveyor.yml b/vendor/github.com/gofrs/flock/appveyor.yml
deleted file mode 100644
index 909b4bf7cb..0000000000
--- a/vendor/github.com/gofrs/flock/appveyor.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-version: '{build}'
-
-build: false
-deploy: false
-
-clone_folder: 'c:\gopath\src\github.com\gofrs\flock'
-
-environment:
- GOPATH: 'c:\gopath'
- GOVERSION: '1.15'
-
-init:
- - git config --global core.autocrlf input
-
-install:
- - rmdir c:\go /s /q
- - appveyor DownloadFile https://storage.googleapis.com/golang/go%GOVERSION%.windows-amd64.msi
- - msiexec /i go%GOVERSION%.windows-amd64.msi /q
- - set Path=c:\go\bin;c:\gopath\bin;%Path%
- - go version
- - go env
-
-test_script:
- - go get -t ./...
- - go test -race -v ./...
diff --git a/vendor/github.com/gofrs/flock/build.sh b/vendor/github.com/gofrs/flock/build.sh
new file mode 100644
index 0000000000..60f7809f06
--- /dev/null
+++ b/vendor/github.com/gofrs/flock/build.sh
@@ -0,0 +1,18 @@
+#!/bin/bash -e
+
+# Not supported by flock:
+# - plan9/*
+# - js/wasm
+# - wasp1/wasm
+
+for row in $(go tool dist list -json | jq -r '.[] | @base64'); do
+ _jq() {
+ echo ${row} | base64 --decode | jq -r ${1}
+ }
+
+ GOOS=$(_jq '.GOOS')
+ GOARCH=$(_jq '.GOARCH')
+
+ echo "$GOOS/$GOARCH"
+ GOOS=$GOOS GOARCH=$GOARCH go build
+done
diff --git a/vendor/github.com/gofrs/flock/flock.go b/vendor/github.com/gofrs/flock/flock.go
index 95c784ca50..ff942b228a 100644
--- a/vendor/github.com/gofrs/flock/flock.go
+++ b/vendor/github.com/gofrs/flock/flock.go
@@ -1,4 +1,5 @@
// Copyright 2015 Tim Heckman. All rights reserved.
+// Copyright 2018-2024 The Gofrs. All rights reserved.
// Use of this source code is governed by the BSD 3-Clause
// license that can be found in the LICENSE file.
@@ -18,12 +19,29 @@ package flock
import (
"context"
+ "io/fs"
"os"
"runtime"
"sync"
"time"
)
+type Option func(f *Flock)
+
+// SetFlag sets the flag used to create/open the file.
+func SetFlag(flag int) Option {
+ return func(f *Flock) {
+ f.flag = flag
+ }
+}
+
+// SetPermissions sets the OS permissions to set on the file.
+func SetPermissions(perm fs.FileMode) Option {
+ return func(f *Flock) {
+ f.perm = perm
+ }
+}
+
// Flock is the struct type to handle file locking. All fields are unexported,
// with access to some of the fields provided by getter methods (Path() and Locked()).
type Flock struct {
@@ -32,12 +50,37 @@ type Flock struct {
fh *os.File
l bool
r bool
+
+ // flag is the flag used to create/open the file.
+ flag int
+ // perm is the OS permissions to set on the file.
+ perm fs.FileMode
}
// New returns a new instance of *Flock. The only parameter
// it takes is the path to the desired lockfile.
-func New(path string) *Flock {
- return &Flock{path: path}
+func New(path string, opts ...Option) *Flock {
+ // create it if it doesn't exist, and open the file read-only.
+ flags := os.O_CREATE
+ switch runtime.GOOS {
+ case "aix", "solaris", "illumos":
+ // AIX cannot preform write-lock (i.e. exclusive) on a read-only file.
+ flags |= os.O_RDWR
+ default:
+ flags |= os.O_RDONLY
+ }
+
+ f := &Flock{
+ path: path,
+ flag: flags,
+ perm: fs.FileMode(0o600),
+ }
+
+ for _, opt := range opts {
+ opt(f)
+ }
+
+ return f
}
// NewFlock returns a new instance of *Flock. The only parameter
@@ -67,6 +110,7 @@ func (f *Flock) Path() string {
func (f *Flock) Locked() bool {
f.m.RLock()
defer f.m.RUnlock()
+
return f.l
}
@@ -76,6 +120,7 @@ func (f *Flock) Locked() bool {
func (f *Flock) RLocked() bool {
f.m.RLock()
defer f.m.RUnlock()
+
return f.r
}
@@ -83,16 +128,18 @@ func (f *Flock) String() string {
return f.path
}
-// TryLockContext repeatedly tries to take an exclusive lock until one of the
-// conditions is met: TryLock succeeds, TryLock fails with error, or Context
-// Done channel is closed.
+// TryLockContext repeatedly tries to take an exclusive lock until one of the conditions is met:
+// - TryLock succeeds
+// - TryLock fails with error
+// - Context Done channel is closed.
func (f *Flock) TryLockContext(ctx context.Context, retryDelay time.Duration) (bool, error) {
return tryCtx(ctx, f.TryLock, retryDelay)
}
-// TryRLockContext repeatedly tries to take a shared lock until one of the
-// conditions is met: TryRLock succeeds, TryRLock fails with error, or Context
-// Done channel is closed.
+// TryRLockContext repeatedly tries to take a shared lock until one of the conditions is met:
+// - TryRLock succeeds
+// - TryRLock fails with error
+// - Context Done channel is closed.
func (f *Flock) TryRLockContext(ctx context.Context, retryDelay time.Duration) (bool, error) {
return tryCtx(ctx, f.TryRLock, retryDelay)
}
@@ -101,10 +148,12 @@ func tryCtx(ctx context.Context, fn func() (bool, error), retryDelay time.Durati
if ctx.Err() != nil {
return false, ctx.Err()
}
+
for {
if ok, err := fn(); ok || err != nil {
return ok, err
}
+
select {
case <-ctx.Done():
return false, ctx.Err()
@@ -114,31 +163,44 @@ func tryCtx(ctx context.Context, fn func() (bool, error), retryDelay time.Durati
}
}
-func (f *Flock) setFh() error {
+func (f *Flock) setFh(flag int) error {
// open a new os.File instance
- // create it if it doesn't exist, and open the file read-only.
- flags := os.O_CREATE
- if runtime.GOOS == "aix" {
- // AIX cannot preform write-lock (ie exclusive) on a
- // read-only file.
- flags |= os.O_RDWR
- } else {
- flags |= os.O_RDONLY
- }
- fh, err := os.OpenFile(f.path, flags, os.FileMode(0600))
+ fh, err := os.OpenFile(f.path, flag, f.perm)
if err != nil {
return err
}
- // set the filehandle on the struct
+ // set the file handle on the struct
f.fh = fh
+
return nil
}
-// ensure the file handle is closed if no lock is held
+// resetFh resets file handle:
+// - tries to close the file (ignore errors)
+// - sets fh to nil.
+func (f *Flock) resetFh() {
+ if f.fh == nil {
+ return
+ }
+
+ _ = f.fh.Close()
+
+ f.fh = nil
+}
+
+// ensure the file handle is closed if no lock is held.
func (f *Flock) ensureFhState() {
- if !f.l && !f.r && f.fh != nil {
- f.fh.Close()
- f.fh = nil
+ if f.l || f.r || f.fh == nil {
+ return
}
+
+ f.resetFh()
+}
+
+func (f *Flock) reset() {
+ f.l = false
+ f.r = false
+
+ f.resetFh()
}
diff --git a/vendor/github.com/gofrs/flock/flock_aix.go b/vendor/github.com/gofrs/flock/flock_aix.go
deleted file mode 100644
index 7277c1b6b2..0000000000
--- a/vendor/github.com/gofrs/flock/flock_aix.go
+++ /dev/null
@@ -1,281 +0,0 @@
-// Copyright 2019 Tim Heckman. All rights reserved. Use of this source code is
-// governed by the BSD 3-Clause license that can be found in the LICENSE file.
-
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// This code implements the filelock API using POSIX 'fcntl' locks, which attach
-// to an (inode, process) pair rather than a file descriptor. To avoid unlocking
-// files prematurely when the same file is opened through different descriptors,
-// we allow only one read-lock at a time.
-//
-// This code is adapted from the Go package:
-// cmd/go/internal/lockedfile/internal/filelock
-
-//+build aix
-
-package flock
-
-import (
- "errors"
- "io"
- "os"
- "sync"
- "syscall"
-
- "golang.org/x/sys/unix"
-)
-
-type lockType int16
-
-const (
- readLock lockType = unix.F_RDLCK
- writeLock lockType = unix.F_WRLCK
-)
-
-type cmdType int
-
-const (
- tryLock cmdType = unix.F_SETLK
- waitLock cmdType = unix.F_SETLKW
-)
-
-type inode = uint64
-
-type inodeLock struct {
- owner *Flock
- queue []<-chan *Flock
-}
-
-var (
- mu sync.Mutex
- inodes = map[*Flock]inode{}
- locks = map[inode]inodeLock{}
-)
-
-// Lock is a blocking call to try and take an exclusive file lock. It will wait
-// until it is able to obtain the exclusive file lock. It's recommended that
-// TryLock() be used over this function. This function may block the ability to
-// query the current Locked() or RLocked() status due to a RW-mutex lock.
-//
-// If we are already exclusive-locked, this function short-circuits and returns
-// immediately assuming it can take the mutex lock.
-//
-// If the *Flock has a shared lock (RLock), this may transparently replace the
-// shared lock with an exclusive lock on some UNIX-like operating systems. Be
-// careful when using exclusive locks in conjunction with shared locks
-// (RLock()), because calling Unlock() may accidentally release the exclusive
-// lock that was once a shared lock.
-func (f *Flock) Lock() error {
- return f.lock(&f.l, writeLock)
-}
-
-// RLock is a blocking call to try and take a shared file lock. It will wait
-// until it is able to obtain the shared file lock. It's recommended that
-// TryRLock() be used over this function. This function may block the ability to
-// query the current Locked() or RLocked() status due to a RW-mutex lock.
-//
-// If we are already shared-locked, this function short-circuits and returns
-// immediately assuming it can take the mutex lock.
-func (f *Flock) RLock() error {
- return f.lock(&f.r, readLock)
-}
-
-func (f *Flock) lock(locked *bool, flag lockType) error {
- f.m.Lock()
- defer f.m.Unlock()
-
- if *locked {
- return nil
- }
-
- if f.fh == nil {
- if err := f.setFh(); err != nil {
- return err
- }
- defer f.ensureFhState()
- }
-
- if _, err := f.doLock(waitLock, flag, true); err != nil {
- return err
- }
-
- *locked = true
- return nil
-}
-
-func (f *Flock) doLock(cmd cmdType, lt lockType, blocking bool) (bool, error) {
- // POSIX locks apply per inode and process, and the lock for an inode is
- // released when *any* descriptor for that inode is closed. So we need to
- // synchronize access to each inode internally, and must serialize lock and
- // unlock calls that refer to the same inode through different descriptors.
- fi, err := f.fh.Stat()
- if err != nil {
- return false, err
- }
- ino := inode(fi.Sys().(*syscall.Stat_t).Ino)
-
- mu.Lock()
- if i, dup := inodes[f]; dup && i != ino {
- mu.Unlock()
- return false, &os.PathError{
- Path: f.Path(),
- Err: errors.New("inode for file changed since last Lock or RLock"),
- }
- }
-
- inodes[f] = ino
-
- var wait chan *Flock
- l := locks[ino]
- if l.owner == f {
- // This file already owns the lock, but the call may change its lock type.
- } else if l.owner == nil {
- // No owner: it's ours now.
- l.owner = f
- } else if !blocking {
- // Already owned: cannot take the lock.
- mu.Unlock()
- return false, nil
- } else {
- // Already owned: add a channel to wait on.
- wait = make(chan *Flock)
- l.queue = append(l.queue, wait)
- }
- locks[ino] = l
- mu.Unlock()
-
- if wait != nil {
- wait <- f
- }
-
- err = setlkw(f.fh.Fd(), cmd, lt)
-
- if err != nil {
- f.doUnlock()
- if cmd == tryLock && err == unix.EACCES {
- return false, nil
- }
- return false, err
- }
-
- return true, nil
-}
-
-func (f *Flock) Unlock() error {
- f.m.Lock()
- defer f.m.Unlock()
-
- // if we aren't locked or if the lockfile instance is nil
- // just return a nil error because we are unlocked
- if (!f.l && !f.r) || f.fh == nil {
- return nil
- }
-
- if err := f.doUnlock(); err != nil {
- return err
- }
-
- f.fh.Close()
-
- f.l = false
- f.r = false
- f.fh = nil
-
- return nil
-}
-
-func (f *Flock) doUnlock() (err error) {
- var owner *Flock
- mu.Lock()
- ino, ok := inodes[f]
- if ok {
- owner = locks[ino].owner
- }
- mu.Unlock()
-
- if owner == f {
- err = setlkw(f.fh.Fd(), waitLock, unix.F_UNLCK)
- }
-
- mu.Lock()
- l := locks[ino]
- if len(l.queue) == 0 {
- // No waiters: remove the map entry.
- delete(locks, ino)
- } else {
- // The first waiter is sending us their file now.
- // Receive it and update the queue.
- l.owner = <-l.queue[0]
- l.queue = l.queue[1:]
- locks[ino] = l
- }
- delete(inodes, f)
- mu.Unlock()
-
- return err
-}
-
-// TryLock is the preferred function for taking an exclusive file lock. This
-// function takes an RW-mutex lock before it tries to lock the file, so there is
-// the possibility that this function may block for a short time if another
-// goroutine is trying to take any action.
-//
-// The actual file lock is non-blocking. If we are unable to get the exclusive
-// file lock, the function will return false instead of waiting for the lock. If
-// we get the lock, we also set the *Flock instance as being exclusive-locked.
-func (f *Flock) TryLock() (bool, error) {
- return f.try(&f.l, writeLock)
-}
-
-// TryRLock is the preferred function for taking a shared file lock. This
-// function takes an RW-mutex lock before it tries to lock the file, so there is
-// the possibility that this function may block for a short time if another
-// goroutine is trying to take any action.
-//
-// The actual file lock is non-blocking. If we are unable to get the shared file
-// lock, the function will return false instead of waiting for the lock. If we
-// get the lock, we also set the *Flock instance as being share-locked.
-func (f *Flock) TryRLock() (bool, error) {
- return f.try(&f.r, readLock)
-}
-
-func (f *Flock) try(locked *bool, flag lockType) (bool, error) {
- f.m.Lock()
- defer f.m.Unlock()
-
- if *locked {
- return true, nil
- }
-
- if f.fh == nil {
- if err := f.setFh(); err != nil {
- return false, err
- }
- defer f.ensureFhState()
- }
-
- haslock, err := f.doLock(tryLock, flag, false)
- if err != nil {
- return false, err
- }
-
- *locked = haslock
- return haslock, nil
-}
-
-// setlkw calls FcntlFlock with cmd for the entire file indicated by fd.
-func setlkw(fd uintptr, cmd cmdType, lt lockType) error {
- for {
- err := unix.FcntlFlock(fd, int(cmd), &unix.Flock_t{
- Type: int16(lt),
- Whence: io.SeekStart,
- Start: 0,
- Len: 0, // All bytes.
- })
- if err != unix.EINTR {
- return err
- }
- }
-}
diff --git a/vendor/github.com/gofrs/flock/flock_others.go b/vendor/github.com/gofrs/flock/flock_others.go
new file mode 100644
index 0000000000..18b14f1bd7
--- /dev/null
+++ b/vendor/github.com/gofrs/flock/flock_others.go
@@ -0,0 +1,40 @@
+//go:build (!unix && !windows) || plan9
+
+package flock
+
+import (
+ "errors"
+ "io/fs"
+)
+
+func (f *Flock) Lock() error {
+ return &fs.PathError{
+ Op: "Lock",
+ Path: f.Path(),
+ Err: errors.ErrUnsupported,
+ }
+}
+
+func (f *Flock) RLock() error {
+ return &fs.PathError{
+ Op: "RLock",
+ Path: f.Path(),
+ Err: errors.ErrUnsupported,
+ }
+}
+
+func (f *Flock) Unlock() error {
+ return &fs.PathError{
+ Op: "Unlock",
+ Path: f.Path(),
+ Err: errors.ErrUnsupported,
+ }
+}
+
+func (f *Flock) TryLock() (bool, error) {
+ return false, f.Lock()
+}
+
+func (f *Flock) TryRLock() (bool, error) {
+ return false, f.RLock()
+}
diff --git a/vendor/github.com/gofrs/flock/flock_unix.go b/vendor/github.com/gofrs/flock/flock_unix.go
index c315a3e290..cf8919c7ad 100644
--- a/vendor/github.com/gofrs/flock/flock_unix.go
+++ b/vendor/github.com/gofrs/flock/flock_unix.go
@@ -1,42 +1,44 @@
// Copyright 2015 Tim Heckman. All rights reserved.
+// Copyright 2018-2024 The Gofrs. All rights reserved.
// Use of this source code is governed by the BSD 3-Clause
// license that can be found in the LICENSE file.
-// +build !aix,!windows
+//go:build darwin || dragonfly || freebsd || illumos || linux || netbsd || openbsd
package flock
import (
+ "errors"
"os"
- "syscall"
+
+ "golang.org/x/sys/unix"
)
-// Lock is a blocking call to try and take an exclusive file lock. It will wait
-// until it is able to obtain the exclusive file lock. It's recommended that
-// TryLock() be used over this function. This function may block the ability to
-// query the current Locked() or RLocked() status due to a RW-mutex lock.
+// Lock is a blocking call to try and take an exclusive file lock.
+// It will wait until it is able to obtain the exclusive file lock.
+// It's recommended that TryLock() be used over this function.
+// This function may block the ability to query the current Locked() or RLocked() status due to a RW-mutex lock.
//
-// If we are already exclusive-locked, this function short-circuits and returns
-// immediately assuming it can take the mutex lock.
+// If we are already exclusive-locked,
+// this function short-circuits and returns immediately assuming it can take the mutex lock.
//
-// If the *Flock has a shared lock (RLock), this may transparently replace the
-// shared lock with an exclusive lock on some UNIX-like operating systems. Be
-// careful when using exclusive locks in conjunction with shared locks
-// (RLock()), because calling Unlock() may accidentally release the exclusive
-// lock that was once a shared lock.
+// If the *Flock has a shared lock (RLock),
+// this may transparently replace the shared lock with an exclusive lock on some UNIX-like operating systems.
+// Be careful when using exclusive locks in conjunction with shared locks (RLock()),
+// because calling Unlock() may accidentally release the exclusive lock that was once a shared lock.
func (f *Flock) Lock() error {
- return f.lock(&f.l, syscall.LOCK_EX)
+ return f.lock(&f.l, unix.LOCK_EX)
}
-// RLock is a blocking call to try and take a shared file lock. It will wait
-// until it is able to obtain the shared file lock. It's recommended that
-// TryRLock() be used over this function. This function may block the ability to
-// query the current Locked() or RLocked() status due to a RW-mutex lock.
+// RLock is a blocking call to try and take a shared file lock.
+// It will wait until it is able to obtain the shared file lock.
+// It's recommended that TryRLock() be used over this function.
+// This function may block the ability to query the current Locked() or RLocked() status due to a RW-mutex lock.
//
-// If we are already shared-locked, this function short-circuits and returns
-// immediately assuming it can take the mutex lock.
+// If we are already shared-locked,
+// this function short-circuits and returns immediately assuming it can take the mutex lock.
func (f *Flock) RLock() error {
- return f.lock(&f.r, syscall.LOCK_SH)
+ return f.lock(&f.r, unix.LOCK_SH)
}
func (f *Flock) lock(locked *bool, flag int) error {
@@ -48,13 +50,15 @@ func (f *Flock) lock(locked *bool, flag int) error {
}
if f.fh == nil {
- if err := f.setFh(); err != nil {
+ if err := f.setFh(f.flag); err != nil {
return err
}
+
defer f.ensureFhState()
}
- if err := syscall.Flock(int(f.fh.Fd()), flag); err != nil {
+ err := unix.Flock(int(f.fh.Fd()), flag)
+ if err != nil {
shouldRetry, reopenErr := f.reopenFDOnError(err)
if reopenErr != nil {
return reopenErr
@@ -64,71 +68,74 @@ func (f *Flock) lock(locked *bool, flag int) error {
return err
}
- if err = syscall.Flock(int(f.fh.Fd()), flag); err != nil {
+ err = unix.Flock(int(f.fh.Fd()), flag)
+ if err != nil {
return err
}
}
*locked = true
+
return nil
}
-// Unlock is a function to unlock the file. This file takes a RW-mutex lock, so
-// while it is running the Locked() and RLocked() functions will be blocked.
+// Unlock is a function to unlock the file.
+// This file takes a RW-mutex lock,
+// so while it is running the Locked() and RLocked() functions will be blocked.
//
-// This function short-circuits if we are unlocked already. If not, it calls
-// syscall.LOCK_UN on the file and closes the file descriptor. It does not
-// remove the file from disk. It's up to your application to do.
+// This function short-circuits if we are unlocked already.
+// If not, it calls unix.LOCK_UN on the file and closes the file descriptor.
+// It does not remove the file from disk. It's up to your application to do.
//
-// Please note, if your shared lock became an exclusive lock this may
-// unintentionally drop the exclusive lock if called by the consumer that
-// believes they have a shared lock. Please see Lock() for more details.
+// Please note,
+// if your shared lock became an exclusive lock,
+// this may unintentionally drop the exclusive lock if called by the consumer that believes they have a shared lock.
+// Please see Lock() for more details.
func (f *Flock) Unlock() error {
f.m.Lock()
defer f.m.Unlock()
- // if we aren't locked or if the lockfile instance is nil
- // just return a nil error because we are unlocked
+ // If we aren't locked or if the lockfile instance is nil
+ // just return a nil error because we are unlocked.
if (!f.l && !f.r) || f.fh == nil {
return nil
}
- // mark the file as unlocked
- if err := syscall.Flock(int(f.fh.Fd()), syscall.LOCK_UN); err != nil {
+ // Mark the file as unlocked.
+ err := unix.Flock(int(f.fh.Fd()), unix.LOCK_UN)
+ if err != nil {
return err
}
- f.fh.Close()
-
- f.l = false
- f.r = false
- f.fh = nil
+ f.reset()
return nil
}
-// TryLock is the preferred function for taking an exclusive file lock. This
-// function takes an RW-mutex lock before it tries to lock the file, so there is
-// the possibility that this function may block for a short time if another
-// goroutine is trying to take any action.
+// TryLock is the preferred function for taking an exclusive file lock.
+// This function takes an RW-mutex lock before it tries to lock the file,
+// so there is the possibility that this function may block for a short time
+// if another goroutine is trying to take any action.
//
-// The actual file lock is non-blocking. If we are unable to get the exclusive
-// file lock, the function will return false instead of waiting for the lock. If
-// we get the lock, we also set the *Flock instance as being exclusive-locked.
+// The actual file lock is non-blocking.
+// If we are unable to get the exclusive file lock,
+// the function will return false instead of waiting for the lock.
+// If we get the lock, we also set the *Flock instance as being exclusive-locked.
func (f *Flock) TryLock() (bool, error) {
- return f.try(&f.l, syscall.LOCK_EX)
+ return f.try(&f.l, unix.LOCK_EX)
}
-// TryRLock is the preferred function for taking a shared file lock. This
-// function takes an RW-mutex lock before it tries to lock the file, so there is
-// the possibility that this function may block for a short time if another
-// goroutine is trying to take any action.
+// TryRLock is the preferred function for taking a shared file lock.
+// This function takes an RW-mutex lock before it tries to lock the file,
+// so there is the possibility that this function may block for a short time
+// if another goroutine is trying to take any action.
//
-// The actual file lock is non-blocking. If we are unable to get the shared file
-// lock, the function will return false instead of waiting for the lock. If we
-// get the lock, we also set the *Flock instance as being share-locked.
+// The actual file lock is non-blocking.
+// If we are unable to get the shared file lock,
+// the function will return false instead of waiting for the lock.
+// If we get the lock, we also set the *Flock instance as being share-locked.
func (f *Flock) TryRLock() (bool, error) {
- return f.try(&f.r, syscall.LOCK_SH)
+ return f.try(&f.r, unix.LOCK_SH)
}
func (f *Flock) try(locked *bool, flag int) (bool, error) {
@@ -140,25 +147,28 @@ func (f *Flock) try(locked *bool, flag int) (bool, error) {
}
if f.fh == nil {
- if err := f.setFh(); err != nil {
+ if err := f.setFh(f.flag); err != nil {
return false, err
}
+
defer f.ensureFhState()
}
var retried bool
retry:
- err := syscall.Flock(int(f.fh.Fd()), flag|syscall.LOCK_NB)
+ err := unix.Flock(int(f.fh.Fd()), flag|unix.LOCK_NB)
- switch err {
- case syscall.EWOULDBLOCK:
+ switch {
+ case errors.Is(err, unix.EWOULDBLOCK):
return false, nil
- case nil:
+ case err == nil:
*locked = true
return true, nil
}
+
if !retried {
- if shouldRetry, reopenErr := f.reopenFDOnError(err); reopenErr != nil {
+ shouldRetry, reopenErr := f.reopenFDOnError(err)
+ if reopenErr != nil {
return false, reopenErr
} else if shouldRetry {
retried = true
@@ -169,29 +179,32 @@ retry:
return false, err
}
-// reopenFDOnError determines whether we should reopen the file handle
-// in readwrite mode and try again. This comes from util-linux/sys-utils/flock.c:
-// Since Linux 3.4 (commit 55725513)
-// Probably NFSv4 where flock() is emulated by fcntl().
+// reopenFDOnError determines whether we should reopen the file handle in readwrite mode and try again.
+// This comes from `util-linux/sys-utils/flock.c`:
+// > Since Linux 3.4 (commit 55725513)
+// > Probably NFSv4 where flock() is emulated by fcntl().
+// > https://github.com/util-linux/util-linux/blob/198e920aa24743ef6ace4e07cf6237de527f9261/sys-utils/flock.c#L374-L390
func (f *Flock) reopenFDOnError(err error) (bool, error) {
- if err != syscall.EIO && err != syscall.EBADF {
+ if !errors.Is(err, unix.EIO) && !errors.Is(err, unix.EBADF) {
return false, nil
}
- if st, err := f.fh.Stat(); err == nil {
- // if the file is able to be read and written
- if st.Mode()&0600 == 0600 {
- f.fh.Close()
- f.fh = nil
-
- // reopen in read-write mode and set the filehandle
- fh, err := os.OpenFile(f.path, os.O_CREATE|os.O_RDWR, os.FileMode(0600))
- if err != nil {
- return false, err
- }
- f.fh = fh
- return true, nil
- }
+
+ st, err := f.fh.Stat()
+ if err != nil {
+ return false, nil
+ }
+
+ if st.Mode()&f.perm != f.perm {
+ return false, nil
+ }
+
+ f.resetFh()
+
+ // reopen in read-write mode and set the file handle
+ err = f.setFh(f.flag | os.O_RDWR)
+ if err != nil {
+ return false, err
}
- return false, nil
+ return true, nil
}
diff --git a/vendor/github.com/gofrs/flock/flock_unix_fcntl.go b/vendor/github.com/gofrs/flock/flock_unix_fcntl.go
new file mode 100644
index 0000000000..ea007b47d9
--- /dev/null
+++ b/vendor/github.com/gofrs/flock/flock_unix_fcntl.go
@@ -0,0 +1,393 @@
+// Copyright 2015 Tim Heckman. All rights reserved.
+// Copyright 2018-2024 The Gofrs. All rights reserved.
+// Use of this source code is governed by the BSD 3-Clause
+// license that can be found in the LICENSE file.
+
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This code implements the filelock API using POSIX 'fcntl' locks,
+// which attach to an (inode, process) pair rather than a file descriptor.
+// To avoid unlocking files prematurely when the same file is opened through different descriptors,
+// we allow only one read-lock at a time.
+//
+// This code is adapted from the Go package (go.22):
+// https://github.com/golang/go/blob/release-branch.go1.22/src/cmd/go/internal/lockedfile/internal/filelock/filelock_fcntl.go
+
+//go:build aix || (solaris && !illumos)
+
+package flock
+
+import (
+ "errors"
+ "io"
+ "io/fs"
+ "math/rand"
+ "sync"
+ "syscall"
+ "time"
+
+ "golang.org/x/sys/unix"
+)
+
+// https://github.com/golang/go/blob/09aeb6e33ab426eff4676a3baf694d5a3019e9fc/src/cmd/go/internal/lockedfile/internal/filelock/filelock_fcntl.go#L28
+type lockType int16
+
+// String returns the name of the function corresponding to lt
+// (Lock, RLock, or Unlock).
+// https://github.com/golang/go/blob/09aeb6e33ab426eff4676a3baf694d5a3019e9fc/src/cmd/go/internal/lockedfile/internal/filelock/filelock.go#L67
+func (lt lockType) String() string {
+ switch lt {
+ case readLock:
+ return "RLock"
+ case writeLock:
+ return "Lock"
+ default:
+ return "Unlock"
+ }
+}
+
+// https://github.com/golang/go/blob/09aeb6e33ab426eff4676a3baf694d5a3019e9fc/src/cmd/go/internal/lockedfile/internal/filelock/filelock_fcntl.go#L30-L33
+const (
+ readLock lockType = unix.F_RDLCK
+ writeLock lockType = unix.F_WRLCK
+)
+
+// https://github.com/golang/go/blob/09aeb6e33ab426eff4676a3baf694d5a3019e9fc/src/cmd/go/internal/lockedfile/internal/filelock/filelock_fcntl.go#L35
+type inode = uint64
+
+// https://github.com/golang/go/blob/09aeb6e33ab426eff4676a3baf694d5a3019e9fc/src/cmd/go/internal/lockedfile/internal/filelock/filelock_fcntl.go#L37-L40
+type inodeLock struct {
+ owner *Flock
+ queue []<-chan *Flock
+}
+
+type cmdType int
+
+const (
+ tryLock cmdType = unix.F_SETLK
+ waitLock cmdType = unix.F_SETLKW
+)
+
+var (
+ mu sync.Mutex
+ inodes = map[*Flock]inode{}
+ locks = map[inode]inodeLock{}
+)
+
+// Lock is a blocking call to try and take an exclusive file lock.
+// It will wait until it is able to obtain the exclusive file lock.
+// It's recommended that TryLock() be used over this function.
+// This function may block the ability to query the current Locked() or RLocked() status due to a RW-mutex lock.
+//
+// If we are already exclusive-locked, this function short-circuits and
+// returns immediately assuming it can take the mutex lock.
+//
+// If the *Flock has a shared lock (RLock),
+// this may transparently replace the shared lock with an exclusive lock on some UNIX-like operating systems.
+// Be careful when using exclusive locks in conjunction with shared locks (RLock()),
+// because calling Unlock() may accidentally release the exclusive lock that was once a shared lock.
+func (f *Flock) Lock() error {
+ return f.lock(&f.l, writeLock)
+}
+
+// RLock is a blocking call to try and take a shared file lock.
+// It will wait until it is able to obtain the shared file lock.
+// It's recommended that TryRLock() be used over this function.
+// This function may block the ability to query the current Locked() or RLocked() status due to a RW-mutex lock.
+//
+// If we are already shared-locked, this function short-circuits and
+// returns immediately assuming it can take the mutex lock.
+func (f *Flock) RLock() error {
+ return f.lock(&f.r, readLock)
+}
+
+func (f *Flock) lock(locked *bool, flag lockType) error {
+ f.m.Lock()
+ defer f.m.Unlock()
+
+ if *locked {
+ return nil
+ }
+
+ if f.fh == nil {
+ if err := f.setFh(f.flag); err != nil {
+ return err
+ }
+
+ defer f.ensureFhState()
+ }
+
+ _, err := f.doLock(waitLock, flag, true)
+ if err != nil {
+ return err
+ }
+
+ *locked = true
+
+ return nil
+}
+
+// https://github.com/golang/go/blob/09aeb6e33ab426eff4676a3baf694d5a3019e9fc/src/cmd/go/internal/lockedfile/internal/filelock/filelock_fcntl.go#L48
+func (f *Flock) doLock(cmd cmdType, lt lockType, blocking bool) (bool, error) {
+ // POSIX locks apply per inode and process,
+ // and the lock for an inode is released when *any* descriptor for that inode is closed.
+ // So we need to synchronize access to each inode internally,
+ // and must serialize lock and unlock calls that refer to the same inode through different descriptors.
+ fi, err := f.fh.Stat()
+ if err != nil {
+ return false, err
+ }
+
+ // Note(ldez): don't replace `syscall.Stat_t` by `unix.Stat_t` because `FileInfo.Sys()` returns `syscall.Stat_t`
+ ino := fi.Sys().(*syscall.Stat_t).Ino
+
+ mu.Lock()
+
+ if i, dup := inodes[f]; dup && i != ino {
+ mu.Unlock()
+ return false, &fs.PathError{
+ Op: lt.String(),
+ Path: f.Path(),
+ Err: errors.New("inode for file changed since last Lock or RLock"),
+ }
+ }
+
+ inodes[f] = ino
+
+ var wait chan *Flock
+
+ l := locks[ino]
+
+ switch {
+ case l.owner == f:
+ // This file already owns the lock, but the call may change its lock type.
+ case l.owner == nil:
+ // No owner: it's ours now.
+ l.owner = f
+
+ case !blocking:
+ // Already owned: cannot take the lock.
+ mu.Unlock()
+ return false, nil
+
+ default:
+ // Already owned: add a channel to wait on.
+ wait = make(chan *Flock)
+ l.queue = append(l.queue, wait)
+ }
+
+ locks[ino] = l
+
+ mu.Unlock()
+
+ if wait != nil {
+ wait <- f
+ }
+
+ // Spurious EDEADLK errors arise on platforms that compute deadlock graphs at
+ // the process, rather than thread, level. Consider processes P and Q, with
+ // threads P.1, P.2, and Q.3. The following trace is NOT a deadlock, but will be
+ // reported as a deadlock on systems that consider only process granularity:
+ //
+ // P.1 locks file A.
+ // Q.3 locks file B.
+ // Q.3 blocks on file A.
+ // P.2 blocks on file B. (This is erroneously reported as a deadlock.)
+ // P.1 unlocks file A.
+ // Q.3 unblocks and locks file A.
+ // Q.3 unlocks files A and B.
+ // P.2 unblocks and locks file B.
+ // P.2 unlocks file B.
+ //
+ // These spurious errors were observed in practice on AIX and Solaris in
+ // cmd/go: see https://golang.org/issue/32817.
+ //
+ // We work around this bug by treating EDEADLK as always spurious. If there
+ // really is a lock-ordering bug between the interacting processes, it will
+ // become a livelock instead, but that's not appreciably worse than if we had
+ // a proper flock implementation (which generally does not even attempt to
+ // diagnose deadlocks).
+ //
+ // In the above example, that changes the trace to:
+ //
+ // P.1 locks file A.
+ // Q.3 locks file B.
+ // Q.3 blocks on file A.
+ // P.2 spuriously fails to lock file B and goes to sleep.
+ // P.1 unlocks file A.
+ // Q.3 unblocks and locks file A.
+ // Q.3 unlocks files A and B.
+ // P.2 wakes up and locks file B.
+ // P.2 unlocks file B.
+ //
+ // We know that the retry loop will not introduce a *spurious* livelock
+ // because, according to the POSIX specification, EDEADLK is only to be
+ // returned when “the lock is blocked by a lock from another process”.
+ // If that process is blocked on some lock that we are holding, then the
+ // resulting livelock is due to a real deadlock (and would manifest as such
+ // when using, for example, the flock implementation of this package).
+ // If the other process is *not* blocked on some other lock that we are
+ // holding, then it will eventually release the requested lock.
+
+ nextSleep := 1 * time.Millisecond
+ const maxSleep = 500 * time.Millisecond
+ for {
+ err = setlkw(f.fh.Fd(), cmd, lt)
+ if !errors.Is(err, unix.EDEADLK) {
+ break
+ }
+
+ time.Sleep(nextSleep)
+
+ nextSleep += nextSleep
+ if nextSleep > maxSleep {
+ nextSleep = maxSleep
+ }
+ // Apply 10% jitter to avoid synchronizing collisions when we finally unblock.
+ nextSleep += time.Duration((0.1*rand.Float64() - 0.05) * float64(nextSleep))
+ }
+
+ if err != nil {
+ f.doUnlock()
+
+ if cmd == tryLock && errors.Is(err, unix.EACCES) {
+ return false, nil
+ }
+
+ return false, &fs.PathError{
+ Op: lt.String(),
+ Path: f.Path(),
+ Err: err,
+ }
+ }
+
+ return true, nil
+}
+
+func (f *Flock) Unlock() error {
+ f.m.Lock()
+ defer f.m.Unlock()
+
+ // If we aren't locked or if the lockfile instance is nil
+ // just return a nil error because we are unlocked.
+ if (!f.l && !f.r) || f.fh == nil {
+ return nil
+ }
+
+ if err := f.doUnlock(); err != nil {
+ return err
+ }
+
+ f.reset()
+
+ return nil
+}
+
+// https://github.com/golang/go/blob/09aeb6e33ab426eff4676a3baf694d5a3019e9fc/src/cmd/go/internal/lockedfile/internal/filelock/filelock_fcntl.go#L163
+func (f *Flock) doUnlock() (err error) {
+ var owner *Flock
+
+ mu.Lock()
+
+ ino, ok := inodes[f]
+ if ok {
+ owner = locks[ino].owner
+ }
+
+ mu.Unlock()
+
+ if owner == f {
+ err = setlkw(f.fh.Fd(), waitLock, unix.F_UNLCK)
+ }
+
+ mu.Lock()
+
+ l := locks[ino]
+
+ if len(l.queue) == 0 {
+ // No waiters: remove the map entry.
+ delete(locks, ino)
+ } else {
+ // The first waiter is sending us their file now.
+ // Receive it and update the queue.
+ l.owner = <-l.queue[0]
+ l.queue = l.queue[1:]
+ locks[ino] = l
+ }
+
+ delete(inodes, f)
+
+ mu.Unlock()
+
+ return err
+}
+
+// TryLock is the preferred function for taking an exclusive file lock.
+// This function takes an RW-mutex lock before it tries to lock the file,
+// so there is the possibility that this function may block for a short time
+// if another goroutine is trying to take any action.
+//
+// The actual file lock is non-blocking.
+// If we are unable to get the exclusive file lock,
+// the function will return false instead of waiting for the lock.
+// If we get the lock, we also set the *Flock instance as being exclusive-locked.
+func (f *Flock) TryLock() (bool, error) {
+ return f.try(&f.l, writeLock)
+}
+
+// TryRLock is the preferred function for taking a shared file lock.
+// This function takes an RW-mutex lock before it tries to lock the file,
+// so there is the possibility that this function may block for a short time
+// if another goroutine is trying to take any action.
+//
+// The actual file lock is non-blocking.
+// If we are unable to get the shared file lock,
+// the function will return false instead of waiting for the lock.
+// If we get the lock, we also set the *Flock instance as being share-locked.
+func (f *Flock) TryRLock() (bool, error) {
+ return f.try(&f.r, readLock)
+}
+
+func (f *Flock) try(locked *bool, flag lockType) (bool, error) {
+ f.m.Lock()
+ defer f.m.Unlock()
+
+ if *locked {
+ return true, nil
+ }
+
+ if f.fh == nil {
+ if err := f.setFh(f.flag); err != nil {
+ return false, err
+ }
+
+ defer f.ensureFhState()
+ }
+
+ hasLock, err := f.doLock(tryLock, flag, false)
+ if err != nil {
+ return false, err
+ }
+
+ *locked = hasLock
+
+ return hasLock, nil
+}
+
+// setlkw calls FcntlFlock with cmd for the entire file indicated by fd.
+// https://github.com/golang/go/blob/09aeb6e33ab426eff4676a3baf694d5a3019e9fc/src/cmd/go/internal/lockedfile/internal/filelock/filelock_fcntl.go#L198
+func setlkw(fd uintptr, cmd cmdType, lt lockType) error {
+ for {
+ err := unix.FcntlFlock(fd, int(cmd), &unix.Flock_t{
+ Type: int16(lt),
+ Whence: io.SeekStart,
+ Start: 0,
+ Len: 0, // All bytes.
+ })
+ if !errors.Is(err, unix.EINTR) {
+ return err
+ }
+ }
+}
diff --git a/vendor/github.com/gofrs/flock/flock_winapi.go b/vendor/github.com/gofrs/flock/flock_winapi.go
deleted file mode 100644
index fe405a255a..0000000000
--- a/vendor/github.com/gofrs/flock/flock_winapi.go
+++ /dev/null
@@ -1,76 +0,0 @@
-// Copyright 2015 Tim Heckman. All rights reserved.
-// Use of this source code is governed by the BSD 3-Clause
-// license that can be found in the LICENSE file.
-
-// +build windows
-
-package flock
-
-import (
- "syscall"
- "unsafe"
-)
-
-var (
- kernel32, _ = syscall.LoadLibrary("kernel32.dll")
- procLockFileEx, _ = syscall.GetProcAddress(kernel32, "LockFileEx")
- procUnlockFileEx, _ = syscall.GetProcAddress(kernel32, "UnlockFileEx")
-)
-
-const (
- winLockfileFailImmediately = 0x00000001
- winLockfileExclusiveLock = 0x00000002
- winLockfileSharedLock = 0x00000000
-)
-
-// Use of 0x00000000 for the shared lock is a guess based on some the MS Windows
-// `LockFileEX` docs, which document the `LOCKFILE_EXCLUSIVE_LOCK` flag as:
-//
-// > The function requests an exclusive lock. Otherwise, it requests a shared
-// > lock.
-//
-// https://msdn.microsoft.com/en-us/library/windows/desktop/aa365203(v=vs.85).aspx
-
-func lockFileEx(handle syscall.Handle, flags uint32, reserved uint32, numberOfBytesToLockLow uint32, numberOfBytesToLockHigh uint32, offset *syscall.Overlapped) (bool, syscall.Errno) {
- r1, _, errNo := syscall.Syscall6(
- uintptr(procLockFileEx),
- 6,
- uintptr(handle),
- uintptr(flags),
- uintptr(reserved),
- uintptr(numberOfBytesToLockLow),
- uintptr(numberOfBytesToLockHigh),
- uintptr(unsafe.Pointer(offset)))
-
- if r1 != 1 {
- if errNo == 0 {
- return false, syscall.EINVAL
- }
-
- return false, errNo
- }
-
- return true, 0
-}
-
-func unlockFileEx(handle syscall.Handle, reserved uint32, numberOfBytesToLockLow uint32, numberOfBytesToLockHigh uint32, offset *syscall.Overlapped) (bool, syscall.Errno) {
- r1, _, errNo := syscall.Syscall6(
- uintptr(procUnlockFileEx),
- 5,
- uintptr(handle),
- uintptr(reserved),
- uintptr(numberOfBytesToLockLow),
- uintptr(numberOfBytesToLockHigh),
- uintptr(unsafe.Pointer(offset)),
- 0)
-
- if r1 != 1 {
- if errNo == 0 {
- return false, syscall.EINVAL
- }
-
- return false, errNo
- }
-
- return true, 0
-}
diff --git a/vendor/github.com/gofrs/flock/flock_windows.go b/vendor/github.com/gofrs/flock/flock_windows.go
index ddb534ccef..dfd31e15f5 100644
--- a/vendor/github.com/gofrs/flock/flock_windows.go
+++ b/vendor/github.com/gofrs/flock/flock_windows.go
@@ -1,35 +1,48 @@
// Copyright 2015 Tim Heckman. All rights reserved.
+// Copyright 2018-2024 The Gofrs. All rights reserved.
// Use of this source code is governed by the BSD 3-Clause
// license that can be found in the LICENSE file.
+//go:build windows
+
package flock
import (
- "syscall"
+ "errors"
+
+ "golang.org/x/sys/windows"
)
-// ErrorLockViolation is the error code returned from the Windows syscall when a
-// lock would block and you ask to fail immediately.
-const ErrorLockViolation syscall.Errno = 0x21 // 33
+// Use of 0x00000000 for the shared lock is a guess based on some the MS Windows `LockFileEX` docs,
+// which document the `LOCKFILE_EXCLUSIVE_LOCK` flag as:
+//
+// > The function requests an exclusive lock. Otherwise, it requests a shared lock.
+//
+// https://msdn.microsoft.com/en-us/library/windows/desktop/aa365203(v=vs.85).aspx
+const winLockfileSharedLock = 0x00000000
+
+// ErrorLockViolation is the error code returned from the Windows syscall when a lock would block,
+// and you ask to fail immediately.
+const ErrorLockViolation windows.Errno = 0x21 // 33
-// Lock is a blocking call to try and take an exclusive file lock. It will wait
-// until it is able to obtain the exclusive file lock. It's recommended that
-// TryLock() be used over this function. This function may block the ability to
-// query the current Locked() or RLocked() status due to a RW-mutex lock.
+// Lock is a blocking call to try and take an exclusive file lock.
+// It will wait until it is able to obtain the exclusive file lock.
+// It's recommended that TryLock() be used over this function.
+// This function may block the ability to query the current Locked() or RLocked() status due to a RW-mutex lock.
//
-// If we are already locked, this function short-circuits and returns
-// immediately assuming it can take the mutex lock.
+// If we are already locked, this function short-circuits and
+// returns immediately assuming it can take the mutex lock.
func (f *Flock) Lock() error {
- return f.lock(&f.l, winLockfileExclusiveLock)
+ return f.lock(&f.l, windows.LOCKFILE_EXCLUSIVE_LOCK)
}
-// RLock is a blocking call to try and take a shared file lock. It will wait
-// until it is able to obtain the shared file lock. It's recommended that
-// TryRLock() be used over this function. This function may block the ability to
-// query the current Locked() or RLocked() status due to a RW-mutex lock.
+// RLock is a blocking call to try and take a shared file lock.
+// It will wait until it is able to obtain the shared file lock.
+// It's recommended that TryRLock() be used over this function.
+// This function may block the ability to query the current Locked() or RLocked() status due to a RW-mutex lock.
//
-// If we are already locked, this function short-circuits and returns
-// immediately assuming it can take the mutex lock.
+// If we are already locked, this function short-circuits and
+// returns immediately assuming it can take the mutex lock.
func (f *Flock) RLock() error {
return f.lock(&f.r, winLockfileSharedLock)
}
@@ -43,26 +56,31 @@ func (f *Flock) lock(locked *bool, flag uint32) error {
}
if f.fh == nil {
- if err := f.setFh(); err != nil {
+ if err := f.setFh(f.flag); err != nil {
return err
}
+
defer f.ensureFhState()
}
- if _, errNo := lockFileEx(syscall.Handle(f.fh.Fd()), flag, 0, 1, 0, &syscall.Overlapped{}); errNo > 0 {
- return errNo
+ err := windows.LockFileEx(windows.Handle(f.fh.Fd()), flag, 0, 1, 0, &windows.Overlapped{})
+ if err != nil && !errors.Is(err, windows.Errno(0)) {
+ return err
}
*locked = true
+
return nil
}
-// Unlock is a function to unlock the file. This file takes a RW-mutex lock, so
-// while it is running the Locked() and RLocked() functions will be blocked.
+// Unlock is a function to unlock the file.
+// This file takes a RW-mutex lock,
+// so while it is running the Locked() and RLocked() functions will be blocked.
//
-// This function short-circuits if we are unlocked already. If not, it calls
-// UnlockFileEx() on the file and closes the file descriptor. It does not remove
-// the file from disk. It's up to your application to do.
+// This function short-circuits if we are unlocked already.
+// If not, it calls UnlockFileEx() on the file and closes the file descriptor.
+// It does not remove the file from disk.
+// It's up to your application to do.
func (f *Flock) Unlock() error {
f.m.Lock()
defer f.m.Unlock()
@@ -74,39 +92,37 @@ func (f *Flock) Unlock() error {
}
// mark the file as unlocked
- if _, errNo := unlockFileEx(syscall.Handle(f.fh.Fd()), 0, 1, 0, &syscall.Overlapped{}); errNo > 0 {
- return errNo
+ err := windows.UnlockFileEx(windows.Handle(f.fh.Fd()), 0, 1, 0, &windows.Overlapped{})
+ if err != nil && !errors.Is(err, windows.Errno(0)) {
+ return err
}
- f.fh.Close()
-
- f.l = false
- f.r = false
- f.fh = nil
+ f.reset()
return nil
}
-// TryLock is the preferred function for taking an exclusive file lock. This
-// function does take a RW-mutex lock before it tries to lock the file, so there
-// is the possibility that this function may block for a short time if another
-// goroutine is trying to take any action.
+// TryLock is the preferred function for taking an exclusive file lock.
+// This function does take a RW-mutex lock before it tries to lock the file,
+// so there is the possibility that this function may block for a short time
+// if another goroutine is trying to take any action.
//
-// The actual file lock is non-blocking. If we are unable to get the exclusive
-// file lock, the function will return false instead of waiting for the lock. If
-// we get the lock, we also set the *Flock instance as being exclusive-locked.
+// The actual file lock is non-blocking.
+// If we are unable to get the exclusive file lock,
+// the function will return false instead of waiting for the lock.
+// If we get the lock, we also set the *Flock instance as being exclusive-locked.
func (f *Flock) TryLock() (bool, error) {
- return f.try(&f.l, winLockfileExclusiveLock)
+ return f.try(&f.l, windows.LOCKFILE_EXCLUSIVE_LOCK)
}
-// TryRLock is the preferred function for taking a shared file lock. This
-// function does take a RW-mutex lock before it tries to lock the file, so there
-// is the possibility that this function may block for a short time if another
-// goroutine is trying to take any action.
+// TryRLock is the preferred function for taking a shared file lock.
+// This function does take a RW-mutex lock before it tries to lock the file,
+// so there is the possibility that this function may block for a short time if another goroutine is trying to take any action.
//
-// The actual file lock is non-blocking. If we are unable to get the shared file
-// lock, the function will return false instead of waiting for the lock. If we
-// get the lock, we also set the *Flock instance as being shared-locked.
+// The actual file lock is non-blocking.
+// If we are unable to get the shared file lock,
+// the function will return false instead of waiting for the lock.
+// If we get the lock, we also set the *Flock instance as being shared-locked.
func (f *Flock) TryRLock() (bool, error) {
return f.try(&f.r, winLockfileSharedLock)
}
@@ -120,20 +136,20 @@ func (f *Flock) try(locked *bool, flag uint32) (bool, error) {
}
if f.fh == nil {
- if err := f.setFh(); err != nil {
+ if err := f.setFh(f.flag); err != nil {
return false, err
}
+
defer f.ensureFhState()
}
- _, errNo := lockFileEx(syscall.Handle(f.fh.Fd()), flag|winLockfileFailImmediately, 0, 1, 0, &syscall.Overlapped{})
-
- if errNo > 0 {
- if errNo == ErrorLockViolation || errNo == syscall.ERROR_IO_PENDING {
+ err := windows.LockFileEx(windows.Handle(f.fh.Fd()), flag|windows.LOCKFILE_FAIL_IMMEDIATELY, 0, 1, 0, &windows.Overlapped{})
+ if err != nil && !errors.Is(err, windows.Errno(0)) {
+ if errors.Is(err, ErrorLockViolation) || errors.Is(err, windows.ERROR_IO_PENDING) {
return false, nil
}
- return false, errNo
+ return false, err
}
*locked = true
diff --git a/vendor/github.com/golangci/dupl/.travis.yml b/vendor/github.com/golangci/dupl/.travis.yml
deleted file mode 100644
index 33de24c0fd..0000000000
--- a/vendor/github.com/golangci/dupl/.travis.yml
+++ /dev/null
@@ -1,5 +0,0 @@
-language: go
-go:
- - 1.3
- - 1.8
- - 1.9
diff --git a/vendor/github.com/golangci/dupl/README.md b/vendor/github.com/golangci/dupl/README.md
deleted file mode 100644
index f34901d7ac..0000000000
--- a/vendor/github.com/golangci/dupl/README.md
+++ /dev/null
@@ -1,63 +0,0 @@
-# dupl [](https://travis-ci.org/mibk/dupl)
-
-**dupl** is a tool written in Go for finding code clones. So far it can find clones only
-in the Go source files. The method uses suffix tree for serialized ASTs. It ignores values
-of AST nodes. It just operates with their types (e.g. `if a == 13 {}` and `if x == 100 {}` are
-considered the same provided it exceeds the minimal token sequence size).
-
-Due to the used method dupl can report so called "false positives" on the output. These are
-the ones we do not consider clones (whether they are too small, or the values of the matched
-tokens are completely different).
-
-## Installation
-
-```bash
-go get -u github.com/golangci/dupl
-```
-
-## Usage
-
-```
-Usage of dupl:
- dupl [flags] [paths]
-
-Paths:
- If the given path is a file, dupl will use it regardless of
- the file extension. If it is a directory it will recursively
- search for *.go files in that directory.
-
- If no path is given dupl will recursively search for *.go
- files in the current directory.
-
-Flags:
- -files
- read file names from stdin one at each line
- -html
- output the results as HTML, including duplicate code fragments
- -plumbing
- plumbing (easy-to-parse) output for consumption by scripts or tools
- -t, -threshold size
- minimum token sequence size as a clone (default 15)
- -vendor
- check files in vendor directory
- -v, -verbose
- explain what is being done
-
-Examples:
- dupl -t 100
- Search clones in the current directory of size at least
- 100 tokens.
- dupl $(find app/ -name '*_test.go')
- Search for clones in tests in the app directory.
- find app/ -name '*_test.go' |dupl -files
- The same as above.
-```
-
-## Example
-
-The reduced output of this command with the following parameters for the [Docker](https://www.docker.com) source code
-looks like [this](http://htmlpreview.github.io/?https://github.com/golangci/dupl/blob/master/_output_example/docker.html).
-
-```bash
-$ dupl -t 200 -html >docker.html
-```
diff --git a/vendor/github.com/golangci/dupl/main.go b/vendor/github.com/golangci/dupl/lib/lib.go
similarity index 51%
rename from vendor/github.com/golangci/dupl/main.go
rename to vendor/github.com/golangci/dupl/lib/lib.go
index 3030a97aec..3000a8f38c 100644
--- a/vendor/github.com/golangci/dupl/main.go
+++ b/vendor/github.com/golangci/dupl/lib/lib.go
@@ -1,11 +1,8 @@
-package dupl
+// Package lib Golangci-lint: altered version of main.go
+package lib
import (
- "flag"
- "fmt"
- "io/ioutil"
"os"
- "path/filepath"
"sort"
"github.com/golangci/dupl/job"
@@ -13,27 +10,6 @@ import (
"github.com/golangci/dupl/syntax"
)
-const defaultThreshold = 15
-
-var (
- paths = []string{"."}
- vendor = flag.Bool("dupl.vendor", false, "")
- verbose = flag.Bool("dupl.verbose", false, "")
- files = flag.Bool("dupl.files", false, "")
-
- html = flag.Bool("dupl.html", false, "")
- plumbing = flag.Bool("dupl.plumbing", false, "")
-)
-
-const (
- vendorDirPrefix = "vendor" + string(filepath.Separator)
- vendorDirInPath = string(filepath.Separator) + vendorDirPrefix
-)
-
-func init() {
- flag.BoolVar(verbose, "dupl.v", false, "alias for -verbose")
-}
-
func Run(files []string, threshold int) ([]printer.Issue, error) {
fchan := make(chan string, 1024)
go func() {
@@ -75,7 +51,7 @@ func makeIssues(duplChan <-chan syntax.Match) ([]printer.Issue, error) {
}
sort.Strings(keys)
- p := printer.NewPlumbing(ioutil.ReadFile)
+ p := printer.NewIssuer(os.ReadFile)
var issues []printer.Issue
for _, k := range keys {
@@ -110,39 +86,3 @@ func unique(group [][]*syntax.Node) [][]*syntax.Node {
}
return newGroup
}
-
-func usage() {
- fmt.Fprintln(os.Stderr, `Usage: dupl [flags] [paths]
-
-Paths:
- If the given path is a file, dupl will use it regardless of
- the file extension. If it is a directory, it will recursively
- search for *.go files in that directory.
-
- If no path is given, dupl will recursively search for *.go
- files in the current directory.
-
-Flags:
- -files
- read file names from stdin one at each line
- -html
- output the results as HTML, including duplicate code fragments
- -plumbing
- plumbing (easy-to-parse) output for consumption by scripts or tools
- -t, -threshold size
- minimum token sequence size as a clone (default 15)
- -vendor
- check files in vendor directory
- -v, -verbose
- explain what is being done
-
-Examples:
- dupl -t 100
- Search clones in the current directory of size at least
- 100 tokens.
- dupl $(find app/ -name '*_test.go')
- Search for clones in tests in the app directory.
- find app/ -name '*_test.go' |dupl -files
- The same as above.`)
- os.Exit(2)
-}
diff --git a/vendor/github.com/golangci/dupl/printer/html.go b/vendor/github.com/golangci/dupl/printer/html.go
index 5ad9e25c7f..ac14741419 100644
--- a/vendor/github.com/golangci/dupl/printer/html.go
+++ b/vendor/github.com/golangci/dupl/printer/html.go
@@ -3,6 +3,7 @@ package printer
import (
"bytes"
"fmt"
+ "html"
"io"
"regexp"
"sort"
@@ -10,17 +11,17 @@ import (
"github.com/golangci/dupl/syntax"
)
-type html struct {
+type htmlprinter struct {
iota int
w io.Writer
ReadFile
}
func NewHTML(w io.Writer, fread ReadFile) Printer {
- return &html{w: w, ReadFile: fread}
+ return &htmlprinter{w: w, ReadFile: fread}
}
-func (p *html) PrintHeader() error {
+func (p *htmlprinter) PrintHeader() error {
_, err := fmt.Fprint(p.w, `
Duplicates
@@ -35,7 +36,7 @@ func (p *html) PrintHeader() error {
return err
}
-func (p *html) PrintClones(dups [][]*syntax.Node) error {
+func (p *htmlprinter) PrintClones(dups [][]*syntax.Node) error {
p.iota++
fmt.Fprintf(p.w, "#%d found %d clones
\n", p.iota, len(dups))
@@ -63,12 +64,13 @@ func (p *html) PrintClones(dups [][]*syntax.Node) error {
sort.Sort(byNameAndLine(clones))
for _, cl := range clones {
- fmt.Fprintf(p.w, "%s:%d
\n%s
\n", cl.filename, cl.lineStart, cl.fragment)
+ fmt.Fprintf(p.w, "%s:%d
\n%s
\n", cl.filename, cl.lineStart,
+ html.EscapeString(string(cl.fragment)))
}
return nil
}
-func (*html) PrintFooter() error { return nil }
+func (*htmlprinter) PrintFooter() error { return nil }
func findLineBeg(file []byte, index int) int {
for i := index; i >= 0; i-- {
diff --git a/vendor/github.com/golangci/dupl/printer/issuer.go b/vendor/github.com/golangci/dupl/printer/issuer.go
new file mode 100644
index 0000000000..9b79f57056
--- /dev/null
+++ b/vendor/github.com/golangci/dupl/printer/issuer.go
@@ -0,0 +1,56 @@
+package printer
+
+// Golangci-lint: altered version of plumbing.go
+
+import (
+ "sort"
+
+ "github.com/golangci/dupl/syntax"
+)
+
+type Clone clone
+
+func (c Clone) Filename() string {
+ return c.filename
+}
+
+func (c Clone) LineStart() int {
+ return c.lineStart
+}
+
+func (c Clone) LineEnd() int {
+ return c.lineEnd
+}
+
+type Issue struct {
+ From, To Clone
+}
+
+type Issuer struct {
+ ReadFile
+}
+
+func NewIssuer(fread ReadFile) *Issuer {
+ return &Issuer{fread}
+}
+
+func (p *Issuer) MakeIssues(dups [][]*syntax.Node) ([]Issue, error) {
+ clones, err := prepareClonesInfo(p.ReadFile, dups)
+ if err != nil {
+ return nil, err
+ }
+
+ sort.Sort(byNameAndLine(clones))
+
+ var issues []Issue
+
+ for i, cl := range clones {
+ nextCl := clones[(i+1)%len(clones)]
+ issues = append(issues, Issue{
+ From: Clone(cl),
+ To: Clone(nextCl),
+ })
+ }
+
+ return issues, nil
+}
diff --git a/vendor/github.com/golangci/dupl/printer/plumbing.go b/vendor/github.com/golangci/dupl/printer/plumbing.go
index cf39d01b78..b0577ddd56 100644
--- a/vendor/github.com/golangci/dupl/printer/plumbing.go
+++ b/vendor/github.com/golangci/dupl/printer/plumbing.go
@@ -1,50 +1,36 @@
package printer
import (
+ "fmt"
+ "io"
"sort"
"github.com/golangci/dupl/syntax"
)
-type Clone clone
-
-func (c Clone) Filename() string {
- return c.filename
-}
-
-func (c Clone) LineStart() int {
- return c.lineStart
-}
-
-func (c Clone) LineEnd() int {
- return c.lineEnd
-}
-
-type Issue struct {
- From, To Clone
-}
-
-type Plumbing struct {
+type plumbing struct {
+ w io.Writer
ReadFile
}
-func NewPlumbing(fread ReadFile) *Plumbing {
- return &Plumbing{fread}
+func NewPlumbing(w io.Writer, fread ReadFile) Printer {
+ return &plumbing{w, fread}
}
-func (p *Plumbing) MakeIssues(dups [][]*syntax.Node) ([]Issue, error) {
+func (p *plumbing) PrintHeader() error { return nil }
+
+func (p *plumbing) PrintClones(dups [][]*syntax.Node) error {
clones, err := prepareClonesInfo(p.ReadFile, dups)
if err != nil {
- return nil, err
+ return err
}
sort.Sort(byNameAndLine(clones))
- var issues []Issue
for i, cl := range clones {
nextCl := clones[(i+1)%len(clones)]
- issues = append(issues, Issue{
- From: Clone(cl),
- To: Clone(nextCl),
- })
+ fmt.Fprintf(p.w, "%s:%d-%d: duplicate of %s:%d-%d\n", cl.filename, cl.lineStart, cl.lineEnd,
+ nextCl.filename, nextCl.lineStart, nextCl.lineEnd)
}
- return issues, nil
+ return nil
}
+
+func (p *plumbing) PrintFooter() error { return nil }
diff --git a/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go b/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go
index 7380150258..871469e8d5 100644
--- a/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go
+++ b/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go
@@ -41,7 +41,7 @@ func New() *STree {
// Update refreshes the suffix tree to by new data.
func (t *STree) Update(data ...Token) {
t.data = append(t.data, data...)
- for _ = range data {
+ for range data {
t.update()
t.s, t.start = t.canonize(t.s, t.start, t.end)
t.end++
diff --git a/vendor/github.com/golangci/dupl/syntax/syntax.go b/vendor/github.com/golangci/dupl/syntax/syntax.go
index e2c750afd5..9b11d3119b 100644
--- a/vendor/github.com/golangci/dupl/syntax/syntax.go
+++ b/vendor/github.com/golangci/dupl/syntax/syntax.go
@@ -6,6 +6,19 @@ import (
"github.com/golangci/dupl/suffixtree"
)
+// To avoid "goroutine stack exceeds" with gigantic slices (Composite Literals).
+// 10_000 => 0.89s
+// 20_000 => 1.53s
+// 30_000 => 2.57s
+// 40_000 => 3.89s
+// 50_000 => 5.58s
+// 60_000 => 7.95s
+// 70_000 => 10.15s
+// 80_000 => 13.11s
+// 90_000 => 16.62s
+// 100_000 => 21.42s
+const maxChildrenSerial = 10_000
+
type Node struct {
Type int
Filename string
@@ -40,7 +53,12 @@ func Serialize(n *Node) []*Node {
func serial(n *Node, stream *[]*Node) int {
*stream = append(*stream, n)
var count int
- for _, child := range n.Children {
+ for i, child := range n.Children {
+ // To avoid "goroutine stack exceeds" with gigantic slices (Composite Literals).
+ if i > maxChildrenSerial {
+ break
+ }
+
count += serial(child, stream)
}
n.Owns = count
diff --git a/vendor/github.com/t-yuki/gocover-cobertura/LICENSE b/vendor/github.com/golangci/go-printf-func-name/LICENSE
similarity index 59%
rename from vendor/github.com/t-yuki/gocover-cobertura/LICENSE
rename to vendor/github.com/golangci/go-printf-func-name/LICENSE
index 7ec1b3d853..4585140d18 100644
--- a/vendor/github.com/t-yuki/gocover-cobertura/LICENSE
+++ b/vendor/github.com/golangci/go-printf-func-name/LICENSE
@@ -1,11 +1,14 @@
-Copyright (c) 2013 Yukinari Toyota
+MIT License
-Permission is hereby granted, free of charge, to any person obtaining a copy of
-this software and associated documentation files (the "Software"), to deal in
-the Software without restriction, including without limitation the rights to
-use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is furnished to do
-so, subject to the following conditions:
+Copyright (c) 2024 Golangci-lint authors
+Copyright (c) 2020 Isaev Denis
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
diff --git a/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go b/vendor/github.com/golangci/go-printf-func-name/pkg/analyzer/analyzer.go
similarity index 93%
rename from vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go
rename to vendor/github.com/golangci/go-printf-func-name/pkg/analyzer/analyzer.go
index 7937dd4337..bce4b242ed 100644
--- a/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/golangci/go-printf-func-name/pkg/analyzer/analyzer.go
@@ -4,10 +4,9 @@ import (
"go/ast"
"strings"
+ "golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
-
- "golang.org/x/tools/go/analysis"
)
var Analyzer = &analysis.Analyzer{
@@ -18,12 +17,13 @@ var Analyzer = &analysis.Analyzer{
}
func run(pass *analysis.Pass) (interface{}, error) {
- inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
nodeFilter := []ast.Node{
(*ast.FuncDecl)(nil),
}
- inspector.Preorder(nodeFilter, func(node ast.Node) {
+ insp.Preorder(nodeFilter, func(node ast.Node) {
funcDecl := node.(*ast.FuncDecl)
if res := funcDecl.Type.Results; res != nil && len(res.List) != 0 {
diff --git a/vendor/github.com/golangci/gofmt/gofmt/gofmt.go b/vendor/github.com/golangci/gofmt/gofmt/gofmt.go
index be046f34cf..a4f252e86d 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/gofmt.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/gofmt.go
@@ -16,13 +16,15 @@ import (
"go/token"
"io"
"io/fs"
+ "math/rand"
"os"
"path/filepath"
"runtime"
"runtime/pprof"
+ "strconv"
"strings"
- "github.com/golangci/gofmt/gofmt/internal/diff"
+ "github.com/rogpeppe/go-internal/diff"
"golang.org/x/sync/semaphore"
)
@@ -233,12 +235,9 @@ func processFile(filename string, info fs.FileInfo, in io.Reader, r *reporter) e
}
fileSet := token.NewFileSet()
- fragmentOk := false
- if info == nil {
- // If we are formatting stdin, we accept a program fragment in lieu of a
- // complete source file.
- fragmentOk = true
- }
+ // If we are formatting stdin, we accept a program fragment in lieu of a
+ // complete source file.
+ fragmentOk := info == nil
file, sourceAdj, indentAdj, err := parse(fileSet, filename, src, fragmentOk)
if err != nil {
return err
@@ -272,21 +271,9 @@ func processFile(filename string, info fs.FileInfo, in io.Reader, r *reporter) e
if info == nil {
panic("-w should not have been allowed with stdin")
}
- // make a temporary backup before overwriting original
+
perm := info.Mode().Perm()
- bakname, err := backupFile(filename+".", src, perm)
- if err != nil {
- return err
- }
- fdSem <- true
- err = os.WriteFile(filename, res, perm)
- <-fdSem
- if err != nil {
- os.Rename(bakname, filename)
- return err
- }
- err = os.Remove(bakname)
- if err != nil {
+ if err := writeFile(filename, src, res, perm, info.Size()); err != nil {
return err
}
}
@@ -470,32 +457,111 @@ func fileWeight(path string, info fs.FileInfo) int64 {
return info.Size()
}
-const chmodSupported = runtime.GOOS != "windows"
+// writeFile updates a file with the new formatted data.
+func writeFile(filename string, orig, formatted []byte, perm fs.FileMode, size int64) error {
+ // Make a temporary backup file before rewriting the original file.
+ bakname, err := backupFile(filename, orig, perm)
+ if err != nil {
+ return err
+ }
+
+ fdSem <- true
+ defer func() { <-fdSem }()
+
+ fout, err := os.OpenFile(filename, os.O_WRONLY, perm)
+ if err != nil {
+ // We couldn't even open the file, so it should
+ // not have changed.
+ os.Remove(bakname)
+ return err
+ }
+ defer fout.Close() // for error paths
+
+ restoreFail := func(err error) {
+ fmt.Fprintf(os.Stderr, "gofmt: %s: error restoring file to original: %v; backup in %s\n", filename, err, bakname)
+ }
+
+ n, err := fout.Write(formatted)
+ if err == nil && int64(n) < size {
+ err = fout.Truncate(int64(n))
+ }
+
+ if err != nil {
+ // Rewriting the file failed.
+
+ if n == 0 {
+ // Original file unchanged.
+ os.Remove(bakname)
+ return err
+ }
+
+ // Try to restore the original contents.
+
+ no, erro := fout.WriteAt(orig, 0)
+ if erro != nil {
+ // That failed too.
+ restoreFail(erro)
+ return err
+ }
+
+ if no < n {
+ // Original file is shorter. Truncate.
+ if erro = fout.Truncate(int64(no)); erro != nil {
+ restoreFail(erro)
+ return err
+ }
+ }
+
+ if erro := fout.Close(); erro != nil {
+ restoreFail(erro)
+ return err
+ }
+
+ // Original contents restored.
+ os.Remove(bakname)
+ return err
+ }
+
+ if err := fout.Close(); err != nil {
+ restoreFail(err)
+ return err
+ }
+
+ // File updated.
+ os.Remove(bakname)
+ return nil
+}
// backupFile writes data to a new file named filename with permissions perm,
-// with randomly chosen such that the file name is unique. backupFile returns
// the chosen file name.
func backupFile(filename string, data []byte, perm fs.FileMode) (string, error) {
fdSem <- true
defer func() { <-fdSem }()
- // create backup file
- f, err := os.CreateTemp(filepath.Dir(filename), filepath.Base(filename))
- if err != nil {
- return "", err
+ nextRandom := func() string {
+ return strconv.Itoa(rand.Int())
}
- bakname := f.Name()
- if chmodSupported {
- err = f.Chmod(perm)
- if err != nil {
- f.Close()
- os.Remove(bakname)
- return bakname, err
+
+ dir, base := filepath.Split(filename)
+ var (
+ bakname string
+ f *os.File
+ )
+ for {
+ bakname = filepath.Join(dir, base+"."+nextRandom())
+ var err error
+ f, err = os.OpenFile(bakname, os.O_RDWR|os.O_CREATE|os.O_EXCL, perm)
+ if err == nil {
+ break
+ }
+ if err != nil && !os.IsExist(err) {
+ return "", err
}
}
// write data to backup file
- _, err = f.Write(data)
+ _, err := f.Write(data)
if err1 := f.Close(); err == nil {
err = err1
}
diff --git a/vendor/github.com/golangci/gofmt/gofmt/golangci.go b/vendor/github.com/golangci/gofmt/gofmt/golangci.go
index a69611e1d3..a7f3ef6e73 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/golangci.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/golangci.go
@@ -11,9 +11,14 @@ import (
"path/filepath"
"sync"
- "github.com/golangci/gofmt/gofmt/internal/diff"
+ "github.com/rogpeppe/go-internal/diff"
)
+type Options struct {
+ NeedSimplify bool
+ RewriteRules []RewriteRule
+}
+
var parserModeMu sync.RWMutex
type RewriteRule struct {
@@ -22,13 +27,13 @@ type RewriteRule struct {
}
// Run runs gofmt.
-// Deprecated: use RunRewrite instead.
+// Deprecated: use [Source] instead.
func Run(filename string, needSimplify bool) ([]byte, error) {
return RunRewrite(filename, needSimplify, nil)
}
// RunRewrite runs gofmt.
-// empty string `rewrite` will be ignored.
+// Deprecated: use [Source] instead.
func RunRewrite(filename string, needSimplify bool, rewriteRules []RewriteRule) ([]byte, error) {
src, err := os.ReadFile(filename)
if err != nil {
@@ -73,6 +78,34 @@ func RunRewrite(filename string, needSimplify bool, rewriteRules []RewriteRule)
return diff.Diff(oldName, src, newName, res), nil
}
+// Source formats the code like gofmt.
+// Empty string `rewrite` will be ignored.
+func Source(filename string, src []byte, opts Options) ([]byte, error) {
+ fset := token.NewFileSet()
+
+ parserModeMu.Lock()
+ initParserMode()
+ parserModeMu.Unlock()
+
+ file, sourceAdj, indentAdj, err := parse(fset, filename, src, false)
+ if err != nil {
+ return nil, err
+ }
+
+ file, err = rewriteFileContent(fset, file, opts.RewriteRules)
+ if err != nil {
+ return nil, err
+ }
+
+ ast.SortImports(fset, file)
+
+ if opts.NeedSimplify {
+ simplify(file)
+ }
+
+ return format(fset, file, sourceAdj, indentAdj, src, printer.Config{Mode: printerMode, Tabwidth: tabWidth})
+}
+
func rewriteFileContent(fset *token.FileSet, file *ast.File, rewriteRules []RewriteRule) (*ast.File, error) {
for _, rewriteRule := range rewriteRules {
pattern, err := parseExpression(rewriteRule.Pattern, "pattern")
diff --git a/vendor/github.com/golangci/gofmt/gofmt/internal.go b/vendor/github.com/golangci/gofmt/gofmt/internal.go
index 31a825bf83..231a250915 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/internal.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/internal.go
@@ -27,11 +27,11 @@ func parse(fset *token.FileSet, filename string, src []byte, fragmentOk bool) (
err error,
) {
- // START - Change related to usgae inside golangci-lint
+ // START - Change related to usage inside golangci-lint
parserModeMu.Lock()
parserMode := parserMode
parserModeMu.Unlock()
- // END - Change related to usgae inside golangci-lint
+ // END - Change related to usage inside golangci-lint
// Try as whole source file.
file, err = parser.ParseFile(fset, filename, src, parserMode)
diff --git a/vendor/github.com/golangci/gofmt/gofmt/readme.md b/vendor/github.com/golangci/gofmt/gofmt/readme.md
index c2faaab82d..907973116f 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/readme.md
+++ b/vendor/github.com/golangci/gofmt/gofmt/readme.md
@@ -1,5 +1,15 @@
# Hard Fork of gofmt
-2022-08-31: Sync with go1.18.5
-2023-10-04: Sync with go1.19.13
-2023-10-04: Sync with go1.20.8
+- https://github.com/golang/go/blob/master/src/cmd/gofmt/
+- https://github.com/golang/go/blob/master/src/internal/testenv
+- https://github.com/golang/go/blob/master/src/internal/platform
+- https://github.com/golang/go/blob/master/src/internal/diff -> replaced by `github.com/rogpeppe/go-internal/diff`
+- https://github.com/golang/go/blob/master/src/internal/cfg
+
+## Updates
+
+- 2024-08-17: Sync with go1.22.6
+- 2023-02-28: Sync with go1.21.7
+- 2023-10-04: Sync with go1.20.8
+- 2023-10-04: Sync with go1.19.13
+- 2022-08-31: Sync with go1.18.5
diff --git a/vendor/github.com/golangci/gofmt/gofmt/rewrite.go b/vendor/github.com/golangci/gofmt/gofmt/rewrite.go
index f1299a42b4..c95d44f61b 100644
--- a/vendor/github.com/golangci/gofmt/gofmt/rewrite.go
+++ b/vendor/github.com/golangci/gofmt/gofmt/rewrite.go
@@ -69,9 +69,7 @@ func rewriteFile(fileSet *token.FileSet, pattern, replace ast.Expr, p *ast.File)
return reflect.Value{}
}
val = apply(rewriteVal, val)
- for k := range m {
- delete(m, k)
- }
+ clear(m)
if match(m, pat, val) {
val = subst(m, repl, reflect.ValueOf(val.Interface().(ast.Node).Pos()))
}
@@ -199,7 +197,7 @@ func match(m map[string]reflect.Value, pattern, val reflect.Value) bool {
// object pointers and token positions always match
return true
case callExprType:
- // For calls, the Ellipsis fields (token.Position) must
+ // For calls, the Ellipsis fields (token.Pos) must
// match since that is how f(x) and f(x...) are different.
// Check them here but fall through for the remaining fields.
p := pattern.Interface().(*ast.CallExpr)
diff --git a/vendor/github.com/golangci/gofmt/goimports/goimports.go b/vendor/github.com/golangci/gofmt/goimports/goimports.go
deleted file mode 100644
index 20d92e119c..0000000000
--- a/vendor/github.com/golangci/gofmt/goimports/goimports.go
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package goimports
-
-import (
- "bytes"
- "fmt"
- "io/ioutil"
- "os"
- "os/exec"
- "path/filepath"
- "runtime"
-)
-
-// Extracted from golang.org/x/tools@v0.13.0/cmd/goimports/goimports.go
-
-func writeTempFile(dir, prefix string, data []byte) (string, error) {
- file, err := ioutil.TempFile(dir, prefix)
- if err != nil {
- return "", err
- }
- _, err = file.Write(data)
- if err1 := file.Close(); err == nil {
- err = err1
- }
- if err != nil {
- os.Remove(file.Name())
- return "", err
- }
- return file.Name(), nil
-}
-
-func diff(b1, b2 []byte, filename string) (data []byte, err error) {
- f1, err := writeTempFile("", "gofmt", b1)
- if err != nil {
- return
- }
- defer os.Remove(f1)
-
- f2, err := writeTempFile("", "gofmt", b2)
- if err != nil {
- return
- }
- defer os.Remove(f2)
-
- cmd := "diff"
- if runtime.GOOS == "plan9" {
- cmd = "/bin/ape/diff"
- }
-
- data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput()
- if len(data) > 0 {
- // diff exits with a non-zero status when the files don't match.
- // Ignore that failure as long as we get output.
- return replaceTempFilename(data, filename)
- }
- return
-}
-
-// replaceTempFilename replaces temporary filenames in diff with actual one.
-//
-// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500
-// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500
-// ...
-// ->
-// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500
-// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500
-// ...
-func replaceTempFilename(diff []byte, filename string) ([]byte, error) {
- bs := bytes.SplitN(diff, []byte{'\n'}, 3)
- if len(bs) < 3 {
- return nil, fmt.Errorf("got unexpected diff for %s", filename)
- }
- // Preserve timestamps.
- var t0, t1 []byte
- if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 {
- t0 = bs[0][i:]
- }
- if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 {
- t1 = bs[1][i:]
- }
- // Always print filepath with slash separator.
- f := filepath.ToSlash(filename)
- bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0))
- bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1))
- return bytes.Join(bs, []byte{'\n'}), nil
-}
diff --git a/vendor/github.com/golangci/gofmt/goimports/golangci.go b/vendor/github.com/golangci/gofmt/goimports/golangci.go
deleted file mode 100644
index 6ff286ae06..0000000000
--- a/vendor/github.com/golangci/gofmt/goimports/golangci.go
+++ /dev/null
@@ -1,35 +0,0 @@
-package goimports
-
-import (
- "bytes"
- "fmt"
- "os"
-
- "golang.org/x/tools/imports"
-)
-
-// Run runs goimports.
-// The local prefixes (comma separated) must be defined through the global variable imports.LocalPrefix.
-func Run(filename string) ([]byte, error) {
- src, err := os.ReadFile(filename)
- if err != nil {
- return nil, err
- }
-
- res, err := imports.Process(filename, src, nil)
- if err != nil {
- return nil, err
- }
-
- if bytes.Equal(src, res) {
- return nil, nil
- }
-
- // formatting has changed
- data, err := diff(src, res, filename)
- if err != nil {
- return nil, fmt.Errorf("error computing diff: %s", err)
- }
-
- return data, nil
-}
diff --git a/vendor/github.com/golangci/gofmt/goimports/readme.md b/vendor/github.com/golangci/gofmt/goimports/readme.md
deleted file mode 100644
index e57ed550b1..0000000000
--- a/vendor/github.com/golangci/gofmt/goimports/readme.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# Hard Fork of goimports
-
-2022-08-31: Sync with golang.org/x/tools v0.1.12
-2023-10-04: Sync with golang.org/x/tools v0.13.0
diff --git a/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go
index 413e071d65..bf235bf17f 100644
--- a/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go
+++ b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go
@@ -1,6 +1,7 @@
package main
import (
+ "cmp"
"fmt"
"os"
"runtime/debug"
@@ -63,17 +64,9 @@ func createBuildInfo() commands.BuildInfo {
}
}
- if revision == "" {
- revision = "unknown"
- }
-
- if modified == "" {
- modified = "?"
- }
-
- if info.Date == "" {
- info.Date = "(unknown)"
- }
+ revision = cmp.Or(revision, "unknown")
+ modified = cmp.Or(modified, "?")
+ info.Date = cmp.Or(info.Date, "(unknown)")
info.Commit = fmt.Sprintf("(%s, modified: %s, mod sum: %q)", revision, modified, buildInfo.Main.Sum)
diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go
index 299fd52790..41eb5c82be 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go
@@ -1,525 +1,296 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package cache implements a build artifact cache.
-//
-// This package is a slightly modified fork of Go's
-// cmd/go/internal/cache package.
package cache
import (
"bytes"
- "crypto/sha256"
+ "encoding/gob"
"encoding/hex"
"errors"
"fmt"
- "io"
- "os"
- "path/filepath"
- "strconv"
+ "maps"
+ "runtime"
+ "slices"
"strings"
- "time"
+ "sync"
- "github.com/golangci/golangci-lint/internal/renameio"
- "github.com/golangci/golangci-lint/internal/robustio"
+ "golang.org/x/tools/go/packages"
+
+ "github.com/golangci/golangci-lint/internal/go/cache"
+ "github.com/golangci/golangci-lint/pkg/logutils"
+ "github.com/golangci/golangci-lint/pkg/timeutils"
+)
+
+type HashMode int
+
+const (
+ HashModeNeedOnlySelf HashMode = iota
+ HashModeNeedDirectDeps
+ HashModeNeedAllDeps
)
-// An ActionID is a cache action key, the hash of a complete description of a
-// repeatable computation (command line, environment variables,
-// input file contents, executable contents).
-type ActionID [HashSize]byte
+var ErrMissing = errors.New("missing data")
-// An OutputID is a cache output key, the hash of an output of a computation.
-type OutputID [HashSize]byte
+type hashResults map[HashMode]string
-// A Cache is a package cache, backed by a file system directory tree.
+// Cache is a per-package data cache.
+// A cached data is invalidated when package,
+// or it's dependencies change.
type Cache struct {
- dir string
- now func() time.Time
+ lowLevelCache cache.Cache
+ pkgHashes sync.Map
+ sw *timeutils.Stopwatch
+ log logutils.Log
+ ioSem chan struct{} // semaphore limiting parallel IO
+}
+
+func NewCache(sw *timeutils.Stopwatch, log logutils.Log) (*Cache, error) {
+ return &Cache{
+ lowLevelCache: cache.Default(),
+ sw: sw,
+ log: log,
+ ioSem: make(chan struct{}, runtime.GOMAXPROCS(-1)),
+ }, nil
}
-// Open opens and returns the cache in the given directory.
-//
-// It is safe for multiple processes on a single machine to use the
-// same cache directory in a local file system simultaneously.
-// They will coordinate using operating system file locks and may
-// duplicate effort but will not corrupt the cache.
-//
-// However, it is NOT safe for multiple processes on different machines
-// to share a cache directory (for example, if the directory were stored
-// in a network file system). File locking is notoriously unreliable in
-// network file systems and may not suffice to protect the cache.
-func Open(dir string) (*Cache, error) {
- info, err := os.Stat(dir)
+func (c *Cache) Close() {
+ err := c.sw.TrackStageErr("close", c.lowLevelCache.Close)
if err != nil {
- return nil, err
+ c.log.Errorf("cache close: %v", err)
}
- if !info.IsDir() {
- return nil, &os.PathError{Op: "open", Path: dir, Err: errors.New("not a directory")}
- }
- for i := 0; i < 256; i++ {
- name := filepath.Join(dir, fmt.Sprintf("%02x", i))
- if err := os.MkdirAll(name, 0744); err != nil {
- return nil, err
- }
- }
- c := &Cache{
- dir: dir,
- now: time.Now,
- }
- return c, nil
}
-// fileName returns the name of the file corresponding to the given id.
-func (c *Cache) fileName(id [HashSize]byte, key string) string {
- return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key)
-}
+func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data any) error {
+ buf, err := c.encode(data)
+ if err != nil {
+ return err
+ }
-var errMissing = errors.New("cache entry not found")
+ actionID, err := c.buildKey(pkg, mode, key)
+ if err != nil {
+ return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err)
+ }
+
+ err = c.putBytes(actionID, buf)
+ if err != nil {
+ return fmt.Errorf("failed to save data to low-level cache by key %s for package %s: %w", key, pkg.Name, err)
+ }
-func IsErrMissing(err error) bool {
- return errors.Is(err, errMissing)
+ return nil
}
-const (
- // action entry file is "v1 \n"
- hexSize = HashSize * 2
- entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1
-)
+func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data any) error {
+ actionID, err := c.buildKey(pkg, mode, key)
+ if err != nil {
+ return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err)
+ }
-// verify controls whether to run the cache in verify mode.
-// In verify mode, the cache always returns errMissing from Get
-// but then double-checks in Put that the data being written
-// exactly matches any existing entry. This provides an easy
-// way to detect program behavior that would have been different
-// had the cache entry been returned from Get.
-//
-// verify is enabled by setting the environment variable
-// GODEBUG=gocacheverify=1.
-var verify = false
-
-// DebugTest is set when GODEBUG=gocachetest=1 is in the environment.
-var DebugTest = false
-
-func init() { initEnv() }
-
-func initEnv() {
- verify = false
- debugHash = false
- debug := strings.Split(os.Getenv("GODEBUG"), ",")
- for _, f := range debug {
- if f == "gocacheverify=1" {
- verify = true
- }
- if f == "gocachehash=1" {
- debugHash = true
- }
- if f == "gocachetest=1" {
- DebugTest = true
+ cachedData, err := c.getBytes(actionID)
+ if err != nil {
+ if cache.IsErrMissing(err) {
+ return ErrMissing
}
+ return fmt.Errorf("failed to get data from low-level cache by key %s for package %s: %w", key, pkg.Name, err)
}
-}
-// Get looks up the action ID in the cache,
-// returning the corresponding output ID and file size, if any.
-// Note that finding an output ID does not guarantee that the
-// saved file for that output ID is still available.
-func (c *Cache) Get(id ActionID) (Entry, error) {
- if verify {
- return Entry{}, errMissing
- }
- return c.get(id)
+ return c.decode(cachedData, data)
}
-type Entry struct {
- OutputID OutputID
- Size int64
- Time time.Time
+func (c *Cache) buildKey(pkg *packages.Package, mode HashMode, key string) (cache.ActionID, error) {
+ return timeutils.TrackStage(c.sw, "key build", func() (cache.ActionID, error) {
+ actionID, err := c.pkgActionID(pkg, mode)
+ if err != nil {
+ return actionID, err
+ }
+
+ subkey, subkeyErr := cache.Subkey(actionID, key)
+ if subkeyErr != nil {
+ return actionID, fmt.Errorf("failed to build subkey: %w", subkeyErr)
+ }
+
+ return subkey, nil
+ })
}
-// get is Get but does not respect verify mode, so that Put can use it.
-func (c *Cache) get(id ActionID) (Entry, error) {
- missing := func() (Entry, error) {
- return Entry{}, errMissing
- }
- failed := func(err error) (Entry, error) {
- return Entry{}, err
- }
- fileName := c.fileName(id, "a")
- f, err := os.Open(fileName)
+func (c *Cache) pkgActionID(pkg *packages.Package, mode HashMode) (cache.ActionID, error) {
+ hash, err := c.packageHash(pkg, mode)
if err != nil {
- if os.IsNotExist(err) {
- return missing()
- }
- return failed(err)
- }
- defer f.Close()
- entry := make([]byte, entrySize+1) // +1 to detect whether f is too long
- if n, readErr := io.ReadFull(f, entry); n != entrySize || readErr != io.ErrUnexpectedEOF {
- return failed(fmt.Errorf("read %d/%d bytes from %s with error %w", n, entrySize, fileName, readErr))
- }
- if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' {
- return failed(fmt.Errorf("bad data in %s", fileName))
- }
- eid, entry := entry[3:3+hexSize], entry[3+hexSize:]
- eout, entry := entry[1:1+hexSize], entry[1+hexSize:]
- esize, entry := entry[1:1+20], entry[1+20:]
- etime := entry[1 : 1+20]
- var buf [HashSize]byte
- if _, err = hex.Decode(buf[:], eid); err != nil || buf != id {
- return failed(fmt.Errorf("failed to hex decode eid data in %s: %w", fileName, err))
- }
- if _, err = hex.Decode(buf[:], eout); err != nil {
- return failed(fmt.Errorf("failed to hex decode eout data in %s: %w", fileName, err))
- }
- i := 0
- for i < len(esize) && esize[i] == ' ' {
- i++
- }
- size, err := strconv.ParseInt(string(esize[i:]), 10, 64)
- if err != nil || size < 0 {
- return failed(fmt.Errorf("failed to parse esize int from %s with error %w", fileName, err))
- }
- i = 0
- for i < len(etime) && etime[i] == ' ' {
- i++
- }
- tm, err := strconv.ParseInt(string(etime[i:]), 10, 64)
- if err != nil || tm < 0 {
- return failed(fmt.Errorf("failed to parse etime int from %s with error %w", fileName, err))
+ return cache.ActionID{}, fmt.Errorf("failed to get package hash: %w", err)
}
- if err = c.used(fileName); err != nil {
- return failed(fmt.Errorf("failed to mark %s as used: %w", fileName, err))
+ key, err := cache.NewHash("action ID")
+ if err != nil {
+ return cache.ActionID{}, fmt.Errorf("failed to make a hash: %w", err)
}
- return Entry{buf, size, time.Unix(0, tm)}, nil
+ fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
+ fmt.Fprintf(key, "pkghash %s\n", hash)
+
+ return key.Sum(), nil
}
-// GetBytes looks up the action ID in the cache and returns
-// the corresponding output bytes.
-// GetBytes should only be used for data that can be expected to fit in memory.
-func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) {
- entry, err := c.Get(id)
- if err != nil {
- return nil, entry, err
- }
- outputFile, err := c.OutputFile(entry.OutputID)
- if err != nil {
- return nil, entry, err
+func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error) {
+ results, found := c.pkgHashes.Load(pkg)
+ if found {
+ hashRes := results.(hashResults)
+ if result, ok := hashRes[mode]; ok {
+ return result, nil
+ }
+
+ return "", fmt.Errorf("no mode %d in hash result", mode)
}
- data, err := robustio.ReadFile(outputFile)
+ hashRes, err := c.computePkgHash(pkg)
if err != nil {
- return nil, entry, err
+ return "", err
}
- if sha256.Sum256(data) != entry.OutputID {
- return nil, entry, errMissing
+ result, found := hashRes[mode]
+ if !found {
+ return "", fmt.Errorf("invalid mode %d", mode)
}
- return data, entry, nil
+
+ c.pkgHashes.Store(pkg, hashRes)
+
+ return result, nil
}
-// OutputFile returns the name of the cache file storing output with the given OutputID.
-func (c *Cache) OutputFile(out OutputID) (string, error) {
- file := c.fileName(out, "d")
- if err := c.used(file); err != nil {
- return "", err
+// computePkgHash computes a package's hash.
+// The hash is based on all Go files that make up the package,
+// as well as the hashes of imported packages.
+func (c *Cache) computePkgHash(pkg *packages.Package) (hashResults, error) {
+ key, err := cache.NewHash("package hash")
+ if err != nil {
+ return nil, fmt.Errorf("failed to make a hash: %w", err)
}
- return file, nil
-}
-// Time constants for cache expiration.
-//
-// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour),
-// to avoid causing many unnecessary inode updates. The mtimes therefore
-// roughly reflect "time of last use" but may in fact be older by at most an hour.
-//
-// We scan the cache for entries to delete at most once per trimInterval (1 day).
-//
-// When we do scan the cache, we delete entries that have not been used for
-// at least trimLimit (5 days). Statistics gathered from a month of usage by
-// Go developers found that essentially all reuse of cached entries happened
-// within 5 days of the previous reuse. See golang.org/issue/22990.
-const (
- mtimeInterval = 1 * time.Hour
- trimInterval = 24 * time.Hour
- trimLimit = 5 * 24 * time.Hour
-)
+ hashRes := hashResults{}
-// used makes a best-effort attempt to update mtime on file,
-// so that mtime reflects cache access time.
-//
-// Because the reflection only needs to be approximate,
-// and to reduce the amount of disk activity caused by using
-// cache entries, used only updates the mtime if the current
-// mtime is more than an hour old. This heuristic eliminates
-// nearly all the mtime updates that would otherwise happen,
-// while still keeping the mtimes useful for cache trimming.
-func (c *Cache) used(file string) error {
- info, err := os.Stat(file)
- if err != nil {
- if os.IsNotExist(err) {
- return errMissing
+ fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
+
+ for _, f := range pkg.CompiledGoFiles {
+ h, fErr := c.fileHash(f)
+ if fErr != nil {
+ return nil, fmt.Errorf("failed to calculate file %s hash: %w", f, fErr)
}
- return fmt.Errorf("failed to stat file %s: %w", file, err)
- }
- if c.now().Sub(info.ModTime()) < mtimeInterval {
- return nil
+ fmt.Fprintf(key, "file %s %x\n", f, h)
}
- if err := os.Chtimes(file, c.now(), c.now()); err != nil {
- return fmt.Errorf("failed to change time of file %s: %w", file, err)
- }
+ curSum := key.Sum()
+ hashRes[HashModeNeedOnlySelf] = hex.EncodeToString(curSum[:])
- return nil
-}
+ imps := slices.SortedFunc(maps.Values(pkg.Imports), func(a, b *packages.Package) int {
+ return strings.Compare(a.PkgPath, b.PkgPath)
+ })
-// Trim removes old cache entries that are likely not to be reused.
-func (c *Cache) Trim() {
- now := c.now()
-
- // We maintain in dir/trim.txt the time of the last completed cache trim.
- // If the cache has been trimmed recently enough, do nothing.
- // This is the common case.
- data, _ := renameio.ReadFile(filepath.Join(c.dir, "trim.txt"))
- t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64)
- if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval {
- return
+ if err := c.computeDepsHash(HashModeNeedOnlySelf, imps, key); err != nil {
+ return nil, err
}
- // Trim each of the 256 subdirectories.
- // We subtract an additional mtimeInterval
- // to account for the imprecision of our "last used" mtimes.
- cutoff := now.Add(-trimLimit - mtimeInterval)
- for i := 0; i < 256; i++ {
- subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i))
- c.trimSubdir(subdir, cutoff)
+ curSum = key.Sum()
+ hashRes[HashModeNeedDirectDeps] = hex.EncodeToString(curSum[:])
+
+ if err := c.computeDepsHash(HashModeNeedAllDeps, imps, key); err != nil {
+ return nil, err
}
- // Ignore errors from here: if we don't write the complete timestamp, the
- // cache will appear older than it is, and we'll trim it again next time.
- _ = renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())), 0666)
-}
+ curSum = key.Sum()
+ hashRes[HashModeNeedAllDeps] = hex.EncodeToString(curSum[:])
-// trimSubdir trims a single cache subdirectory.
-func (c *Cache) trimSubdir(subdir string, cutoff time.Time) {
- // Read all directory entries from subdir before removing
- // any files, in case removing files invalidates the file offset
- // in the directory scan. Also, ignore error from f.Readdirnames,
- // because we don't care about reporting the error, and we still
- // want to process any entries found before the error.
- f, err := os.Open(subdir)
- if err != nil {
- return
- }
- names, _ := f.Readdirnames(-1)
- f.Close()
+ return hashRes, nil
+}
- for _, name := range names {
- // Remove only cache entries (xxxx-a and xxxx-d).
- if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") {
+func (c *Cache) computeDepsHash(depMode HashMode, imps []*packages.Package, key *cache.Hash) error {
+ for _, dep := range imps {
+ if dep.PkgPath == "unsafe" {
continue
}
- entry := filepath.Join(subdir, name)
- info, err := os.Stat(entry)
- if err == nil && info.ModTime().Before(cutoff) {
- os.Remove(entry)
+
+ depHash, err := c.packageHash(dep, depMode)
+ if err != nil {
+ return fmt.Errorf("failed to calculate hash for dependency %s with mode %d: %w", dep.Name, depMode, err)
}
+
+ fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, depHash)
}
+
+ return nil
}
-// putIndexEntry adds an entry to the cache recording that executing the action
-// with the given id produces an output with the given output id (hash) and size.
-func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error {
- // Note: We expect that for one reason or another it may happen
- // that repeating an action produces a different output hash
- // (for example, if the output contains a time stamp or temp dir name).
- // While not ideal, this is also not a correctness problem, so we
- // don't make a big deal about it. In particular, we leave the action
- // cache entries writable specifically so that they can be overwritten.
- //
- // Setting GODEBUG=gocacheverify=1 does make a big deal:
- // in verify mode we are double-checking that the cache entries
- // are entirely reproducible. As just noted, this may be unrealistic
- // in some cases but the check is also useful for shaking out real bugs.
- entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano())
-
- if verify && allowVerify {
- old, err := c.get(id)
- if err == nil && (old.OutputID != out || old.Size != size) {
- // panic to show stack trace, so we can see what code is generating this cache entry.
- msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size)
- panic(msg)
- }
- }
- file := c.fileName(id, "a")
+func (c *Cache) putBytes(actionID cache.ActionID, buf *bytes.Buffer) error {
+ c.ioSem <- struct{}{}
+
+ err := c.sw.TrackStageErr("cache io", func() error {
+ return cache.PutBytes(c.lowLevelCache, actionID, buf.Bytes())
+ })
+
+ <-c.ioSem
- // Copy file to cache directory.
- mode := os.O_WRONLY | os.O_CREATE
- f, err := os.OpenFile(file, mode, 0666)
- if err != nil {
- return err
- }
- _, err = f.WriteString(entry)
- if err == nil {
- // Truncate the file only *after* writing it.
- // (This should be a no-op, but truncate just in case of previous corruption.)
- //
- // This differs from os.WriteFile, which truncates to 0 *before* writing
- // via os.O_TRUNC. Truncating only after writing ensures that a second write
- // of the same content to the same file is idempotent, and does not — even
- // temporarily! — undo the effect of the first write.
- err = f.Truncate(int64(len(entry)))
- }
- if closeErr := f.Close(); err == nil {
- err = closeErr
- }
if err != nil {
- // TODO(bcmills): This Remove potentially races with another go command writing to file.
- // Can we eliminate it?
- os.Remove(file)
return err
}
- if err = os.Chtimes(file, c.now(), c.now()); err != nil { // mainly for tests
- return fmt.Errorf("failed to change time of file %s: %w", file, err)
- }
return nil
}
-// Put stores the given output in the cache as the output for the action ID.
-// It may read file twice. The content of file must not change between the two passes.
-func (c *Cache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
- return c.put(id, file, true)
-}
+func (c *Cache) getBytes(actionID cache.ActionID) ([]byte, error) {
+ c.ioSem <- struct{}{}
-// PutNoVerify is like Put but disables the verify check
-// when GODEBUG=goverifycache=1 is set.
-// It is meant for data that is OK to cache but that we expect to vary slightly from run to run,
-// like test output containing times and the like.
-func (c *Cache) PutNoVerify(id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
- return c.put(id, file, false)
-}
+ cachedData, err := timeutils.TrackStage(c.sw, "cache io", func() ([]byte, error) {
+ b, _, errGB := cache.GetBytes(c.lowLevelCache, actionID)
+ return b, errGB
+ })
-func (c *Cache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) {
- // Compute output ID.
- h := sha256.New()
- if _, err := file.Seek(0, 0); err != nil {
- return OutputID{}, 0, err
- }
- size, err := io.Copy(h, file)
- if err != nil {
- return OutputID{}, 0, err
- }
- var out OutputID
- h.Sum(out[:0])
+ <-c.ioSem
- // Copy to cached output file (if not already present).
- if err := c.copyFile(file, out, size); err != nil {
- return out, size, err
+ if err != nil {
+ return nil, err
}
- // Add to cache index.
- return out, size, c.putIndexEntry(id, out, size, allowVerify)
+ return cachedData, nil
}
-// PutBytes stores the given bytes in the cache as the output for the action ID.
-func (c *Cache) PutBytes(id ActionID, data []byte) error {
- _, _, err := c.Put(id, bytes.NewReader(data))
- return err
-}
+func (c *Cache) fileHash(f string) ([cache.HashSize]byte, error) {
+ c.ioSem <- struct{}{}
-// copyFile copies file into the cache, expecting it to have the given
-// output ID and size, if that file is not present already.
-func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error {
- name := c.fileName(out, "d")
- info, err := os.Stat(name)
- if err == nil && info.Size() == size {
- // Check hash.
- if f, openErr := os.Open(name); openErr == nil {
- h := sha256.New()
- if _, copyErr := io.Copy(h, f); copyErr != nil {
- return fmt.Errorf("failed to copy to sha256: %w", copyErr)
- }
-
- f.Close()
- var out2 OutputID
- h.Sum(out2[:0])
- if out == out2 {
- return nil
- }
- }
- // Hash did not match. Fall through and rewrite file.
- }
+ h, err := cache.FileHash(f)
+
+ <-c.ioSem
- // Copy file to cache directory.
- mode := os.O_RDWR | os.O_CREATE
- if err == nil && info.Size() > size { // shouldn't happen but fix in case
- mode |= os.O_TRUNC
- }
- f, err := os.OpenFile(name, mode, 0666)
if err != nil {
- return err
- }
- defer f.Close()
- if size == 0 {
- // File now exists with correct size.
- // Only one possible zero-length file, so contents are OK too.
- // Early return here makes sure there's a "last byte" for code below.
- return nil
+ return [cache.HashSize]byte{}, err
}
- // From here on, if any of the I/O writing the file fails,
- // we make a best-effort attempt to truncate the file f
- // before returning, to avoid leaving bad bytes in the file.
+ return h, nil
+}
- // Copy file to f, but also into h to double-check hash.
- if _, err = file.Seek(0, 0); err != nil {
- _ = f.Truncate(0)
- return err
- }
- h := sha256.New()
- w := io.MultiWriter(f, h)
- if _, err = io.CopyN(w, file, size-1); err != nil {
- _ = f.Truncate(0)
- return err
- }
- // Check last byte before writing it; writing it will make the size match
- // what other processes expect to find and might cause them to start
- // using the file.
- buf := make([]byte, 1)
- if _, err = file.Read(buf); err != nil {
- _ = f.Truncate(0)
- return err
- }
- if n, wErr := h.Write(buf); n != len(buf) {
- return fmt.Errorf("wrote to hash %d/%d bytes with error %w", n, len(buf), wErr)
+func (c *Cache) encode(data any) (*bytes.Buffer, error) {
+ buf := &bytes.Buffer{}
+ err := c.sw.TrackStageErr("gob", func() error {
+ return gob.NewEncoder(buf).Encode(data)
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to gob encode: %w", err)
}
- sum := h.Sum(nil)
- if !bytes.Equal(sum, out[:]) {
- _ = f.Truncate(0)
- return errors.New("file content changed underfoot")
- }
+ return buf, nil
+}
- // Commit cache file entry.
- if _, err = f.Write(buf); err != nil {
- _ = f.Truncate(0)
- return err
- }
- if err = f.Close(); err != nil {
- // Data might not have been written,
- // but file may look like it is the right size.
- // To be extra careful, remove cached file.
- os.Remove(name)
- return err
- }
- if err = os.Chtimes(name, c.now(), c.now()); err != nil { // mainly for tests
- return fmt.Errorf("failed to change time of file %s: %w", name, err)
+func (c *Cache) decode(b []byte, data any) error {
+ err := c.sw.TrackStageErr("gob", func() error {
+ return gob.NewDecoder(bytes.NewReader(b)).Decode(data)
+ })
+ if err != nil {
+ return fmt.Errorf("failed to gob decode: %w", err)
}
return nil
}
+
+func SetSalt(b *bytes.Buffer) {
+ cache.SetSalt(b.Bytes())
+}
+
+func DefaultDir() string {
+ cacheDir, _ := cache.DefaultDir()
+ return cacheDir
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md b/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md
deleted file mode 100644
index b469711edd..0000000000
--- a/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# cache
-
-Extracted from go/src/cmd/go/internal/cache/
-I don't know what version of Go this package was pulled from.
-
-Adapted for golangci-lint:
-- https://github.com/golangci/golangci-lint/pull/699
-- https://github.com/golangci/golangci-lint/pull/779
-- https://github.com/golangci/golangci-lint/pull/788
-- https://github.com/golangci/golangci-lint/pull/808
-- https://github.com/golangci/golangci-lint/pull/1063
-- https://github.com/golangci/golangci-lint/pull/1070
-- https://github.com/golangci/golangci-lint/pull/1162
-- https://github.com/golangci/golangci-lint/pull/2318
-- https://github.com/golangci/golangci-lint/pull/2352
-- https://github.com/golangci/golangci-lint/pull/3012
-- https://github.com/golangci/golangci-lint/pull/3096
-- https://github.com/golangci/golangci-lint/pull/3204
diff --git a/vendor/golang.org/x/exp/LICENSE b/vendor/github.com/golangci/golangci-lint/internal/go/LICENSE
similarity index 100%
rename from vendor/golang.org/x/exp/LICENSE
rename to vendor/github.com/golangci/golangci-lint/internal/go/LICENSE
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go
new file mode 100644
index 0000000000..85899ebc92
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go
@@ -0,0 +1,663 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cache implements a build artifact cache.
+//
+// This package is a slightly modified fork of Go's
+// cmd/go/internal/cache package.
+package cache
+
+import (
+ "bytes"
+ "crypto/sha256"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/golangci/golangci-lint/internal/go/mmap"
+ "github.com/golangci/golangci-lint/internal/go/robustio"
+ "github.com/rogpeppe/go-internal/lockedfile"
+)
+
+// An ActionID is a cache action key, the hash of a complete description of a
+// repeatable computation (command line, environment variables,
+// input file contents, executable contents).
+type ActionID [HashSize]byte
+
+// An OutputID is a cache output key, the hash of an output of a computation.
+type OutputID [HashSize]byte
+
+// Cache is the interface as used by the cmd/go.
+type Cache interface {
+ // Get returns the cache entry for the provided ActionID.
+ // On miss, the error type should be of type *entryNotFoundError.
+ //
+ // After a success call to Get, OutputFile(Entry.OutputID) must
+ // exist on disk for until Close is called (at the end of the process).
+ Get(ActionID) (Entry, error)
+
+ // Put adds an item to the cache.
+ //
+ // The seeker is only used to seek to the beginning. After a call to Put,
+ // the seek position is not guaranteed to be in any particular state.
+ //
+ // As a special case, if the ReadSeeker is of type noVerifyReadSeeker,
+ // the verification from GODEBUG=goverifycache=1 is skipped.
+ //
+ // After a success call to Get, OutputFile(Entry.OutputID) must
+ // exist on disk for until Close is called (at the end of the process).
+ Put(ActionID, io.ReadSeeker) (_ OutputID, size int64, _ error)
+
+ // Close is called at the end of the go process. Implementations can do
+ // cache cleanup work at this phase, or wait for and report any errors from
+ // background cleanup work started earlier. Any cache trimming should in one
+ // process should not violate cause the invariants of this interface to be
+ // violated in another process. Namely, a cache trim from one process should
+ // not delete an ObjectID from disk that was recently Get or Put from
+ // another process. As a rule of thumb, don't trim things used in the last
+ // day.
+ Close() error
+
+ // OutputFile returns the path on disk where OutputID is stored.
+ //
+ // It's only called after a successful get or put call so it doesn't need
+ // to return an error; it's assumed that if the previous get or put succeeded,
+ // it's already on disk.
+ OutputFile(OutputID) string
+
+ // FuzzDir returns where fuzz files are stored.
+ FuzzDir() string
+}
+
+// A Cache is a package cache, backed by a file system directory tree.
+type DiskCache struct {
+ dir string
+ now func() time.Time
+}
+
+// Open opens and returns the cache in the given directory.
+//
+// It is safe for multiple processes on a single machine to use the
+// same cache directory in a local file system simultaneously.
+// They will coordinate using operating system file locks and may
+// duplicate effort but will not corrupt the cache.
+//
+// However, it is NOT safe for multiple processes on different machines
+// to share a cache directory (for example, if the directory were stored
+// in a network file system). File locking is notoriously unreliable in
+// network file systems and may not suffice to protect the cache.
+func Open(dir string) (*DiskCache, error) {
+ info, err := os.Stat(dir)
+ if err != nil {
+ return nil, err
+ }
+ if !info.IsDir() {
+ return nil, &fs.PathError{Op: "open", Path: dir, Err: fmt.Errorf("not a directory")}
+ }
+ for i := 0; i < 256; i++ {
+ name := filepath.Join(dir, fmt.Sprintf("%02x", i))
+ if err := os.MkdirAll(name, 0744); err != nil {
+ return nil, err
+ }
+ }
+ c := &DiskCache{
+ dir: dir,
+ now: time.Now,
+ }
+ return c, nil
+}
+
+// fileName returns the name of the file corresponding to the given id.
+func (c *DiskCache) fileName(id [HashSize]byte, key string) string {
+ return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key)
+}
+
+// An entryNotFoundError indicates that a cache entry was not found, with an
+// optional underlying reason.
+type entryNotFoundError struct {
+ Err error
+}
+
+func (e *entryNotFoundError) Error() string {
+ if e.Err == nil {
+ return "cache entry not found"
+ }
+ return fmt.Sprintf("cache entry not found: %v", e.Err)
+}
+
+func (e *entryNotFoundError) Unwrap() error {
+ return e.Err
+}
+
+const (
+ // action entry file is "v1 \n"
+ hexSize = HashSize * 2
+ entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1
+)
+
+// verify controls whether to run the cache in verify mode.
+// In verify mode, the cache always returns errMissing from Get
+// but then double-checks in Put that the data being written
+// exactly matches any existing entry. This provides an easy
+// way to detect program behavior that would have been different
+// had the cache entry been returned from Get.
+//
+// verify is enabled by setting the environment variable
+// GODEBUG=gocacheverify=1.
+var verify = false
+
+var errVerifyMode = errors.New("gocacheverify=1")
+
+// DebugTest is set when GODEBUG=gocachetest=1 is in the environment.
+var DebugTest = false
+
+// func init() { initEnv() }
+
+// var (
+// gocacheverify = godebug.New("gocacheverify")
+// gocachehash = godebug.New("gocachehash")
+// gocachetest = godebug.New("gocachetest")
+// )
+
+// func initEnv() {
+// if gocacheverify.Value() == "1" {
+// gocacheverify.IncNonDefault()
+// verify = true
+// }
+// if gocachehash.Value() == "1" {
+// gocachehash.IncNonDefault()
+// debugHash = true
+// }
+// if gocachetest.Value() == "1" {
+// gocachetest.IncNonDefault()
+// DebugTest = true
+// }
+// }
+
+// Get looks up the action ID in the cache,
+// returning the corresponding output ID and file size, if any.
+// Note that finding an output ID does not guarantee that the
+// saved file for that output ID is still available.
+func (c *DiskCache) Get(id ActionID) (Entry, error) {
+ if verify {
+ return Entry{}, &entryNotFoundError{Err: errVerifyMode}
+ }
+ return c.get(id)
+}
+
+type Entry struct {
+ OutputID OutputID
+ Size int64
+ Time time.Time // when added to cache
+}
+
+// get is Get but does not respect verify mode, so that Put can use it.
+func (c *DiskCache) get(id ActionID) (Entry, error) {
+ missing := func(reason error) (Entry, error) {
+ return Entry{}, &entryNotFoundError{Err: reason}
+ }
+ f, err := os.Open(c.fileName(id, "a"))
+ if err != nil {
+ return missing(err)
+ }
+ defer f.Close()
+ entry := make([]byte, entrySize+1) // +1 to detect whether f is too long
+ if n, err := io.ReadFull(f, entry); n > entrySize {
+ return missing(errors.New("too long"))
+ } else if err != io.ErrUnexpectedEOF {
+ if err == io.EOF {
+ return missing(errors.New("file is empty"))
+ }
+ return missing(err)
+ } else if n < entrySize {
+ return missing(errors.New("entry file incomplete"))
+ }
+ if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' {
+ return missing(errors.New("invalid header"))
+ }
+ eid, entry := entry[3:3+hexSize], entry[3+hexSize:]
+ eout, entry := entry[1:1+hexSize], entry[1+hexSize:]
+ esize, entry := entry[1:1+20], entry[1+20:]
+ etime, entry := entry[1:1+20], entry[1+20:]
+ var buf [HashSize]byte
+ if _, err := hex.Decode(buf[:], eid); err != nil {
+ return missing(fmt.Errorf("decoding ID: %v", err))
+ } else if buf != id {
+ return missing(errors.New("mismatched ID"))
+ }
+ if _, err := hex.Decode(buf[:], eout); err != nil {
+ return missing(fmt.Errorf("decoding output ID: %v", err))
+ }
+ i := 0
+ for i < len(esize) && esize[i] == ' ' {
+ i++
+ }
+ size, err := strconv.ParseInt(string(esize[i:]), 10, 64)
+ if err != nil {
+ return missing(fmt.Errorf("parsing size: %v", err))
+ } else if size < 0 {
+ return missing(errors.New("negative size"))
+ }
+ i = 0
+ for i < len(etime) && etime[i] == ' ' {
+ i++
+ }
+ tm, err := strconv.ParseInt(string(etime[i:]), 10, 64)
+ if err != nil {
+ return missing(fmt.Errorf("parsing timestamp: %v", err))
+ } else if tm < 0 {
+ return missing(errors.New("negative timestamp"))
+ }
+
+ err = c.used(c.fileName(id, "a"))
+ if err != nil {
+ return Entry{}, fmt.Errorf("failed to mark %s as used: %w", c.fileName(id, "a"), err)
+ }
+
+ return Entry{buf, size, time.Unix(0, tm)}, nil
+}
+
+// GetFile looks up the action ID in the cache and returns
+// the name of the corresponding data file.
+func GetFile(c Cache, id ActionID) (file string, entry Entry, err error) {
+ entry, err = c.Get(id)
+ if err != nil {
+ return "", Entry{}, err
+ }
+ file = c.OutputFile(entry.OutputID)
+ info, err := os.Stat(file)
+ if err != nil {
+ return "", Entry{}, &entryNotFoundError{Err: err}
+ }
+ if info.Size() != entry.Size {
+ return "", Entry{}, &entryNotFoundError{Err: errors.New("file incomplete")}
+ }
+ return file, entry, nil
+}
+
+// GetBytes looks up the action ID in the cache and returns
+// the corresponding output bytes.
+// GetBytes should only be used for data that can be expected to fit in memory.
+func GetBytes(c Cache, id ActionID) ([]byte, Entry, error) {
+ entry, err := c.Get(id)
+ if err != nil {
+ return nil, entry, err
+ }
+ data, err := robustio.ReadFile(c.OutputFile(entry.OutputID))
+ if err != nil {
+ return nil, entry, &entryNotFoundError{Err: err}
+ }
+ if sha256.Sum256(data) != entry.OutputID {
+ return nil, entry, &entryNotFoundError{Err: errors.New("bad checksum")}
+ }
+ return data, entry, nil
+}
+
+// GetMmap looks up the action ID in the cache and returns
+// the corresponding output bytes.
+// GetMmap should only be used for data that can be expected to fit in memory.
+func GetMmap(c Cache, id ActionID) ([]byte, Entry, error) {
+ entry, err := c.Get(id)
+ if err != nil {
+ return nil, entry, err
+ }
+ md, err := mmap.Mmap(c.OutputFile(entry.OutputID))
+ if err != nil {
+ return nil, Entry{}, err
+ }
+ if int64(len(md.Data)) != entry.Size {
+ return nil, Entry{}, &entryNotFoundError{Err: errors.New("file incomplete")}
+ }
+ return md.Data, entry, nil
+}
+
+// OutputFile returns the name of the cache file storing output with the given OutputID.
+func (c *DiskCache) OutputFile(out OutputID) string {
+ file := c.fileName(out, "d")
+ c.used(file)
+ return file
+}
+
+// Time constants for cache expiration.
+//
+// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour),
+// to avoid causing many unnecessary inode updates. The mtimes therefore
+// roughly reflect "time of last use" but may in fact be older by at most an hour.
+//
+// We scan the cache for entries to delete at most once per trimInterval (1 day).
+//
+// When we do scan the cache, we delete entries that have not been used for
+// at least trimLimit (5 days). Statistics gathered from a month of usage by
+// Go developers found that essentially all reuse of cached entries happened
+// within 5 days of the previous reuse. See golang.org/issue/22990.
+const (
+ mtimeInterval = 1 * time.Hour
+ trimInterval = 24 * time.Hour
+ trimLimit = 5 * 24 * time.Hour
+)
+
+// used makes a best-effort attempt to update mtime on file,
+// so that mtime reflects cache access time.
+//
+// Because the reflection only needs to be approximate,
+// and to reduce the amount of disk activity caused by using
+// cache entries, used only updates the mtime if the current
+// mtime is more than an hour old. This heuristic eliminates
+// nearly all of the mtime updates that would otherwise happen,
+// while still keeping the mtimes useful for cache trimming.
+func (c *DiskCache) used(file string) error {
+ info, err := os.Stat(file)
+ if err == nil && c.now().Sub(info.ModTime()) < mtimeInterval {
+ return nil
+ }
+
+ if err != nil {
+ if os.IsNotExist(err) {
+ return &entryNotFoundError{Err: err}
+ }
+ return &entryNotFoundError{Err: fmt.Errorf("failed to stat file %s: %w", file, err)}
+ }
+
+ err = os.Chtimes(file, c.now(), c.now())
+ if err != nil {
+ return fmt.Errorf("failed to change time of file %s: %w", file, err)
+ }
+
+ return nil
+}
+
+func (c *DiskCache) Close() error { return c.Trim() }
+
+// Trim removes old cache entries that are likely not to be reused.
+func (c *DiskCache) Trim() error {
+ now := c.now()
+
+ // We maintain in dir/trim.txt the time of the last completed cache trim.
+ // If the cache has been trimmed recently enough, do nothing.
+ // This is the common case.
+ // If the trim file is corrupt, detected if the file can't be parsed, or the
+ // trim time is too far in the future, attempt the trim anyway. It's possible that
+ // the cache was full when the corruption happened. Attempting a trim on
+ // an empty cache is cheap, so there wouldn't be a big performance hit in that case.
+ if data, err := lockedfile.Read(filepath.Join(c.dir, "trim.txt")); err == nil {
+ if t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64); err == nil {
+ lastTrim := time.Unix(t, 0)
+ if d := now.Sub(lastTrim); d < trimInterval && d > -mtimeInterval {
+ return nil
+ }
+ }
+ }
+
+ // Trim each of the 256 subdirectories.
+ // We subtract an additional mtimeInterval
+ // to account for the imprecision of our "last used" mtimes.
+ cutoff := now.Add(-trimLimit - mtimeInterval)
+ for i := 0; i < 256; i++ {
+ subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i))
+ c.trimSubdir(subdir, cutoff)
+ }
+
+ // Ignore errors from here: if we don't write the complete timestamp, the
+ // cache will appear older than it is, and we'll trim it again next time.
+ var b bytes.Buffer
+ fmt.Fprintf(&b, "%d", now.Unix())
+ if err := lockedfile.Write(filepath.Join(c.dir, "trim.txt"), &b, 0666); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// trimSubdir trims a single cache subdirectory.
+func (c *DiskCache) trimSubdir(subdir string, cutoff time.Time) {
+ // Read all directory entries from subdir before removing
+ // any files, in case removing files invalidates the file offset
+ // in the directory scan. Also, ignore error from f.Readdirnames,
+ // because we don't care about reporting the error and we still
+ // want to process any entries found before the error.
+ f, err := os.Open(subdir)
+ if err != nil {
+ return
+ }
+ names, _ := f.Readdirnames(-1)
+ f.Close()
+
+ for _, name := range names {
+ // Remove only cache entries (xxxx-a and xxxx-d).
+ if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") {
+ continue
+ }
+ entry := filepath.Join(subdir, name)
+ info, err := os.Stat(entry)
+ if err == nil && info.ModTime().Before(cutoff) {
+ os.Remove(entry)
+ }
+ }
+}
+
+// putIndexEntry adds an entry to the cache recording that executing the action
+// with the given id produces an output with the given output id (hash) and size.
+func (c *DiskCache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error {
+ // Note: We expect that for one reason or another it may happen
+ // that repeating an action produces a different output hash
+ // (for example, if the output contains a time stamp or temp dir name).
+ // While not ideal, this is also not a correctness problem, so we
+ // don't make a big deal about it. In particular, we leave the action
+ // cache entries writable specifically so that they can be overwritten.
+ //
+ // Setting GODEBUG=gocacheverify=1 does make a big deal:
+ // in verify mode we are double-checking that the cache entries
+ // are entirely reproducible. As just noted, this may be unrealistic
+ // in some cases but the check is also useful for shaking out real bugs.
+ entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano())
+ if verify && allowVerify {
+ old, err := c.get(id)
+ if err == nil && (old.OutputID != out || old.Size != size) {
+ // panic to show stack trace, so we can see what code is generating this cache entry.
+ msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size)
+ panic(msg)
+ }
+ }
+ file := c.fileName(id, "a")
+
+ // Copy file to cache directory.
+ mode := os.O_WRONLY | os.O_CREATE
+ f, err := os.OpenFile(file, mode, 0666)
+ if err != nil {
+ return err
+ }
+ _, err = f.WriteString(entry)
+ if err == nil {
+ // Truncate the file only *after* writing it.
+ // (This should be a no-op, but truncate just in case of previous corruption.)
+ //
+ // This differs from os.WriteFile, which truncates to 0 *before* writing
+ // via os.O_TRUNC. Truncating only after writing ensures that a second write
+ // of the same content to the same file is idempotent, and does not — even
+ // temporarily! — undo the effect of the first write.
+ err = f.Truncate(int64(len(entry)))
+ }
+ if closeErr := f.Close(); err == nil {
+ err = closeErr
+ }
+ if err != nil {
+ // TODO(bcmills): This Remove potentially races with another go command writing to file.
+ // Can we eliminate it?
+ os.Remove(file)
+ return err
+ }
+ err = os.Chtimes(file, c.now(), c.now()) // mainly for tests
+ if err != nil {
+ return fmt.Errorf("failed to change time of file %s: %w", file, err)
+ }
+
+ return nil
+}
+
+// noVerifyReadSeeker is an io.ReadSeeker wrapper sentinel type
+// that says that Cache.Put should skip the verify check
+// (from GODEBUG=goverifycache=1).
+type noVerifyReadSeeker struct {
+ io.ReadSeeker
+}
+
+// Put stores the given output in the cache as the output for the action ID.
+// It may read file twice. The content of file must not change between the two passes.
+func (c *DiskCache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
+ wrapper, isNoVerify := file.(noVerifyReadSeeker)
+ if isNoVerify {
+ file = wrapper.ReadSeeker
+ }
+ return c.put(id, file, !isNoVerify)
+}
+
+// PutNoVerify is like Put but disables the verify check
+// when GODEBUG=goverifycache=1 is set.
+// It is meant for data that is OK to cache but that we expect to vary slightly from run to run,
+// like test output containing times and the like.
+func PutNoVerify(c Cache, id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
+ return c.Put(id, noVerifyReadSeeker{file})
+}
+
+func (c *DiskCache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) {
+ // Compute output ID.
+ h := sha256.New()
+ if _, err := file.Seek(0, 0); err != nil {
+ return OutputID{}, 0, err
+ }
+ size, err := io.Copy(h, file)
+ if err != nil {
+ return OutputID{}, 0, err
+ }
+ var out OutputID
+ h.Sum(out[:0])
+
+ // Copy to cached output file (if not already present).
+ if err := c.copyFile(file, out, size); err != nil {
+ return out, size, err
+ }
+
+ // Add to cache index.
+ return out, size, c.putIndexEntry(id, out, size, allowVerify)
+}
+
+// PutBytes stores the given bytes in the cache as the output for the action ID.
+func PutBytes(c Cache, id ActionID, data []byte) error {
+ _, _, err := c.Put(id, bytes.NewReader(data))
+ return err
+}
+
+// copyFile copies file into the cache, expecting it to have the given
+// output ID and size, if that file is not present already.
+func (c *DiskCache) copyFile(file io.ReadSeeker, out OutputID, size int64) error {
+ name := c.fileName(out, "d")
+ info, err := os.Stat(name)
+ if err == nil && info.Size() == size {
+ // Check hash.
+ if f, err := os.Open(name); err == nil {
+ h := sha256.New()
+ _, copyErr := io.Copy(h, f)
+ if copyErr != nil {
+ return fmt.Errorf("failed to copy to sha256: %w", copyErr)
+ }
+
+ f.Close()
+ var out2 OutputID
+ h.Sum(out2[:0])
+ if out == out2 {
+ return nil
+ }
+ }
+ // Hash did not match. Fall through and rewrite file.
+ }
+
+ // Copy file to cache directory.
+ mode := os.O_RDWR | os.O_CREATE
+ if err == nil && info.Size() > size { // shouldn't happen but fix in case
+ mode |= os.O_TRUNC
+ }
+ f, err := os.OpenFile(name, mode, 0666)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ if size == 0 {
+ // File now exists with correct size.
+ // Only one possible zero-length file, so contents are OK too.
+ // Early return here makes sure there's a "last byte" for code below.
+ return nil
+ }
+
+ // From here on, if any of the I/O writing the file fails,
+ // we make a best-effort attempt to truncate the file f
+ // before returning, to avoid leaving bad bytes in the file.
+
+ // Copy file to f, but also into h to double-check hash.
+ if _, err := file.Seek(0, 0); err != nil {
+ f.Truncate(0)
+ return err
+ }
+ h := sha256.New()
+ w := io.MultiWriter(f, h)
+ if _, err := io.CopyN(w, file, size-1); err != nil {
+ f.Truncate(0)
+ return err
+ }
+ // Check last byte before writing it; writing it will make the size match
+ // what other processes expect to find and might cause them to start
+ // using the file.
+ buf := make([]byte, 1)
+ if _, err := file.Read(buf); err != nil {
+ f.Truncate(0)
+ return err
+ }
+ n, wErr := h.Write(buf)
+ if n != len(buf) {
+ return fmt.Errorf("wrote to hash %d/%d bytes with error %w", n, len(buf), wErr)
+ }
+
+ sum := h.Sum(nil)
+ if !bytes.Equal(sum, out[:]) {
+ f.Truncate(0)
+ return fmt.Errorf("file content changed underfoot")
+ }
+
+ // Commit cache file entry.
+ if _, err := f.Write(buf); err != nil {
+ f.Truncate(0)
+ return err
+ }
+ if err := f.Close(); err != nil {
+ // Data might not have been written,
+ // but file may look like it is the right size.
+ // To be extra careful, remove cached file.
+ os.Remove(name)
+ return err
+ }
+ err = os.Chtimes(name, c.now(), c.now()) // mainly for tests
+ if err != nil {
+ return fmt.Errorf("failed to change time of file %s: %w", name, err)
+ }
+
+ return nil
+}
+
+// FuzzDir returns a subdirectory within the cache for storing fuzzing data.
+// The subdirectory may not exist.
+//
+// This directory is managed by the internal/fuzz package. Files in this
+// directory aren't removed by the 'go clean -cache' command or by Trim.
+// They may be removed with 'go clean -fuzzcache'.
+//
+// TODO(#48526): make Trim remove unused files from this directory.
+func (c *DiskCache) FuzzDir() string {
+ return filepath.Join(c.dir, "fuzz")
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go
new file mode 100644
index 0000000000..b4f07738e6
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go
@@ -0,0 +1,12 @@
+package cache
+
+import (
+ "errors"
+)
+
+// IsErrMissing allows to access to the internal error.
+// TODO(ldez) the handling of this error inside runner_action.go should be refactored.
+func IsErrMissing(err error) bool {
+ var errENF *entryNotFoundError
+ return errors.As(err, &errENF)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/default.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default.go
similarity index 58%
rename from vendor/github.com/golangci/golangci-lint/internal/cache/default.go
rename to vendor/github.com/golangci/golangci-lint/internal/go/cache/default.go
index 399cc84cf0..7232f1ef3e 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/cache/default.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default.go
@@ -6,23 +6,22 @@ package cache
import (
"fmt"
- "log"
+ base "log"
"os"
"path/filepath"
"sync"
)
-const envGolangciLintCache = "GOLANGCI_LINT_CACHE"
-
// Default returns the default cache to use.
-func Default() (*Cache, error) {
+// It never returns nil.
+func Default() Cache {
defaultOnce.Do(initDefaultCache)
- return defaultCache, defaultDirErr
+ return defaultCache
}
var (
defaultOnce sync.Once
- defaultCache *Cache
+ defaultCache Cache
)
// cacheREADME is a message stored in a README in the cache directory.
@@ -34,32 +33,46 @@ const cacheREADME = `This directory holds cached build artifacts from golangci-l
// initDefaultCache does the work of finding the default cache
// the first time Default is called.
func initDefaultCache() {
- dir := DefaultDir()
+ dir, _ := DefaultDir()
+ if dir == "off" {
+ if defaultDirErr != nil {
+ base.Fatalf("build cache is required, but could not be located: %v", defaultDirErr)
+ }
+ base.Fatalf("build cache is disabled by %s=off, but required", envGolangciLintCache)
+ }
if err := os.MkdirAll(dir, 0744); err != nil {
- log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
+ base.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
}
if _, err := os.Stat(filepath.Join(dir, "README")); err != nil {
// Best effort.
if wErr := os.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666); wErr != nil {
- log.Fatalf("Failed to write README file to cache dir %s: %s", dir, err)
+ base.Fatalf("Failed to write README file to cache dir %s: %s", dir, err)
}
}
- c, err := Open(dir)
+ diskCache, err := Open(dir)
if err != nil {
- log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
+ base.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
+ }
+
+ if v := os.Getenv(envGolangciLintCacheProg); v != "" {
+ defaultCache = startCacheProg(v, diskCache)
+ } else {
+ defaultCache = diskCache
}
- defaultCache = c
}
var (
- defaultDirOnce sync.Once
- defaultDir string
- defaultDirErr error
+ defaultDirOnce sync.Once
+ defaultDir string
+ defaultDirChanged bool // effective value differs from $GOLANGCI_LINT_CACHE
+ defaultDirErr error
)
// DefaultDir returns the effective GOLANGCI_LINT_CACHE setting.
-func DefaultDir() string {
+// It returns "off" if the cache is disabled,
+// and reports whether the effective value differs from GOLANGCI_LINT_CACHE.
+func DefaultDir() (string, bool) {
// Save the result of the first call to DefaultDir for later use in
// initDefaultCache. cmd/go/main.go explicitly sets GOCACHE so that
// subprocesses will inherit it, but that means initDefaultCache can't
@@ -67,10 +80,12 @@ func DefaultDir() string {
defaultDirOnce.Do(func() {
defaultDir = os.Getenv(envGolangciLintCache)
- if filepath.IsAbs(defaultDir) {
- return
- }
if defaultDir != "" {
+ defaultDirChanged = true
+ if filepath.IsAbs(defaultDir) || defaultDir == "off" {
+ return
+ }
+ defaultDir = "off"
defaultDirErr = fmt.Errorf("%s is not an absolute path", envGolangciLintCache)
return
}
@@ -78,11 +93,13 @@ func DefaultDir() string {
// Compute default location.
dir, err := os.UserCacheDir()
if err != nil {
+ defaultDir = "off"
+ defaultDirChanged = true
defaultDirErr = fmt.Errorf("%s is not defined and %w", envGolangciLintCache, err)
return
}
defaultDir = filepath.Join(dir, "golangci-lint")
})
- return defaultDir
+ return defaultDir, defaultDirChanged
}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go
new file mode 100644
index 0000000000..a801f67f47
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go
@@ -0,0 +1,6 @@
+package cache
+
+const (
+ envGolangciLintCache = "GOLANGCI_LINT_CACHE"
+ envGolangciLintCacheProg = "GOLANGCI_LINT_CACHEPROG"
+)
diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash.go
similarity index 91%
rename from vendor/github.com/golangci/golangci-lint/internal/cache/hash.go
rename to vendor/github.com/golangci/golangci-lint/internal/go/cache/hash.go
index 4ce79e325b..d5169dd4c4 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash.go
@@ -11,6 +11,7 @@ import (
"hash"
"io"
"os"
+ "strings"
"sync"
)
@@ -36,22 +37,26 @@ type Hash struct {
// which are still addressed by unsalted SHA256.
var hashSalt []byte
-func SetSalt(b []byte) {
- hashSalt = b
+// stripExperiment strips any GOEXPERIMENT configuration from the Go
+// version string.
+func stripExperiment(version string) string {
+ if i := strings.Index(version, " X:"); i >= 0 {
+ return version[:i]
+ }
+ return version
}
// Subkey returns an action ID corresponding to mixing a parent
// action ID with a string description of the subkey.
func Subkey(parent ActionID, desc string) (ActionID, error) {
h := sha256.New()
- const subkeyPrefix = "subkey:"
- if n, err := h.Write([]byte(subkeyPrefix)); n != len(subkeyPrefix) {
- return ActionID{}, fmt.Errorf("wrote %d/%d bytes of subkey prefix with error %s", n, len(subkeyPrefix), err)
- }
- if n, err := h.Write(parent[:]); n != len(parent) {
+ h.Write([]byte(("subkey:")))
+ n, err := h.Write(parent[:])
+ if n != len(parent) {
return ActionID{}, fmt.Errorf("wrote %d/%d bytes of parent with error %s", n, len(parent), err)
}
- if n, err := h.Write([]byte(desc)); n != len(desc) {
+ n, err = h.Write([]byte(desc))
+ if n != len(desc) {
return ActionID{}, fmt.Errorf("wrote %d/%d bytes of desc with error %s", n, len(desc), err)
}
@@ -75,7 +80,8 @@ func NewHash(name string) (*Hash, error) {
if debugHash {
fmt.Fprintf(os.Stderr, "HASH[%s]\n", h.name)
}
- if n, err := h.Write(hashSalt); n != len(hashSalt) {
+ n, err := h.Write(hashSalt)
+ if n != len(hashSalt) {
return nil, fmt.Errorf("wrote %d/%d bytes of hash salt with error %s", n, len(hashSalt), err)
}
if verify {
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go
new file mode 100644
index 0000000000..08749036bd
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go
@@ -0,0 +1,5 @@
+package cache
+
+func SetSalt(b []byte) {
+ hashSalt = b
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go
new file mode 100644
index 0000000000..a93740a3cf
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go
@@ -0,0 +1,428 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cache
+
+import (
+ "bufio"
+ "context"
+ "crypto/sha256"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ base "log"
+ "os"
+ "os/exec"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "github.com/golangci/golangci-lint/internal/go/quoted"
+)
+
+// ProgCache implements Cache via JSON messages over stdin/stdout to a child
+// helper process which can then implement whatever caching policy/mechanism it
+// wants.
+//
+// See https://github.com/golang/go/issues/59719
+type ProgCache struct {
+ cmd *exec.Cmd
+ stdout io.ReadCloser // from the child process
+ stdin io.WriteCloser // to the child process
+ bw *bufio.Writer // to stdin
+ jenc *json.Encoder // to bw
+
+ // can are the commands that the child process declared that it supports.
+ // This is effectively the versioning mechanism.
+ can map[ProgCmd]bool
+
+ // fuzzDirCache is another Cache implementation to use for the FuzzDir
+ // method. In practice this is the default GOCACHE disk-based
+ // implementation.
+ //
+ // TODO(bradfitz): maybe this isn't ideal. But we'd need to extend the Cache
+ // interface and the fuzzing callers to be less disk-y to do more here.
+ fuzzDirCache Cache
+
+ closing atomic.Bool
+ ctx context.Context // valid until Close via ctxClose
+ ctxCancel context.CancelFunc // called on Close
+ readLoopDone chan struct{} // closed when readLoop returns
+
+ mu sync.Mutex // guards following fields
+ nextID int64
+ inFlight map[int64]chan<- *ProgResponse
+ outputFile map[OutputID]string // object => abs path on disk
+
+ // writeMu serializes writing to the child process.
+ // It must never be held at the same time as mu.
+ writeMu sync.Mutex
+}
+
+// ProgCmd is a command that can be issued to a child process.
+//
+// If the interface needs to grow, we can add new commands or new versioned
+// commands like "get2".
+type ProgCmd string
+
+const (
+ cmdGet = ProgCmd("get")
+ cmdPut = ProgCmd("put")
+ cmdClose = ProgCmd("close")
+)
+
+// ProgRequest is the JSON-encoded message that's sent from cmd/go to
+// the GOLANGCI_LINT_CACHEPROG child process over stdin. Each JSON object is on its
+// own line. A ProgRequest of Type "put" with BodySize > 0 will be followed
+// by a line containing a base64-encoded JSON string literal of the body.
+type ProgRequest struct {
+ // ID is a unique number per process across all requests.
+ // It must be echoed in the ProgResponse from the child.
+ ID int64
+
+ // Command is the type of request.
+ // The cmd/go tool will only send commands that were declared
+ // as supported by the child.
+ Command ProgCmd
+
+ // ActionID is non-nil for get and puts.
+ ActionID []byte `json:",omitempty"` // or nil if not used
+
+ // ObjectID is set for Type "put" and "output-file".
+ ObjectID []byte `json:",omitempty"` // or nil if not used
+
+ // Body is the body for "put" requests. It's sent after the JSON object
+ // as a base64-encoded JSON string when BodySize is non-zero.
+ // It's sent as a separate JSON value instead of being a struct field
+ // send in this JSON object so large values can be streamed in both directions.
+ // The base64 string body of a ProgRequest will always be written
+ // immediately after the JSON object and a newline.
+ Body io.Reader `json:"-"`
+
+ // BodySize is the number of bytes of Body. If zero, the body isn't written.
+ BodySize int64 `json:",omitempty"`
+}
+
+// ProgResponse is the JSON response from the child process to cmd/go.
+//
+// With the exception of the first protocol message that the child writes to its
+// stdout with ID==0 and KnownCommands populated, these are only sent in
+// response to a ProgRequest from cmd/go.
+//
+// ProgResponses can be sent in any order. The ID must match the request they're
+// replying to.
+type ProgResponse struct {
+ ID int64 // that corresponds to ProgRequest; they can be answered out of order
+ Err string `json:",omitempty"` // if non-empty, the error
+
+ // KnownCommands is included in the first message that cache helper program
+ // writes to stdout on startup (with ID==0). It includes the
+ // ProgRequest.Command types that are supported by the program.
+ //
+ // This lets us extend the protocol gracefully over time (adding "get2",
+ // etc), or fail gracefully when needed. It also lets us verify the program
+ // wants to be a cache helper.
+ KnownCommands []ProgCmd `json:",omitempty"`
+
+ // For Get requests.
+
+ Miss bool `json:",omitempty"` // cache miss
+ OutputID []byte `json:",omitempty"`
+ Size int64 `json:",omitempty"` // in bytes
+ Time *time.Time `json:",omitempty"` // an Entry.Time; when the object was added to the docs
+
+ // DiskPath is the absolute path on disk of the ObjectID corresponding
+ // a "get" request's ActionID (on cache hit) or a "put" request's
+ // provided ObjectID.
+ DiskPath string `json:",omitempty"`
+}
+
+// startCacheProg starts the prog binary (with optional space-separated flags)
+// and returns a Cache implementation that talks to it.
+//
+// It blocks a few seconds to wait for the child process to successfully start
+// and advertise its capabilities.
+func startCacheProg(progAndArgs string, fuzzDirCache Cache) Cache {
+ if fuzzDirCache == nil {
+ panic("missing fuzzDirCache")
+ }
+ args, err := quoted.Split(progAndArgs)
+ if err != nil {
+ base.Fatalf("%s args: %v", envGolangciLintCacheProg, err)
+ }
+ var prog string
+ if len(args) > 0 {
+ prog = args[0]
+ args = args[1:]
+ }
+
+ ctx, ctxCancel := context.WithCancel(context.Background())
+
+ cmd := exec.CommandContext(ctx, prog, args...)
+ out, err := cmd.StdoutPipe()
+ if err != nil {
+ base.Fatalf("StdoutPipe to %s: %v", envGolangciLintCacheProg, err)
+ }
+ in, err := cmd.StdinPipe()
+ if err != nil {
+ base.Fatalf("StdinPipe to %s: %v", envGolangciLintCacheProg, err)
+ }
+ cmd.Stderr = os.Stderr
+ cmd.Cancel = in.Close
+
+ if err := cmd.Start(); err != nil {
+ base.Fatalf("error starting %s program %q: %v", envGolangciLintCacheProg, prog, err)
+ }
+
+ pc := &ProgCache{
+ ctx: ctx,
+ ctxCancel: ctxCancel,
+ fuzzDirCache: fuzzDirCache,
+ cmd: cmd,
+ stdout: out,
+ stdin: in,
+ bw: bufio.NewWriter(in),
+ inFlight: make(map[int64]chan<- *ProgResponse),
+ outputFile: make(map[OutputID]string),
+ readLoopDone: make(chan struct{}),
+ }
+
+ // Register our interest in the initial protocol message from the child to
+ // us, saying what it can do.
+ capResc := make(chan *ProgResponse, 1)
+ pc.inFlight[0] = capResc
+
+ pc.jenc = json.NewEncoder(pc.bw)
+ go pc.readLoop(pc.readLoopDone)
+
+ // Give the child process a few seconds to report its capabilities. This
+ // should be instant and not require any slow work by the program.
+ timer := time.NewTicker(5 * time.Second)
+ defer timer.Stop()
+ for {
+ select {
+ case <-timer.C:
+ log.Printf("# still waiting for %s %v ...", envGolangciLintCacheProg, prog)
+ case capRes := <-capResc:
+ can := map[ProgCmd]bool{}
+ for _, cmd := range capRes.KnownCommands {
+ can[cmd] = true
+ }
+ if len(can) == 0 {
+ base.Fatalf("%s %v declared no supported commands", envGolangciLintCacheProg, prog)
+ }
+ pc.can = can
+ return pc
+ }
+ }
+}
+
+func (c *ProgCache) readLoop(readLoopDone chan<- struct{}) {
+ defer close(readLoopDone)
+ jd := json.NewDecoder(c.stdout)
+ for {
+ res := new(ProgResponse)
+ if err := jd.Decode(res); err != nil {
+ if c.closing.Load() {
+ return // quietly
+ }
+ if err == io.EOF {
+ c.mu.Lock()
+ inFlight := len(c.inFlight)
+ c.mu.Unlock()
+ base.Fatalf("%s exited pre-Close with %v pending requests", envGolangciLintCacheProg, inFlight)
+ }
+ base.Fatalf("error reading JSON from %s: %v", envGolangciLintCacheProg, err)
+ }
+ c.mu.Lock()
+ ch, ok := c.inFlight[res.ID]
+ delete(c.inFlight, res.ID)
+ c.mu.Unlock()
+ if ok {
+ ch <- res
+ } else {
+ base.Fatalf("%s sent response for unknown request ID %v", envGolangciLintCacheProg, res.ID)
+ }
+ }
+}
+
+func (c *ProgCache) send(ctx context.Context, req *ProgRequest) (*ProgResponse, error) {
+ resc := make(chan *ProgResponse, 1)
+ if err := c.writeToChild(req, resc); err != nil {
+ return nil, err
+ }
+ select {
+ case res := <-resc:
+ if res.Err != "" {
+ return nil, errors.New(res.Err)
+ }
+ return res, nil
+ case <-ctx.Done():
+ return nil, ctx.Err()
+ }
+}
+
+func (c *ProgCache) writeToChild(req *ProgRequest, resc chan<- *ProgResponse) (err error) {
+ c.mu.Lock()
+ c.nextID++
+ req.ID = c.nextID
+ c.inFlight[req.ID] = resc
+ c.mu.Unlock()
+
+ defer func() {
+ if err != nil {
+ c.mu.Lock()
+ delete(c.inFlight, req.ID)
+ c.mu.Unlock()
+ }
+ }()
+
+ c.writeMu.Lock()
+ defer c.writeMu.Unlock()
+
+ if err := c.jenc.Encode(req); err != nil {
+ return err
+ }
+ if err := c.bw.WriteByte('\n'); err != nil {
+ return err
+ }
+ if req.Body != nil && req.BodySize > 0 {
+ if err := c.bw.WriteByte('"'); err != nil {
+ return err
+ }
+ e := base64.NewEncoder(base64.StdEncoding, c.bw)
+ wrote, err := io.Copy(e, req.Body)
+ if err != nil {
+ return err
+ }
+ if err := e.Close(); err != nil {
+ return nil
+ }
+ if wrote != req.BodySize {
+ return fmt.Errorf("short write writing body to %s for action %x, object %x: wrote %v; expected %v",
+ envGolangciLintCacheProg, req.ActionID, req.ObjectID, wrote, req.BodySize)
+ }
+ if _, err := c.bw.WriteString("\"\n"); err != nil {
+ return err
+ }
+ }
+ if err := c.bw.Flush(); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (c *ProgCache) Get(a ActionID) (Entry, error) {
+ if !c.can[cmdGet] {
+ // They can't do a "get". Maybe they're a write-only cache.
+ //
+ // TODO(bradfitz,bcmills): figure out the proper error type here. Maybe
+ // errors.ErrUnsupported? Is entryNotFoundError even appropriate? There
+ // might be places where we rely on the fact that a recent Put can be
+ // read through a corresponding Get. Audit callers and check, and document
+ // error types on the Cache interface.
+ return Entry{}, &entryNotFoundError{}
+ }
+ res, err := c.send(c.ctx, &ProgRequest{
+ Command: cmdGet,
+ ActionID: a[:],
+ })
+ if err != nil {
+ return Entry{}, err // TODO(bradfitz): or entryNotFoundError? Audit callers.
+ }
+ if res.Miss {
+ return Entry{}, &entryNotFoundError{}
+ }
+ e := Entry{
+ Size: res.Size,
+ }
+ if res.Time != nil {
+ e.Time = *res.Time
+ } else {
+ e.Time = time.Now()
+ }
+ if res.DiskPath == "" {
+ return Entry{}, &entryNotFoundError{fmt.Errorf("%s didn't populate DiskPath on get hit", envGolangciLintCacheProg)}
+ }
+ if copy(e.OutputID[:], res.OutputID) != len(res.OutputID) {
+ return Entry{}, &entryNotFoundError{errors.New("incomplete ProgResponse OutputID")}
+ }
+ c.noteOutputFile(e.OutputID, res.DiskPath)
+ return e, nil
+}
+
+func (c *ProgCache) noteOutputFile(o OutputID, diskPath string) {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ c.outputFile[o] = diskPath
+}
+
+func (c *ProgCache) OutputFile(o OutputID) string {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ return c.outputFile[o]
+}
+
+func (c *ProgCache) Put(a ActionID, file io.ReadSeeker) (_ OutputID, size int64, _ error) {
+ // Compute output ID.
+ h := sha256.New()
+ if _, err := file.Seek(0, 0); err != nil {
+ return OutputID{}, 0, err
+ }
+ size, err := io.Copy(h, file)
+ if err != nil {
+ return OutputID{}, 0, err
+ }
+ var out OutputID
+ h.Sum(out[:0])
+
+ if _, err := file.Seek(0, 0); err != nil {
+ return OutputID{}, 0, err
+ }
+
+ if !c.can[cmdPut] {
+ // Child is a read-only cache. Do nothing.
+ return out, size, nil
+ }
+
+ res, err := c.send(c.ctx, &ProgRequest{
+ Command: cmdPut,
+ ActionID: a[:],
+ ObjectID: out[:],
+ Body: file,
+ BodySize: size,
+ })
+ if err != nil {
+ return OutputID{}, 0, err
+ }
+ if res.DiskPath == "" {
+ return OutputID{}, 0, fmt.Errorf("%s didn't return DiskPath in put response", envGolangciLintCacheProg)
+ }
+ c.noteOutputFile(out, res.DiskPath)
+ return out, size, err
+}
+
+func (c *ProgCache) Close() error {
+ c.closing.Store(true)
+ var err error
+
+ // First write a "close" message to the child so it can exit nicely
+ // and clean up if it wants. Only after that exchange do we cancel
+ // the context that kills the process.
+ if c.can[cmdClose] {
+ _, err = c.send(c.ctx, &ProgRequest{Command: cmdClose})
+ }
+ c.ctxCancel()
+ <-c.readLoopDone
+ return err
+}
+
+func (c *ProgCache) FuzzDir() string {
+ // TODO(bradfitz): figure out what to do here. For now just use the
+ // disk-based default.
+ return c.fuzzDirCache.FuzzDir()
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md
new file mode 100644
index 0000000000..5be600e425
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md
@@ -0,0 +1,51 @@
+# cache
+
+Extracted from `go/src/cmd/go/internal/cache/`.
+
+The main modifications are:
+- The errors management
+ - Some methods return error.
+ - Some errors are returned instead of being ignored.
+- The name of the env vars:
+ - `GOCACHE` -> `GOLANGCI_LINT_CACHE`
+ - `GOCACHEPROG` -> `GOLANGCI_LINT_CACHEPROG`
+
+## History
+
+- https://github.com/golangci/golangci-lint/pull/5100
+ - Move package from `internal/cache` to `internal/go/cache`
+- https://github.com/golangci/golangci-lint/pull/5098
+ - sync with go1.23.2
+ - sync with go1.22.8
+ - sync with go1.21.13
+ - sync with go1.20.14
+ - sync with go1.19.13
+ - sync with go1.18.10
+ - sync with go1.17.13
+ - sync with go1.16.15
+ - sync with go1.15.15
+ - sync with go1.14.15
+
+## Previous History
+
+Based on the initial PR/commit the based in a mix between go1.12 and go1.13:
+- cache.go (go1.13)
+- cache_test.go (go1.12?)
+- default.go (go1.12?)
+- hash.go (go1.13 and go1.12 are identical)
+- hash_test.go -> (go1.12?)
+
+Adapted for golangci-lint:
+- https://github.com/golangci/golangci-lint/pull/699: initial code (contains modifications of the files)
+- https://github.com/golangci/golangci-lint/pull/779: just a nolint (`cache.go`)
+- https://github.com/golangci/golangci-lint/pull/788: only directory permissions changes (0777 -> 0744) (`cache.go`, `cache_test.go`, `default.go`)
+- https://github.com/golangci/golangci-lint/pull/808: mainly related to logs and errors (`cache.go`, `default.go`, `hash.go`, `hash_test.go`)
+- https://github.com/golangci/golangci-lint/pull/1063: `ioutil` -> `robustio` (`cache.go`)
+- https://github.com/golangci/golangci-lint/pull/1070: add `t.Parallel()` inside `cache_test.go`
+- https://github.com/golangci/golangci-lint/pull/1162: errors inside `cache.go`
+- https://github.com/golangci/golangci-lint/pull/2318: `ioutil` -> `os` (`cache.go`, `cache_test.go`, `default.go`, `hash_test.go`)
+- https://github.com/golangci/golangci-lint/pull/2352: Go doc typos
+- https://github.com/golangci/golangci-lint/pull/3012: errors inside `cache.go` (`cache.go`, `default.go`)
+- https://github.com/golangci/golangci-lint/pull/3196: constant for `GOLANGCI_LINT_CACHE` (`cache.go`)
+- https://github.com/golangci/golangci-lint/pull/3204: add this file and `%w` in `fmt.Errorf` (`cache.go`)
+- https://github.com/golangci/golangci-lint/pull/3604: remove `github.com/pkg/errors` (`cache.go`)
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go
new file mode 100644
index 0000000000..fcbd3e08c1
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go
@@ -0,0 +1,31 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This package is a lightly modified version of the mmap code
+// in github.com/google/codesearch/index.
+
+// The mmap package provides an abstraction for memory mapping files
+// on different platforms.
+package mmap
+
+import (
+ "os"
+)
+
+// Data is mmap'ed read-only data from a file.
+// The backing file is never closed, so Data
+// remains valid for the lifetime of the process.
+type Data struct {
+ f *os.File
+ Data []byte
+}
+
+// Mmap maps the given file into memory.
+func Mmap(file string) (Data, error) {
+ f, err := os.Open(file)
+ if err != nil {
+ return Data{}, err
+ }
+ return mmapFile(f)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go
new file mode 100644
index 0000000000..4d2844fc37
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go
@@ -0,0 +1,21 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build (js && wasm) || wasip1 || plan9
+
+package mmap
+
+import (
+ "io"
+ "os"
+)
+
+// mmapFile on other systems doesn't mmap the file. It just reads everything.
+func mmapFile(f *os.File) (Data, error) {
+ b, err := io.ReadAll(f)
+ if err != nil {
+ return Data{}, err
+ }
+ return Data{f, b}, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go
new file mode 100644
index 0000000000..5dce872368
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go
@@ -0,0 +1,36 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build unix
+
+package mmap
+
+import (
+ "fmt"
+ "io/fs"
+ "os"
+ "syscall"
+)
+
+func mmapFile(f *os.File) (Data, error) {
+ st, err := f.Stat()
+ if err != nil {
+ return Data{}, err
+ }
+ size := st.Size()
+ pagesize := int64(os.Getpagesize())
+ if int64(int(size+(pagesize-1))) != size+(pagesize-1) {
+ return Data{}, fmt.Errorf("%s: too large for mmap", f.Name())
+ }
+ n := int(size)
+ if n == 0 {
+ return Data{f, nil}, nil
+ }
+ mmapLength := int(((size + pagesize - 1) / pagesize) * pagesize) // round up to page size
+ data, err := syscall.Mmap(int(f.Fd()), 0, mmapLength, syscall.PROT_READ, syscall.MAP_SHARED)
+ if err != nil {
+ return Data{}, &fs.PathError{Op: "mmap", Path: f.Name(), Err: err}
+ }
+ return Data{f, data[:n]}, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go
new file mode 100644
index 0000000000..479ee30754
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go
@@ -0,0 +1,41 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package mmap
+
+import (
+ "fmt"
+ "os"
+ "syscall"
+ "unsafe"
+
+ "golang.org/x/sys/windows"
+)
+
+func mmapFile(f *os.File) (Data, error) {
+ st, err := f.Stat()
+ if err != nil {
+ return Data{}, err
+ }
+ size := st.Size()
+ if size == 0 {
+ return Data{f, nil}, nil
+ }
+ h, err := syscall.CreateFileMapping(syscall.Handle(f.Fd()), nil, syscall.PAGE_READONLY, 0, 0, nil)
+ if err != nil {
+ return Data{}, fmt.Errorf("CreateFileMapping %s: %w", f.Name(), err)
+ }
+
+ addr, err := syscall.MapViewOfFile(h, syscall.FILE_MAP_READ, 0, 0, 0)
+ if err != nil {
+ return Data{}, fmt.Errorf("MapViewOfFile %s: %w", f.Name(), err)
+ }
+ var info windows.MemoryBasicInformation
+ err = windows.VirtualQuery(addr, &info, unsafe.Sizeof(info))
+ if err != nil {
+ return Data{}, fmt.Errorf("VirtualQuery %s: %w", f.Name(), err)
+ }
+ data := unsafe.Slice((*byte)(unsafe.Pointer(addr)), int(info.RegionSize))
+ return Data{f, data}, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md
new file mode 100644
index 0000000000..f68aef097c
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md
@@ -0,0 +1,15 @@
+# mmap
+
+Extracted from `go/src/cmd/go/internal/mmap/` (related to `cache`).
+This is just a copy of the Go code without any changes.
+
+## History
+
+- https://github.com/golangci/golangci-lint/pull/5100
+ - Move package from `internal/mmap` to `internal/go/mmap`
+- https://github.com/golangci/golangci-lint/pull/5098
+ - sync with go1.23.2
+ - sync with go1.22.8
+ - sync with go1.21.13
+ - sync with go1.20.14
+ - sync with go1.19.13
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go
new file mode 100644
index 0000000000..a812275073
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go
@@ -0,0 +1,129 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package quoted provides string manipulation utilities.
+package quoted
+
+import (
+ "flag"
+ "fmt"
+ "strings"
+ "unicode"
+)
+
+func isSpaceByte(c byte) bool {
+ return c == ' ' || c == '\t' || c == '\n' || c == '\r'
+}
+
+// Split splits s into a list of fields,
+// allowing single or double quotes around elements.
+// There is no unescaping or other processing within
+// quoted fields.
+//
+// Keep in sync with cmd/dist/quoted.go
+func Split(s string) ([]string, error) {
+ // Split fields allowing '' or "" around elements.
+ // Quotes further inside the string do not count.
+ var f []string
+ for len(s) > 0 {
+ for len(s) > 0 && isSpaceByte(s[0]) {
+ s = s[1:]
+ }
+ if len(s) == 0 {
+ break
+ }
+ // Accepted quoted string. No unescaping inside.
+ if s[0] == '"' || s[0] == '\'' {
+ quote := s[0]
+ s = s[1:]
+ i := 0
+ for i < len(s) && s[i] != quote {
+ i++
+ }
+ if i >= len(s) {
+ return nil, fmt.Errorf("unterminated %c string", quote)
+ }
+ f = append(f, s[:i])
+ s = s[i+1:]
+ continue
+ }
+ i := 0
+ for i < len(s) && !isSpaceByte(s[i]) {
+ i++
+ }
+ f = append(f, s[:i])
+ s = s[i:]
+ }
+ return f, nil
+}
+
+// Join joins a list of arguments into a string that can be parsed
+// with Split. Arguments are quoted only if necessary; arguments
+// without spaces or quotes are kept as-is. No argument may contain both
+// single and double quotes.
+func Join(args []string) (string, error) {
+ var buf []byte
+ for i, arg := range args {
+ if i > 0 {
+ buf = append(buf, ' ')
+ }
+ var sawSpace, sawSingleQuote, sawDoubleQuote bool
+ for _, c := range arg {
+ switch {
+ case c > unicode.MaxASCII:
+ continue
+ case isSpaceByte(byte(c)):
+ sawSpace = true
+ case c == '\'':
+ sawSingleQuote = true
+ case c == '"':
+ sawDoubleQuote = true
+ }
+ }
+ switch {
+ case !sawSpace && !sawSingleQuote && !sawDoubleQuote:
+ buf = append(buf, arg...)
+
+ case !sawSingleQuote:
+ buf = append(buf, '\'')
+ buf = append(buf, arg...)
+ buf = append(buf, '\'')
+
+ case !sawDoubleQuote:
+ buf = append(buf, '"')
+ buf = append(buf, arg...)
+ buf = append(buf, '"')
+
+ default:
+ return "", fmt.Errorf("argument %q contains both single and double quotes and cannot be quoted", arg)
+ }
+ }
+ return string(buf), nil
+}
+
+// A Flag parses a list of string arguments encoded with Join.
+// It is useful for flags like cmd/link's -extldflags.
+type Flag []string
+
+var _ flag.Value = (*Flag)(nil)
+
+func (f *Flag) Set(v string) error {
+ fs, err := Split(v)
+ if err != nil {
+ return err
+ }
+ *f = fs[:len(fs):len(fs)]
+ return nil
+}
+
+func (f *Flag) String() string {
+ if f == nil {
+ return ""
+ }
+ s, err := Join(*f)
+ if err != nil {
+ return strings.Join(*f, " ")
+ }
+ return s
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md
new file mode 100644
index 0000000000..a5e4c4bb3b
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md
@@ -0,0 +1,13 @@
+# quoted
+
+Extracted from `go/src/cmd/internal/quoted/` (related to `cache`).
+This is just a copy of the Go code without any changes.
+
+## History
+
+- https://github.com/golangci/golangci-lint/pull/5100
+ - Move package from `internal/quoted` to `internal/go/quoted`
+- https://github.com/golangci/golangci-lint/pull/5098
+ - sync go1.23.2
+ - sync go1.22.8
+ - sync go1.21.13
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/readme.md
similarity index 64%
rename from vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md
rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/readme.md
index 7c7ba0483a..f4dbc16264 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/readme.md
@@ -4,3 +4,8 @@ Extracted from go1.19.1/src/cmd/go/internal/robustio
There is only one modification:
- ERROR_SHARING_VIOLATION extracted from go1.19.1/src/internal/syscall/windows/syscall_windows.go to remove the dependencies to `internal/syscall/windows`
+
+## History
+
+- https://github.com/golangci/golangci-lint/pull/5100
+ - Move package from `internal/robustio` to `internal/go/robustio`
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio.go
similarity index 100%
rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go
rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_darwin.go
similarity index 100%
rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go
rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_darwin.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_flaky.go
similarity index 100%
rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go
rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_flaky.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_other.go
similarity index 100%
rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go
rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_other.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_windows.go
similarity index 100%
rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go
rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_windows.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go b/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go
deleted file mode 100644
index 3b3422eb7a..0000000000
--- a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go
+++ /dev/null
@@ -1,229 +0,0 @@
-package pkgcache
-
-import (
- "bytes"
- "encoding/gob"
- "encoding/hex"
- "errors"
- "fmt"
- "runtime"
- "sort"
- "sync"
-
- "golang.org/x/tools/go/packages"
-
- "github.com/golangci/golangci-lint/internal/cache"
- "github.com/golangci/golangci-lint/pkg/logutils"
- "github.com/golangci/golangci-lint/pkg/timeutils"
-)
-
-type HashMode int
-
-const (
- HashModeNeedOnlySelf HashMode = iota
- HashModeNeedDirectDeps
- HashModeNeedAllDeps
-)
-
-// Cache is a per-package data cache. A cached data is invalidated when
-// package, or it's dependencies change.
-type Cache struct {
- lowLevelCache *cache.Cache
- pkgHashes sync.Map
- sw *timeutils.Stopwatch
- log logutils.Log // not used now, but may be needed for future debugging purposes
- ioSem chan struct{} // semaphore limiting parallel IO
-}
-
-func NewCache(sw *timeutils.Stopwatch, log logutils.Log) (*Cache, error) {
- c, err := cache.Default()
- if err != nil {
- return nil, err
- }
- return &Cache{
- lowLevelCache: c,
- sw: sw,
- log: log,
- ioSem: make(chan struct{}, runtime.GOMAXPROCS(-1)),
- }, nil
-}
-
-func (c *Cache) Trim() {
- c.sw.TrackStage("trim", func() {
- c.lowLevelCache.Trim()
- })
-}
-
-func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data any) error {
- var err error
- buf := &bytes.Buffer{}
- c.sw.TrackStage("gob", func() {
- err = gob.NewEncoder(buf).Encode(data)
- })
- if err != nil {
- return fmt.Errorf("failed to gob encode: %w", err)
- }
-
- var aID cache.ActionID
-
- c.sw.TrackStage("key build", func() {
- aID, err = c.pkgActionID(pkg, mode)
- if err == nil {
- subkey, subkeyErr := cache.Subkey(aID, key)
- if subkeyErr != nil {
- err = fmt.Errorf("failed to build subkey: %w", subkeyErr)
- }
- aID = subkey
- }
- })
- if err != nil {
- return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err)
- }
- c.ioSem <- struct{}{}
- c.sw.TrackStage("cache io", func() {
- err = c.lowLevelCache.PutBytes(aID, buf.Bytes())
- })
- <-c.ioSem
- if err != nil {
- return fmt.Errorf("failed to save data to low-level cache by key %s for package %s: %w", key, pkg.Name, err)
- }
-
- return nil
-}
-
-var ErrMissing = errors.New("missing data")
-
-func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data any) error {
- var aID cache.ActionID
- var err error
- c.sw.TrackStage("key build", func() {
- aID, err = c.pkgActionID(pkg, mode)
- if err == nil {
- subkey, subkeyErr := cache.Subkey(aID, key)
- if subkeyErr != nil {
- err = fmt.Errorf("failed to build subkey: %w", subkeyErr)
- }
- aID = subkey
- }
- })
- if err != nil {
- return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err)
- }
-
- var b []byte
- c.ioSem <- struct{}{}
- c.sw.TrackStage("cache io", func() {
- b, _, err = c.lowLevelCache.GetBytes(aID)
- })
- <-c.ioSem
- if err != nil {
- if cache.IsErrMissing(err) {
- return ErrMissing
- }
- return fmt.Errorf("failed to get data from low-level cache by key %s for package %s: %w", key, pkg.Name, err)
- }
-
- c.sw.TrackStage("gob", func() {
- err = gob.NewDecoder(bytes.NewReader(b)).Decode(data)
- })
- if err != nil {
- return fmt.Errorf("failed to gob decode: %w", err)
- }
-
- return nil
-}
-
-func (c *Cache) pkgActionID(pkg *packages.Package, mode HashMode) (cache.ActionID, error) {
- hash, err := c.packageHash(pkg, mode)
- if err != nil {
- return cache.ActionID{}, fmt.Errorf("failed to get package hash: %w", err)
- }
-
- key, err := cache.NewHash("action ID")
- if err != nil {
- return cache.ActionID{}, fmt.Errorf("failed to make a hash: %w", err)
- }
- fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
- fmt.Fprintf(key, "pkghash %s\n", hash)
-
- return key.Sum(), nil
-}
-
-// packageHash computes a package's hash. The hash is based on all Go
-// files that make up the package, as well as the hashes of imported
-// packages.
-func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error) {
- type hashResults map[HashMode]string
- hashResI, ok := c.pkgHashes.Load(pkg)
- if ok {
- hashRes := hashResI.(hashResults)
- if _, ok := hashRes[mode]; !ok {
- return "", fmt.Errorf("no mode %d in hash result", mode)
- }
- return hashRes[mode], nil
- }
-
- hashRes := hashResults{}
-
- key, err := cache.NewHash("package hash")
- if err != nil {
- return "", fmt.Errorf("failed to make a hash: %w", err)
- }
-
- fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
- for _, f := range pkg.CompiledGoFiles {
- c.ioSem <- struct{}{}
- h, fErr := cache.FileHash(f)
- <-c.ioSem
- if fErr != nil {
- return "", fmt.Errorf("failed to calculate file %s hash: %w", f, fErr)
- }
- fmt.Fprintf(key, "file %s %x\n", f, h)
- }
- curSum := key.Sum()
- hashRes[HashModeNeedOnlySelf] = hex.EncodeToString(curSum[:])
-
- imps := make([]*packages.Package, 0, len(pkg.Imports))
- for _, imp := range pkg.Imports {
- imps = append(imps, imp)
- }
- sort.Slice(imps, func(i, j int) bool {
- return imps[i].PkgPath < imps[j].PkgPath
- })
-
- calcDepsHash := func(depMode HashMode) error {
- for _, dep := range imps {
- if dep.PkgPath == "unsafe" {
- continue
- }
-
- depHash, depErr := c.packageHash(dep, depMode)
- if depErr != nil {
- return fmt.Errorf("failed to calculate hash for dependency %s with mode %d: %w", dep.Name, depMode, depErr)
- }
-
- fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, depHash)
- }
- return nil
- }
-
- if err := calcDepsHash(HashModeNeedOnlySelf); err != nil {
- return "", err
- }
-
- curSum = key.Sum()
- hashRes[HashModeNeedDirectDeps] = hex.EncodeToString(curSum[:])
-
- if err := calcDepsHash(HashModeNeedAllDeps); err != nil {
- return "", err
- }
- curSum = key.Sum()
- hashRes[HashModeNeedAllDeps] = hex.EncodeToString(curSum[:])
-
- if _, ok := hashRes[mode]; !ok {
- return "", fmt.Errorf("invalid mode %d", mode)
- }
-
- c.pkgHashes.Store(pkg, hashRes)
- return hashRes[mode], nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md b/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md
deleted file mode 100644
index 36ec6ed499..0000000000
--- a/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# renameio
-
-Extracted from go/src/cmd/go/internal/renameio/
-I don't know what version of Go this package was pulled from.
-
-Adapted for golangci-lint:
-- https://github.com/golangci/golangci-lint/pull/699
-- https://github.com/golangci/golangci-lint/pull/808
-- https://github.com/golangci/golangci-lint/pull/1063
-- https://github.com/golangci/golangci-lint/pull/3204
diff --git a/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go b/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go
deleted file mode 100644
index 2f88f4f7cc..0000000000
--- a/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go
+++ /dev/null
@@ -1,93 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package renameio writes files atomically by renaming temporary files.
-package renameio
-
-import (
- "bytes"
- "io"
- "math/rand"
- "os"
- "path/filepath"
- "strconv"
-
- "github.com/golangci/golangci-lint/internal/robustio"
-)
-
-const patternSuffix = ".tmp"
-
-// Pattern returns a glob pattern that matches the unrenamed temporary files
-// created when writing to filename.
-func Pattern(filename string) string {
- return filepath.Join(filepath.Dir(filename), filepath.Base(filename)+patternSuffix)
-}
-
-// WriteFile is like os.WriteFile, but first writes data to an arbitrary
-// file in the same directory as filename, then renames it atomically to the
-// final name.
-//
-// That ensures that the final location, if it exists, is always a complete file.
-func WriteFile(filename string, data []byte, perm os.FileMode) (err error) {
- return WriteToFile(filename, bytes.NewReader(data), perm)
-}
-
-// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader
-// instead of a slice.
-func WriteToFile(filename string, data io.Reader, perm os.FileMode) (err error) {
- f, err := tempFile(filepath.Dir(filename), filepath.Base(filename), perm)
- if err != nil {
- return err
- }
- defer func() {
- // Only call os.Remove on f.Name() if we failed to rename it: otherwise,
- // some other process may have created a new file with the same name after
- // that.
- if err != nil {
- f.Close()
- os.Remove(f.Name())
- }
- }()
-
- if _, err := io.Copy(f, data); err != nil {
- return err
- }
- // Sync the file before renaming it: otherwise, after a crash the reader may
- // observe a 0-length file instead of the actual contents.
- // See https://golang.org/issue/22397#issuecomment-380831736.
- if err := f.Sync(); err != nil {
- return err
- }
- if err := f.Close(); err != nil {
- return err
- }
-
- return robustio.Rename(f.Name(), filename)
-}
-
-// tempFile creates a new temporary file with given permission bits.
-func tempFile(dir, prefix string, perm os.FileMode) (f *os.File, err error) {
- for i := 0; i < 10000; i++ {
- name := filepath.Join(dir, prefix+strconv.Itoa(rand.Intn(1000000000))+patternSuffix)
- f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, perm)
- if os.IsExist(err) {
- continue
- }
- break
- }
- return
-}
-
-// ReadFile is like os.ReadFile, but on Windows retries spurious errors that
-// may occur if the file is concurrently replaced.
-//
-// Errors are classified heuristically and retries are bounded, so even this
-// function may occasionally return a spurious error on Windows.
-// If so, the error will likely wrap one of:
-// - syscall.ERROR_ACCESS_DENIED
-// - syscall.ERROR_FILE_NOT_FOUND
-// - internal/syscall/windows.ERROR_SHARING_VIOLATION
-func ReadFile(filename string) ([]byte, error) {
- return robustio.ReadFile(filename)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/LICENSE b/vendor/github.com/golangci/golangci-lint/internal/x/LICENSE
new file mode 100644
index 0000000000..2a7cf70da6
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/LICENSE
@@ -0,0 +1,27 @@
+Copyright 2009 The Go Authors.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google LLC nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisflags/readme.md b/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisflags/readme.md
new file mode 100644
index 0000000000..4d221d4ca5
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisflags/readme.md
@@ -0,0 +1,8 @@
+# analysisflags
+
+Extracted from `/go/analysis/internal/analysisflags` (related to `checker`).
+This is just a copy of the code without any changes.
+
+## History
+
+- sync with https://github.com/golang/tools/blob/v0.28.0
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisflags/url.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisflags/url.go
new file mode 100644
index 0000000000..26a917a991
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisflags/url.go
@@ -0,0 +1,33 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package analysisflags
+
+import (
+ "fmt"
+ "net/url"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// ResolveURL resolves the URL field for a Diagnostic from an Analyzer
+// and returns the URL. See Diagnostic.URL for details.
+func ResolveURL(a *analysis.Analyzer, d analysis.Diagnostic) (string, error) {
+ if d.URL == "" && d.Category == "" && a.URL == "" {
+ return "", nil // do nothing
+ }
+ raw := d.URL
+ if d.URL == "" && d.Category != "" {
+ raw = "#" + d.Category
+ }
+ u, err := url.Parse(raw)
+ if err != nil {
+ return "", fmt.Errorf("invalid Diagnostic.URL %q: %s", raw, err)
+ }
+ base, err := url.Parse(a.URL)
+ if err != nil {
+ return "", fmt.Errorf("invalid Analyzer.URL %q: %s", a.URL, err)
+ }
+ return base.ResolveReference(u).String(), nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisinternal/analysis.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisinternal/analysis.go
new file mode 100644
index 0000000000..bb12600dac
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisinternal/analysis.go
@@ -0,0 +1,48 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package analysisinternal provides gopls' internal analyses with a
+// number of helper functions that operate on typed syntax trees.
+package analysisinternal
+
+import (
+ "fmt"
+ "os"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// MakeReadFile returns a simple implementation of the Pass.ReadFile function.
+func MakeReadFile(pass *analysis.Pass) func(filename string) ([]byte, error) {
+ return func(filename string) ([]byte, error) {
+ if err := CheckReadable(pass, filename); err != nil {
+ return nil, err
+ }
+ return os.ReadFile(filename)
+ }
+}
+
+// CheckReadable enforces the access policy defined by the ReadFile field of [analysis.Pass].
+func CheckReadable(pass *analysis.Pass, filename string) error {
+ if slicesContains(pass.OtherFiles, filename) ||
+ slicesContains(pass.IgnoredFiles, filename) {
+ return nil
+ }
+ for _, f := range pass.Files {
+ if pass.Fset.File(f.FileStart).Name() == filename {
+ return nil
+ }
+ }
+ return fmt.Errorf("Pass.ReadFile: %s is not among OtherFiles, IgnoredFiles, or names of Files", filename)
+}
+
+// TODO(adonovan): use go1.21 slices.Contains.
+func slicesContains[S ~[]E, E comparable](slice S, x E) bool {
+ for _, elem := range slice {
+ if elem == x {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisinternal/readme.md b/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisinternal/readme.md
new file mode 100644
index 0000000000..f301cdbebb
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/analysisinternal/readme.md
@@ -0,0 +1,8 @@
+# analysisinternal
+
+Extracted from `/internal/analysisinternal/` (related to `checker`).
+This is just a copy of the code without any changes.
+
+## History
+
+- sync with https://github.com/golang/tools/blob/v0.28.0
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/diff.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/diff.go
new file mode 100644
index 0000000000..a13547b7a7
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/diff.go
@@ -0,0 +1,176 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package diff computes differences between text files or strings.
+package diff
+
+import (
+ "fmt"
+ "sort"
+ "strings"
+)
+
+// An Edit describes the replacement of a portion of a text file.
+type Edit struct {
+ Start, End int // byte offsets of the region to replace
+ New string // the replacement
+}
+
+func (e Edit) String() string {
+ return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New)
+}
+
+// Apply applies a sequence of edits to the src buffer and returns the
+// result. Edits are applied in order of start offset; edits with the
+// same start offset are applied in they order they were provided.
+//
+// Apply returns an error if any edit is out of bounds,
+// or if any pair of edits is overlapping.
+func Apply(src string, edits []Edit) (string, error) {
+ edits, size, err := validate(src, edits)
+ if err != nil {
+ return "", err
+ }
+
+ // Apply edits.
+ out := make([]byte, 0, size)
+ lastEnd := 0
+ for _, edit := range edits {
+ if lastEnd < edit.Start {
+ out = append(out, src[lastEnd:edit.Start]...)
+ }
+ out = append(out, edit.New...)
+ lastEnd = edit.End
+ }
+ out = append(out, src[lastEnd:]...)
+
+ if len(out) != size {
+ panic("wrong size")
+ }
+
+ return string(out), nil
+}
+
+// ApplyBytes is like Apply, but it accepts a byte slice.
+// The result is always a new array.
+func ApplyBytes(src []byte, edits []Edit) ([]byte, error) {
+ res, err := Apply(string(src), edits)
+ return []byte(res), err
+}
+
+// validate checks that edits are consistent with src,
+// and returns the size of the patched output.
+// It may return a different slice.
+func validate(src string, edits []Edit) ([]Edit, int, error) {
+ if !sort.IsSorted(editsSort(edits)) {
+ edits = append([]Edit(nil), edits...)
+ SortEdits(edits)
+ }
+
+ // Check validity of edits and compute final size.
+ size := len(src)
+ lastEnd := 0
+ for _, edit := range edits {
+ if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) {
+ return nil, 0, fmt.Errorf("diff has out-of-bounds edits")
+ }
+ if edit.Start < lastEnd {
+ return nil, 0, fmt.Errorf("diff has overlapping edits")
+ }
+ size += len(edit.New) + edit.Start - edit.End
+ lastEnd = edit.End
+ }
+
+ return edits, size, nil
+}
+
+// SortEdits orders a slice of Edits by (start, end) offset.
+// This ordering puts insertions (end = start) before deletions
+// (end > start) at the same point, but uses a stable sort to preserve
+// the order of multiple insertions at the same point.
+// (Apply detects multiple deletions at the same point as an error.)
+func SortEdits(edits []Edit) {
+ sort.Stable(editsSort(edits))
+}
+
+type editsSort []Edit
+
+func (a editsSort) Len() int { return len(a) }
+func (a editsSort) Less(i, j int) bool {
+ if cmp := a[i].Start - a[j].Start; cmp != 0 {
+ return cmp < 0
+ }
+ return a[i].End < a[j].End
+}
+func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+
+// lineEdits expands and merges a sequence of edits so that each
+// resulting edit replaces one or more complete lines.
+// See ApplyEdits for preconditions.
+func lineEdits(src string, edits []Edit) ([]Edit, error) {
+ edits, _, err := validate(src, edits)
+ if err != nil {
+ return nil, err
+ }
+
+ // Do all deletions begin and end at the start of a line,
+ // and all insertions end with a newline?
+ // (This is merely a fast path.)
+ for _, edit := range edits {
+ if edit.Start >= len(src) || // insertion at EOF
+ edit.Start > 0 && src[edit.Start-1] != '\n' || // not at line start
+ edit.End > 0 && src[edit.End-1] != '\n' || // not at line start
+ edit.New != "" && edit.New[len(edit.New)-1] != '\n' { // partial insert
+ goto expand // slow path
+ }
+ }
+ return edits, nil // aligned
+
+expand:
+ if len(edits) == 0 {
+ return edits, nil // no edits (unreachable due to fast path)
+ }
+ expanded := make([]Edit, 0, len(edits)) // a guess
+ prev := edits[0]
+ // TODO(adonovan): opt: start from the first misaligned edit.
+ // TODO(adonovan): opt: avoid quadratic cost of string += string.
+ for _, edit := range edits[1:] {
+ between := src[prev.End:edit.Start]
+ if !strings.Contains(between, "\n") {
+ // overlapping lines: combine with previous edit.
+ prev.New += between + edit.New
+ prev.End = edit.End
+ } else {
+ // non-overlapping lines: flush previous edit.
+ expanded = append(expanded, expandEdit(prev, src))
+ prev = edit
+ }
+ }
+ return append(expanded, expandEdit(prev, src)), nil // flush final edit
+}
+
+// expandEdit returns edit expanded to complete whole lines.
+func expandEdit(edit Edit, src string) Edit {
+ // Expand start left to start of line.
+ // (delta is the zero-based column number of start.)
+ start := edit.Start
+ if delta := start - 1 - strings.LastIndex(src[:start], "\n"); delta > 0 {
+ edit.Start -= delta
+ edit.New = src[start-delta:start] + edit.New
+ }
+
+ // Expand end right to end of line.
+ end := edit.End
+ if end > 0 && src[end-1] != '\n' ||
+ edit.New != "" && edit.New[len(edit.New)-1] != '\n' {
+ if nl := strings.IndexByte(src[end:], '\n'); nl < 0 {
+ edit.End = len(src) // extend to EOF
+ } else {
+ edit.End = end + nl + 1 // extend beyond \n
+ }
+ }
+ edit.New += src[end:edit.End]
+
+ return edit
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/common.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/common.go
new file mode 100644
index 0000000000..c3e82dd268
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/common.go
@@ -0,0 +1,179 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package lcs
+
+import (
+ "log"
+ "sort"
+)
+
+// lcs is a longest common sequence
+type lcs []diag
+
+// A diag is a piece of the edit graph where A[X+i] == B[Y+i], for 0<=i l[j].Len
+ })
+ return l
+}
+
+// validate that the elements of the lcs do not overlap
+// (can only happen when the two-sided algorithm ends early)
+// expects the lcs to be sorted
+func (l lcs) valid() bool {
+ for i := 1; i < len(l); i++ {
+ if l[i-1].X+l[i-1].Len > l[i].X {
+ return false
+ }
+ if l[i-1].Y+l[i-1].Len > l[i].Y {
+ return false
+ }
+ }
+ return true
+}
+
+// repair overlapping lcs
+// only called if two-sided stops early
+func (l lcs) fix() lcs {
+ // from the set of diagonals in l, find a maximal non-conflicting set
+ // this problem may be NP-complete, but we use a greedy heuristic,
+ // which is quadratic, but with a better data structure, could be D log D.
+ // indepedent is not enough: {0,3,1} and {3,0,2} can't both occur in an lcs
+ // which has to have monotone x and y
+ if len(l) == 0 {
+ return nil
+ }
+ sort.Slice(l, func(i, j int) bool { return l[i].Len > l[j].Len })
+ tmp := make(lcs, 0, len(l))
+ tmp = append(tmp, l[0])
+ for i := 1; i < len(l); i++ {
+ var dir direction
+ nxt := l[i]
+ for _, in := range tmp {
+ if dir, nxt = overlap(in, nxt); dir == empty || dir == bad {
+ break
+ }
+ }
+ if nxt.Len > 0 && dir != bad {
+ tmp = append(tmp, nxt)
+ }
+ }
+ tmp.sort()
+ if false && !tmp.valid() { // debug checking
+ log.Fatalf("here %d", len(tmp))
+ }
+ return tmp
+}
+
+type direction int
+
+const (
+ empty direction = iota // diag is empty (so not in lcs)
+ leftdown // proposed acceptably to the left and below
+ rightup // proposed diag is acceptably to the right and above
+ bad // proposed diag is inconsistent with the lcs so far
+)
+
+// overlap trims the proposed diag prop so it doesn't overlap with
+// the existing diag that has already been added to the lcs.
+func overlap(exist, prop diag) (direction, diag) {
+ if prop.X <= exist.X && exist.X < prop.X+prop.Len {
+ // remove the end of prop where it overlaps with the X end of exist
+ delta := prop.X + prop.Len - exist.X
+ prop.Len -= delta
+ if prop.Len <= 0 {
+ return empty, prop
+ }
+ }
+ if exist.X <= prop.X && prop.X < exist.X+exist.Len {
+ // remove the beginning of prop where overlaps with exist
+ delta := exist.X + exist.Len - prop.X
+ prop.Len -= delta
+ if prop.Len <= 0 {
+ return empty, prop
+ }
+ prop.X += delta
+ prop.Y += delta
+ }
+ if prop.Y <= exist.Y && exist.Y < prop.Y+prop.Len {
+ // remove the end of prop that overlaps (in Y) with exist
+ delta := prop.Y + prop.Len - exist.Y
+ prop.Len -= delta
+ if prop.Len <= 0 {
+ return empty, prop
+ }
+ }
+ if exist.Y <= prop.Y && prop.Y < exist.Y+exist.Len {
+ // remove the beginning of peop that overlaps with exist
+ delta := exist.Y + exist.Len - prop.Y
+ prop.Len -= delta
+ if prop.Len <= 0 {
+ return empty, prop
+ }
+ prop.X += delta // no test reaches this code
+ prop.Y += delta
+ }
+ if prop.X+prop.Len <= exist.X && prop.Y+prop.Len <= exist.Y {
+ return leftdown, prop
+ }
+ if exist.X+exist.Len <= prop.X && exist.Y+exist.Len <= prop.Y {
+ return rightup, prop
+ }
+ // prop can't be in an lcs that contains exist
+ return bad, prop
+}
+
+// manipulating Diag and lcs
+
+// prepend a diagonal (x,y)-(x+1,y+1) segment either to an empty lcs
+// or to its first Diag. prepend is only called to extend diagonals
+// the backward direction.
+func (lcs lcs) prepend(x, y int) lcs {
+ if len(lcs) > 0 {
+ d := &lcs[0]
+ if int(d.X) == x+1 && int(d.Y) == y+1 {
+ // extend the diagonal down and to the left
+ d.X, d.Y = int(x), int(y)
+ d.Len++
+ return lcs
+ }
+ }
+
+ r := diag{X: int(x), Y: int(y), Len: 1}
+ lcs = append([]diag{r}, lcs...)
+ return lcs
+}
+
+// append appends a diagonal, or extends the existing one.
+// by adding the edge (x,y)-(x+1.y+1). append is only called
+// to extend diagonals in the forward direction.
+func (lcs lcs) append(x, y int) lcs {
+ if len(lcs) > 0 {
+ last := &lcs[len(lcs)-1]
+ // Expand last element if adjoining.
+ if last.X+last.Len == x && last.Y+last.Len == y {
+ last.Len++
+ return lcs
+ }
+ }
+
+ return append(lcs, diag{X: x, Y: y, Len: 1})
+}
+
+// enforce constraint on d, k
+func ok(d, k int) bool {
+ return d >= 0 && -d <= k && k <= d
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/doc.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/doc.go
new file mode 100644
index 0000000000..9029dd20b3
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/doc.go
@@ -0,0 +1,156 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// package lcs contains code to find longest-common-subsequences
+// (and diffs)
+package lcs
+
+/*
+Compute longest-common-subsequences of two slices A, B using
+algorithms from Myers' paper. A longest-common-subsequence
+(LCS from now on) of A and B is a maximal set of lexically increasing
+pairs of subscripts (x,y) with A[x]==B[y]. There may be many LCS, but
+they all have the same length. An LCS determines a sequence of edits
+that changes A into B.
+
+The key concept is the edit graph of A and B.
+If A has length N and B has length M, then the edit graph has
+vertices v[i][j] for 0 <= i <= N, 0 <= j <= M. There is a
+horizontal edge from v[i][j] to v[i+1][j] whenever both are in
+the graph, and a vertical edge from v[i][j] to f[i][j+1] similarly.
+When A[i] == B[j] there is a diagonal edge from v[i][j] to v[i+1][j+1].
+
+A path between in the graph between (0,0) and (N,M) determines a sequence
+of edits converting A into B: each horizontal edge corresponds to removing
+an element of A, and each vertical edge corresponds to inserting an
+element of B.
+
+A vertex (x,y) is on (forward) diagonal k if x-y=k. A path in the graph
+is of length D if it has D non-diagonal edges. The algorithms generate
+forward paths (in which at least one of x,y increases at each edge),
+or backward paths (in which at least one of x,y decreases at each edge),
+or a combination. (Note that the orientation is the traditional mathematical one,
+with the origin in the lower-left corner.)
+
+Here is the edit graph for A:"aabbaa", B:"aacaba". (I know the diagonals look weird.)
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙
+ a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ |
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙
+ b | | | ___/‾‾‾ | ___/‾‾‾ | | |
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙
+ a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ |
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙
+ c | | | | | | |
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙
+ a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ |
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙
+ a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ |
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙
+ a a b b a a
+
+
+The algorithm labels a vertex (x,y) with D,k if it is on diagonal k and at
+the end of a maximal path of length D. (Because x-y=k it suffices to remember
+only the x coordinate of the vertex.)
+
+The forward algorithm: Find the longest diagonal starting at (0,0) and
+label its end with D=0,k=0. From that vertex take a vertical step and
+then follow the longest diagonal (up and to the right), and label that vertex
+with D=1,k=-1. From the D=0,k=0 point take a horizontal step and the follow
+the longest diagonal (up and to the right) and label that vertex
+D=1,k=1. In the same way, having labelled all the D vertices,
+from a vertex labelled D,k find two vertices
+tentatively labelled D+1,k-1 and D+1,k+1. There may be two on the same
+diagonal, in which case take the one with the larger x.
+
+Eventually the path gets to (N,M), and the diagonals on it are the LCS.
+
+Here is the edit graph with the ends of D-paths labelled. (So, for instance,
+0/2,2 indicates that x=2,y=2 is labelled with 0, as it should be, since the first
+step is to go up the longest diagonal from (0,0).)
+A:"aabbaa", B:"aacaba"
+ ⊙ ------- ⊙ ------- ⊙ -------(3/3,6)------- ⊙ -------(3/5,6)-------(4/6,6)
+ a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ |
+ ⊙ ------- ⊙ ------- ⊙ -------(2/3,5)------- ⊙ ------- ⊙ ------- ⊙
+ b | | | ___/‾‾‾ | ___/‾‾‾ | | |
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ -------(3/5,4)------- ⊙
+ a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ |
+ ⊙ ------- ⊙ -------(1/2,3)-------(2/3,3)------- ⊙ ------- ⊙ ------- ⊙
+ c | | | | | | |
+ ⊙ ------- ⊙ -------(0/2,2)-------(1/3,2)-------(2/4,2)-------(3/5,2)-------(4/6,2)
+ a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ |
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙
+ a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ |
+ ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙
+ a a b b a a
+
+The 4-path is reconstructed starting at (4/6,6), horizontal to (3/5,6), diagonal to (3,4), vertical
+to (2/3,3), horizontal to (1/2,3), vertical to (0/2,2), and diagonal to (0,0). As expected,
+there are 4 non-diagonal steps, and the diagonals form an LCS.
+
+There is a symmetric backward algorithm, which gives (backwards labels are prefixed with a colon):
+A:"aabbaa", B:"aacaba"
+ ⊙ -------- ⊙ -------- ⊙ -------- ⊙ -------- ⊙ -------- ⊙ -------- ⊙
+ a | ____/‾‾‾ | ____/‾‾‾ | | | ____/‾‾‾ | ____/‾‾‾ |
+ ⊙ -------- ⊙ -------- ⊙ -------- ⊙ -------- ⊙ --------(:0/5,5)-------- ⊙
+ b | | | ____/‾‾‾ | ____/‾‾‾ | | |
+ ⊙ -------- ⊙ -------- ⊙ --------(:1/3,4)-------- ⊙ -------- ⊙ -------- ⊙
+ a | ____/‾‾‾ | ____/‾‾‾ | | | ____/‾‾‾ | ____/‾‾‾ |
+ (:3/0,3)--------(:2/1,3)-------- ⊙ --------(:2/3,3)--------(:1/4,3)-------- ⊙ -------- ⊙
+ c | | | | | | |
+ ⊙ -------- ⊙ -------- ⊙ --------(:3/3,2)--------(:2/4,2)-------- ⊙ -------- ⊙
+ a | ____/‾‾‾ | ____/‾‾‾ | | | ____/‾‾‾ | ____/‾‾‾ |
+ (:3/0,1)-------- ⊙ -------- ⊙ -------- ⊙ --------(:3/4,1)-------- ⊙ -------- ⊙
+ a | ____/‾‾‾ | ____/‾‾‾ | | | ____/‾‾‾ | ____/‾‾‾ |
+ (:4/0,0)-------- ⊙ -------- ⊙ -------- ⊙ --------(:4/4,0)-------- ⊙ -------- ⊙
+ a a b b a a
+
+Neither of these is ideal for use in an editor, where it is undesirable to send very long diffs to the
+front end. It's tricky to decide exactly what 'very long diffs' means, as "replace A by B" is very short.
+We want to control how big D can be, by stopping when it gets too large. The forward algorithm then
+privileges common prefixes, and the backward algorithm privileges common suffixes. Either is an undesirable
+asymmetry.
+
+Fortunately there is a two-sided algorithm, implied by results in Myers' paper. Here's what the labels in
+the edit graph look like.
+A:"aabbaa", B:"aacaba"
+ ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙
+ a | ____/‾‾‾‾ | ____/‾‾‾‾ | | | ____/‾‾‾‾ | ____/‾‾‾‾ |
+ ⊙ --------- ⊙ --------- ⊙ --------- (2/3,5) --------- ⊙ --------- (:0/5,5)--------- ⊙
+ b | | | ____/‾‾‾‾ | ____/‾‾‾‾ | | |
+ ⊙ --------- ⊙ --------- ⊙ --------- (:1/3,4)--------- ⊙ --------- ⊙ --------- ⊙
+ a | ____/‾‾‾‾ | ____/‾‾‾‾ | | | ____/‾‾‾‾ | ____/‾‾‾‾ |
+ ⊙ --------- (:2/1,3)--------- (1/2,3) ---------(2:2/3,3)--------- (:1/4,3)--------- ⊙ --------- ⊙
+ c | | | | | | |
+ ⊙ --------- ⊙ --------- (0/2,2) --------- (1/3,2) ---------(2:2/4,2)--------- ⊙ --------- ⊙
+ a | ____/‾‾‾‾ | ____/‾‾‾‾ | | | ____/‾‾‾‾ | ____/‾‾‾‾ |
+ ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙
+ a | ____/‾‾‾‾ | ____/‾‾‾‾ | | | ____/‾‾‾‾ | ____/‾‾‾‾ |
+ ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙
+ a a b b a a
+
+The algorithm stopped when it saw the backwards 2-path ending at (1,3) and the forwards 2-path ending at (3,5). The criterion
+is a backwards path ending at (u,v) and a forward path ending at (x,y), where u <= x and the two points are on the same
+diagonal. (Here the edgegraph has a diagonal, but the criterion is x-y=u-v.) Myers proves there is a forward
+2-path from (0,0) to (1,3), and that together with the backwards 2-path ending at (1,3) gives the expected 4-path.
+Unfortunately the forward path has to be constructed by another run of the forward algorithm; it can't be found from the
+computed labels. That is the worst case. Had the code noticed (x,y)=(u,v)=(3,3) the whole path could be reconstructed
+from the edgegraph. The implementation looks for a number of special cases to try to avoid computing an extra forward path.
+
+If the two-sided algorithm has stop early (because D has become too large) it will have found a forward LCS and a
+backwards LCS. Ideally these go with disjoint prefixes and suffixes of A and B, but disjointness may fail and the two
+computed LCS may conflict. (An easy example is where A is a suffix of B, and shares a short prefix. The backwards LCS
+is all of A, and the forward LCS is a prefix of A.) The algorithm combines the two
+to form a best-effort LCS. In the worst case the forward partial LCS may have to
+be recomputed.
+*/
+
+/* Eugene Myers paper is titled
+"An O(ND) Difference Algorithm and Its Variations"
+and can be found at
+http://www.xmailserver.org/diff2.pdf
+
+(There is a generic implementation of the algorithm the repository with git hash
+b9ad7e4ade3a686d608e44475390ad428e60e7fc)
+*/
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/git.sh b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/git.sh
new file mode 100644
index 0000000000..b25ba4aac7
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/git.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+# Copyright 2022 The Go Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file.
+#
+# Creates a zip file containing all numbered versions
+# of the commit history of a large source file, for use
+# as input data for the tests of the diff algorithm.
+#
+# Run script from root of the x/tools repo.
+
+set -eu
+
+# WARNING: This script will install the latest version of $file
+# The largest real source file in the x/tools repo.
+# file=internal/golang/completion/completion.go
+# file=internal/golang/diagnostics.go
+file=internal/protocol/tsprotocol.go
+
+tmp=$(mktemp -d)
+git log $file |
+ awk '/^commit / {print $2}' |
+ nl -ba -nrz |
+ while read n hash; do
+ git checkout --quiet $hash $file
+ cp -f $file $tmp/$n
+ done
+(cd $tmp && zip -q - *) > testdata.zip
+rm -fr $tmp
+git restore --staged $file
+git restore $file
+echo "Created testdata.zip"
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/labels.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/labels.go
new file mode 100644
index 0000000000..504913d1da
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/labels.go
@@ -0,0 +1,55 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package lcs
+
+import (
+ "fmt"
+)
+
+// For each D, vec[D] has length D+1,
+// and the label for (D, k) is stored in vec[D][(D+k)/2].
+type label struct {
+ vec [][]int
+}
+
+// Temporary checking DO NOT COMMIT true TO PRODUCTION CODE
+const debug = false
+
+// debugging. check that the (d,k) pair is valid
+// (that is, -d<=k<=d and d+k even)
+func checkDK(D, k int) {
+ if k >= -D && k <= D && (D+k)%2 == 0 {
+ return
+ }
+ panic(fmt.Sprintf("out of range, d=%d,k=%d", D, k))
+}
+
+func (t *label) set(D, k, x int) {
+ if debug {
+ checkDK(D, k)
+ }
+ for len(t.vec) <= D {
+ t.vec = append(t.vec, nil)
+ }
+ if t.vec[D] == nil {
+ t.vec[D] = make([]int, D+1)
+ }
+ t.vec[D][(D+k)/2] = x // known that D+k is even
+}
+
+func (t *label) get(d, k int) int {
+ if debug {
+ checkDK(d, k)
+ }
+ return int(t.vec[d][(d+k)/2])
+}
+
+func newtriang(limit int) label {
+ if limit < 100 {
+ // Preallocate if limit is not large.
+ return label{vec: make([][]int, limit)}
+ }
+ return label{}
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/old.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/old.go
new file mode 100644
index 0000000000..4353da15ba
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/old.go
@@ -0,0 +1,480 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package lcs
+
+// TODO(adonovan): remove unclear references to "old" in this package.
+
+import (
+ "fmt"
+)
+
+// A Diff is a replacement of a portion of A by a portion of B.
+type Diff struct {
+ Start, End int // offsets of portion to delete in A
+ ReplStart, ReplEnd int // offset of replacement text in B
+}
+
+// DiffStrings returns the differences between two strings.
+// It does not respect rune boundaries.
+func DiffStrings(a, b string) []Diff { return diff(stringSeqs{a, b}) }
+
+// DiffBytes returns the differences between two byte sequences.
+// It does not respect rune boundaries.
+func DiffBytes(a, b []byte) []Diff { return diff(bytesSeqs{a, b}) }
+
+// DiffRunes returns the differences between two rune sequences.
+func DiffRunes(a, b []rune) []Diff { return diff(runesSeqs{a, b}) }
+
+func diff(seqs sequences) []Diff {
+ // A limit on how deeply the LCS algorithm should search. The value is just a guess.
+ const maxDiffs = 100
+ diff, _ := compute(seqs, twosided, maxDiffs/2)
+ return diff
+}
+
+// compute computes the list of differences between two sequences,
+// along with the LCS. It is exercised directly by tests.
+// The algorithm is one of {forward, backward, twosided}.
+func compute(seqs sequences, algo func(*editGraph) lcs, limit int) ([]Diff, lcs) {
+ if limit <= 0 {
+ limit = 1 << 25 // effectively infinity
+ }
+ alen, blen := seqs.lengths()
+ g := &editGraph{
+ seqs: seqs,
+ vf: newtriang(limit),
+ vb: newtriang(limit),
+ limit: limit,
+ ux: alen,
+ uy: blen,
+ delta: alen - blen,
+ }
+ lcs := algo(g)
+ diffs := lcs.toDiffs(alen, blen)
+ return diffs, lcs
+}
+
+// editGraph carries the information for computing the lcs of two sequences.
+type editGraph struct {
+ seqs sequences
+ vf, vb label // forward and backward labels
+
+ limit int // maximal value of D
+ // the bounding rectangle of the current edit graph
+ lx, ly, ux, uy int
+ delta int // common subexpression: (ux-lx)-(uy-ly)
+}
+
+// toDiffs converts an LCS to a list of edits.
+func (lcs lcs) toDiffs(alen, blen int) []Diff {
+ var diffs []Diff
+ var pa, pb int // offsets in a, b
+ for _, l := range lcs {
+ if pa < l.X || pb < l.Y {
+ diffs = append(diffs, Diff{pa, l.X, pb, l.Y})
+ }
+ pa = l.X + l.Len
+ pb = l.Y + l.Len
+ }
+ if pa < alen || pb < blen {
+ diffs = append(diffs, Diff{pa, alen, pb, blen})
+ }
+ return diffs
+}
+
+// --- FORWARD ---
+
+// fdone decides if the forward path has reached the upper right
+// corner of the rectangle. If so, it also returns the computed lcs.
+func (e *editGraph) fdone(D, k int) (bool, lcs) {
+ // x, y, k are relative to the rectangle
+ x := e.vf.get(D, k)
+ y := x - k
+ if x == e.ux && y == e.uy {
+ return true, e.forwardlcs(D, k)
+ }
+ return false, nil
+}
+
+// run the forward algorithm, until success or up to the limit on D.
+func forward(e *editGraph) lcs {
+ e.setForward(0, 0, e.lx)
+ if ok, ans := e.fdone(0, 0); ok {
+ return ans
+ }
+ // from D to D+1
+ for D := 0; D < e.limit; D++ {
+ e.setForward(D+1, -(D + 1), e.getForward(D, -D))
+ if ok, ans := e.fdone(D+1, -(D + 1)); ok {
+ return ans
+ }
+ e.setForward(D+1, D+1, e.getForward(D, D)+1)
+ if ok, ans := e.fdone(D+1, D+1); ok {
+ return ans
+ }
+ for k := -D + 1; k <= D-1; k += 2 {
+ // these are tricky and easy to get backwards
+ lookv := e.lookForward(k, e.getForward(D, k-1)+1)
+ lookh := e.lookForward(k, e.getForward(D, k+1))
+ if lookv > lookh {
+ e.setForward(D+1, k, lookv)
+ } else {
+ e.setForward(D+1, k, lookh)
+ }
+ if ok, ans := e.fdone(D+1, k); ok {
+ return ans
+ }
+ }
+ }
+ // D is too large
+ // find the D path with maximal x+y inside the rectangle and
+ // use that to compute the found part of the lcs
+ kmax := -e.limit - 1
+ diagmax := -1
+ for k := -e.limit; k <= e.limit; k += 2 {
+ x := e.getForward(e.limit, k)
+ y := x - k
+ if x+y > diagmax && x <= e.ux && y <= e.uy {
+ diagmax, kmax = x+y, k
+ }
+ }
+ return e.forwardlcs(e.limit, kmax)
+}
+
+// recover the lcs by backtracking from the farthest point reached
+func (e *editGraph) forwardlcs(D, k int) lcs {
+ var ans lcs
+ for x := e.getForward(D, k); x != 0 || x-k != 0; {
+ if ok(D-1, k-1) && x-1 == e.getForward(D-1, k-1) {
+ // if (x-1,y) is labelled D-1, x--,D--,k--,continue
+ D, k, x = D-1, k-1, x-1
+ continue
+ } else if ok(D-1, k+1) && x == e.getForward(D-1, k+1) {
+ // if (x,y-1) is labelled D-1, x, D--,k++, continue
+ D, k = D-1, k+1
+ continue
+ }
+ // if (x-1,y-1)--(x,y) is a diagonal, prepend,x--,y--, continue
+ y := x - k
+ ans = ans.prepend(x+e.lx-1, y+e.ly-1)
+ x--
+ }
+ return ans
+}
+
+// start at (x,y), go up the diagonal as far as possible,
+// and label the result with d
+func (e *editGraph) lookForward(k, relx int) int {
+ rely := relx - k
+ x, y := relx+e.lx, rely+e.ly
+ if x < e.ux && y < e.uy {
+ x += e.seqs.commonPrefixLen(x, e.ux, y, e.uy)
+ }
+ return x
+}
+
+func (e *editGraph) setForward(d, k, relx int) {
+ x := e.lookForward(k, relx)
+ e.vf.set(d, k, x-e.lx)
+}
+
+func (e *editGraph) getForward(d, k int) int {
+ x := e.vf.get(d, k)
+ return x
+}
+
+// --- BACKWARD ---
+
+// bdone decides if the backward path has reached the lower left corner
+func (e *editGraph) bdone(D, k int) (bool, lcs) {
+ // x, y, k are relative to the rectangle
+ x := e.vb.get(D, k)
+ y := x - (k + e.delta)
+ if x == 0 && y == 0 {
+ return true, e.backwardlcs(D, k)
+ }
+ return false, nil
+}
+
+// run the backward algorithm, until success or up to the limit on D.
+func backward(e *editGraph) lcs {
+ e.setBackward(0, 0, e.ux)
+ if ok, ans := e.bdone(0, 0); ok {
+ return ans
+ }
+ // from D to D+1
+ for D := 0; D < e.limit; D++ {
+ e.setBackward(D+1, -(D + 1), e.getBackward(D, -D)-1)
+ if ok, ans := e.bdone(D+1, -(D + 1)); ok {
+ return ans
+ }
+ e.setBackward(D+1, D+1, e.getBackward(D, D))
+ if ok, ans := e.bdone(D+1, D+1); ok {
+ return ans
+ }
+ for k := -D + 1; k <= D-1; k += 2 {
+ // these are tricky and easy to get wrong
+ lookv := e.lookBackward(k, e.getBackward(D, k-1))
+ lookh := e.lookBackward(k, e.getBackward(D, k+1)-1)
+ if lookv < lookh {
+ e.setBackward(D+1, k, lookv)
+ } else {
+ e.setBackward(D+1, k, lookh)
+ }
+ if ok, ans := e.bdone(D+1, k); ok {
+ return ans
+ }
+ }
+ }
+
+ // D is too large
+ // find the D path with minimal x+y inside the rectangle and
+ // use that to compute the part of the lcs found
+ kmax := -e.limit - 1
+ diagmin := 1 << 25
+ for k := -e.limit; k <= e.limit; k += 2 {
+ x := e.getBackward(e.limit, k)
+ y := x - (k + e.delta)
+ if x+y < diagmin && x >= 0 && y >= 0 {
+ diagmin, kmax = x+y, k
+ }
+ }
+ if kmax < -e.limit {
+ panic(fmt.Sprintf("no paths when limit=%d?", e.limit))
+ }
+ return e.backwardlcs(e.limit, kmax)
+}
+
+// recover the lcs by backtracking
+func (e *editGraph) backwardlcs(D, k int) lcs {
+ var ans lcs
+ for x := e.getBackward(D, k); x != e.ux || x-(k+e.delta) != e.uy; {
+ if ok(D-1, k-1) && x == e.getBackward(D-1, k-1) {
+ // D--, k--, x unchanged
+ D, k = D-1, k-1
+ continue
+ } else if ok(D-1, k+1) && x+1 == e.getBackward(D-1, k+1) {
+ // D--, k++, x++
+ D, k, x = D-1, k+1, x+1
+ continue
+ }
+ y := x - (k + e.delta)
+ ans = ans.append(x+e.lx, y+e.ly)
+ x++
+ }
+ return ans
+}
+
+// start at (x,y), go down the diagonal as far as possible,
+func (e *editGraph) lookBackward(k, relx int) int {
+ rely := relx - (k + e.delta) // forward k = k + e.delta
+ x, y := relx+e.lx, rely+e.ly
+ if x > 0 && y > 0 {
+ x -= e.seqs.commonSuffixLen(0, x, 0, y)
+ }
+ return x
+}
+
+// convert to rectangle, and label the result with d
+func (e *editGraph) setBackward(d, k, relx int) {
+ x := e.lookBackward(k, relx)
+ e.vb.set(d, k, x-e.lx)
+}
+
+func (e *editGraph) getBackward(d, k int) int {
+ x := e.vb.get(d, k)
+ return x
+}
+
+// -- TWOSIDED ---
+
+func twosided(e *editGraph) lcs {
+ // The termination condition could be improved, as either the forward
+ // or backward pass could succeed before Myers' Lemma applies.
+ // Aside from questions of efficiency (is the extra testing cost-effective)
+ // this is more likely to matter when e.limit is reached.
+ e.setForward(0, 0, e.lx)
+ e.setBackward(0, 0, e.ux)
+
+ // from D to D+1
+ for D := 0; D < e.limit; D++ {
+ // just finished a backwards pass, so check
+ if got, ok := e.twoDone(D, D); ok {
+ return e.twolcs(D, D, got)
+ }
+ // do a forwards pass (D to D+1)
+ e.setForward(D+1, -(D + 1), e.getForward(D, -D))
+ e.setForward(D+1, D+1, e.getForward(D, D)+1)
+ for k := -D + 1; k <= D-1; k += 2 {
+ // these are tricky and easy to get backwards
+ lookv := e.lookForward(k, e.getForward(D, k-1)+1)
+ lookh := e.lookForward(k, e.getForward(D, k+1))
+ if lookv > lookh {
+ e.setForward(D+1, k, lookv)
+ } else {
+ e.setForward(D+1, k, lookh)
+ }
+ }
+ // just did a forward pass, so check
+ if got, ok := e.twoDone(D+1, D); ok {
+ return e.twolcs(D+1, D, got)
+ }
+ // do a backward pass, D to D+1
+ e.setBackward(D+1, -(D + 1), e.getBackward(D, -D)-1)
+ e.setBackward(D+1, D+1, e.getBackward(D, D))
+ for k := -D + 1; k <= D-1; k += 2 {
+ // these are tricky and easy to get wrong
+ lookv := e.lookBackward(k, e.getBackward(D, k-1))
+ lookh := e.lookBackward(k, e.getBackward(D, k+1)-1)
+ if lookv < lookh {
+ e.setBackward(D+1, k, lookv)
+ } else {
+ e.setBackward(D+1, k, lookh)
+ }
+ }
+ }
+
+ // D too large. combine a forward and backward partial lcs
+ // first, a forward one
+ kmax := -e.limit - 1
+ diagmax := -1
+ for k := -e.limit; k <= e.limit; k += 2 {
+ x := e.getForward(e.limit, k)
+ y := x - k
+ if x+y > diagmax && x <= e.ux && y <= e.uy {
+ diagmax, kmax = x+y, k
+ }
+ }
+ if kmax < -e.limit {
+ panic(fmt.Sprintf("no forward paths when limit=%d?", e.limit))
+ }
+ lcs := e.forwardlcs(e.limit, kmax)
+ // now a backward one
+ // find the D path with minimal x+y inside the rectangle and
+ // use that to compute the lcs
+ diagmin := 1 << 25 // infinity
+ for k := -e.limit; k <= e.limit; k += 2 {
+ x := e.getBackward(e.limit, k)
+ y := x - (k + e.delta)
+ if x+y < diagmin && x >= 0 && y >= 0 {
+ diagmin, kmax = x+y, k
+ }
+ }
+ if kmax < -e.limit {
+ panic(fmt.Sprintf("no backward paths when limit=%d?", e.limit))
+ }
+ lcs = append(lcs, e.backwardlcs(e.limit, kmax)...)
+ // These may overlap (e.forwardlcs and e.backwardlcs return sorted lcs)
+ ans := lcs.fix()
+ return ans
+}
+
+// Does Myers' Lemma apply?
+func (e *editGraph) twoDone(df, db int) (int, bool) {
+ if (df+db+e.delta)%2 != 0 {
+ return 0, false // diagonals cannot overlap
+ }
+ kmin := -db + e.delta
+ if -df > kmin {
+ kmin = -df
+ }
+ kmax := db + e.delta
+ if df < kmax {
+ kmax = df
+ }
+ for k := kmin; k <= kmax; k += 2 {
+ x := e.vf.get(df, k)
+ u := e.vb.get(db, k-e.delta)
+ if u <= x {
+ // is it worth looking at all the other k?
+ for l := k; l <= kmax; l += 2 {
+ x := e.vf.get(df, l)
+ y := x - l
+ u := e.vb.get(db, l-e.delta)
+ v := u - l
+ if x == u || u == 0 || v == 0 || y == e.uy || x == e.ux {
+ return l, true
+ }
+ }
+ return k, true
+ }
+ }
+ return 0, false
+}
+
+func (e *editGraph) twolcs(df, db, kf int) lcs {
+ // db==df || db+1==df
+ x := e.vf.get(df, kf)
+ y := x - kf
+ kb := kf - e.delta
+ u := e.vb.get(db, kb)
+ v := u - kf
+
+ // Myers proved there is a df-path from (0,0) to (u,v)
+ // and a db-path from (x,y) to (N,M).
+ // In the first case the overall path is the forward path
+ // to (u,v) followed by the backward path to (N,M).
+ // In the second case the path is the backward path to (x,y)
+ // followed by the forward path to (x,y) from (0,0).
+
+ // Look for some special cases to avoid computing either of these paths.
+ if x == u {
+ // "babaab" "cccaba"
+ // already patched together
+ lcs := e.forwardlcs(df, kf)
+ lcs = append(lcs, e.backwardlcs(db, kb)...)
+ return lcs.sort()
+ }
+
+ // is (u-1,v) or (u,v-1) labelled df-1?
+ // if so, that forward df-1-path plus a horizontal or vertical edge
+ // is the df-path to (u,v), then plus the db-path to (N,M)
+ if u > 0 && ok(df-1, u-1-v) && e.vf.get(df-1, u-1-v) == u-1 {
+ // "aabbab" "cbcabc"
+ lcs := e.forwardlcs(df-1, u-1-v)
+ lcs = append(lcs, e.backwardlcs(db, kb)...)
+ return lcs.sort()
+ }
+ if v > 0 && ok(df-1, (u-(v-1))) && e.vf.get(df-1, u-(v-1)) == u {
+ // "abaabb" "bcacab"
+ lcs := e.forwardlcs(df-1, u-(v-1))
+ lcs = append(lcs, e.backwardlcs(db, kb)...)
+ return lcs.sort()
+ }
+
+ // The path can't possibly contribute to the lcs because it
+ // is all horizontal or vertical edges
+ if u == 0 || v == 0 || x == e.ux || y == e.uy {
+ // "abaabb" "abaaaa"
+ if u == 0 || v == 0 {
+ return e.backwardlcs(db, kb)
+ }
+ return e.forwardlcs(df, kf)
+ }
+
+ // is (x+1,y) or (x,y+1) labelled db-1?
+ if x+1 <= e.ux && ok(db-1, x+1-y-e.delta) && e.vb.get(db-1, x+1-y-e.delta) == x+1 {
+ // "bababb" "baaabb"
+ lcs := e.backwardlcs(db-1, kb+1)
+ lcs = append(lcs, e.forwardlcs(df, kf)...)
+ return lcs.sort()
+ }
+ if y+1 <= e.uy && ok(db-1, x-(y+1)-e.delta) && e.vb.get(db-1, x-(y+1)-e.delta) == x {
+ // "abbbaa" "cabacc"
+ lcs := e.backwardlcs(db-1, kb-1)
+ lcs = append(lcs, e.forwardlcs(df, kf)...)
+ return lcs.sort()
+ }
+
+ // need to compute another path
+ // "aabbaa" "aacaba"
+ lcs := e.backwardlcs(db, kb)
+ oldx, oldy := e.ux, e.uy
+ e.ux = u
+ e.uy = v
+ lcs = append(lcs, forward(e)...)
+ e.ux, e.uy = oldx, oldy
+ return lcs.sort()
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/sequence.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/sequence.go
new file mode 100644
index 0000000000..2d72d26304
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/lcs/sequence.go
@@ -0,0 +1,113 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package lcs
+
+// This file defines the abstract sequence over which the LCS algorithm operates.
+
+// sequences abstracts a pair of sequences, A and B.
+type sequences interface {
+ lengths() (int, int) // len(A), len(B)
+ commonPrefixLen(ai, aj, bi, bj int) int // len(commonPrefix(A[ai:aj], B[bi:bj]))
+ commonSuffixLen(ai, aj, bi, bj int) int // len(commonSuffix(A[ai:aj], B[bi:bj]))
+}
+
+type stringSeqs struct{ a, b string }
+
+func (s stringSeqs) lengths() (int, int) { return len(s.a), len(s.b) }
+func (s stringSeqs) commonPrefixLen(ai, aj, bi, bj int) int {
+ return commonPrefixLenString(s.a[ai:aj], s.b[bi:bj])
+}
+func (s stringSeqs) commonSuffixLen(ai, aj, bi, bj int) int {
+ return commonSuffixLenString(s.a[ai:aj], s.b[bi:bj])
+}
+
+// The explicit capacity in s[i:j:j] leads to more efficient code.
+
+type bytesSeqs struct{ a, b []byte }
+
+func (s bytesSeqs) lengths() (int, int) { return len(s.a), len(s.b) }
+func (s bytesSeqs) commonPrefixLen(ai, aj, bi, bj int) int {
+ return commonPrefixLenBytes(s.a[ai:aj:aj], s.b[bi:bj:bj])
+}
+func (s bytesSeqs) commonSuffixLen(ai, aj, bi, bj int) int {
+ return commonSuffixLenBytes(s.a[ai:aj:aj], s.b[bi:bj:bj])
+}
+
+type runesSeqs struct{ a, b []rune }
+
+func (s runesSeqs) lengths() (int, int) { return len(s.a), len(s.b) }
+func (s runesSeqs) commonPrefixLen(ai, aj, bi, bj int) int {
+ return commonPrefixLenRunes(s.a[ai:aj:aj], s.b[bi:bj:bj])
+}
+func (s runesSeqs) commonSuffixLen(ai, aj, bi, bj int) int {
+ return commonSuffixLenRunes(s.a[ai:aj:aj], s.b[bi:bj:bj])
+}
+
+// TODO(adonovan): optimize these functions using ideas from:
+// - https://go.dev/cl/408116 common.go
+// - https://go.dev/cl/421435 xor_generic.go
+
+// TODO(adonovan): factor using generics when available,
+// but measure performance impact.
+
+// commonPrefixLen* returns the length of the common prefix of a[ai:aj] and b[bi:bj].
+func commonPrefixLenBytes(a, b []byte) int {
+ n := min(len(a), len(b))
+ i := 0
+ for i < n && a[i] == b[i] {
+ i++
+ }
+ return i
+}
+func commonPrefixLenRunes(a, b []rune) int {
+ n := min(len(a), len(b))
+ i := 0
+ for i < n && a[i] == b[i] {
+ i++
+ }
+ return i
+}
+func commonPrefixLenString(a, b string) int {
+ n := min(len(a), len(b))
+ i := 0
+ for i < n && a[i] == b[i] {
+ i++
+ }
+ return i
+}
+
+// commonSuffixLen* returns the length of the common suffix of a[ai:aj] and b[bi:bj].
+func commonSuffixLenBytes(a, b []byte) int {
+ n := min(len(a), len(b))
+ i := 0
+ for i < n && a[len(a)-1-i] == b[len(b)-1-i] {
+ i++
+ }
+ return i
+}
+func commonSuffixLenRunes(a, b []rune) int {
+ n := min(len(a), len(b))
+ i := 0
+ for i < n && a[len(a)-1-i] == b[len(b)-1-i] {
+ i++
+ }
+ return i
+}
+func commonSuffixLenString(a, b string) int {
+ n := min(len(a), len(b))
+ i := 0
+ for i < n && a[len(a)-1-i] == b[len(b)-1-i] {
+ i++
+ }
+ return i
+}
+
+func min(x, y int) int {
+ if x < y {
+ return x
+ } else {
+ return y
+ }
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/ndiff.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/ndiff.go
new file mode 100644
index 0000000000..f7aa2b79f6
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/ndiff.go
@@ -0,0 +1,99 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package diff
+
+import (
+ "bytes"
+ "unicode/utf8"
+
+ "github.com/golangci/golangci-lint/internal/x/tools/diff/lcs"
+)
+
+// Strings computes the differences between two strings.
+// The resulting edits respect rune boundaries.
+func Strings(before, after string) []Edit {
+ if before == after {
+ return nil // common case
+ }
+
+ if isASCII(before) && isASCII(after) {
+ // TODO(adonovan): opt: specialize diffASCII for strings.
+ return diffASCII([]byte(before), []byte(after))
+ }
+ return diffRunes([]rune(before), []rune(after))
+}
+
+// Bytes computes the differences between two byte slices.
+// The resulting edits respect rune boundaries.
+func Bytes(before, after []byte) []Edit {
+ if bytes.Equal(before, after) {
+ return nil // common case
+ }
+
+ if isASCII(before) && isASCII(after) {
+ return diffASCII(before, after)
+ }
+ return diffRunes(runes(before), runes(after))
+}
+
+func diffASCII(before, after []byte) []Edit {
+ diffs := lcs.DiffBytes(before, after)
+
+ // Convert from LCS diffs.
+ res := make([]Edit, len(diffs))
+ for i, d := range diffs {
+ res[i] = Edit{d.Start, d.End, string(after[d.ReplStart:d.ReplEnd])}
+ }
+ return res
+}
+
+func diffRunes(before, after []rune) []Edit {
+ diffs := lcs.DiffRunes(before, after)
+
+ // The diffs returned by the lcs package use indexes
+ // into whatever slice was passed in.
+ // Convert rune offsets to byte offsets.
+ res := make([]Edit, len(diffs))
+ lastEnd := 0
+ utf8Len := 0
+ for i, d := range diffs {
+ utf8Len += runesLen(before[lastEnd:d.Start]) // text between edits
+ start := utf8Len
+ utf8Len += runesLen(before[d.Start:d.End]) // text deleted by this edit
+ res[i] = Edit{start, utf8Len, string(after[d.ReplStart:d.ReplEnd])}
+ lastEnd = d.End
+ }
+ return res
+}
+
+// runes is like []rune(string(bytes)) without the duplicate allocation.
+func runes(bytes []byte) []rune {
+ n := utf8.RuneCount(bytes)
+ runes := make([]rune, n)
+ for i := 0; i < n; i++ {
+ r, sz := utf8.DecodeRune(bytes)
+ bytes = bytes[sz:]
+ runes[i] = r
+ }
+ return runes
+}
+
+// runesLen returns the length in bytes of the UTF-8 encoding of runes.
+func runesLen(runes []rune) (len int) {
+ for _, r := range runes {
+ len += utf8.RuneLen(r)
+ }
+ return len
+}
+
+// isASCII reports whether s contains only ASCII.
+func isASCII[S string | []byte](s S) bool {
+ for i := 0; i < len(s); i++ {
+ if s[i] >= utf8.RuneSelf {
+ return false
+ }
+ }
+ return true
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/readme.md b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/readme.md
new file mode 100644
index 0000000000..4b97984989
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/readme.md
@@ -0,0 +1,8 @@
+# diff
+
+Extracted from `/internal/diff/` (related to `fixer`).
+This is just a copy of the code without any changes.
+
+## History
+
+- sync with https://github.com/golang/tools/blob/v0.28.0
diff --git a/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/unified.go b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/unified.go
new file mode 100644
index 0000000000..cfbda61020
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/x/tools/diff/unified.go
@@ -0,0 +1,251 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package diff
+
+import (
+ "fmt"
+ "log"
+ "strings"
+)
+
+// DefaultContextLines is the number of unchanged lines of surrounding
+// context displayed by Unified. Use ToUnified to specify a different value.
+const DefaultContextLines = 3
+
+// Unified returns a unified diff of the old and new strings.
+// The old and new labels are the names of the old and new files.
+// If the strings are equal, it returns the empty string.
+func Unified(oldLabel, newLabel, old, new string) string {
+ edits := Strings(old, new)
+ unified, err := ToUnified(oldLabel, newLabel, old, edits, DefaultContextLines)
+ if err != nil {
+ // Can't happen: edits are consistent.
+ log.Fatalf("internal error in diff.Unified: %v", err)
+ }
+ return unified
+}
+
+// ToUnified applies the edits to content and returns a unified diff,
+// with contextLines lines of (unchanged) context around each diff hunk.
+// The old and new labels are the names of the content and result files.
+// It returns an error if the edits are inconsistent; see ApplyEdits.
+func ToUnified(oldLabel, newLabel, content string, edits []Edit, contextLines int) (string, error) {
+ u, err := toUnified(oldLabel, newLabel, content, edits, contextLines)
+ if err != nil {
+ return "", err
+ }
+ return u.String(), nil
+}
+
+// unified represents a set of edits as a unified diff.
+type unified struct {
+ // from is the name of the original file.
+ from string
+ // to is the name of the modified file.
+ to string
+ // hunks is the set of edit hunks needed to transform the file content.
+ hunks []*hunk
+}
+
+// Hunk represents a contiguous set of line edits to apply.
+type hunk struct {
+ // The line in the original source where the hunk starts.
+ fromLine int
+ // The line in the original source where the hunk finishes.
+ toLine int
+ // The set of line based edits to apply.
+ lines []line
+}
+
+// Line represents a single line operation to apply as part of a Hunk.
+type line struct {
+ // kind is the type of line this represents, deletion, insertion or copy.
+ kind opKind
+ // content is the content of this line.
+ // For deletion it is the line being removed, for all others it is the line
+ // to put in the output.
+ content string
+}
+
+// opKind is used to denote the type of operation a line represents.
+type opKind int
+
+const (
+ // opDelete is the operation kind for a line that is present in the input
+ // but not in the output.
+ opDelete opKind = iota
+ // opInsert is the operation kind for a line that is new in the output.
+ opInsert
+ // opEqual is the operation kind for a line that is the same in the input and
+ // output, often used to provide context around edited lines.
+ opEqual
+)
+
+// String returns a human readable representation of an OpKind. It is not
+// intended for machine processing.
+func (k opKind) String() string {
+ switch k {
+ case opDelete:
+ return "delete"
+ case opInsert:
+ return "insert"
+ case opEqual:
+ return "equal"
+ default:
+ panic("unknown operation kind")
+ }
+}
+
+// toUnified takes a file contents and a sequence of edits, and calculates
+// a unified diff that represents those edits.
+func toUnified(fromName, toName string, content string, edits []Edit, contextLines int) (unified, error) {
+ gap := contextLines * 2
+ u := unified{
+ from: fromName,
+ to: toName,
+ }
+ if len(edits) == 0 {
+ return u, nil
+ }
+ var err error
+ edits, err = lineEdits(content, edits) // expand to whole lines
+ if err != nil {
+ return u, err
+ }
+ lines := splitLines(content)
+ var h *hunk
+ last := 0
+ toLine := 0
+ for _, edit := range edits {
+ // Compute the zero-based line numbers of the edit start and end.
+ // TODO(adonovan): opt: compute incrementally, avoid O(n^2).
+ start := strings.Count(content[:edit.Start], "\n")
+ end := strings.Count(content[:edit.End], "\n")
+ if edit.End == len(content) && len(content) > 0 && content[len(content)-1] != '\n' {
+ end++ // EOF counts as an implicit newline
+ }
+
+ switch {
+ case h != nil && start == last:
+ //direct extension
+ case h != nil && start <= last+gap:
+ //within range of previous lines, add the joiners
+ addEqualLines(h, lines, last, start)
+ default:
+ //need to start a new hunk
+ if h != nil {
+ // add the edge to the previous hunk
+ addEqualLines(h, lines, last, last+contextLines)
+ u.hunks = append(u.hunks, h)
+ }
+ toLine += start - last
+ h = &hunk{
+ fromLine: start + 1,
+ toLine: toLine + 1,
+ }
+ // add the edge to the new hunk
+ delta := addEqualLines(h, lines, start-contextLines, start)
+ h.fromLine -= delta
+ h.toLine -= delta
+ }
+ last = start
+ for i := start; i < end; i++ {
+ h.lines = append(h.lines, line{kind: opDelete, content: lines[i]})
+ last++
+ }
+ if edit.New != "" {
+ for _, content := range splitLines(edit.New) {
+ h.lines = append(h.lines, line{kind: opInsert, content: content})
+ toLine++
+ }
+ }
+ }
+ if h != nil {
+ // add the edge to the final hunk
+ addEqualLines(h, lines, last, last+contextLines)
+ u.hunks = append(u.hunks, h)
+ }
+ return u, nil
+}
+
+func splitLines(text string) []string {
+ lines := strings.SplitAfter(text, "\n")
+ if lines[len(lines)-1] == "" {
+ lines = lines[:len(lines)-1]
+ }
+ return lines
+}
+
+func addEqualLines(h *hunk, lines []string, start, end int) int {
+ delta := 0
+ for i := start; i < end; i++ {
+ if i < 0 {
+ continue
+ }
+ if i >= len(lines) {
+ return delta
+ }
+ h.lines = append(h.lines, line{kind: opEqual, content: lines[i]})
+ delta++
+ }
+ return delta
+}
+
+// String converts a unified diff to the standard textual form for that diff.
+// The output of this function can be passed to tools like patch.
+func (u unified) String() string {
+ if len(u.hunks) == 0 {
+ return ""
+ }
+ b := new(strings.Builder)
+ fmt.Fprintf(b, "--- %s\n", u.from)
+ fmt.Fprintf(b, "+++ %s\n", u.to)
+ for _, hunk := range u.hunks {
+ fromCount, toCount := 0, 0
+ for _, l := range hunk.lines {
+ switch l.kind {
+ case opDelete:
+ fromCount++
+ case opInsert:
+ toCount++
+ default:
+ fromCount++
+ toCount++
+ }
+ }
+ fmt.Fprint(b, "@@")
+ if fromCount > 1 {
+ fmt.Fprintf(b, " -%d,%d", hunk.fromLine, fromCount)
+ } else if hunk.fromLine == 1 && fromCount == 0 {
+ // Match odd GNU diff -u behavior adding to empty file.
+ fmt.Fprintf(b, " -0,0")
+ } else {
+ fmt.Fprintf(b, " -%d", hunk.fromLine)
+ }
+ if toCount > 1 {
+ fmt.Fprintf(b, " +%d,%d", hunk.toLine, toCount)
+ } else if hunk.toLine == 1 && toCount == 0 {
+ // Match odd GNU diff -u behavior adding to empty file.
+ fmt.Fprintf(b, " +0,0")
+ } else {
+ fmt.Fprintf(b, " +%d", hunk.toLine)
+ }
+ fmt.Fprint(b, " @@\n")
+ for _, l := range hunk.lines {
+ switch l.kind {
+ case opDelete:
+ fmt.Fprintf(b, "-%s", l.content)
+ case opInsert:
+ fmt.Fprintf(b, "+%s", l.content)
+ default:
+ fmt.Fprintf(b, " %s", l.content)
+ }
+ if !strings.HasSuffix(l.content, "\n") {
+ fmt.Fprintf(b, "\n\\ No newline at end of file\n")
+ }
+ }
+ }
+ return b.String()
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go
index cc6c0eacd5..4f2c812dce 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go
@@ -62,6 +62,7 @@ func (*cacheCommand) executeClean(_ *cobra.Command, _ []string) error {
func (*cacheCommand) executeStatus(_ *cobra.Command, _ []string) {
cacheDir := cache.DefaultDir()
+
_, _ = fmt.Fprintf(logutils.StdOut, "Dir: %s\n", cacheDir)
cacheSizeBytes, err := dirSizeBytes(cacheDir)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/config_verify.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/config_verify.go
index a44050b593..76e09581ca 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/config_verify.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/config_verify.go
@@ -1,18 +1,20 @@
package commands
import (
+ "context"
+ "encoding/json"
"errors"
"fmt"
"net/http"
"os"
"path/filepath"
+ "strconv"
"strings"
"time"
hcversion "github.com/hashicorp/go-version"
"github.com/pelletier/go-toml/v2"
- "github.com/santhosh-tekuri/jsonschema/v5"
- "github.com/santhosh-tekuri/jsonschema/v5/httploader"
+ "github.com/santhosh-tekuri/jsonschema/v6"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"gopkg.in/yaml.v3"
@@ -43,9 +45,7 @@ func (c *configCommand) executeVerify(cmd *cobra.Command, _ []string) error {
return fmt.Errorf("[%s] validate: %w", usedConfigFile, err)
}
- detail := v.DetailedOutput()
-
- printValidationDetail(cmd, &detail)
+ printValidationDetail(cmd, v.DetailedOutput())
return errors.New("the configuration contains invalid elements")
}
@@ -70,40 +70,67 @@ func createSchemaURL(flags *pflag.FlagSet, buildInfo BuildInfo) (string, error)
return "", fmt.Errorf("parse version: %w", err)
}
- schemaURL = fmt.Sprintf("https://golangci-lint.run/jsonschema/golangci.v%d.%d.jsonschema.json",
- version.Segments()[0], version.Segments()[1])
+ if version.Core().Equal(hcversion.Must(hcversion.NewVersion("v0.0.0"))) {
+ commit, err := extractCommitHash(buildInfo)
+ if err != nil {
+ return "", err
+ }
- case buildInfo.Commit != "" && buildInfo.Commit != "?":
- if buildInfo.Commit == "unknown" {
- return "", errors.New("unknown commit information")
+ return fmt.Sprintf("https://raw.githubusercontent.com/golangci/golangci-lint/%s/jsonschema/golangci.next.jsonschema.json",
+ commit), nil
}
- commit := buildInfo.Commit
-
- if strings.HasPrefix(commit, "(") {
- c, _, ok := strings.Cut(strings.TrimPrefix(commit, "("), ",")
- if !ok {
- return "", errors.New("commit information not found")
- }
+ return fmt.Sprintf("https://golangci-lint.run/jsonschema/golangci.v%d.%d.jsonschema.json",
+ version.Segments()[0], version.Segments()[1]), nil
- commit = c
+ case buildInfo.Commit != "" && buildInfo.Commit != "?":
+ commit, err := extractCommitHash(buildInfo)
+ if err != nil {
+ return "", err
}
- schemaURL = fmt.Sprintf("https://raw.githubusercontent.com/golangci/golangci-lint/%s/jsonschema/golangci.next.jsonschema.json",
- commit)
+ return fmt.Sprintf("https://raw.githubusercontent.com/golangci/golangci-lint/%s/jsonschema/golangci.next.jsonschema.json",
+ commit), nil
default:
return "", errors.New("version not found")
}
+}
+
+func extractCommitHash(buildInfo BuildInfo) (string, error) {
+ if buildInfo.Commit == "" || buildInfo.Commit == "?" {
+ return "", errors.New("empty commit information")
+ }
+
+ if buildInfo.Commit == "unknown" {
+ return "", errors.New("unknown commit information")
+ }
+
+ commit := buildInfo.Commit
+
+ if strings.HasPrefix(commit, "(") {
+ c, _, ok := strings.Cut(strings.TrimPrefix(commit, "("), ",")
+ if !ok {
+ return "", errors.New("commit information not found")
+ }
+
+ commit = c
+ }
+
+ if commit == "unknown" {
+ return "", errors.New("unknown commit information")
+ }
- return schemaURL, nil
+ return commit, nil
}
func validateConfiguration(schemaPath, targetFile string) error {
- httploader.Client = &http.Client{Timeout: 2 * time.Second}
-
compiler := jsonschema.NewCompiler()
- compiler.Draft = jsonschema.Draft7
+ compiler.UseLoader(jsonschema.SchemeURLLoader{
+ "file": jsonschema.FileLoader{},
+ "https": newJSONSchemaHTTPLoader(),
+ })
+ compiler.DefaultDraft(jsonschema.Draft7)
schema, err := compiler.Compile(schemaPath)
if err != nil {
@@ -133,14 +160,16 @@ func validateConfiguration(schemaPath, targetFile string) error {
return schema.Validate(m)
}
-func printValidationDetail(cmd *cobra.Command, detail *jsonschema.Detailed) {
- if detail.Error != "" {
+func printValidationDetail(cmd *cobra.Command, detail *jsonschema.OutputUnit) {
+ if detail.Error != nil {
+ data, _ := json.Marshal(detail.Error)
+ details, _ := strconv.Unquote(string(data))
+
cmd.PrintErrf("jsonschema: %q does not validate with %q: %s\n",
- strings.ReplaceAll(strings.TrimPrefix(detail.InstanceLocation, "/"), "/", "."), detail.KeywordLocation, detail.Error)
+ strings.ReplaceAll(strings.TrimPrefix(detail.InstanceLocation, "/"), "/", "."), detail.KeywordLocation, details)
}
for _, d := range detail.Errors {
- d := d
printValidationDetail(cmd, &d)
}
}
@@ -178,3 +207,33 @@ func decodeTomlFile(filename string) (any, error) {
return m, nil
}
+
+type jsonschemaHTTPLoader struct {
+ *http.Client
+}
+
+func newJSONSchemaHTTPLoader() *jsonschemaHTTPLoader {
+ return &jsonschemaHTTPLoader{Client: &http.Client{
+ Timeout: 2 * time.Second,
+ }}
+}
+
+func (l jsonschemaHTTPLoader) Load(url string) (any, error) {
+ req, err := http.NewRequestWithContext(context.Background(), http.MethodGet, url, http.NoBody)
+ if err != nil {
+ return nil, err
+ }
+
+ resp, err := l.Do(req)
+ if err != nil {
+ return nil, err
+ }
+
+ defer resp.Body.Close()
+
+ if resp.StatusCode != http.StatusOK {
+ return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode)
+ }
+
+ return jsonschema.UnmarshalJSON(resp.Body)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/flagsets.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/flagsets.go
index 608f6b9de5..d514f12718 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/flagsets.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/flagsets.go
@@ -28,11 +28,11 @@ func setupLintersFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
color.GreenString("Enable only fast linters from enabled linters set (first run won't be fast)"))
internal.AddHackedStringSliceP(fs, "presets", "p",
- color.GreenString(fmt.Sprintf("Enable presets (%s) of linters.\n"+
- "Run 'golangci-lint help linters' to see them.\n"+
+ formatList("Enable presets of linters:", lintersdb.AllPresets(),
+ "Run 'golangci-lint help linters' to see them.",
"This option implies option --disable-all",
- strings.Join(lintersdb.AllPresets(), "|"),
- )))
+ ),
+ )
fs.StringSlice("enable-only", nil,
color.GreenString("Override linters configuration section to only run the specific linter(s)")) // Flags only.
@@ -49,14 +49,13 @@ func setupRunFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
internal.AddFlagAndBind(v, fs, fs.String, "go", "run.go", "", color.GreenString("Targeted Go version"))
internal.AddHackedStringSlice(fs, "build-tags", color.GreenString("Build tags"))
- internal.AddFlagAndBind(v, fs, fs.Duration, "timeout", "run.timeout", defaultTimeout, color.GreenString("Timeout for total work"))
+ internal.AddFlagAndBind(v, fs, fs.Duration, "timeout", "run.timeout", defaultTimeout,
+ color.GreenString("Timeout for total work. If <= 0, the timeout is disabled"))
internal.AddFlagAndBind(v, fs, fs.Bool, "tests", "run.tests", true, color.GreenString("Analyze tests (*_test.go)"))
internal.AddDeprecatedHackedStringSlice(fs, "skip-files", color.GreenString("Regexps of files to skip"))
internal.AddDeprecatedHackedStringSlice(fs, "skip-dirs", color.GreenString("Regexps of directories to skip"))
- internal.AddDeprecatedFlagAndBind(v, fs, fs.Bool, "skip-dirs-use-default", "run.skip-dirs-use-default", true,
- getDefaultDirectoryExcludeHelp())
const allowParallelDesc = "Allow multiple parallel golangci-lint instances running.\n" +
"If false (default) - golangci-lint acquires file lock on start."
@@ -69,13 +68,11 @@ func setupRunFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
func setupOutputFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
internal.AddFlagAndBind(v, fs, fs.String, "out-format", "output.formats", config.OutFormatColoredLineNumber,
- color.GreenString(fmt.Sprintf("Formats of output: %s", strings.Join(config.AllOutputFormats, "|"))))
+ formatList("Formats of output:", config.AllOutputFormats))
internal.AddFlagAndBind(v, fs, fs.Bool, "print-issued-lines", "output.print-issued-lines", true,
color.GreenString("Print lines of code with issue"))
internal.AddFlagAndBind(v, fs, fs.Bool, "print-linter-name", "output.print-linter-name", true,
color.GreenString("Print linter name in issue line"))
- internal.AddFlagAndBind(v, fs, fs.Bool, "uniq-by-line", "output.uniq-by-line", true,
- color.GreenString("Make issues output unique by line"))
internal.AddFlagAndBind(v, fs, fs.Bool, "sort-results", "output.sort-results", false,
color.GreenString("Sort linter results"))
internal.AddFlagAndBind(v, fs, fs.StringSlice, "sort-order", "output.sort-order", nil,
@@ -97,11 +94,13 @@ func setupIssuesFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
color.GreenString("Maximum issues count per one linter. Set to 0 to disable"))
internal.AddFlagAndBind(v, fs, fs.Int, "max-same-issues", "issues.max-same-issues", 3,
color.GreenString("Maximum count of issues with the same text. Set to 0 to disable"))
+ internal.AddFlagAndBind(v, fs, fs.Bool, "uniq-by-line", "issues.uniq-by-line", true,
+ color.GreenString("Make issues output unique by line"))
internal.AddHackedStringSlice(fs, "exclude-files", color.GreenString("Regexps of files to exclude"))
internal.AddHackedStringSlice(fs, "exclude-dirs", color.GreenString("Regexps of directories to exclude"))
internal.AddFlagAndBind(v, fs, fs.Bool, "exclude-dirs-use-default", "issues.exclude-dirs-use-default", true,
- getDefaultDirectoryExcludeHelp())
+ formatList("Use or not use default excluded directories:", processors.StdExcludeDirRegexps))
internal.AddFlagAndBind(v, fs, fs.String, "exclude-generated", "issues.exclude-generated", processors.AutogeneratedModeLax,
color.GreenString("Mode of the generated files analysis"))
@@ -117,12 +116,31 @@ func setupIssuesFlagSet(v *viper.Viper, fs *pflag.FlagSet) {
color.GreenString("Show only new issues created after git revision `REV`"))
internal.AddFlagAndBind(v, fs, fs.String, "new-from-patch", "issues.new-from-patch", "",
color.GreenString("Show only new issues created in git patch with file path `PATH`"))
+ internal.AddFlagAndBind(v, fs, fs.String, "new-from-merge-base", "issues.new-from-merge-base", "",
+ color.GreenString("Show only new issues created after the best common ancestor (merge-base against HEAD)"))
internal.AddFlagAndBind(v, fs, fs.Bool, "whole-files", "issues.whole-files", false,
color.GreenString("Show issues in any part of update files (requires new-from-rev or new-from-patch)"))
internal.AddFlagAndBind(v, fs, fs.Bool, "fix", "issues.fix", false,
color.GreenString("Fix found issues (if it's supported by the linter)"))
}
+func formatList(head string, items []string, foot ...string) string {
+ parts := []string{color.GreenString(head)}
+ for _, p := range items {
+ parts = append(parts, fmt.Sprintf(" - %s", color.YellowString(p)))
+ }
+
+ for _, s := range foot {
+ parts = append(parts, color.GreenString(s))
+ }
+
+ if len(foot) == 0 {
+ parts = append(parts, "")
+ }
+
+ return strings.Join(parts, "\n")
+}
+
func getDefaultIssueExcludeHelp() string {
parts := []string{color.GreenString("Use or not use default excludes:")}
@@ -135,12 +153,3 @@ func getDefaultIssueExcludeHelp() string {
return strings.Join(parts, "\n")
}
-
-func getDefaultDirectoryExcludeHelp() string {
- parts := []string{color.GreenString("Use or not use default excluded directories:")}
- for _, dir := range processors.StdExcludeDirRegexps {
- parts = append(parts, fmt.Sprintf(" - %s", color.YellowString(dir)))
- }
- parts = append(parts, "")
- return strings.Join(parts, "\n")
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go
index 094e5d1905..de4a9998fb 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go
@@ -1,10 +1,13 @@
package commands
import (
+ "encoding/json"
"fmt"
"slices"
"sort"
"strings"
+ "unicode"
+ "unicode/utf8"
"github.com/fatih/color"
"github.com/spf13/cobra"
@@ -15,9 +18,27 @@ import (
"github.com/golangci/golangci-lint/pkg/logutils"
)
+type linterHelp struct {
+ Name string `json:"name"`
+ Desc string `json:"description"`
+ Fast bool `json:"fast"`
+ AutoFix bool `json:"autoFix"`
+ Presets []string `json:"presets"`
+ EnabledByDefault bool `json:"enabledByDefault"`
+ Deprecated bool `json:"deprecated"`
+ Since string `json:"since"`
+ OriginalURL string `json:"originalURL,omitempty"`
+}
+
+type helpOptions struct {
+ JSON bool
+}
+
type helpCommand struct {
cmd *cobra.Command
+ opts helpOptions
+
dbManager *lintersdb.Manager
log logutils.Log
@@ -35,16 +56,21 @@ func newHelpCommand(logger logutils.Log) *helpCommand {
},
}
- helpCmd.AddCommand(
- &cobra.Command{
- Use: "linters",
- Short: "Help about linters",
- Args: cobra.NoArgs,
- ValidArgsFunction: cobra.NoFileCompletions,
- Run: c.execute,
- PreRunE: c.preRunE,
- },
- )
+ lintersCmd := &cobra.Command{
+ Use: "linters",
+ Short: "Help about linters",
+ Args: cobra.NoArgs,
+ ValidArgsFunction: cobra.NoFileCompletions,
+ RunE: c.execute,
+ PreRunE: c.preRunE,
+ }
+
+ helpCmd.AddCommand(lintersCmd)
+
+ fs := lintersCmd.Flags()
+ fs.SortFlags = false // sort them as they are defined here
+
+ fs.BoolVar(&c.opts.JSON, "json", false, color.GreenString("Display as JSON"))
c.cmd = helpCmd
@@ -64,7 +90,41 @@ func (c *helpCommand) preRunE(_ *cobra.Command, _ []string) error {
return nil
}
-func (c *helpCommand) execute(_ *cobra.Command, _ []string) {
+func (c *helpCommand) execute(_ *cobra.Command, _ []string) error {
+ if c.opts.JSON {
+ return c.printJSON()
+ }
+
+ c.print()
+
+ return nil
+}
+
+func (c *helpCommand) printJSON() error {
+ var linters []linterHelp
+
+ for _, lc := range c.dbManager.GetAllSupportedLinterConfigs() {
+ if lc.Internal {
+ continue
+ }
+
+ linters = append(linters, linterHelp{
+ Name: lc.Name(),
+ Desc: formatDescription(lc.Linter.Desc()),
+ Fast: !lc.IsSlowLinter(),
+ AutoFix: lc.CanAutoFix,
+ Presets: lc.InPresets,
+ EnabledByDefault: lc.EnabledByDefault,
+ Deprecated: lc.IsDeprecated(),
+ Since: lc.Since,
+ OriginalURL: lc.OriginalURL,
+ })
+ }
+
+ return json.NewEncoder(c.cmd.OutOrStdout()).Encode(linters)
+}
+
+func (c *helpCommand) print() {
var enabledLCs, disabledLCs []*linter.Config
for _, lc := range c.dbManager.GetAllSupportedLinterConfigs() {
if lc.Internal {
@@ -124,19 +184,52 @@ func printLinters(lcs []*linter.Config) {
})
for _, lc := range lcs {
- // If the linter description spans multiple lines, truncate everything following the first newline
- linterDescription := lc.Linter.Desc()
- firstNewline := strings.IndexRune(linterDescription, '\n')
- if firstNewline > 0 {
- linterDescription = linterDescription[:firstNewline]
- }
+ desc := formatDescription(lc.Linter.Desc())
deprecatedMark := ""
if lc.IsDeprecated() {
deprecatedMark = " [" + color.RedString("deprecated") + "]"
}
- _, _ = fmt.Fprintf(logutils.StdOut, "%s%s: %s [fast: %t, auto-fix: %t]\n",
- color.YellowString(lc.Name()), deprecatedMark, linterDescription, !lc.IsSlowLinter(), lc.CanAutoFix)
+ var capabilities []string
+ if !lc.IsSlowLinter() {
+ capabilities = append(capabilities, color.BlueString("fast"))
+ }
+ if lc.CanAutoFix {
+ capabilities = append(capabilities, color.GreenString("auto-fix"))
+ }
+
+ var capability string
+ if capabilities != nil {
+ capability = " [" + strings.Join(capabilities, ", ") + "]"
+ }
+
+ _, _ = fmt.Fprintf(logutils.StdOut, "%s%s: %s%s\n",
+ color.YellowString(lc.Name()), deprecatedMark, desc, capability)
}
}
+
+func formatDescription(desc string) string {
+ desc = strings.TrimSpace(desc)
+
+ if desc == "" {
+ return desc
+ }
+
+ // If the linter description spans multiple lines, truncate everything following the first newline
+ endFirstLine := strings.IndexRune(desc, '\n')
+ if endFirstLine > 0 {
+ desc = desc[:endFirstLine]
+ }
+
+ rawDesc := []rune(desc)
+
+ r, _ := utf8.DecodeRuneInString(desc)
+ rawDesc[0] = unicode.ToUpper(r)
+
+ if rawDesc[len(rawDesc)-1] != '.' {
+ rawDesc = append(rawDesc, '.')
+ }
+
+ return string(rawDesc)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/builder.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/builder.go
index 7253615a45..f0e259fb02 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/builder.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/internal/builder.go
@@ -95,7 +95,7 @@ func (b Builder) clone(ctx context.Context) error {
output, err := cmd.CombinedOutput()
if err != nil {
- b.log.Infof(string(output))
+ b.log.Infof("%s", string(output))
return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
}
@@ -132,7 +132,7 @@ func (b Builder) goGet(ctx context.Context, plugin *Plugin) error {
output, err := cmd.CombinedOutput()
if err != nil {
- b.log.Warnf(string(output))
+ b.log.Warnf("%s", string(output))
return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
}
@@ -150,7 +150,7 @@ func (b Builder) addReplaceDirective(ctx context.Context, plugin *Plugin) error
output, err := cmd.CombinedOutput()
if err != nil {
- b.log.Warnf(string(output))
+ b.log.Warnf("%s", string(output))
return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
}
@@ -164,7 +164,7 @@ func (b Builder) goModTidy(ctx context.Context) error {
output, err := cmd.CombinedOutput()
if err != nil {
- b.log.Warnf(string(output))
+ b.log.Warnf("%s", string(output))
return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
}
@@ -187,7 +187,7 @@ func (b Builder) goBuild(ctx context.Context, binaryName string) error {
output, err := cmd.CombinedOutput()
if err != nil {
- b.log.Warnf(string(output))
+ b.log.Warnf("%s", string(output))
return fmt.Errorf("%s: %w", strings.Join(cmd.Args, " "), err)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go
index bc086bc3d7..57f3cdd993 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go
@@ -8,12 +8,13 @@ import (
"fmt"
"io"
"log"
+ "maps"
"os"
"path/filepath"
"runtime"
"runtime/pprof"
"runtime/trace"
- "sort"
+ "slices"
"strconv"
"strings"
"time"
@@ -24,11 +25,9 @@ import (
"github.com/spf13/pflag"
"github.com/spf13/viper"
"go.uber.org/automaxprocs/maxprocs"
- "golang.org/x/exp/maps"
"gopkg.in/yaml.v3"
"github.com/golangci/golangci-lint/internal/cache"
- "github.com/golangci/golangci-lint/internal/pkgcache"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/exitcodes"
"github.com/golangci/golangci-lint/pkg/fsutils"
@@ -152,7 +151,7 @@ func (c *runCommand) persistentPreRunE(cmd *cobra.Command, args []string) error
return err
}
- c.log.Infof(c.buildInfo.String())
+ c.log.Infof("%s", c.buildInfo.String())
loader := config.NewLoader(c.log.Child(logutils.DebugKeyConfigReader), c.viper, cmd.Flags(), c.opts.LoaderOptions, c.cfg, args)
@@ -187,6 +186,10 @@ func (c *runCommand) persistentPostRunE(_ *cobra.Command, _ []string) error {
}
func (c *runCommand) preRunE(_ *cobra.Command, args []string) error {
+ if c.cfg.GetConfigDir() != "" && c.cfg.Version != "" {
+ return errors.New("you are using a configuration file for golangci-lint v2 with golangci-lint v1: please use golangci-lint v2")
+ }
+
dbManager, err := lintersdb.NewManager(c.log.Child(logutils.DebugKeyLintersDB), c.cfg,
lintersdb.NewLinterBuilder(), lintersdb.NewPluginModuleBuilder(c.log), lintersdb.NewPluginGoBuilder(c.log))
if err != nil {
@@ -195,7 +198,7 @@ func (c *runCommand) preRunE(_ *cobra.Command, args []string) error {
c.dbManager = dbManager
- printer, err := printers.NewPrinter(c.log, &c.cfg.Output, c.reportData)
+ printer, err := printers.NewPrinter(c.log, &c.cfg.Output, c.reportData, c.cfg.GetBasePath())
if err != nil {
return err
}
@@ -209,7 +212,7 @@ func (c *runCommand) preRunE(_ *cobra.Command, args []string) error {
sw := timeutils.NewStopwatch("pkgcache", c.log.Child(logutils.DebugKeyStopwatch))
- pkgCache, err := pkgcache.NewCache(sw, c.log.Child(logutils.DebugKeyPkgCache))
+ pkgCache, err := cache.NewCache(sw, c.log.Child(logutils.DebugKeyPkgCache))
if err != nil {
return fmt.Errorf("failed to build packages cache: %w", err)
}
@@ -218,7 +221,7 @@ func (c *runCommand) preRunE(_ *cobra.Command, args []string) error {
pkgLoader := lint.NewPackageLoader(c.log.Child(logutils.DebugKeyLoader), c.cfg, args, c.goenv, guard)
- c.contextBuilder = lint.NewContextBuilder(c.cfg, pkgLoader, c.fileCache, pkgCache, guard)
+ c.contextBuilder = lint.NewContextBuilder(c.cfg, pkgLoader, pkgCache, guard)
if err = initHashSalt(c.buildInfo.Version, c.cfg); err != nil {
return fmt.Errorf("failed to init hash salt: %w", err)
@@ -239,14 +242,21 @@ func (c *runCommand) execute(_ *cobra.Command, args []string) {
needTrackResources := logutils.IsVerbose() || c.opts.PrintResourcesUsage
trackResourcesEndCh := make(chan struct{})
- defer func() { // XXX: this defer must be before ctx.cancel defer
- if needTrackResources { // wait until resource tracking finished to print properly
+
+ // Note: this defer must be before ctx.cancel defer
+ defer func() {
+ // wait until resource tracking finished to print properly
+ if needTrackResources {
<-trackResourcesEndCh
}
}()
- ctx, cancel := context.WithTimeout(context.Background(), c.cfg.Run.Timeout)
- defer cancel()
+ ctx := context.Background()
+ if c.cfg.Run.Timeout > 0 {
+ var cancel context.CancelFunc
+ ctx, cancel = context.WithTimeout(ctx, c.cfg.Run.Timeout)
+ defer cancel()
+ }
if needTrackResources {
go watchResources(ctx, trackResourcesEndCh, c.log, c.debugf)
@@ -446,8 +456,7 @@ func (c *runCommand) printStats(issues []result.Issue) {
c.cmd.Printf("%d issues:\n", len(issues))
- keys := maps.Keys(stats)
- sort.Strings(keys)
+ keys := slices.Sorted(maps.Keys(stats))
for _, key := range keys {
c.cmd.Printf("* %s: %d\n", key, stats[key])
@@ -640,7 +649,7 @@ func initHashSalt(version string, cfg *config.Config) error {
b := bytes.NewBuffer(binSalt)
b.Write(configSalt)
- cache.SetSalt(b.Bytes())
+ cache.SetSalt(b)
return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/base_rule.go b/vendor/github.com/golangci/golangci-lint/pkg/config/base_rule.go
new file mode 100644
index 0000000000..780c60cd24
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/base_rule.go
@@ -0,0 +1,75 @@
+package config
+
+import (
+ "errors"
+ "fmt"
+ "regexp"
+)
+
+type BaseRule struct {
+ Linters []string
+ Path string
+ PathExcept string `mapstructure:"path-except"`
+ Text string
+ Source string
+
+ // For compatibility with exclude-use-default/include.
+ InternalReference string `mapstructure:"-"`
+}
+
+func (b *BaseRule) Validate(minConditionsCount int) error {
+ if err := validateOptionalRegex(b.Path); err != nil {
+ return fmt.Errorf("invalid path regex: %w", err)
+ }
+
+ if err := validateOptionalRegex(b.PathExcept); err != nil {
+ return fmt.Errorf("invalid path-except regex: %w", err)
+ }
+
+ if err := validateOptionalRegex(b.Text); err != nil {
+ return fmt.Errorf("invalid text regex: %w", err)
+ }
+
+ if err := validateOptionalRegex(b.Source); err != nil {
+ return fmt.Errorf("invalid source regex: %w", err)
+ }
+
+ if b.Path != "" && b.PathExcept != "" {
+ return errors.New("path and path-except should not be set at the same time")
+ }
+
+ nonBlank := 0
+ if len(b.Linters) > 0 {
+ nonBlank++
+ }
+
+ // Filtering by path counts as one condition, regardless how it is done (one or both).
+ // Otherwise, a rule with Path and PathExcept set would pass validation
+ // whereas before the introduction of path-except that wouldn't have been precise enough.
+ if b.Path != "" || b.PathExcept != "" {
+ nonBlank++
+ }
+
+ if b.Text != "" {
+ nonBlank++
+ }
+
+ if b.Source != "" {
+ nonBlank++
+ }
+
+ if nonBlank < minConditionsCount {
+ return fmt.Errorf("at least %d of (text, source, path[-except], linters) should be set", minConditionsCount)
+ }
+
+ return nil
+}
+
+func validateOptionalRegex(value string) error {
+ if value == "" {
+ return nil
+ }
+
+ _, err := regexp.Compile(value)
+ return err
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go
index 1dd064013a..ee7a62b7e2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go
@@ -1,17 +1,26 @@
package config
import (
+ "cmp"
+ "context"
+ "fmt"
"os"
- "regexp"
+ "path/filepath"
+ "slices"
"strings"
hcversion "github.com/hashicorp/go-version"
- "github.com/ldez/gomoddirectives"
+ "github.com/ldez/grignotin/goenv"
+ "github.com/ldez/grignotin/gomod"
+ "golang.org/x/mod/modfile"
)
// Config encapsulates the config data specified in the golangci-lint YAML config file.
type Config struct {
- cfgDir string // The directory containing the golangci-lint config file.
+ cfgDir string // Path to the directory containing golangci-lint config file.
+ basePath string // Path the root directory related to [Run.RelativePathMode].
+
+ Version string `mapstructure:"version"` // From v2, to be able to detect v2 config file.
Run Run `mapstructure:"run"`
@@ -26,11 +35,15 @@ type Config struct {
InternalTest bool // Option is used only for testing golangci-lint code, don't use it
}
-// GetConfigDir returns the directory that contains golangci config file.
+// GetConfigDir returns the directory that contains golangci-lint config file.
func (c *Config) GetConfigDir() string {
return c.cfgDir
}
+func (c *Config) GetBasePath() string {
+ return c.basePath
+}
+
func (c *Config) Validate() error {
validators := []func() error{
c.Run.Validate,
@@ -75,36 +88,94 @@ func IsGoGreaterThanOrEqual(current, limit string) bool {
return v1.GreaterThanOrEqual(l)
}
-func detectGoVersion() string {
- file, _ := gomoddirectives.GetModuleFile()
+func detectGoVersion(ctx context.Context) string {
+ return cmp.Or(detectGoVersionFromGoMod(ctx), "1.17")
+}
+
+// detectGoVersionFromGoMod tries to get Go version from go.mod.
+// It returns `toolchain` version if present,
+// else it returns `go` version if present,
+// else it returns `GOVERSION` version if present,
+// else it returns empty.
+func detectGoVersionFromGoMod(ctx context.Context) string {
+ values, err := goenv.Get(ctx, goenv.GOMOD, goenv.GOVERSION)
+ if err != nil {
+ values = map[string]string{
+ goenv.GOMOD: detectGoModFallback(ctx),
+ }
+ }
+
+ if values[goenv.GOMOD] == "" {
+ return parseGoVersion(values[goenv.GOVERSION])
+ }
+
+ file, err := parseGoMod(values[goenv.GOMOD])
+ if err != nil {
+ return parseGoVersion(values[goenv.GOVERSION])
+ }
+
+ // The toolchain exists only if 'toolchain' version > 'go' version.
+ // If 'toolchain' version <= 'go' version, `go mod tidy` will remove 'toolchain' version from go.mod.
+ if file.Toolchain != nil && file.Toolchain.Name != "" {
+ return parseGoVersion(file.Toolchain.Name)
+ }
- if file != nil && file.Go != nil && file.Go.Version != "" {
+ if file.Go != nil && file.Go.Version != "" {
return file.Go.Version
}
- v := os.Getenv("GOVERSION")
- if v != "" {
- return v
+ return parseGoVersion(values[goenv.GOVERSION])
+}
+
+func parseGoVersion(v string) string {
+ raw := strings.TrimPrefix(v, "go")
+
+ // prerelease version (ex: go1.24rc1)
+ idx := strings.IndexFunc(raw, func(r rune) bool {
+ return (r < '0' || r > '9') && r != '.'
+ })
+
+ if idx != -1 {
+ raw = raw[:idx]
+ }
+
+ return raw
+}
+
+func parseGoMod(goMod string) (*modfile.File, error) {
+ raw, err := os.ReadFile(filepath.Clean(goMod))
+ if err != nil {
+ return nil, fmt.Errorf("reading go.mod file: %w", err)
}
- return "1.17"
+ return modfile.Parse("go.mod", raw, nil)
}
-// Trims the Go version to keep only M.m.
-// Since Go 1.21 the version inside the go.mod can be a patched version (ex: 1.21.0).
-// The version can also include information which we want to remove (ex: 1.21alpha1)
-// https://go.dev/doc/toolchain#versions
-// This a problem with staticcheck and gocritic.
-func trimGoVersion(v string) string {
- if v == "" {
+func detectGoModFallback(ctx context.Context) string {
+ info, err := gomod.GetModuleInfo(ctx)
+ if err != nil {
+ return ""
+ }
+
+ wd, err := os.Getwd()
+ if err != nil {
return ""
}
- exp := regexp.MustCompile(`(\d\.\d+)(?:\.\d+|[a-z]+\d)`)
+ slices.SortFunc(info, func(a, b gomod.ModInfo) int {
+ return cmp.Compare(len(b.Path), len(a.Path))
+ })
+
+ goMod := info[0]
+ for _, m := range info {
+ if !strings.HasPrefix(wd, m.Dir) {
+ continue
+ }
+
+ goMod = m
- if exp.MatchString(v) {
- return exp.FindStringSubmatch(v)[1]
+ break
}
- return v
+ return goMod.GoMod
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go b/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go
index 2ee9364aaa..d5b6650f95 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go
@@ -1,13 +1,9 @@
package config
import (
- "errors"
"fmt"
- "regexp"
)
-const excludeRuleMinConditionsCount = 2
-
var DefaultExcludePatterns = []ExcludePattern{
{
ID: "EXC0001",
@@ -117,17 +113,19 @@ type Issues struct {
UseDefaultExcludeDirs bool `mapstructure:"exclude-dirs-use-default"`
- MaxIssuesPerLinter int `mapstructure:"max-issues-per-linter"`
- MaxSameIssues int `mapstructure:"max-same-issues"`
+ MaxIssuesPerLinter int `mapstructure:"max-issues-per-linter"`
+ MaxSameIssues int `mapstructure:"max-same-issues"`
+ UniqByLine bool `mapstructure:"uniq-by-line"`
DiffFromRevision string `mapstructure:"new-from-rev"`
+ DiffFromMergeBase string `mapstructure:"new-from-merge-base"`
DiffPatchFilePath string `mapstructure:"new-from-patch"`
WholeFiles bool `mapstructure:"whole-files"`
Diff bool `mapstructure:"new"`
NeedFix bool `mapstructure:"fix"`
- ExcludeGeneratedStrict bool `mapstructure:"exclude-generated-strict"` // Deprecated: use ExcludeGenerated instead.
+ ExcludeGeneratedStrict *bool `mapstructure:"exclude-generated-strict"` // Deprecated: use ExcludeGenerated instead.
}
func (i *Issues) Validate() error {
@@ -140,79 +138,6 @@ func (i *Issues) Validate() error {
return nil
}
-type ExcludeRule struct {
- BaseRule `mapstructure:",squash"`
-}
-
-func (e *ExcludeRule) Validate() error {
- return e.BaseRule.Validate(excludeRuleMinConditionsCount)
-}
-
-type BaseRule struct {
- Linters []string
- Path string
- PathExcept string `mapstructure:"path-except"`
- Text string
- Source string
-}
-
-func (b *BaseRule) Validate(minConditionsCount int) error {
- if err := validateOptionalRegex(b.Path); err != nil {
- return fmt.Errorf("invalid path regex: %w", err)
- }
-
- if err := validateOptionalRegex(b.PathExcept); err != nil {
- return fmt.Errorf("invalid path-except regex: %w", err)
- }
-
- if err := validateOptionalRegex(b.Text); err != nil {
- return fmt.Errorf("invalid text regex: %w", err)
- }
-
- if err := validateOptionalRegex(b.Source); err != nil {
- return fmt.Errorf("invalid source regex: %w", err)
- }
-
- if b.Path != "" && b.PathExcept != "" {
- return errors.New("path and path-except should not be set at the same time")
- }
-
- nonBlank := 0
- if len(b.Linters) > 0 {
- nonBlank++
- }
-
- // Filtering by path counts as one condition, regardless how it is done (one or both).
- // Otherwise, a rule with Path and PathExcept set would pass validation
- // whereas before the introduction of path-except that wouldn't have been precise enough.
- if b.Path != "" || b.PathExcept != "" {
- nonBlank++
- }
-
- if b.Text != "" {
- nonBlank++
- }
-
- if b.Source != "" {
- nonBlank++
- }
-
- if nonBlank < minConditionsCount {
- return fmt.Errorf("at least %d of (text, source, path[-except], linters) should be set", minConditionsCount)
- }
-
- return nil
-}
-
-func validateOptionalRegex(value string) error {
- if value == "" {
- return nil
- }
-
- _, err := regexp.Compile(value)
- return err
-}
-
type ExcludePattern struct {
ID string
Pattern string
@@ -220,14 +145,6 @@ type ExcludePattern struct {
Why string
}
-func GetDefaultExcludePatternsStrings() []string {
- ret := make([]string, len(DefaultExcludePatterns))
- for i, p := range DefaultExcludePatterns {
- ret[i] = p.Pattern
- }
- return ret
-}
-
// TODO(ldez): this behavior must be changed in v2, because this is confusing.
func GetExcludePatterns(include []string) []ExcludePattern {
includeMap := make(map[string]struct{}, len(include))
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go
index 5c2628272c..4814d1eb24 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go
@@ -13,15 +13,21 @@ type Linters struct {
Fast bool
Presets []string
+
+ LinterExclusions LinterExclusions `mapstructure:"exclusions"`
}
func (l *Linters) Validate() error {
- if err := l.validateAllDisableEnableOptions(); err != nil {
- return err
+ validators := []func() error{
+ l.validateAllDisableEnableOptions,
+ l.validateDisabledAndEnabledAtOneMoment,
+ l.LinterExclusions.Validate,
}
- if err := l.validateDisabledAndEnabledAtOneMoment(); err != nil {
- return err
+ for _, v := range validators {
+ if err := v(); err != nil {
+ return err
+ }
}
return nil
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_exclusions.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_exclusions.go
new file mode 100644
index 0000000000..3bed6dfc15
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_exclusions.go
@@ -0,0 +1,55 @@
+package config
+
+import (
+ "fmt"
+ "slices"
+)
+
+const (
+ ExclusionPresetComments = "comments"
+ ExclusionPresetStdErrorHandling = "stdErrorHandling"
+ ExclusionPresetCommonFalsePositives = "commonFalsePositives"
+ ExclusionPresetLegacy = "legacy"
+)
+
+const excludeRuleMinConditionsCount = 2
+
+type LinterExclusions struct {
+ Generated string `mapstructure:"generated"`
+ WarnUnused bool `mapstructure:"warn-unused"`
+ Presets []string `mapstructure:"preset"`
+ Rules []ExcludeRule `mapstructure:"rules"`
+ Paths []string `mapstructure:"paths"`
+ PathsExcept []string `mapstructure:"paths-except"`
+}
+
+func (e *LinterExclusions) Validate() error {
+ for i, rule := range e.Rules {
+ if err := rule.Validate(); err != nil {
+ return fmt.Errorf("error in exclude rule #%d: %w", i, err)
+ }
+ }
+
+ allPresets := []string{
+ ExclusionPresetComments,
+ ExclusionPresetStdErrorHandling,
+ ExclusionPresetCommonFalsePositives,
+ ExclusionPresetLegacy,
+ }
+
+ for _, preset := range e.Presets {
+ if !slices.Contains(allPresets, preset) {
+ return fmt.Errorf("invalid preset: %s", preset)
+ }
+ }
+
+ return nil
+}
+
+type ExcludeRule struct {
+ BaseRule `mapstructure:",squash"`
+}
+
+func (e *ExcludeRule) Validate() error {
+ return e.BaseRule.Validate(excludeRuleMinConditionsCount)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go
index b2f4567d49..94650a66de 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go
@@ -47,6 +47,9 @@ var defaultLintersSettings = LintersSettings{
Sections: []string{"standard", "default"},
SkipGenerated: true,
},
+ GoChecksumType: GoChecksumTypeSettings{
+ DefaultSignifiesExhaustive: true,
+ },
Gocognit: GocognitSettings{
MinComplexity: 30,
},
@@ -102,6 +105,7 @@ var defaultLintersSettings = LintersSettings{
Kitlog: true,
Klog: true,
Logr: true,
+ Slog: true,
Zap: true,
RequireStringKey: false,
NoPrintfLike: false,
@@ -122,11 +126,16 @@ var defaultLintersSettings = LintersSettings{
AllowUnused: false,
},
PerfSprint: PerfSprintSettings{
+ IntegerFormat: true,
IntConversion: true,
+ ErrorFormat: true,
ErrError: false,
ErrorF: true,
+ StringFormat: true,
SprintF1: true,
StrConcat: true,
+ BoolFormat: true,
+ HexFormat: true,
},
Prealloc: PreallocSettings{
Simple: true,
@@ -165,7 +174,6 @@ var defaultLintersSettings = LintersSettings{
Unused: UnusedSettings{
FieldWritesAreUses: true,
PostStatementsAreReads: false,
- ExportedIsUsed: true,
ExportedFieldsAreUsed: true,
ParametersAreUsed: true,
LocalVariablesAreUsed: true,
@@ -175,6 +183,15 @@ var defaultLintersSettings = LintersSettings{
HTTPMethod: true,
HTTPStatusCode: true,
},
+ UseTesting: UseTestingSettings{
+ ContextBackground: true,
+ ContextTodo: true,
+ OSChdir: true,
+ OSMkdirTemp: true,
+ OSSetenv: true,
+ OSTempDir: false,
+ OSCreateTemp: true,
+ },
Varnamelen: VarnamelenSettings{
MaxDistance: 5,
MinNameLength: 3,
@@ -211,11 +228,13 @@ type LintersSettings struct {
ErrorLint ErrorLintSettings
Exhaustive ExhaustiveSettings
Exhaustruct ExhaustructSettings
+ Fatcontext FatcontextSettings
Forbidigo ForbidigoSettings
Funlen FunlenSettings
Gci GciSettings
GinkgoLinter GinkgoLinterSettings
Gocognit GocognitSettings
+ GoChecksumType GoChecksumTypeSettings
Goconst GoConstSettings
Gocritic GoCriticSettings
Gocyclo GoCycloSettings
@@ -225,7 +244,6 @@ type LintersSettings struct {
Gofumpt GofumptSettings
Goheader GoHeaderSettings
Goimports GoImportsSettings
- Gomnd GoMndSettings
GoModDirectives GoModDirectivesSettings
Gomodguard GoModGuardSettings
Gosec GoSecSettings
@@ -233,6 +251,7 @@ type LintersSettings struct {
Gosmopolitan GosmopolitanSettings
Govet GovetSettings
Grouper GrouperSettings
+ Iface IfaceSettings
ImportAs ImportAsSettings
Inamedparam INamedParamSettings
InterfaceBloat InterfaceBloatSettings
@@ -257,6 +276,7 @@ type LintersSettings struct {
Promlinter PromlinterSettings
ProtoGetter ProtoGetterSettings
Reassign ReassignSettings
+ Recvcheck RecvcheckSettings
Revive ReviveSettings
RowsErrCheck RowsErrCheckSettings
SlogLint SlogLintSettings
@@ -273,6 +293,7 @@ type LintersSettings struct {
Unparam UnparamSettings
Unused UnusedSettings
UseStdlibVars UseStdlibVarsSettings
+ UseTesting UseTestingSettings
Varnamelen VarnamelenSettings
Whitespace WhitespaceSettings
Wrapcheck WrapcheckSettings
@@ -314,8 +335,10 @@ type BiDiChkSettings struct {
}
type CopyLoopVarSettings struct {
- IgnoreAlias bool `mapstructure:"ignore-alias"` // Deprecated: use CheckAlias
- CheckAlias bool `mapstructure:"check-alias"`
+ CheckAlias bool `mapstructure:"check-alias"`
+
+ // Deprecated: use CheckAlias
+ IgnoreAlias *bool `mapstructure:"ignore-alias"`
}
type Cyclop struct {
@@ -413,6 +436,10 @@ type ExhaustructSettings struct {
Exclude []string `mapstructure:"exclude"`
}
+type FatcontextSettings struct {
+ CheckStructPointers bool `mapstructure:"check-struct-pointers"`
+}
+
type ForbidigoSettings struct {
Forbid []ForbidigoPattern `mapstructure:"forbid"`
ExcludeGodocExamples bool `mapstructure:"exclude-godoc-examples"`
@@ -462,9 +489,12 @@ type FunlenSettings struct {
}
type GciSettings struct {
- Sections []string `mapstructure:"sections"`
- SkipGenerated bool `mapstructure:"skip-generated"`
- CustomOrder bool `mapstructure:"custom-order"`
+ Sections []string `mapstructure:"sections"`
+ NoInlineComments bool `mapstructure:"no-inline-comments"`
+ NoPrefixComments bool `mapstructure:"no-prefix-comments"`
+ SkipGenerated bool `mapstructure:"skip-generated"`
+ CustomOrder bool `mapstructure:"custom-order"`
+ NoLexOrder bool `mapstructure:"no-lex-order"`
// Deprecated: use Sections instead.
LocalPrefixes string `mapstructure:"local-prefixes"`
@@ -482,6 +512,12 @@ type GinkgoLinterSettings struct {
ForceExpectTo bool `mapstructure:"force-expect-to"`
ValidateAsyncIntervals bool `mapstructure:"validate-async-intervals"`
ForbidSpecPollution bool `mapstructure:"forbid-spec-pollution"`
+ ForceSucceedForFuncs bool `mapstructure:"force-succeed"`
+}
+
+type GoChecksumTypeSettings struct {
+ DefaultSignifiesExhaustive bool `mapstructure:"default-signifies-exhaustive"`
+ IncludeSharedInterfaces bool `mapstructure:"include-shared-interfaces"`
}
type GocognitSettings struct {
@@ -524,7 +560,7 @@ type GodotSettings struct {
Period bool `mapstructure:"period"`
// Deprecated: use Scope instead
- CheckAll bool `mapstructure:"check-all"`
+ CheckAll *bool `mapstructure:"check-all"`
}
type GodoxSettings struct {
@@ -559,19 +595,16 @@ type GoImportsSettings struct {
LocalPrefixes string `mapstructure:"local-prefixes"`
}
-// Deprecated: use MndSettings.
-type GoMndSettings struct {
- MndSettings `mapstructure:",squash"`
-
- // Deprecated: use root level settings instead.
- Settings map[string]map[string]any
-}
-
type GoModDirectivesSettings struct {
ReplaceAllowList []string `mapstructure:"replace-allow-list"`
ReplaceLocal bool `mapstructure:"replace-local"`
ExcludeForbidden bool `mapstructure:"exclude-forbidden"`
RetractAllowNoExplanation bool `mapstructure:"retract-allow-no-explanation"`
+ ToolchainForbidden bool `mapstructure:"toolchain-forbidden"`
+ ToolchainPattern string `mapstructure:"toolchain-pattern"`
+ ToolForbidden bool `mapstructure:"tool-forbidden"`
+ GoDebugForbidden bool `mapstructure:"go-debug-forbidden"`
+ GoVersionPattern string `mapstructure:"go-version-pattern"`
}
type GoModGuardSettings struct {
@@ -620,7 +653,7 @@ type GovetSettings struct {
Settings map[string]map[string]any
// Deprecated: the linter should be enabled inside Enable.
- CheckShadowing bool `mapstructure:"check-shadowing"`
+ CheckShadowing *bool `mapstructure:"check-shadowing"`
}
func (cfg *GovetSettings) Validate() error {
@@ -647,6 +680,11 @@ type GrouperSettings struct {
VarRequireGrouping bool `mapstructure:"var-require-grouping"`
}
+type IfaceSettings struct {
+ Enable []string `mapstructure:"enable"`
+ Settings map[string]map[string]any `mapstructure:"settings"`
+}
+
type ImportAsSettings struct {
Alias []ImportAsAlias
NoUnaliased bool `mapstructure:"no-unaliased"`
@@ -680,6 +718,7 @@ type LoggerCheckSettings struct {
Kitlog bool `mapstructure:"kitlog"`
Klog bool `mapstructure:"klog"`
Logr bool `mapstructure:"logr"`
+ Slog bool `mapstructure:"slog"`
Zap bool `mapstructure:"zap"`
RequireStringKey bool `mapstructure:"require-string-key"`
NoPrintfLike bool `mapstructure:"no-printf-like"`
@@ -716,7 +755,7 @@ type MustTagSettings struct {
}
type NakedretSettings struct {
- MaxFuncLines int `mapstructure:"max-func-lines"`
+ MaxFuncLines uint `mapstructure:"max-func-lines"`
}
type NestifSettings struct {
@@ -724,7 +763,8 @@ type NestifSettings struct {
}
type NilNilSettings struct {
- CheckedTypes []string `mapstructure:"checked-types"`
+ DetectOpposite bool `mapstructure:"detect-opposite"`
+ CheckedTypes []string `mapstructure:"checked-types"`
}
type NlreturnSettings struct {
@@ -756,11 +796,19 @@ type ParallelTestSettings struct {
}
type PerfSprintSettings struct {
+ IntegerFormat bool `mapstructure:"integer-format"`
IntConversion bool `mapstructure:"int-conversion"`
- ErrError bool `mapstructure:"err-error"`
- ErrorF bool `mapstructure:"errorf"`
- SprintF1 bool `mapstructure:"sprintf1"`
- StrConcat bool `mapstructure:"strconcat"`
+
+ ErrorFormat bool `mapstructure:"error-format"`
+ ErrError bool `mapstructure:"err-error"`
+ ErrorF bool `mapstructure:"errorf"`
+
+ StringFormat bool `mapstructure:"string-format"`
+ SprintF1 bool `mapstructure:"sprintf1"`
+ StrConcat bool `mapstructure:"strconcat"`
+
+ BoolFormat bool `mapstructure:"bool-format"`
+ HexFormat bool `mapstructure:"hex-format"`
}
type PreallocSettings struct {
@@ -790,9 +838,15 @@ type ReassignSettings struct {
Patterns []string `mapstructure:"patterns"`
}
+type RecvcheckSettings struct {
+ DisableBuiltin bool `mapstructure:"disable-builtin"`
+ Exclusions []string `mapstructure:"exclusions"`
+}
+
type ReviveSettings struct {
- MaxOpenFiles int `mapstructure:"max-open-files"`
- IgnoreGeneratedHeader bool `mapstructure:"ignore-generated-header"`
+ Go string `mapstructure:"-"`
+ MaxOpenFiles int `mapstructure:"max-open-files"`
+ IgnoreGeneratedHeader bool `mapstructure:"ignore-generated-header"`
Confidence float64
Severity string
EnableAllRules bool `mapstructure:"enable-all-rules"`
@@ -828,7 +882,7 @@ type SlogLintSettings struct {
ArgsOnSepLines bool `mapstructure:"args-on-sep-lines"`
// Deprecated: use Context instead.
- ContextOnly bool `mapstructure:"context-only"`
+ ContextOnly *bool `mapstructure:"context-only"`
}
type SpancheckSettings struct {
@@ -859,10 +913,31 @@ type TagAlignSettings struct {
}
type TagliatelleSettings struct {
- Case struct {
- Rules map[string]string
- UseFieldName bool `mapstructure:"use-field-name"`
- }
+ Case TagliatelleCase
+}
+
+type TagliatelleCase struct {
+ TagliatelleBase `mapstructure:",squash"`
+ Overrides []TagliatelleOverrides
+}
+
+type TagliatelleOverrides struct {
+ TagliatelleBase `mapstructure:",squash"`
+ Package string `mapstructure:"pkg"`
+ Ignore bool `mapstructure:"ignore"`
+}
+
+type TagliatelleBase struct {
+ Rules map[string]string `mapstructure:"rules"`
+ ExtendedRules map[string]TagliatelleExtendedRule `mapstructure:"extended-rules"`
+ UseFieldName bool `mapstructure:"use-field-name"`
+ IgnoredFields []string `mapstructure:"ignored-fields"`
+}
+
+type TagliatelleExtendedRule struct {
+ Case string
+ ExtraInitialisms bool
+ InitialismOverrides map[string]bool
}
type TestifylintSettings struct {
@@ -879,6 +954,11 @@ type TestifylintSettings struct {
ExpVarPattern string `mapstructure:"pattern"`
} `mapstructure:"expected-actual"`
+ Formatter struct {
+ CheckFormatString *bool `mapstructure:"check-format-string"`
+ RequireFFuncs bool `mapstructure:"require-f-funcs"`
+ } `mapstructure:"formatter"`
+
GoRequire struct {
IgnoreHTTPHandlers bool `mapstructure:"ignore-http-handlers"`
} `mapstructure:"go-require"`
@@ -922,11 +1002,24 @@ type UseStdlibVarsSettings struct {
TimeLayout bool `mapstructure:"time-layout"`
CryptoHash bool `mapstructure:"crypto-hash"`
DefaultRPCPath bool `mapstructure:"default-rpc-path"`
- OSDevNull bool `mapstructure:"os-dev-null"` // Deprecated
SQLIsolationLevel bool `mapstructure:"sql-isolation-level"`
TLSSignatureScheme bool `mapstructure:"tls-signature-scheme"`
ConstantKind bool `mapstructure:"constant-kind"`
- SyslogPriority bool `mapstructure:"syslog-priority"` // Deprecated
+
+ // Deprecated
+ OSDevNull *bool `mapstructure:"os-dev-null"`
+ // Deprecated
+ SyslogPriority *bool `mapstructure:"syslog-priority"`
+}
+
+type UseTestingSettings struct {
+ ContextBackground bool `mapstructure:"context-background"`
+ ContextTodo bool `mapstructure:"context-todo"`
+ OSChdir bool `mapstructure:"os-chdir"`
+ OSMkdirTemp bool `mapstructure:"os-mkdir-temp"`
+ OSSetenv bool `mapstructure:"os-setenv"`
+ OSTempDir bool `mapstructure:"os-temp-dir"`
+ OSCreateTemp bool `mapstructure:"os-create-temp"`
}
type UnconvertSettings struct {
@@ -942,11 +1035,13 @@ type UnparamSettings struct {
type UnusedSettings struct {
FieldWritesAreUses bool `mapstructure:"field-writes-are-uses"`
PostStatementsAreReads bool `mapstructure:"post-statements-are-reads"`
- ExportedIsUsed bool `mapstructure:"exported-is-used"`
ExportedFieldsAreUsed bool `mapstructure:"exported-fields-are-used"`
ParametersAreUsed bool `mapstructure:"parameters-are-used"`
LocalVariablesAreUsed bool `mapstructure:"local-variables-are-used"`
GeneratedIsUsed bool `mapstructure:"generated-is-used"`
+
+ // Deprecated
+ ExportedIsUsed *bool `mapstructure:"exported-is-used"`
}
type VarnamelenSettings struct {
@@ -968,6 +1063,7 @@ type WhitespaceSettings struct {
}
type WrapcheckSettings struct {
+ ExtraIgnoreSigs []string `mapstructure:"extra-ignore-sigs"`
// TODO(ldez): v2 the options must be renamed to use hyphen.
IgnoreSigs []string `mapstructure:"ignoreSigs"`
IgnoreSigRegexps []string `mapstructure:"ignoreSigRegexps"`
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go b/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go
index ea4cae2d55..dc9ceeadd1 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go
@@ -1,6 +1,8 @@
package config
import (
+ "cmp"
+ "context"
"errors"
"fmt"
"os"
@@ -14,6 +16,7 @@ import (
"github.com/golangci/golangci-lint/pkg/exitcodes"
"github.com/golangci/golangci-lint/pkg/fsutils"
+ "github.com/golangci/golangci-lint/pkg/goutil"
"github.com/golangci/golangci-lint/pkg/logutils"
)
@@ -65,6 +68,26 @@ func (l *Loader) Load(opts LoadOptions) error {
l.applyStringSliceHack()
+ if l.cfg.Linters.LinterExclusions.Generated == "" {
+ // `l.cfg.Issues.ExcludeGenerated` is always non-empty because of the flag default value.
+ l.cfg.Linters.LinterExclusions.Generated = cmp.Or(l.cfg.Issues.ExcludeGenerated, "strict")
+ }
+
+ // Compatibility layer with v1.
+ // TODO(ldez): should be removed in v2.
+ if l.cfg.Issues.UseDefaultExcludes {
+ l.cfg.Linters.LinterExclusions.Presets = []string{
+ ExclusionPresetComments,
+ ExclusionPresetStdErrorHandling,
+ ExclusionPresetCommonFalsePositives,
+ ExclusionPresetLegacy,
+ }
+ }
+
+ if len(l.cfg.Issues.ExcludeRules) > 0 {
+ l.cfg.Linters.LinterExclusions.Rules = append(l.cfg.Linters.LinterExclusions.Rules, l.cfg.Issues.ExcludeRules...)
+ }
+
if opts.CheckDeprecation {
err = l.handleDeprecation()
if err != nil {
@@ -74,6 +97,16 @@ func (l *Loader) Load(opts LoadOptions) error {
l.handleGoVersion()
+ err = goutil.CheckGoVersion(l.cfg.Run.Go)
+ if err != nil {
+ return err
+ }
+
+ l.cfg.basePath, err = fsutils.GetBasePath(context.Background(), l.cfg.Run.RelativePathMode, l.cfg.cfgDir)
+ if err != nil {
+ return fmt.Errorf("get base path: %w", err)
+ }
+
err = l.handleEnableOnlyOption()
if err != nil {
return err
@@ -279,31 +312,20 @@ func (l *Loader) appendStringSlice(name string, current *[]string) {
func (l *Loader) handleGoVersion() {
if l.cfg.Run.Go == "" {
- l.cfg.Run.Go = detectGoVersion()
+ l.cfg.Run.Go = detectGoVersion(context.Background())
}
l.cfg.LintersSettings.Govet.Go = l.cfg.Run.Go
l.cfg.LintersSettings.ParallelTest.Go = l.cfg.Run.Go
- if l.cfg.LintersSettings.Gofumpt.LangVersion == "" {
- l.cfg.LintersSettings.Gofumpt.LangVersion = l.cfg.Run.Go
- }
+ l.cfg.LintersSettings.Gofumpt.LangVersion = cmp.Or(l.cfg.LintersSettings.Gofumpt.LangVersion, l.cfg.Run.Go)
- trimmedGoVersion := trimGoVersion(l.cfg.Run.Go)
+ trimmedGoVersion := goutil.TrimGoVersion(l.cfg.Run.Go)
- l.cfg.LintersSettings.Gocritic.Go = trimmedGoVersion
+ l.cfg.LintersSettings.Revive.Go = trimmedGoVersion
- // staticcheck related linters.
- if l.cfg.LintersSettings.Staticcheck.GoVersion == "" {
- l.cfg.LintersSettings.Staticcheck.GoVersion = trimmedGoVersion
- }
- if l.cfg.LintersSettings.Gosimple.GoVersion == "" {
- l.cfg.LintersSettings.Gosimple.GoVersion = trimmedGoVersion
- }
- if l.cfg.LintersSettings.Stylecheck.GoVersion == "" {
- l.cfg.LintersSettings.Stylecheck.GoVersion = trimmedGoVersion
- }
+ l.cfg.LintersSettings.Gocritic.Go = trimmedGoVersion
os.Setenv("GOSECGOVERSION", l.cfg.Run.Go)
}
@@ -325,19 +347,23 @@ func (l *Loader) handleDeprecation() error {
l.cfg.Issues.ExcludeDirs = l.cfg.Run.SkipDirs
}
- // The 2 options are true by default.
// Deprecated since v1.57.0
- if !l.cfg.Run.UseDefaultSkipDirs {
+ if l.cfg.Run.UseDefaultSkipDirs != nil {
l.log.Warnf("The configuration option `run.skip-dirs-use-default` is deprecated, please use `issues.exclude-dirs-use-default`.")
+ l.cfg.Issues.UseDefaultExcludeDirs = *l.cfg.Run.UseDefaultSkipDirs
}
- l.cfg.Issues.UseDefaultExcludeDirs = l.cfg.Run.UseDefaultSkipDirs && l.cfg.Issues.UseDefaultExcludeDirs
- // The 2 options are false by default.
// Deprecated since v1.57.0
- if l.cfg.Run.ShowStats {
+ if l.cfg.Run.ShowStats != nil {
l.log.Warnf("The configuration option `run.show-stats` is deprecated, please use `output.show-stats`")
+ l.cfg.Output.ShowStats = *l.cfg.Run.ShowStats
+ }
+
+ // Deprecated since v1.63.0
+ if l.cfg.Output.UniqByLine != nil {
+ l.log.Warnf("The configuration option `output.uniq-by-line` is deprecated, please use `issues.uniq-by-line`")
+ l.cfg.Issues.UniqByLine = *l.cfg.Output.UniqByLine
}
- l.cfg.Output.ShowStats = l.cfg.Run.ShowStats || l.cfg.Output.ShowStats
// Deprecated since v1.57.0
if l.cfg.Output.Format != "" {
@@ -360,9 +386,11 @@ func (l *Loader) handleDeprecation() error {
}
// Deprecated since v1.59.0
- if l.cfg.Issues.ExcludeGeneratedStrict {
+ if l.cfg.Issues.ExcludeGeneratedStrict != nil {
l.log.Warnf("The configuration option `issues.exclude-generated-strict` is deprecated, please use `issues.exclude-generated`")
- l.cfg.Issues.ExcludeGenerated = "strict" // Don't use the constants to avoid cyclic dependencies.
+ if !*l.cfg.Issues.ExcludeGeneratedStrict {
+ l.cfg.Issues.ExcludeGenerated = "strict" // Don't use the constants to avoid cyclic dependencies.
+ }
}
l.handleLinterOptionDeprecations()
@@ -370,16 +398,15 @@ func (l *Loader) handleDeprecation() error {
return nil
}
-//nolint:gocyclo // the complexity cannot be reduced.
func (l *Loader) handleLinterOptionDeprecations() {
// Deprecated since v1.57.0,
// but it was unofficially deprecated since v1.19 (2019) (https://github.com/golangci/golangci-lint/pull/697).
- if l.cfg.LintersSettings.Govet.CheckShadowing {
+ if l.cfg.LintersSettings.Govet.CheckShadowing != nil {
l.log.Warnf("The configuration option `linters.govet.check-shadowing` is deprecated. " +
"Please enable `shadow` instead, if you are not using `enable-all`.")
}
- if l.cfg.LintersSettings.CopyLoopVar.IgnoreAlias {
+ if l.cfg.LintersSettings.CopyLoopVar.IgnoreAlias != nil {
l.log.Warnf("The configuration option `linters.copyloopvar.ignore-alias` is deprecated and ignored," +
"please use `linters.copyloopvar.check-alias`.")
}
@@ -401,16 +428,10 @@ func (l *Loader) handleLinterOptionDeprecations() {
}
// Deprecated since v1.33.0.
- if l.cfg.LintersSettings.Godot.CheckAll {
+ if l.cfg.LintersSettings.Godot.CheckAll != nil {
l.log.Warnf("The configuration option `linters.godot.check-all` is deprecated, please use `linters.godot.scope: all`.")
}
- // Deprecated since v1.44.0.
- if len(l.cfg.LintersSettings.Gomnd.Settings) > 0 {
- l.log.Warnf("The configuration option `linters.gomnd.settings` is deprecated. Please use the options " +
- "`linters.gomnd.checks`,`linters.gomnd.ignored-numbers`,`linters.gomnd.ignored-files`,`linters.gomnd.ignored-functions`.")
- }
-
// Deprecated since v1.47.0
if l.cfg.LintersSettings.Gofumpt.LangVersion != "" {
l.log.Warnf("The configuration option `linters.gofumpt.lang-version` is deprecated, please use global `run.go`.")
@@ -431,21 +452,24 @@ func (l *Loader) handleLinterOptionDeprecations() {
l.log.Warnf("The configuration option `linters.stylecheck.go` is deprecated, please use global `run.go`.")
}
+ // Deprecated since v1.60.0
+ if l.cfg.LintersSettings.Unused.ExportedIsUsed != nil {
+ l.log.Warnf("The configuration option `linters.unused.exported-is-used` is deprecated.")
+ }
+
// Deprecated since v1.58.0
- if l.cfg.LintersSettings.SlogLint.ContextOnly {
+ if l.cfg.LintersSettings.SlogLint.ContextOnly != nil {
l.log.Warnf("The configuration option `linters.sloglint.context-only` is deprecated, please use `linters.sloglint.context`.")
- if l.cfg.LintersSettings.SlogLint.Context == "" {
- l.cfg.LintersSettings.SlogLint.Context = "all"
- }
+ l.cfg.LintersSettings.SlogLint.Context = cmp.Or(l.cfg.LintersSettings.SlogLint.Context, "all")
}
// Deprecated since v1.51.0
- if l.cfg.LintersSettings.UseStdlibVars.OSDevNull {
+ if l.cfg.LintersSettings.UseStdlibVars.OSDevNull != nil {
l.log.Warnf("The configuration option `linters.usestdlibvars.os-dev-null` is deprecated.")
}
// Deprecated since v1.51.0
- if l.cfg.LintersSettings.UseStdlibVars.SyslogPriority {
+ if l.cfg.LintersSettings.UseStdlibVars.SyslogPriority != nil {
l.log.Warnf("The configuration option `linters.usestdlibvars.syslog-priority` is deprecated.")
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/output.go b/vendor/github.com/golangci/golangci-lint/pkg/config/output.go
index 592e293e0b..caddb095c9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/output.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/output.go
@@ -16,7 +16,8 @@ const (
OutFormatCheckstyle = "checkstyle"
OutFormatCodeClimate = "code-climate"
OutFormatHTML = "html"
- OutFormatJunitXML = "junit-xml"
+ OutFormatJUnitXML = "junit-xml"
+ OutFormatJUnitXMLExtended = "junit-xml-extended"
OutFormatGithubActions = "github-actions" // Deprecated
OutFormatTeamCity = "teamcity"
OutFormatSarif = "sarif"
@@ -31,7 +32,8 @@ var AllOutputFormats = []string{
OutFormatCheckstyle,
OutFormatCodeClimate,
OutFormatHTML,
- OutFormatJunitXML,
+ OutFormatJUnitXML,
+ OutFormatJUnitXMLExtended,
OutFormatGithubActions,
OutFormatTeamCity,
OutFormatSarif,
@@ -41,7 +43,6 @@ type Output struct {
Formats OutputFormats `mapstructure:"formats"`
PrintIssuedLine bool `mapstructure:"print-issued-lines"`
PrintLinterName bool `mapstructure:"print-linter-name"`
- UniqByLine bool `mapstructure:"uniq-by-line"`
SortResults bool `mapstructure:"sort-results"`
SortOrder []string `mapstructure:"sort-order"`
PathPrefix string `mapstructure:"path-prefix"`
@@ -49,6 +50,9 @@ type Output struct {
// Deprecated: use Formats instead.
Format string `mapstructure:"format"`
+
+ // Deprecated: use [Issues.UniqByLine] instead.
+ UniqByLine *bool `mapstructure:"uniq-by-line"`
}
func (o *Output) Validate() error {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/run.go b/vendor/github.com/golangci/golangci-lint/pkg/config/run.go
index 2f6523c0b9..8e00f1e6dc 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/run.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/run.go
@@ -5,6 +5,8 @@ import (
"slices"
"strings"
"time"
+
+ "github.com/golangci/golangci-lint/pkg/fsutils"
)
// Run encapsulates the config options for running the linter analysis.
@@ -15,6 +17,8 @@ type Run struct {
Go string `mapstructure:"go"`
+ RelativePathMode string `mapstructure:"relative-path-mode"`
+
BuildTags []string `mapstructure:"build-tags"`
ModulesDownloadMode string `mapstructure:"modules-download-mode"`
@@ -29,18 +33,24 @@ type Run struct {
// Deprecated: use Issues.ExcludeDirs instead.
SkipDirs []string `mapstructure:"skip-dirs"`
// Deprecated: use Issues.UseDefaultExcludeDirs instead.
- UseDefaultSkipDirs bool `mapstructure:"skip-dirs-use-default"`
+ UseDefaultSkipDirs *bool `mapstructure:"skip-dirs-use-default"`
// Deprecated: use Output.ShowStats instead.
- ShowStats bool `mapstructure:"show-stats"`
+ ShowStats *bool `mapstructure:"show-stats"`
}
func (r *Run) Validate() error {
// go help modules
- allowedMods := []string{"mod", "readonly", "vendor"}
+ allowedModes := []string{"mod", "readonly", "vendor"}
+
+ if r.ModulesDownloadMode != "" && !slices.Contains(allowedModes, r.ModulesDownloadMode) {
+ return fmt.Errorf("invalid modules download path %s, only (%s) allowed", r.ModulesDownloadMode, strings.Join(allowedModes, "|"))
+ }
+
+ pathRelativeToModes := fsutils.AllRelativePathModes()
- if r.ModulesDownloadMode != "" && !slices.Contains(allowedMods, r.ModulesDownloadMode) {
- return fmt.Errorf("invalid modules download path %s, only (%s) allowed", r.ModulesDownloadMode, strings.Join(allowedMods, "|"))
+ if r.RelativePathMode != "" && !slices.Contains(pathRelativeToModes, r.RelativePathMode) {
+ return fmt.Errorf("invalid relative path mode %s, only (%s) allowed", r.RelativePathMode, strings.Join(pathRelativeToModes, "|"))
}
return nil
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/basepath.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/basepath.go
new file mode 100644
index 0000000000..97d6aced11
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/basepath.go
@@ -0,0 +1,77 @@
+package fsutils
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "os/exec"
+ "path/filepath"
+
+ "github.com/ldez/grignotin/goenv"
+)
+
+// Relative path modes.
+const (
+ RelativePathModeGoMod = "gomod"
+ RelativePathModeGitRoot = "gitroot"
+ RelativePathModeCfg = "cfg"
+ RelativePathModeWd = "wd"
+)
+
+func AllRelativePathModes() []string {
+ return []string{RelativePathModeGoMod, RelativePathModeGitRoot, RelativePathModeCfg, RelativePathModeWd}
+}
+
+func GetBasePath(ctx context.Context, mode, cfgDir string) (string, error) {
+ if mode == "" {
+ // TODO(ldez): v2 the default should be cfg or gomod.
+ mode = RelativePathModeWd
+ }
+
+ switch mode {
+ case RelativePathModeCfg:
+ if cfgDir == "" {
+ return GetBasePath(ctx, RelativePathModeWd, cfgDir)
+ }
+
+ return cfgDir, nil
+
+ case RelativePathModeGoMod:
+ goMod, err := goenv.GetOne(ctx, goenv.GOMOD)
+ if err != nil {
+ return "", fmt.Errorf("get go.mod path: %w", err)
+ }
+
+ return filepath.Dir(goMod), nil
+
+ case RelativePathModeGitRoot:
+ root, err := gitRoot(ctx)
+ if err != nil {
+ return "", fmt.Errorf("get git root: %w", err)
+ }
+
+ return root, nil
+
+ case RelativePathModeWd:
+ wd, err := Getwd()
+ if err != nil {
+ return "", fmt.Errorf("get wd: %w", err)
+ }
+
+ return wd, nil
+
+ default:
+ return "", errors.New("unknown relative path mode")
+ }
+}
+
+func gitRoot(ctx context.Context) (string, error) {
+ cmd := exec.CommandContext(ctx, "git", "rev-parse", "--show-toplevel")
+ out, err := cmd.Output()
+ if err != nil {
+ return "", err
+ }
+
+ return string(bytes.TrimSpace(out)), nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go
index 80bb9c5b44..ead18a5378 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go
@@ -34,13 +34,13 @@ func Getwd() (string, error) {
return
}
- evaledWd, err := EvalSymlinks(cachedWd)
+ evaluatedWd, err := EvalSymlinks(cachedWd)
if err != nil {
cachedWd, cachedWdError = "", fmt.Errorf("can't eval symlinks on wd %s: %w", cachedWd, err)
return
}
- cachedWd = evaledWd
+ cachedWd = evaluatedWd
})
return cachedWd, cachedWdError
@@ -61,7 +61,7 @@ func EvalSymlinks(path string) (string, error) {
}
var er evalSymlinkRes
- er.path, er.err = filepath.EvalSymlinks(path)
+ er.path, er.err = evalSymlinks(path)
evalSymlinkCache.Store(path, er)
return er.path, er.err
@@ -76,15 +76,15 @@ func ShortestRelPath(path, wd string) (string, error) {
}
}
- evaledPath, err := EvalSymlinks(path)
+ evaluatedPath, err := EvalSymlinks(path)
if err != nil {
return "", fmt.Errorf("can't eval symlinks for path %s: %w", path, err)
}
- path = evaledPath
+ path = evaluatedPath
// make path absolute and then relative to be able to fix this case:
- // we are in /test dir, we want to normalize ../test, and have file file.go in this dir;
- // it must have normalized path file.go, not ../test/file.go,
+ // we are in `/test` dir, we want to normalize `../test`, and have file `file.go` in this dir;
+ // it must have normalized path `file.go`, not `../test/file.go`,
var absPath string
if filepath.IsAbs(path) {
absPath = path
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils_unix.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils_unix.go
new file mode 100644
index 0000000000..68e762cf4b
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils_unix.go
@@ -0,0 +1,9 @@
+//go:build !windows
+
+package fsutils
+
+import "path/filepath"
+
+func evalSymlinks(path string) (string, error) {
+ return filepath.EvalSymlinks(path)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils_windows.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils_windows.go
new file mode 100644
index 0000000000..19efb1cfc2
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils_windows.go
@@ -0,0 +1,39 @@
+//go:build windows
+
+package fsutils
+
+import (
+ "errors"
+ "os"
+ "path/filepath"
+ "syscall"
+)
+
+// This is a workaround for the behavior of [filepath.EvalSymlinks],
+// which fails with [syscall.ENOTDIR] if the specified path contains a junction on Windows.
+// Junctions can occur, for example, when a volume is mounted as a subdirectory inside another drive.
+// This can usually happen when using the Dev Drives feature and replacing existing directories.
+// See: https://github.com/golang/go/issues/40180
+//
+// Since [syscall.ENOTDIR] is only returned when calling [filepath.EvalSymlinks] on Windows
+// if part of the presented path is a junction and nothing before was a symlink,
+// we simply treat this as NOT symlink,
+// because a symlink over the junction makes no sense at all.
+func evalSymlinks(path string) (string, error) {
+ resolved, err := filepath.EvalSymlinks(path)
+ if err == nil {
+ return resolved, nil
+ }
+
+ if !errors.Is(err, syscall.ENOTDIR) {
+ return "", err
+ }
+
+ _, err = os.Stat(path)
+ if err != nil {
+ return "", err
+ }
+
+ // If exists, we make the path absolute, to be sure...
+ return filepath.Abs(path)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/issue.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/issue.go
index 15d8dd2b33..854e7d15f0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/issue.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/issue.go
@@ -26,7 +26,7 @@ type EncodingIssue struct {
Severity string
Pos token.Position
LineRange *result.Range
- Replacement *result.Replacement
+ SuggestedFixes []analysis.SuggestedFix
ExpectNoLint bool
ExpectedNoLintLinter string
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/errors.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/errors.go
index 91f6dd39d7..7da659e803 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/errors.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors/errors.go
@@ -25,8 +25,6 @@ func BuildIssuesFromIllTypedError(errs []error, lintCtx *linter.Context) ([]resu
var other error
for _, err := range errs {
- err := err
-
var ill *IllTypedError
if !errors.As(err, &ill) {
if other == nil {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/position.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/position.go
new file mode 100644
index 0000000000..28441b341a
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/position.go
@@ -0,0 +1,50 @@
+package goanalysis
+
+import (
+ "go/ast"
+ "go/token"
+ "path/filepath"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+func GetGoFilePosition(pass *analysis.Pass, f *ast.File) (token.Position, bool) {
+ position := GetFilePositionFor(pass.Fset, f.Pos())
+
+ if filepath.Ext(position.Filename) == ".go" {
+ return position, true
+ }
+
+ return position, false
+}
+
+func GetFilePositionFor(fset *token.FileSet, p token.Pos) token.Position {
+ pos := fset.PositionFor(p, true)
+
+ ext := filepath.Ext(pos.Filename)
+ if ext != ".go" {
+ // position has been adjusted to a non-go file, revert to original file
+ return fset.PositionFor(p, false)
+ }
+
+ return pos
+}
+
+func EndOfLinePos(f *token.File, line int) token.Pos {
+ var end token.Pos
+
+ if line >= f.LineCount() {
+ // missing newline at the end of the file
+ end = f.Pos(f.Size())
+ } else {
+ end = f.LineStart(line+1) - token.Pos(1)
+ }
+
+ return end
+}
+
+// AdjustPos is a hack to get the right line to display.
+// It should not be used outside some specific cases.
+func AdjustPos(line, nonAdjLine, adjLine int) int {
+ return line + nonAdjLine - adjLine
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go
index c1274ec09a..7cff0149a4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go
@@ -1,8 +1,3 @@
-// checker is a partial copy of https://github.com/golang/tools/blob/master/go/analysis/internal/checker
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
// Package goanalysis defines the implementation of the checker commands.
// The same code drives the multi-analysis driver, the single-analysis
// driver that is conventionally provided for convenience along with
@@ -13,16 +8,16 @@ import (
"encoding/gob"
"fmt"
"go/token"
+ "maps"
"runtime"
- "sort"
+ "slices"
"sync"
- "golang.org/x/exp/maps"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
+ "github.com/golangci/golangci-lint/internal/cache"
"github.com/golangci/golangci-lint/internal/errorutil"
- "github.com/golangci/golangci-lint/internal/pkgcache"
"github.com/golangci/golangci-lint/pkg/goanalysis/load"
"github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/timeutils"
@@ -47,12 +42,13 @@ type Diagnostic struct {
Analyzer *analysis.Analyzer
Position token.Position
Pkg *packages.Package
+ File *token.File
}
type runner struct {
log logutils.Log
prefix string // ensure unique analyzer names
- pkgCache *pkgcache.Cache
+ pkgCache *cache.Cache
loadGuard *load.Guard
loadMode LoadMode
passToPkg map[*analysis.Pass]*packages.Package
@@ -60,7 +56,7 @@ type runner struct {
sw *timeutils.Stopwatch
}
-func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loadGuard *load.Guard,
+func newRunner(prefix string, logger logutils.Log, pkgCache *cache.Cache, loadGuard *load.Guard,
loadMode LoadMode, sw *timeutils.Stopwatch,
) *runner {
return &runner{
@@ -84,7 +80,6 @@ func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages
[]error, map[*analysis.Pass]*packages.Package,
) {
debugf("Analyzing %d packages on load mode %s", len(initialPackages), r.loadMode)
- defer r.pkgCache.Trim()
roots := r.analyze(initialPackages, analyzers)
@@ -127,9 +122,9 @@ func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package,
}
act = actAlloc.alloc()
- act.a = a
- act.pkg = pkg
- act.r = r
+ act.Analyzer = a
+ act.Package = pkg
+ act.runner = r
act.isInitialPkg = initialPkgs[pkg]
act.needAnalyzeSource = initialPkgs[pkg]
act.analysisDoneCh = make(chan struct{})
@@ -138,11 +133,11 @@ func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package,
if len(a.FactTypes) > 0 {
depsCount += len(pkg.Imports)
}
- act.deps = make([]*action, 0, depsCount)
+ act.Deps = make([]*action, 0, depsCount)
// Add a dependency on each required analyzers.
for _, req := range a.Requires {
- act.deps = append(act.deps, r.makeAction(req, pkg, initialPkgs, actions, actAlloc))
+ act.Deps = append(act.Deps, r.makeAction(req, pkg, initialPkgs, actions, actAlloc))
}
r.buildActionFactDeps(act, a, pkg, initialPkgs, actions, actAlloc)
@@ -164,11 +159,11 @@ func (r *runner) buildActionFactDeps(act *action, a *analysis.Analyzer, pkg *pac
act.objectFacts = make(map[objectFactKey]analysis.Fact)
act.packageFacts = make(map[packageFactKey]analysis.Fact)
- paths := maps.Keys(pkg.Imports)
- sort.Strings(paths) // for determinism
+ paths := slices.Sorted(maps.Keys(pkg.Imports)) // for determinism
+
for _, path := range paths {
dep := r.makeAction(a, pkg.Imports[path], initialPkgs, actions, actAlloc)
- act.deps = append(act.deps, dep)
+ act.Deps = append(act.Deps, dep)
}
// Need to register fact types for pkgcache proper gob encoding.
@@ -209,12 +204,12 @@ func (r *runner) prepareAnalysis(pkgs []*packages.Package,
for _, a := range analyzers {
for _, pkg := range pkgs {
root := r.makeAction(a, pkg, initialPkgs, actions, actAlloc)
- root.isroot = true
+ root.IsRoot = true
roots = append(roots, root)
}
}
- allActions = maps.Values(actions)
+ allActions = slices.Collect(maps.Values(actions))
debugf("Built %d actions", len(actions))
@@ -226,7 +221,7 @@ func (r *runner) analyze(pkgs []*packages.Package, analyzers []*analysis.Analyze
actionPerPkg := map[*packages.Package][]*action{}
for _, act := range actions {
- actionPerPkg[act.pkg] = append(actionPerPkg[act.pkg], act)
+ actionPerPkg[act.Package] = append(actionPerPkg[act.Package], act)
}
// Fill Imports field.
@@ -256,7 +251,7 @@ func (r *runner) analyze(pkgs []*packages.Package, analyzers []*analysis.Analyze
}
}
for _, act := range actions {
- dfs(act.pkg)
+ dfs(act.Package)
}
// Limit memory and IO usage.
@@ -288,7 +283,7 @@ func extractDiagnostics(roots []*action) (retDiags []Diagnostic, retErrors []err
for _, act := range actions {
if !extracted[act] {
extracted[act] = true
- visitAll(act.deps)
+ visitAll(act.Deps)
extract(act)
}
}
@@ -305,31 +300,34 @@ func extractDiagnostics(roots []*action) (retDiags []Diagnostic, retErrors []err
seen := make(map[key]bool)
extract = func(act *action) {
- if act.err != nil {
- if pe, ok := act.err.(*errorutil.PanicError); ok {
+ if act.Err != nil {
+ if pe, ok := act.Err.(*errorutil.PanicError); ok {
panic(pe)
}
- retErrors = append(retErrors, fmt.Errorf("%s: %w", act.a.Name, act.err))
+ retErrors = append(retErrors, fmt.Errorf("%s: %w", act.Analyzer.Name, act.Err))
return
}
- if act.isroot {
- for _, diag := range act.diagnostics {
+ if act.IsRoot {
+ for _, diag := range act.Diagnostics {
// We don't display a.Name/f.Category
// as most users don't care.
- posn := act.pkg.Fset.Position(diag.Pos)
- k := key{posn, act.a, diag.Message}
+ position := GetFilePositionFor(act.Package.Fset, diag.Pos)
+ file := act.Package.Fset.File(diag.Pos)
+
+ k := key{Position: position, Analyzer: act.Analyzer, message: diag.Message}
if seen[k] {
continue // duplicate
}
seen[k] = true
retDiag := Diagnostic{
+ File: file,
Diagnostic: diag,
- Analyzer: act.a,
- Position: posn,
- Pkg: act.pkg,
+ Analyzer: act.Analyzer,
+ Position: position,
+ Pkg: act.Package,
}
retDiags = append(retDiags, retDiag)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go
index 58ea297ea9..2e1c414228 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go
@@ -1,21 +1,10 @@
package goanalysis
import (
- "errors"
"fmt"
- "go/types"
- "io"
- "reflect"
"runtime/debug"
- "time"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/packages"
- "golang.org/x/tools/go/types/objectpath"
"github.com/golangci/golangci-lint/internal/errorutil"
- "github.com/golangci/golangci-lint/internal/pkgcache"
- "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors"
)
type actionAllocator struct {
@@ -39,57 +28,9 @@ func (actAlloc *actionAllocator) alloc() *action {
return act
}
-// An action represents one unit of analysis work: the application of
-// one analysis to one package. Actions form a DAG, both within a
-// package (as different analyzers are applied, either in sequence or
-// parallel), and across packages (as dependencies are analyzed).
-type action struct {
- a *analysis.Analyzer
- pkg *packages.Package
- pass *analysis.Pass
- deps []*action
- objectFacts map[objectFactKey]analysis.Fact
- packageFacts map[packageFactKey]analysis.Fact
- result any
- diagnostics []analysis.Diagnostic
- err error
- r *runner
- analysisDoneCh chan struct{}
- loadCachedFactsDone bool
- loadCachedFactsOk bool
- isroot bool
- isInitialPkg bool
- needAnalyzeSource bool
-}
-
-func (act *action) String() string {
- return fmt.Sprintf("%s@%s", act.a, act.pkg)
-}
-
-func (act *action) loadCachedFacts() bool {
- if act.loadCachedFactsDone { // can't be set in parallel
- return act.loadCachedFactsOk
- }
-
- res := func() bool {
- if act.isInitialPkg {
- return true // load cached facts only for non-initial packages
- }
-
- if len(act.a.FactTypes) == 0 {
- return true // no need to load facts
- }
-
- return act.loadPersistedFacts()
- }()
- act.loadCachedFactsDone = true
- act.loadCachedFactsOk = res
- return res
-}
-
func (act *action) waitUntilDependingAnalyzersWorked() {
- for _, dep := range act.deps {
- if dep.pkg == act.pkg {
+ for _, dep := range act.Deps {
+ if dep.Package == act.Package {
<-dep.analysisDoneCh
}
}
@@ -98,286 +39,26 @@ func (act *action) waitUntilDependingAnalyzersWorked() {
func (act *action) analyzeSafe() {
defer func() {
if p := recover(); p != nil {
- if !act.isroot {
+ if !act.IsRoot {
// This line allows to display "hidden" panic with analyzers like buildssa.
// Some linters are dependent of sub-analyzers but when a sub-analyzer fails the linter is not aware of that,
// this results to another panic (ex: "interface conversion: interface {} is nil, not *buildssa.SSA").
- act.r.log.Errorf("%s: panic during analysis: %v, %s", act.a.Name, p, string(debug.Stack()))
+ act.runner.log.Errorf("%s: panic during analysis: %v, %s", act.Analyzer.Name, p, string(debug.Stack()))
}
- act.err = errorutil.NewPanicError(fmt.Sprintf("%s: package %q (isInitialPkg: %t, needAnalyzeSource: %t): %s",
- act.a.Name, act.pkg.Name, act.isInitialPkg, act.needAnalyzeSource, p), debug.Stack())
+ act.Err = errorutil.NewPanicError(fmt.Sprintf("%s: package %q (isInitialPkg: %t, needAnalyzeSource: %t): %s",
+ act.Analyzer.Name, act.Package.Name, act.isInitialPkg, act.needAnalyzeSource, p), debug.Stack())
}
}()
- act.r.sw.TrackStage(act.a.Name, func() {
- act.analyze()
- })
-}
-
-func (act *action) analyze() {
- defer close(act.analysisDoneCh) // unblock actions depending on this action
-
- if !act.needAnalyzeSource {
- return
- }
-
- defer func(now time.Time) {
- analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now))
- }(time.Now())
-
- // Report an error if any dependency failures.
- var depErrors error
- for _, dep := range act.deps {
- if dep.err == nil {
- continue
- }
-
- depErrors = errors.Join(depErrors, errors.Unwrap(dep.err))
- }
- if depErrors != nil {
- act.err = fmt.Errorf("failed prerequisites: %w", depErrors)
- return
- }
-
- // Plumb the output values of the dependencies
- // into the inputs of this action. Also facts.
- inputs := make(map[*analysis.Analyzer]any)
- startedAt := time.Now()
- for _, dep := range act.deps {
- if dep.pkg == act.pkg {
- // Same package, different analysis (horizontal edge):
- // in-memory outputs of prerequisite analyzers
- // become inputs to this analysis pass.
- inputs[dep.a] = dep.result
- } else if dep.a == act.a { // (always true)
- // Same analysis, different package (vertical edge):
- // serialized facts produced by prerequisite analysis
- // become available to this analysis pass.
- inheritFacts(act, dep)
- }
- }
- factsDebugf("%s: Inherited facts in %s", act, time.Since(startedAt))
-
- // Run the analysis.
- pass := &analysis.Pass{
- Analyzer: act.a,
- Fset: act.pkg.Fset,
- Files: act.pkg.Syntax,
- OtherFiles: act.pkg.OtherFiles,
- Pkg: act.pkg.Types,
- TypesInfo: act.pkg.TypesInfo,
- TypesSizes: act.pkg.TypesSizes,
- ResultOf: inputs,
- Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
- ImportObjectFact: act.importObjectFact,
- ExportObjectFact: act.exportObjectFact,
- ImportPackageFact: act.importPackageFact,
- ExportPackageFact: act.exportPackageFact,
- AllObjectFacts: act.allObjectFacts,
- AllPackageFacts: act.allPackageFacts,
- }
- act.pass = pass
- act.r.passToPkgGuard.Lock()
- act.r.passToPkg[pass] = act.pkg
- act.r.passToPkgGuard.Unlock()
-
- if act.pkg.IllTyped {
- // It looks like there should be !pass.Analyzer.RunDespiteErrors
- // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here,
- // but it exits before it if packages.Load have failed.
- act.err = fmt.Errorf("analysis skipped: %w", &pkgerrors.IllTypedError{Pkg: act.pkg})
- } else {
- startedAt = time.Now()
- act.result, act.err = pass.Analyzer.Run(pass)
- analyzedIn := time.Since(startedAt)
- if analyzedIn > time.Millisecond*10 {
- debugf("%s: run analyzer in %s", act, analyzedIn)
- }
- }
-
- // disallow calls after Run
- pass.ExportObjectFact = nil
- pass.ExportPackageFact = nil
-
- if err := act.persistFactsToCache(); err != nil {
- act.r.log.Warnf("Failed to persist facts to cache: %s", err)
- }
-}
-
-// importObjectFact implements Pass.ImportObjectFact.
-// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
-// importObjectFact copies the fact value to *ptr.
-func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
- if obj == nil {
- panic("nil object")
- }
- key := objectFactKey{obj, act.factType(ptr)}
- if v, ok := act.objectFacts[key]; ok {
- reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
- return true
- }
- return false
-}
-
-// exportObjectFact implements Pass.ExportObjectFact.
-func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
- if obj.Pkg() != act.pkg.Types {
- act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
- act.a, act.pkg, obj, fact)
- }
-
- key := objectFactKey{obj, act.factType(fact)}
- act.objectFacts[key] = fact // clobber any existing entry
- if isFactsExportDebug {
- objstr := types.ObjectString(obj, (*types.Package).Name)
- factsExportDebugf("%s: object %s has fact %s\n",
- act.pkg.Fset.Position(obj.Pos()), objstr, fact)
- }
-}
-
-func (act *action) allObjectFacts() []analysis.ObjectFact {
- out := make([]analysis.ObjectFact, 0, len(act.objectFacts))
- for key, fact := range act.objectFacts {
- out = append(out, analysis.ObjectFact{
- Object: key.obj,
- Fact: fact,
- })
- }
- return out
-}
-
-// importPackageFact implements Pass.ImportPackageFact.
-// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
-// fact copies the fact value to *ptr.
-func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
- if pkg == nil {
- panic("nil package")
- }
- key := packageFactKey{pkg, act.factType(ptr)}
- if v, ok := act.packageFacts[key]; ok {
- reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
- return true
- }
- return false
-}
-
-// exportPackageFact implements Pass.ExportPackageFact.
-func (act *action) exportPackageFact(fact analysis.Fact) {
- key := packageFactKey{act.pass.Pkg, act.factType(fact)}
- act.packageFacts[key] = fact // clobber any existing entry
- factsDebugf("%s: package %s has fact %s\n",
- act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact)
-}
-
-func (act *action) allPackageFacts() []analysis.PackageFact {
- out := make([]analysis.PackageFact, 0, len(act.packageFacts))
- for key, fact := range act.packageFacts {
- out = append(out, analysis.PackageFact{
- Package: key.pkg,
- Fact: fact,
- })
- }
- return out
-}
-
-func (act *action) factType(fact analysis.Fact) reflect.Type {
- t := reflect.TypeOf(fact)
- if t.Kind() != reflect.Ptr {
- act.r.log.Fatalf("invalid Fact type: got %T, want pointer", t)
- }
- return t
-}
-
-func (act *action) persistFactsToCache() error {
- analyzer := act.a
- if len(analyzer.FactTypes) == 0 {
- return nil
- }
-
- // Merge new facts into the package and persist them.
- var facts []Fact
- for key, fact := range act.packageFacts {
- if key.pkg != act.pkg.Types {
- // The fact is from inherited facts from another package
- continue
- }
- facts = append(facts, Fact{
- Path: "",
- Fact: fact,
- })
- }
- for key, fact := range act.objectFacts {
- obj := key.obj
- if obj.Pkg() != act.pkg.Types {
- // The fact is from inherited facts from another package
- continue
- }
-
- path, err := objectpath.For(obj)
- if err != nil {
- // The object is not globally addressable
- continue
- }
-
- facts = append(facts, Fact{
- Path: string(path),
- Fact: fact,
- })
- }
-
- factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name)
-
- key := fmt.Sprintf("%s/facts", analyzer.Name)
- return act.r.pkgCache.Put(act.pkg, pkgcache.HashModeNeedAllDeps, key, facts)
-}
-
-func (act *action) loadPersistedFacts() bool {
- var facts []Fact
- key := fmt.Sprintf("%s/facts", act.a.Name)
- if err := act.r.pkgCache.Get(act.pkg, pkgcache.HashModeNeedAllDeps, key, &facts); err != nil {
- if !errors.Is(err, pkgcache.ErrMissing) && !errors.Is(err, io.EOF) {
- act.r.log.Warnf("Failed to get persisted facts: %s", err)
- }
-
- factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name)
- return false
- }
-
- factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name)
-
- for _, f := range facts {
- if f.Path == "" { // this is a package fact
- key := packageFactKey{act.pkg.Types, act.factType(f.Fact)}
- act.packageFacts[key] = f.Fact
- continue
- }
- obj, err := objectpath.Object(act.pkg.Types, objectpath.Path(f.Path))
- if err != nil {
- // Be lenient about these errors. For example, when
- // analyzing io/ioutil from source, we may get a fact
- // for methods on the devNull type, and objectpath
- // will happily create a path for them. However, when
- // we later load io/ioutil from export data, the path
- // no longer resolves.
- //
- // If an exported type embeds the unexported type,
- // then (part of) the unexported type will become part
- // of the type information and our path will resolve
- // again.
- continue
- }
- factKey := objectFactKey{obj, act.factType(f.Fact)}
- act.objectFacts[factKey] = f.Fact
- }
- return true
+ act.runner.sw.TrackStage(act.Analyzer.Name, act.analyze)
}
func (act *action) markDepsForAnalyzingSource() {
// Horizontal deps (analyzer.Requires) must be loaded from source and analyzed before analyzing
// this action.
- for _, dep := range act.deps {
- if dep.pkg == act.pkg {
+ for _, dep := range act.Deps {
+ if dep.Package == act.Package {
// Analyze source only for horizontal dependencies, e.g. from "buildssa".
dep.needAnalyzeSource = true // can't be set in parallel
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go
new file mode 100644
index 0000000000..e06ea2979c
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go
@@ -0,0 +1,127 @@
+package goanalysis
+
+import (
+ "errors"
+ "fmt"
+ "io"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/types/objectpath"
+
+ "github.com/golangci/golangci-lint/internal/cache"
+)
+
+type Fact struct {
+ Path string // non-empty only for object facts
+ Fact analysis.Fact
+}
+
+func (act *action) loadCachedFacts() bool {
+ if act.loadCachedFactsDone { // can't be set in parallel
+ return act.loadCachedFactsOk
+ }
+
+ res := func() bool {
+ if act.isInitialPkg {
+ return true // load cached facts only for non-initial packages
+ }
+
+ if len(act.Analyzer.FactTypes) == 0 {
+ return true // no need to load facts
+ }
+
+ return act.loadPersistedFacts()
+ }()
+ act.loadCachedFactsDone = true
+ act.loadCachedFactsOk = res
+ return res
+}
+
+func (act *action) persistFactsToCache() error {
+ analyzer := act.Analyzer
+ if len(analyzer.FactTypes) == 0 {
+ return nil
+ }
+
+ // Merge new facts into the package and persist them.
+ var facts []Fact
+ for key, fact := range act.packageFacts {
+ if key.pkg != act.Package.Types {
+ // The fact is from inherited facts from another package
+ continue
+ }
+ facts = append(facts, Fact{
+ Path: "",
+ Fact: fact,
+ })
+ }
+ for key, fact := range act.objectFacts {
+ obj := key.obj
+ if obj.Pkg() != act.Package.Types {
+ // The fact is from inherited facts from another package
+ continue
+ }
+
+ path, err := objectpath.For(obj)
+ if err != nil {
+ // The object is not globally addressable
+ continue
+ }
+
+ facts = append(facts, Fact{
+ Path: string(path),
+ Fact: fact,
+ })
+ }
+
+ factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.Package.Name, act.Analyzer.Name)
+
+ return act.runner.pkgCache.Put(act.Package, cache.HashModeNeedAllDeps, factCacheKey(analyzer), facts)
+}
+
+func (act *action) loadPersistedFacts() bool {
+ var facts []Fact
+
+ err := act.runner.pkgCache.Get(act.Package, cache.HashModeNeedAllDeps, factCacheKey(act.Analyzer), &facts)
+ if err != nil {
+ if !errors.Is(err, cache.ErrMissing) && !errors.Is(err, io.EOF) {
+ act.runner.log.Warnf("Failed to get persisted facts: %s", err)
+ }
+
+ factsCacheDebugf("No cached facts for package %q and analyzer %s", act.Package.Name, act.Analyzer.Name)
+ return false
+ }
+
+ factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.Package.Name, act.Analyzer.Name)
+
+ for _, f := range facts {
+ if f.Path == "" { // this is a package fact
+ key := packageFactKey{act.Package.Types, act.factType(f.Fact)}
+ act.packageFacts[key] = f.Fact
+ continue
+ }
+ obj, err := objectpath.Object(act.Package.Types, objectpath.Path(f.Path))
+ if err != nil {
+ // Be lenient about these errors. For example, when
+ // analyzing io/ioutil from source, we may get a fact
+ // for methods on the devNull type, and objectpath
+ // will happily create a path for them. However, when
+ // we later load io/ioutil from export data, the path
+ // no longer resolves.
+ //
+ // If an exported type embeds the unexported type,
+ // then (part of) the unexported type will become part
+ // of the type information and our path will resolve
+ // again.
+ continue
+ }
+ factKey := objectFactKey{obj, act.factType(f.Fact)}
+ act.objectFacts[factKey] = f.Fact
+ }
+
+ return true
+}
+
+func factCacheKey(a *analysis.Analyzer) string {
+ return fmt.Sprintf("%s/facts", a.Name)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_checker.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_checker.go
new file mode 100644
index 0000000000..376a37f039
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_checker.go
@@ -0,0 +1,446 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+// Altered copy of https://github.com/golang/tools/blob/v0.28.0/go/analysis/internal/checker/checker.go
+
+package goanalysis
+
+import (
+ "bytes"
+ "encoding/gob"
+ "errors"
+ "fmt"
+ "go/types"
+ "reflect"
+ "time"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/packages"
+
+ "github.com/golangci/golangci-lint/internal/x/tools/analysisflags"
+ "github.com/golangci/golangci-lint/internal/x/tools/analysisinternal"
+ "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors"
+)
+
+// NOTE(ldez) altered: custom fields; remove 'once' and 'duration'.
+// An action represents one unit of analysis work: the application of
+// one analysis to one package. Actions form a DAG, both within a
+// package (as different analyzers are applied, either in sequence or
+// parallel), and across packages (as dependencies are analyzed).
+type action struct {
+ Analyzer *analysis.Analyzer
+ Package *packages.Package
+ IsRoot bool // whether this is a root node of the graph
+ Deps []*action
+ Result any // computed result of Analyzer.run, if any (and if IsRoot)
+ Err error // error result of Analyzer.run
+ Diagnostics []analysis.Diagnostic
+ Duration time.Duration // execution time of this step
+
+ pass *analysis.Pass
+ objectFacts map[objectFactKey]analysis.Fact
+ packageFacts map[packageFactKey]analysis.Fact
+
+ // NOTE(ldez) custom fields.
+ runner *runner
+ analysisDoneCh chan struct{}
+ loadCachedFactsDone bool
+ loadCachedFactsOk bool
+ isInitialPkg bool
+ needAnalyzeSource bool
+}
+
+// NOTE(ldez) no alteration.
+type objectFactKey struct {
+ obj types.Object
+ typ reflect.Type
+}
+
+// NOTE(ldez) no alteration.
+type packageFactKey struct {
+ pkg *types.Package
+ typ reflect.Type
+}
+
+// NOTE(ldez) no alteration.
+func (act *action) String() string {
+ return fmt.Sprintf("%s@%s", act.Analyzer, act.Package)
+}
+
+// NOTE(ldez) altered version of `func (act *action) execOnce()`.
+func (act *action) analyze() {
+ defer close(act.analysisDoneCh) // unblock actions depending on this action
+
+ if !act.needAnalyzeSource {
+ return
+ }
+
+ // Record time spent in this node but not its dependencies.
+ // In parallel mode, due to GC/scheduler contention, the
+ // time is 5x higher than in sequential mode, even with a
+ // semaphore limiting the number of threads here.
+ // So use -debug=tp.
+ t0 := time.Now()
+ defer func() {
+ act.Duration = time.Since(t0)
+ analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.runner.prefix, act.Analyzer.Name, act.Package.Name, time.Since(t0))
+ }()
+
+ // Report an error if any dependency failures.
+ var depErrors error
+ for _, dep := range act.Deps {
+ if dep.Err != nil {
+ depErrors = errors.Join(depErrors, errors.Unwrap(dep.Err))
+ }
+ }
+ if depErrors != nil {
+ act.Err = fmt.Errorf("failed prerequisites: %w", depErrors)
+ return
+ }
+
+ // Plumb the output values of the dependencies
+ // into the inputs of this action. Also facts.
+ inputs := make(map[*analysis.Analyzer]any)
+ act.objectFacts = make(map[objectFactKey]analysis.Fact)
+ act.packageFacts = make(map[packageFactKey]analysis.Fact)
+ for _, dep := range act.Deps {
+ if dep.Package == act.Package {
+ // Same package, different analysis (horizontal edge):
+ // in-memory outputs of prerequisite analyzers
+ // become inputs to this analysis pass.
+ inputs[dep.Analyzer] = dep.Result
+
+ } else if dep.Analyzer == act.Analyzer { // (always true)
+ // Same analysis, different package (vertical edge):
+ // serialized facts produced by prerequisite analysis
+ // become available to this analysis pass.
+ inheritFacts(act, dep)
+ }
+ }
+
+ // NOTE(ldez) this is not compatible with our implementation.
+ // Quick (nonexhaustive) check that the correct go/packages mode bits were used.
+ // (If there were errors, all bets are off.)
+ // if pkg := act.Package; pkg.Errors == nil {
+ // if pkg.Name == "" || pkg.PkgPath == "" || pkg.Types == nil || pkg.Fset == nil || pkg.TypesSizes == nil {
+ // panic(fmt.Sprintf("packages must be loaded with packages.LoadSyntax mode: Name: %v, PkgPath: %v, Types: %v, Fset: %v, TypesSizes: %v",
+ // pkg.Name == "", pkg.PkgPath == "", pkg.Types == nil, pkg.Fset == nil, pkg.TypesSizes == nil))
+ // }
+ // }
+
+ factsDebugf("%s: Inherited facts in %s", act, time.Since(t0))
+
+ module := &analysis.Module{} // possibly empty (non nil) in go/analysis drivers.
+ if mod := act.Package.Module; mod != nil {
+ module.Path = mod.Path
+ module.Version = mod.Version
+ module.GoVersion = mod.GoVersion
+ }
+
+ // Run the analysis.
+ pass := &analysis.Pass{
+ Analyzer: act.Analyzer,
+ Fset: act.Package.Fset,
+ Files: act.Package.Syntax,
+ OtherFiles: act.Package.OtherFiles,
+ IgnoredFiles: act.Package.IgnoredFiles,
+ Pkg: act.Package.Types,
+ TypesInfo: act.Package.TypesInfo,
+ TypesSizes: act.Package.TypesSizes,
+ TypeErrors: act.Package.TypeErrors,
+ Module: module,
+
+ ResultOf: inputs,
+ Report: func(d analysis.Diagnostic) { act.Diagnostics = append(act.Diagnostics, d) },
+ ImportObjectFact: act.ObjectFact,
+ ExportObjectFact: act.exportObjectFact,
+ ImportPackageFact: act.PackageFact,
+ ExportPackageFact: act.exportPackageFact,
+ AllObjectFacts: act.AllObjectFacts,
+ AllPackageFacts: act.AllPackageFacts,
+ }
+ pass.ReadFile = analysisinternal.MakeReadFile(pass)
+ act.pass = pass
+
+ act.runner.passToPkgGuard.Lock()
+ act.runner.passToPkg[pass] = act.Package
+ act.runner.passToPkgGuard.Unlock()
+
+ act.Result, act.Err = func() (any, error) {
+ // NOTE(golangci-lint):
+ // It looks like there should be !pass.Analyzer.RunDespiteErrors
+ // but govet's cgocall crashes on it.
+ // Govet itself contains !pass.Analyzer.RunDespiteErrors condition here,
+ // but it exits before it if packages.Load have failed.
+ if act.Package.IllTyped {
+ return nil, fmt.Errorf("analysis skipped: %w", &pkgerrors.IllTypedError{Pkg: act.Package})
+ }
+
+ t1 := time.Now()
+
+ result, err := pass.Analyzer.Run(pass)
+ if err != nil {
+ return nil, err
+ }
+
+ analyzedIn := time.Since(t1)
+ if analyzedIn > 10*time.Millisecond {
+ debugf("%s: run analyzer in %s", act, analyzedIn)
+ }
+
+ // correct result type?
+ if got, want := reflect.TypeOf(result), pass.Analyzer.ResultType; got != want {
+ return nil, fmt.Errorf(
+ "internal error: on package %s, analyzer %s returned a result of type %v, but declared ResultType %v",
+ pass.Pkg.Path(), pass.Analyzer, got, want)
+ }
+
+ // resolve diagnostic URLs
+ for i := range act.Diagnostics {
+ url, err := analysisflags.ResolveURL(act.Analyzer, act.Diagnostics[i])
+ if err != nil {
+ return nil, err
+ }
+ act.Diagnostics[i].URL = url
+ }
+ return result, nil
+ }()
+
+ // Help detect (disallowed) calls after Run.
+ pass.ExportObjectFact = nil
+ pass.ExportPackageFact = nil
+
+ err := act.persistFactsToCache()
+ if err != nil {
+ act.runner.log.Warnf("Failed to persist facts to cache: %s", err)
+ }
+}
+
+// NOTE(ldez) altered: logger; sanityCheck.
+// inheritFacts populates act.facts with
+// those it obtains from its dependency, dep.
+func inheritFacts(act, dep *action) {
+ const sanityCheck = false
+
+ for key, fact := range dep.objectFacts {
+ // Filter out facts related to objects
+ // that are irrelevant downstream
+ // (equivalently: not in the compiler export data).
+ if !exportedFrom(key.obj, dep.Package.Types) {
+ factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact)
+ continue
+ }
+
+ // Optionally serialize/deserialize fact
+ // to verify that it works across address spaces.
+ if sanityCheck {
+ encodedFact, err := codeFact(fact)
+ if err != nil {
+ act.runner.log.Panicf("internal error: encoding of %T fact failed in %v: %v", fact, act, err)
+ }
+ fact = encodedFact
+ }
+
+ factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact)
+
+ act.objectFacts[key] = fact
+ }
+
+ for key, fact := range dep.packageFacts {
+ // TODO: filter out facts that belong to
+ // packages not mentioned in the export data
+ // to prevent side channels.
+ //
+ // The Pass.All{Object,Package}Facts accessors expose too much:
+ // all facts, of all types, for all dependencies in the action
+ // graph. Not only does the representation grow quadratically,
+ // but it violates the separate compilation paradigm, allowing
+ // analysis implementations to communicate with indirect
+ // dependencies that are not mentioned in the export data.
+ //
+ // It's not clear how to fix this short of a rather expensive
+ // filtering step after each action that enumerates all the
+ // objects that would appear in export data, and deletes
+ // facts associated with objects not in this set.
+
+ // Optionally serialize/deserialize fact
+ // to verify that it works across address spaces
+ // and is deterministic.
+ if sanityCheck {
+ encodedFact, err := codeFact(fact)
+ if err != nil {
+ act.runner.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
+ }
+ fact = encodedFact
+ }
+
+ factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact)
+
+ act.packageFacts[key] = fact
+ }
+}
+
+// NOTE(ldez) altered: `new` is renamed to `newFact`.
+// codeFact encodes then decodes a fact,
+// just to exercise that logic.
+func codeFact(fact analysis.Fact) (analysis.Fact, error) {
+ // We encode facts one at a time.
+ // A real modular driver would emit all facts
+ // into one encoder to improve gob efficiency.
+ var buf bytes.Buffer
+ if err := gob.NewEncoder(&buf).Encode(fact); err != nil {
+ return nil, err
+ }
+
+ // Encode it twice and assert that we get the same bits.
+ // This helps detect nondeterministic Gob encoding (e.g. of maps).
+ var buf2 bytes.Buffer
+ if err := gob.NewEncoder(&buf2).Encode(fact); err != nil {
+ return nil, err
+ }
+ if !bytes.Equal(buf.Bytes(), buf2.Bytes()) {
+ return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact)
+ }
+
+ newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact)
+ if err := gob.NewDecoder(&buf).Decode(newFact); err != nil {
+ return nil, err
+ }
+ return newFact, nil
+}
+
+// NOTE(ldez) no alteration.
+// exportedFrom reports whether obj may be visible to a package that imports pkg.
+// This includes not just the exported members of pkg, but also unexported
+// constants, types, fields, and methods, perhaps belonging to other packages,
+// that find there way into the API.
+// This is an overapproximation of the more accurate approach used by
+// gc export data, which walks the type graph, but it's much simpler.
+//
+// TODO(adonovan): do more accurate filtering by walking the type graph.
+func exportedFrom(obj types.Object, pkg *types.Package) bool {
+ switch obj := obj.(type) {
+ case *types.Func:
+ return obj.Exported() && obj.Pkg() == pkg ||
+ obj.Type().(*types.Signature).Recv() != nil
+ case *types.Var:
+ if obj.IsField() {
+ return true
+ }
+ // we can't filter more aggressively than this because we need
+ // to consider function parameters exported, but have no way
+ // of telling apart function parameters from local variables.
+ return obj.Pkg() == pkg
+ case *types.TypeName, *types.Const:
+ return true
+ }
+ return false // Nil, Builtin, Label, or PkgName
+}
+
+// NOTE(ldez) altered: logger; `act.factType`.
+// ObjectFact retrieves a fact associated with obj,
+// and returns true if one was found.
+// Given a value ptr of type *T, where *T satisfies Fact,
+// ObjectFact copies the value to *ptr.
+//
+// See documentation at ImportObjectFact field of [analysis.Pass].
+func (act *action) ObjectFact(obj types.Object, ptr analysis.Fact) bool {
+ if obj == nil {
+ panic("nil object")
+ }
+ key := objectFactKey{obj, act.factType(ptr)}
+ if v, ok := act.objectFacts[key]; ok {
+ reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
+ return true
+ }
+ return false
+}
+
+// NOTE(ldez) altered: logger; `act.factType`.
+// exportObjectFact implements Pass.ExportObjectFact.
+func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
+ if act.pass.ExportObjectFact == nil {
+ act.runner.log.Panicf("%s: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact)
+ }
+
+ if obj.Pkg() != act.Package.Types {
+ act.runner.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
+ act.Analyzer, act.Package, obj, fact)
+ }
+
+ key := objectFactKey{obj, act.factType(fact)}
+ act.objectFacts[key] = fact // clobber any existing entry
+ if isFactsExportDebug {
+ objstr := types.ObjectString(obj, (*types.Package).Name)
+
+ factsExportDebugf("%s: object %s has fact %s\n",
+ act.Package.Fset.Position(obj.Pos()), objstr, fact)
+ }
+}
+
+// NOTE(ldez) no alteration.
+// AllObjectFacts returns a new slice containing all object facts of
+// the analysis's FactTypes in unspecified order.
+//
+// See documentation at AllObjectFacts field of [analysis.Pass].
+func (act *action) AllObjectFacts() []analysis.ObjectFact {
+ facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
+ for k := range act.objectFacts {
+ facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]})
+ }
+ return facts
+}
+
+// NOTE(ldez) altered: `act.factType`.
+// PackageFact retrieves a fact associated with package pkg,
+// which must be this package or one of its dependencies.
+//
+// See documentation at ImportObjectFact field of [analysis.Pass].
+func (act *action) PackageFact(pkg *types.Package, ptr analysis.Fact) bool {
+ if pkg == nil {
+ panic("nil package")
+ }
+ key := packageFactKey{pkg, act.factType(ptr)}
+ if v, ok := act.packageFacts[key]; ok {
+ reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
+ return true
+ }
+ return false
+}
+
+// NOTE(ldez) altered: logger; `act.factType`.
+// exportPackageFact implements Pass.ExportPackageFact.
+func (act *action) exportPackageFact(fact analysis.Fact) {
+ if act.pass.ExportPackageFact == nil {
+ act.runner.log.Panicf("%s: Pass.ExportPackageFact(%T) called after Run", act, fact)
+ }
+
+ key := packageFactKey{act.pass.Pkg, act.factType(fact)}
+ act.packageFacts[key] = fact // clobber any existing entry
+
+ factsDebugf("%s: package %s has fact %s\n",
+ act.Package.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact)
+}
+
+// NOTE(ldez) altered: add receiver to handle logs.
+func (act *action) factType(fact analysis.Fact) reflect.Type {
+ t := reflect.TypeOf(fact)
+ if t.Kind() != reflect.Ptr {
+ act.runner.log.Fatalf("invalid Fact type: got %T, want pointer", fact)
+ }
+ return t
+}
+
+// NOTE(ldez) no alteration.
+// AllPackageFacts returns a new slice containing all package
+// facts of the analysis's FactTypes in unspecified order.
+//
+// See documentation at AllPackageFacts field of [analysis.Pass].
+func (act *action) AllPackageFacts() []analysis.PackageFact {
+ facts := make([]analysis.PackageFact, 0, len(act.packageFacts))
+ for k, fact := range act.packageFacts {
+ facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: fact})
+ }
+ return facts
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go
deleted file mode 100644
index 1d0fb974e7..0000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go
+++ /dev/null
@@ -1,125 +0,0 @@
-package goanalysis
-
-import (
- "bytes"
- "encoding/gob"
- "fmt"
- "go/types"
- "reflect"
-
- "golang.org/x/tools/go/analysis"
-)
-
-type objectFactKey struct {
- obj types.Object
- typ reflect.Type
-}
-
-type packageFactKey struct {
- pkg *types.Package
- typ reflect.Type
-}
-
-type Fact struct {
- Path string // non-empty only for object facts
- Fact analysis.Fact
-}
-
-// inheritFacts populates act.facts with
-// those it obtains from its dependency, dep.
-func inheritFacts(act, dep *action) {
- serialize := false
-
- for key, fact := range dep.objectFacts {
- // Filter out facts related to objects
- // that are irrelevant downstream
- // (equivalently: not in the compiler export data).
- if !exportedFrom(key.obj, dep.pkg.Types) {
- factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact)
- continue
- }
-
- // Optionally serialize/deserialize fact
- // to verify that it works across address spaces.
- if serialize {
- var err error
- fact, err = codeFact(fact)
- if err != nil {
- act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
- }
- }
-
- factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact)
- act.objectFacts[key] = fact
- }
-
- for key, fact := range dep.packageFacts {
- // TODO: filter out facts that belong to
- // packages not mentioned in the export data
- // to prevent side channels.
-
- // Optionally serialize/deserialize fact
- // to verify that it works across address spaces
- // and is deterministic.
- if serialize {
- var err error
- fact, err = codeFact(fact)
- if err != nil {
- act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
- }
- }
-
- factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact)
- act.packageFacts[key] = fact
- }
-}
-
-// codeFact encodes then decodes a fact,
-// just to exercise that logic.
-func codeFact(fact analysis.Fact) (analysis.Fact, error) {
- // We encode facts one at a time.
- // A real modular driver would emit all facts
- // into one encoder to improve gob efficiency.
- var buf bytes.Buffer
- if err := gob.NewEncoder(&buf).Encode(fact); err != nil {
- return nil, err
- }
-
- // Encode it twice and assert that we get the same bits.
- // This helps detect nondeterministic Gob encoding (e.g. of maps).
- var buf2 bytes.Buffer
- if err := gob.NewEncoder(&buf2).Encode(fact); err != nil {
- return nil, err
- }
- if !bytes.Equal(buf.Bytes(), buf2.Bytes()) {
- return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact)
- }
-
- newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact)
- if err := gob.NewDecoder(&buf).Decode(newFact); err != nil {
- return nil, err
- }
- return newFact, nil
-}
-
-// exportedFrom reports whether obj may be visible to a package that imports pkg.
-// This includes not just the exported members of pkg, but also unexported
-// constants, types, fields, and methods, perhaps belonging to other packages,
-// that find there way into the API.
-// This is an over-approximation of the more accurate approach used by
-// gc export data, which walks the type graph, but it's much simpler.
-//
-// TODO(adonovan): do more accurate filtering by walking the type graph.
-func exportedFrom(obj types.Object, pkg *types.Package) bool {
- switch obj := obj.(type) {
- case *types.Func:
- return obj.Exported() && obj.Pkg() == pkg ||
- obj.Type().(*types.Signature).Recv() != nil
- case *types.Var:
- return obj.Exported() && obj.Pkg() == pkg ||
- obj.IsField()
- case *types.TypeName, *types.Const:
- return true
- }
- return false // Nil, Builtin, Label, or PkgName
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go
index c54357eb67..fca4b8c3ad 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go
@@ -4,11 +4,13 @@ import (
"errors"
"fmt"
"go/ast"
+ "go/build"
"go/parser"
"go/scanner"
"go/types"
"os"
"reflect"
+ "strings"
"sync"
"sync/atomic"
@@ -16,6 +18,7 @@ import (
"golang.org/x/tools/go/packages"
"github.com/golangci/golangci-lint/pkg/goanalysis/load"
+ "github.com/golangci/golangci-lint/pkg/goutil"
"github.com/golangci/golangci-lint/pkg/logutils"
)
@@ -64,7 +67,7 @@ func (lp *loadingPackage) analyze(loadMode LoadMode, loadSem chan struct{}) {
// Unblock depending on actions and propagate error.
for _, act := range lp.actions {
close(act.analysisDoneCh)
- act.err = werr
+ act.Err = werr
}
return
}
@@ -122,13 +125,14 @@ func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error {
pkg.IllTyped = true
pkg.TypesInfo = &types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Instances: make(map[*ast.Ident]types.Instance),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Scopes: make(map[ast.Node]*types.Scope),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Instances: make(map[*ast.Ident]types.Instance),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ Scopes: make(map[ast.Node]*types.Scope),
+ FileVersions: make(map[*ast.File]string),
}
importer := func(path string) (*types.Package, error) {
@@ -150,12 +154,27 @@ func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error {
}
return imp.Types, nil
}
+
+ var goVersion string
+ if pkg.Module != nil && pkg.Module.GoVersion != "" {
+ goVersion = "go" + strings.TrimPrefix(pkg.Module.GoVersion, "go")
+ } else {
+ var err error
+ goVersion, err = goutil.CleanRuntimeVersion()
+ if err != nil {
+ return err
+ }
+ }
+
tc := &types.Config{
Importer: importerFunc(importer),
Error: func(err error) {
pkg.Errors = append(pkg.Errors, lp.convertError(err)...)
},
+ GoVersion: goVersion,
+ Sizes: types.SizesFor(build.Default.Compiler, build.Default.GOARCH),
}
+
_ = types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax)
// Don't handle error here: errors are adding by tc.Error function.
@@ -345,12 +364,12 @@ func (lp *loadingPackage) decUse(canClearTypes bool) {
pass.ImportPackageFact = nil
pass.ExportPackageFact = nil
act.pass = nil
- act.deps = nil
- if act.result != nil {
+ act.Deps = nil
+ if act.Result != nil {
if isMemoryDebug {
- debugf("%s: decUse: nilling act result of size %d bytes", act, sizeOfValueTreeBytes(act.result))
+ debugf("%s: decUse: nilling act result of size %d bytes", act, sizeOfValueTreeBytes(act.Result))
}
- act.result = nil
+ act.Result = nil
}
}
@@ -381,7 +400,7 @@ func (lp *loadingPackage) decUse(canClearTypes bool) {
for _, act := range lp.actions {
if !lp.isInitial {
- act.pkg = nil
+ act.Package = nil
}
act.packageFacts = nil
act.objectFacts = nil
@@ -470,7 +489,7 @@ func sizeOfReflectValueTreeBytes(rv reflect.Value, visitedPtrs map[uintptr]struc
return sizeOfReflectValueTreeBytes(rv.Elem(), visitedPtrs)
case reflect.Struct:
ret := 0
- for i := 0; i < rv.NumField(); i++ {
+ for i := range rv.NumField() {
ret += sizeOfReflectValueTreeBytes(rv.Field(i), visitedPtrs)
}
return ret
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go
index c02d33b797..3a9a35dec1 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go
@@ -2,17 +2,12 @@ package goanalysis
import (
"fmt"
- "runtime"
- "sort"
+ "go/token"
"strings"
- "sync"
- "sync/atomic"
- "time"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
- "github.com/golangci/golangci-lint/internal/pkgcache"
"github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils"
@@ -88,6 +83,7 @@ func runAnalyzers(cfg runAnalyzersConfig, lintCtx *linter.Context) ([]result.Iss
func buildIssues(diags []Diagnostic, linterNameBuilder func(diag *Diagnostic) string) []result.Issue {
var issues []result.Issue
+
for i := range diags {
diag := &diags[i]
linterName := linterNameBuilder(diag)
@@ -99,176 +95,55 @@ func buildIssues(diags []Diagnostic, linterNameBuilder func(diag *Diagnostic) st
text = fmt.Sprintf("%s: %s", diag.Analyzer.Name, diag.Message)
}
- issues = append(issues, result.Issue{
- FromLinter: linterName,
- Text: text,
- Pos: diag.Position,
- Pkg: diag.Pkg,
- })
+ var suggestedFixes []analysis.SuggestedFix
- if len(diag.Related) > 0 {
- for _, info := range diag.Related {
- issues = append(issues, result.Issue{
- FromLinter: linterName,
- Text: fmt.Sprintf("%s(related information): %s", diag.Analyzer.Name, info.Message),
- Pos: diag.Pkg.Fset.Position(info.Pos),
- Pkg: diag.Pkg,
- })
+ for _, sf := range diag.SuggestedFixes {
+ // Skip suggested fixes on cgo files.
+ // The related error is: "diff has out-of-bounds edits"
+ // This is a temporary workaround.
+ if !strings.HasSuffix(diag.File.Name(), ".go") {
+ continue
}
- }
- }
- return issues
-}
-
-func getIssuesCacheKey(analyzers []*analysis.Analyzer) string {
- return "lint/result:" + analyzersHashID(analyzers)
-}
-func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool,
- issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer,
-) {
- startedAt := time.Now()
- perPkgIssues := map[*packages.Package][]result.Issue{}
- for ind := range issues {
- i := &issues[ind]
- perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i)
- }
+ nsf := analysis.SuggestedFix{Message: sf.Message}
- savedIssuesCount := int32(0)
- lintResKey := getIssuesCacheKey(analyzers)
+ for _, edit := range sf.TextEdits {
+ end := edit.End
- workerCount := runtime.GOMAXPROCS(-1)
- var wg sync.WaitGroup
- wg.Add(workerCount)
-
- pkgCh := make(chan *packages.Package, len(allPkgs))
- for i := 0; i < workerCount; i++ {
- go func() {
- defer wg.Done()
- for pkg := range pkgCh {
- pkgIssues := perPkgIssues[pkg]
- encodedIssues := make([]EncodingIssue, 0, len(pkgIssues))
- for ind := range pkgIssues {
- i := &pkgIssues[ind]
- encodedIssues = append(encodedIssues, EncodingIssue{
- FromLinter: i.FromLinter,
- Text: i.Text,
- Severity: i.Severity,
- Pos: i.Pos,
- LineRange: i.LineRange,
- Replacement: i.Replacement,
- ExpectNoLint: i.ExpectNoLint,
- ExpectedNoLintLinter: i.ExpectedNoLintLinter,
- })
+ if !end.IsValid() {
+ end = edit.Pos
}
- atomic.AddInt32(&savedIssuesCount, int32(len(encodedIssues)))
- if err := lintCtx.PkgCache.Put(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil {
- lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err)
- } else {
- issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues))
- }
+ // To be applied the positions need to be "adjusted" based on the file.
+ // This is the difference between the "displayed" positions and "effective" positions.
+ nsf.TextEdits = append(nsf.TextEdits, analysis.TextEdit{
+ Pos: token.Pos(diag.File.Offset(edit.Pos)),
+ End: token.Pos(diag.File.Offset(end)),
+ NewText: edit.NewText,
+ })
}
- }()
- }
- for _, pkg := range allPkgs {
- if pkgsFromCache[pkg] {
- continue
+ suggestedFixes = append(suggestedFixes, nsf)
}
- pkgCh <- pkg
- }
- close(pkgCh)
- wg.Wait()
-
- issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt))
-}
-
-func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
- analyzers []*analysis.Analyzer,
-) (issuesFromCache []result.Issue, pkgsFromCache map[*packages.Package]bool) {
- startedAt := time.Now()
-
- lintResKey := getIssuesCacheKey(analyzers)
- type cacheRes struct {
- issues []result.Issue
- loadErr error
- }
- pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs))
- for _, pkg := range pkgs {
- pkgToCacheRes[pkg] = &cacheRes{}
- }
-
- workerCount := runtime.GOMAXPROCS(-1)
- var wg sync.WaitGroup
- wg.Add(workerCount)
-
- pkgCh := make(chan *packages.Package, len(pkgs))
- for i := 0; i < workerCount; i++ {
- go func() {
- defer wg.Done()
- for pkg := range pkgCh {
- var pkgIssues []EncodingIssue
- err := lintCtx.PkgCache.Get(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, &pkgIssues)
- cacheRes := pkgToCacheRes[pkg]
- cacheRes.loadErr = err
- if err != nil {
- continue
- }
- if len(pkgIssues) == 0 {
- continue
- }
+ issues = append(issues, result.Issue{
+ FromLinter: linterName,
+ Text: text,
+ Pos: diag.Position,
+ Pkg: diag.Pkg,
+ SuggestedFixes: suggestedFixes,
+ })
- issues := make([]result.Issue, 0, len(pkgIssues))
- for i := range pkgIssues {
- issue := &pkgIssues[i]
- issues = append(issues, result.Issue{
- FromLinter: issue.FromLinter,
- Text: issue.Text,
- Severity: issue.Severity,
- Pos: issue.Pos,
- LineRange: issue.LineRange,
- Replacement: issue.Replacement,
- Pkg: pkg,
- ExpectNoLint: issue.ExpectNoLint,
- ExpectedNoLintLinter: issue.ExpectedNoLintLinter,
- })
- }
- cacheRes.issues = issues
+ if len(diag.Related) > 0 {
+ for _, info := range diag.Related {
+ issues = append(issues, result.Issue{
+ FromLinter: linterName,
+ Text: fmt.Sprintf("%s(related information): %s", diag.Analyzer.Name, info.Message),
+ Pos: diag.Pkg.Fset.Position(info.Pos),
+ Pkg: diag.Pkg,
+ })
}
- }()
- }
-
- for _, pkg := range pkgs {
- pkgCh <- pkg
- }
- close(pkgCh)
- wg.Wait()
-
- loadedIssuesCount := 0
- pkgsFromCache = map[*packages.Package]bool{}
- for pkg, cacheRes := range pkgToCacheRes {
- if cacheRes.loadErr == nil {
- loadedIssuesCount += len(cacheRes.issues)
- pkgsFromCache[pkg] = true
- issuesFromCache = append(issuesFromCache, cacheRes.issues...)
- issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues))
- } else {
- issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr)
}
}
- issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages",
- loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs))
- return issuesFromCache, pkgsFromCache
-}
-
-func analyzersHashID(analyzers []*analysis.Analyzer) string {
- names := make([]string, 0, len(analyzers))
- for _, a := range analyzers {
- names = append(names, a.Name)
- }
-
- sort.Strings(names)
- return strings.Join(names, ",")
+ return issues
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go
new file mode 100644
index 0000000000..4366155b02
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go
@@ -0,0 +1,172 @@
+package goanalysis
+
+import (
+ "runtime"
+ "sort"
+ "strings"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/packages"
+
+ "github.com/golangci/golangci-lint/internal/cache"
+ "github.com/golangci/golangci-lint/pkg/lint/linter"
+ "github.com/golangci/golangci-lint/pkg/result"
+)
+
+func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool,
+ issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer,
+) {
+ startedAt := time.Now()
+ perPkgIssues := map[*packages.Package][]result.Issue{}
+ for ind := range issues {
+ i := &issues[ind]
+ perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i)
+ }
+
+ var savedIssuesCount int64 = 0
+ lintResKey := getIssuesCacheKey(analyzers)
+
+ workerCount := runtime.GOMAXPROCS(-1)
+ var wg sync.WaitGroup
+ wg.Add(workerCount)
+
+ pkgCh := make(chan *packages.Package, len(allPkgs))
+ for i := 0; i < workerCount; i++ {
+ go func() {
+ defer wg.Done()
+ for pkg := range pkgCh {
+ pkgIssues := perPkgIssues[pkg]
+ encodedIssues := make([]EncodingIssue, 0, len(pkgIssues))
+ for ind := range pkgIssues {
+ i := &pkgIssues[ind]
+ encodedIssues = append(encodedIssues, EncodingIssue{
+ FromLinter: i.FromLinter,
+ Text: i.Text,
+ Severity: i.Severity,
+ Pos: i.Pos,
+ LineRange: i.LineRange,
+ SuggestedFixes: i.SuggestedFixes,
+ ExpectNoLint: i.ExpectNoLint,
+ ExpectedNoLintLinter: i.ExpectedNoLintLinter,
+ })
+ }
+
+ atomic.AddInt64(&savedIssuesCount, int64(len(encodedIssues)))
+ if err := lintCtx.PkgCache.Put(pkg, cache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil {
+ lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err)
+ } else {
+ issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues))
+ }
+ }
+ }()
+ }
+
+ for _, pkg := range allPkgs {
+ if pkgsFromCache[pkg] {
+ continue
+ }
+
+ pkgCh <- pkg
+ }
+ close(pkgCh)
+ wg.Wait()
+
+ lintCtx.PkgCache.Close()
+
+ issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt))
+}
+
+func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
+ analyzers []*analysis.Analyzer,
+) (issuesFromCache []result.Issue, pkgsFromCache map[*packages.Package]bool) {
+ startedAt := time.Now()
+
+ lintResKey := getIssuesCacheKey(analyzers)
+ type cacheRes struct {
+ issues []result.Issue
+ loadErr error
+ }
+ pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs))
+ for _, pkg := range pkgs {
+ pkgToCacheRes[pkg] = &cacheRes{}
+ }
+
+ workerCount := runtime.GOMAXPROCS(-1)
+ var wg sync.WaitGroup
+ wg.Add(workerCount)
+
+ pkgCh := make(chan *packages.Package, len(pkgs))
+ for range workerCount {
+ go func() {
+ defer wg.Done()
+ for pkg := range pkgCh {
+ var pkgIssues []EncodingIssue
+ err := lintCtx.PkgCache.Get(pkg, cache.HashModeNeedAllDeps, lintResKey, &pkgIssues)
+ cacheRes := pkgToCacheRes[pkg]
+ cacheRes.loadErr = err
+ if err != nil {
+ continue
+ }
+ if len(pkgIssues) == 0 {
+ continue
+ }
+
+ issues := make([]result.Issue, 0, len(pkgIssues))
+ for i := range pkgIssues {
+ issue := &pkgIssues[i]
+ issues = append(issues, result.Issue{
+ FromLinter: issue.FromLinter,
+ Text: issue.Text,
+ Severity: issue.Severity,
+ Pos: issue.Pos,
+ LineRange: issue.LineRange,
+ SuggestedFixes: issue.SuggestedFixes,
+ Pkg: pkg,
+ ExpectNoLint: issue.ExpectNoLint,
+ ExpectedNoLintLinter: issue.ExpectedNoLintLinter,
+ })
+ }
+ cacheRes.issues = issues
+ }
+ }()
+ }
+
+ for _, pkg := range pkgs {
+ pkgCh <- pkg
+ }
+ close(pkgCh)
+ wg.Wait()
+
+ loadedIssuesCount := 0
+ pkgsFromCache = map[*packages.Package]bool{}
+ for pkg, cacheRes := range pkgToCacheRes {
+ if cacheRes.loadErr == nil {
+ loadedIssuesCount += len(cacheRes.issues)
+ pkgsFromCache[pkg] = true
+ issuesFromCache = append(issuesFromCache, cacheRes.issues...)
+ issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues))
+ } else {
+ issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr)
+ }
+ }
+ issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages",
+ loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs))
+ return issuesFromCache, pkgsFromCache
+}
+
+func getIssuesCacheKey(analyzers []*analysis.Analyzer) string {
+ return "lint/result:" + analyzersHashID(analyzers)
+}
+
+func analyzersHashID(analyzers []*analysis.Analyzer) string {
+ names := make([]string, 0, len(analyzers))
+ for _, a := range analyzers {
+ names = append(names, a.Name)
+ }
+
+ sort.Strings(names)
+ return strings.Join(names, ",")
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/analyzer.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/analyzer.go
new file mode 100644
index 0000000000..c0ea66e7e6
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/analyzer.go
@@ -0,0 +1,55 @@
+package goformatters
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/rogpeppe/go-internal/diff"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/goformatters/internal"
+ "github.com/golangci/golangci-lint/pkg/logutils"
+)
+
+// NewAnalyzer converts a [Formatter] to an [analysis.Analyzer].
+func NewAnalyzer(logger logutils.Log, doc string, formatter Formatter) *analysis.Analyzer {
+ return &analysis.Analyzer{
+ Name: formatter.Name(),
+ Doc: doc,
+ Run: func(pass *analysis.Pass) (any, error) {
+ for _, file := range pass.Files {
+ position, isGoFile := goanalysis.GetGoFilePosition(pass, file)
+ if !isGoFile {
+ continue
+ }
+
+ input, err := os.ReadFile(position.Filename)
+ if err != nil {
+ return nil, fmt.Errorf("unable to open file %s: %w", position.Filename, err)
+ }
+
+ output, err := formatter.Format(position.Filename, input)
+ if err != nil {
+ return nil, fmt.Errorf("error while running %s: %w", formatter.Name(), err)
+ }
+
+ if !bytes.Equal(input, output) {
+ newName := filepath.ToSlash(position.Filename)
+ oldName := newName + ".orig"
+
+ patch := diff.Diff(oldName, input, newName, output)
+
+ err = internal.ExtractDiagnosticFromPatch(pass, file, patch, logger)
+ if err != nil {
+ return nil, fmt.Errorf("can't extract issues from %s diff output %q: %w", formatter.Name(), patch, err)
+ }
+ }
+ }
+
+ return nil, nil
+ },
+ }
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/formatters.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/formatters.go
new file mode 100644
index 0000000000..c8953ad3bf
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/formatters.go
@@ -0,0 +1,6 @@
+package goformatters
+
+type Formatter interface {
+ Name() string
+ Format(filename string, src []byte) ([]byte, error)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/gci.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/gci.go
new file mode 100644
index 0000000000..f28b5b98a9
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/gci.go
@@ -0,0 +1,71 @@
+package gci
+
+import (
+ "context"
+ "fmt"
+
+ gcicfg "github.com/daixiang0/gci/pkg/config"
+ "github.com/daixiang0/gci/pkg/gci"
+ "github.com/daixiang0/gci/pkg/log"
+ "github.com/ldez/grignotin/gomod"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+ gcicfgi "github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/config"
+ "github.com/golangci/golangci-lint/pkg/goformatters/internal"
+)
+
+const Name = "gci"
+
+type Formatter struct {
+ config *gcicfg.Config
+}
+
+func New(settings *config.GciSettings) (*Formatter, error) {
+ log.InitLogger()
+ _ = log.L().Sync()
+
+ modPath, err := gomod.GetModulePath(context.Background())
+ if err != nil {
+ internal.FormatterLogger.Errorf("gci: %v", err)
+ }
+
+ cfg := gcicfgi.YamlConfig{
+ Cfg: gcicfg.BoolConfig{
+ NoInlineComments: settings.NoInlineComments,
+ NoPrefixComments: settings.NoPrefixComments,
+ SkipGenerated: settings.SkipGenerated,
+ CustomOrder: settings.CustomOrder,
+ NoLexOrder: settings.NoLexOrder,
+ },
+ SectionStrings: settings.Sections,
+ ModPath: modPath,
+ }
+
+ if settings.LocalPrefixes != "" {
+ cfg.SectionStrings = []string{
+ "standard",
+ "default",
+ fmt.Sprintf("prefix(%s)", settings.LocalPrefixes),
+ }
+ }
+
+ parsedCfg, err := cfg.Parse()
+ if err != nil {
+ return nil, err
+ }
+
+ return &Formatter{config: &gcicfg.Config{
+ BoolConfig: parsedCfg.BoolConfig,
+ Sections: parsedCfg.Sections,
+ SectionSeparators: parsedCfg.SectionSeparators,
+ }}, nil
+}
+
+func (*Formatter) Name() string {
+ return Name
+}
+
+func (f *Formatter) Format(filename string, src []byte) ([]byte, error) {
+ _, formatted, err := gci.LoadFormat(src, filename, *f.config)
+ return formatted, err
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/LICENSE b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/LICENSE
new file mode 100644
index 0000000000..e1292f7389
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/LICENSE
@@ -0,0 +1,29 @@
+BSD 3-Clause License
+
+Copyright (c) 2020, Xiang Dai
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/config/config.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/config/config.go
new file mode 100644
index 0000000000..8140d96bf8
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/config/config.go
@@ -0,0 +1,107 @@
+package config
+
+import (
+ "sort"
+ "strings"
+
+ "gopkg.in/yaml.v3"
+
+ "github.com/daixiang0/gci/pkg/config"
+ "github.com/daixiang0/gci/pkg/section"
+ sectioni "github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section"
+)
+
+var defaultOrder = map[string]int{
+ section.StandardType: 0,
+ section.DefaultType: 1,
+ section.CustomType: 2,
+ section.BlankType: 3,
+ section.DotType: 4,
+ section.AliasType: 5,
+ section.LocalModuleType: 6,
+}
+
+type Config struct {
+ config.BoolConfig
+ Sections section.SectionList
+ SectionSeparators section.SectionList
+}
+
+type YamlConfig struct {
+ Cfg config.BoolConfig `yaml:",inline"`
+ SectionStrings []string `yaml:"sections"`
+ SectionSeparatorStrings []string `yaml:"sectionseparators"`
+
+ // Since history issue, Golangci-lint needs Analyzer to run and GCI add an Analyzer layer to integrate.
+ // The ModPath param is only from analyzer.go, no need to set it in all other places.
+ ModPath string `yaml:"-"`
+}
+
+func (g YamlConfig) Parse() (*Config, error) {
+ var err error
+
+ sections, err := sectioni.Parse(g.SectionStrings)
+ if err != nil {
+ return nil, err
+ }
+ if sections == nil {
+ sections = sectioni.DefaultSections()
+ }
+ if err := configureSections(sections, g.ModPath); err != nil {
+ return nil, err
+ }
+
+ // if default order sorted sections
+ if !g.Cfg.CustomOrder {
+ sort.Slice(sections, func(i, j int) bool {
+ sectionI, sectionJ := sections[i].Type(), sections[j].Type()
+
+ if g.Cfg.NoLexOrder || strings.Compare(sectionI, sectionJ) != 0 {
+ return defaultOrder[sectionI] < defaultOrder[sectionJ]
+ }
+
+ return strings.Compare(sections[i].String(), sections[j].String()) < 0
+ })
+ }
+
+ sectionSeparators, err := sectioni.Parse(g.SectionSeparatorStrings)
+ if err != nil {
+ return nil, err
+ }
+ if sectionSeparators == nil {
+ sectionSeparators = section.DefaultSectionSeparators()
+ }
+
+ return &Config{g.Cfg, sections, sectionSeparators}, nil
+}
+
+func ParseConfig(in string) (*Config, error) {
+ config := YamlConfig{}
+
+ err := yaml.Unmarshal([]byte(in), &config)
+ if err != nil {
+ return nil, err
+ }
+
+ gciCfg, err := config.Parse()
+ if err != nil {
+ return nil, err
+ }
+
+ return gciCfg, nil
+}
+
+// configureSections now only do golang module path finding.
+// Since history issue, Golangci-lint needs Analyzer to run and GCI add an Analyzer layer to integrate.
+// The path param is from analyzer.go, in all other places should pass empty string.
+func configureSections(sections section.SectionList, path string) error {
+ for _, sec := range sections {
+ switch s := sec.(type) {
+ case *section.LocalModule:
+ if err := s.Configure(path); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/parser.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/parser.go
new file mode 100644
index 0000000000..9662cbd1a7
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/parser.go
@@ -0,0 +1,51 @@
+package section
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+
+ "github.com/daixiang0/gci/pkg/section"
+)
+
+func Parse(data []string) (section.SectionList, error) {
+ if len(data) == 0 {
+ return nil, nil
+ }
+
+ var list section.SectionList
+ var errString string
+ for _, d := range data {
+ s := strings.ToLower(d)
+ if len(s) == 0 {
+ return nil, nil
+ }
+
+ if s == "default" {
+ list = append(list, section.Default{})
+ } else if s == "standard" {
+ list = append(list, Standard{})
+ } else if s == "newline" {
+ list = append(list, section.NewLine{})
+ } else if strings.HasPrefix(s, "prefix(") && len(d) > 8 {
+ list = append(list, section.Custom{Prefix: d[7 : len(d)-1]})
+ } else if strings.HasPrefix(s, "commentline(") && len(d) > 13 {
+ list = append(list, section.Custom{Prefix: d[12 : len(d)-1]})
+ } else if s == "dot" {
+ list = append(list, section.Dot{})
+ } else if s == "blank" {
+ list = append(list, section.Blank{})
+ } else if s == "alias" {
+ list = append(list, section.Alias{})
+ } else if s == "localmodule" {
+ // pointer because we need to mutate the section at configuration time
+ list = append(list, §ion.LocalModule{})
+ } else {
+ errString += fmt.Sprintf(" %s", s)
+ }
+ }
+ if errString != "" {
+ return nil, errors.New(fmt.Sprintf("invalid params:%s", errString))
+ }
+ return list, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/section.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/section.go
new file mode 100644
index 0000000000..e9c6632225
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/section.go
@@ -0,0 +1,7 @@
+package section
+
+import "github.com/daixiang0/gci/pkg/section"
+
+func DefaultSections() section.SectionList {
+ return section.SectionList{Standard{}, section.Default{}}
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/standard.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/standard.go
new file mode 100644
index 0000000000..26c7e9dc7d
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/standard.go
@@ -0,0 +1,30 @@
+package section
+
+import (
+ "github.com/daixiang0/gci/pkg/parse"
+ "github.com/daixiang0/gci/pkg/specificity"
+)
+
+const StandardType = "standard"
+
+type Standard struct{}
+
+func (s Standard) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity {
+ if isStandard(spec.Path) {
+ return specificity.StandardMatch{}
+ }
+ return specificity.MisMatch{}
+}
+
+func (s Standard) String() string {
+ return StandardType
+}
+
+func (s Standard) Type() string {
+ return StandardType
+}
+
+func isStandard(pkg string) bool {
+ _, ok := standardPackages[pkg]
+ return ok
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/standard_list.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/standard_list.go
new file mode 100644
index 0000000000..2fddded70c
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gci/internal/section/standard_list.go
@@ -0,0 +1,182 @@
+package section
+
+// Code generated based on go1.24.0 X:boringcrypto,arenas,synctest. DO NOT EDIT.
+
+var standardPackages = map[string]struct{}{
+ "archive/tar": {},
+ "archive/zip": {},
+ "arena": {},
+ "bufio": {},
+ "bytes": {},
+ "cmp": {},
+ "compress/bzip2": {},
+ "compress/flate": {},
+ "compress/gzip": {},
+ "compress/lzw": {},
+ "compress/zlib": {},
+ "container/heap": {},
+ "container/list": {},
+ "container/ring": {},
+ "context": {},
+ "crypto": {},
+ "crypto/aes": {},
+ "crypto/boring": {},
+ "crypto/cipher": {},
+ "crypto/des": {},
+ "crypto/dsa": {},
+ "crypto/ecdh": {},
+ "crypto/ecdsa": {},
+ "crypto/ed25519": {},
+ "crypto/elliptic": {},
+ "crypto/fips140": {},
+ "crypto/hkdf": {},
+ "crypto/hmac": {},
+ "crypto/md5": {},
+ "crypto/mlkem": {},
+ "crypto/pbkdf2": {},
+ "crypto/rand": {},
+ "crypto/rc4": {},
+ "crypto/rsa": {},
+ "crypto/sha1": {},
+ "crypto/sha256": {},
+ "crypto/sha3": {},
+ "crypto/sha512": {},
+ "crypto/subtle": {},
+ "crypto/tls": {},
+ "crypto/tls/fipsonly": {},
+ "crypto/x509": {},
+ "crypto/x509/pkix": {},
+ "database/sql": {},
+ "database/sql/driver": {},
+ "debug/buildinfo": {},
+ "debug/dwarf": {},
+ "debug/elf": {},
+ "debug/gosym": {},
+ "debug/macho": {},
+ "debug/pe": {},
+ "debug/plan9obj": {},
+ "embed": {},
+ "encoding": {},
+ "encoding/ascii85": {},
+ "encoding/asn1": {},
+ "encoding/base32": {},
+ "encoding/base64": {},
+ "encoding/binary": {},
+ "encoding/csv": {},
+ "encoding/gob": {},
+ "encoding/hex": {},
+ "encoding/json": {},
+ "encoding/pem": {},
+ "encoding/xml": {},
+ "errors": {},
+ "expvar": {},
+ "flag": {},
+ "fmt": {},
+ "go/ast": {},
+ "go/build": {},
+ "go/build/constraint": {},
+ "go/constant": {},
+ "go/doc": {},
+ "go/doc/comment": {},
+ "go/format": {},
+ "go/importer": {},
+ "go/parser": {},
+ "go/printer": {},
+ "go/scanner": {},
+ "go/token": {},
+ "go/types": {},
+ "go/version": {},
+ "hash": {},
+ "hash/adler32": {},
+ "hash/crc32": {},
+ "hash/crc64": {},
+ "hash/fnv": {},
+ "hash/maphash": {},
+ "html": {},
+ "html/template": {},
+ "image": {},
+ "image/color": {},
+ "image/color/palette": {},
+ "image/draw": {},
+ "image/gif": {},
+ "image/jpeg": {},
+ "image/png": {},
+ "index/suffixarray": {},
+ "io": {},
+ "io/fs": {},
+ "io/ioutil": {},
+ "iter": {},
+ "log": {},
+ "log/slog": {},
+ "log/syslog": {},
+ "maps": {},
+ "math": {},
+ "math/big": {},
+ "math/bits": {},
+ "math/cmplx": {},
+ "math/rand": {},
+ "math/rand/v2": {},
+ "mime": {},
+ "mime/multipart": {},
+ "mime/quotedprintable": {},
+ "net": {},
+ "net/http": {},
+ "net/http/cgi": {},
+ "net/http/cookiejar": {},
+ "net/http/fcgi": {},
+ "net/http/httptest": {},
+ "net/http/httptrace": {},
+ "net/http/httputil": {},
+ "net/http/pprof": {},
+ "net/mail": {},
+ "net/netip": {},
+ "net/rpc": {},
+ "net/rpc/jsonrpc": {},
+ "net/smtp": {},
+ "net/textproto": {},
+ "net/url": {},
+ "os": {},
+ "os/exec": {},
+ "os/signal": {},
+ "os/user": {},
+ "path": {},
+ "path/filepath": {},
+ "plugin": {},
+ "reflect": {},
+ "regexp": {},
+ "regexp/syntax": {},
+ "runtime": {},
+ "runtime/cgo": {},
+ "runtime/coverage": {},
+ "runtime/debug": {},
+ "runtime/metrics": {},
+ "runtime/pprof": {},
+ "runtime/race": {},
+ "runtime/trace": {},
+ "slices": {},
+ "sort": {},
+ "strconv": {},
+ "strings": {},
+ "structs": {},
+ "sync": {},
+ "sync/atomic": {},
+ "syscall": {},
+ "testing": {},
+ "testing/fstest": {},
+ "testing/iotest": {},
+ "testing/quick": {},
+ "testing/slogtest": {},
+ "testing/synctest": {},
+ "text/scanner": {},
+ "text/tabwriter": {},
+ "text/template": {},
+ "text/template/parse": {},
+ "time": {},
+ "time/tzdata": {},
+ "unicode": {},
+ "unicode/utf16": {},
+ "unicode/utf8": {},
+ "unique": {},
+ "unsafe": {},
+ "weak": {},
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gofmt/gofmt.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gofmt/gofmt.go
new file mode 100644
index 0000000000..9005c751d2
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gofmt/gofmt.go
@@ -0,0 +1,35 @@
+package gofmt
+
+import (
+ "github.com/golangci/gofmt/gofmt"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+)
+
+const Name = "gofmt"
+
+type Formatter struct {
+ options gofmt.Options
+}
+
+func New(settings *config.GoFmtSettings) *Formatter {
+ options := gofmt.Options{}
+
+ if settings != nil {
+ options.NeedSimplify = settings.Simplify
+
+ for _, rule := range settings.RewriteRules {
+ options.RewriteRules = append(options.RewriteRules, gofmt.RewriteRule(rule))
+ }
+ }
+
+ return &Formatter{options: options}
+}
+
+func (*Formatter) Name() string {
+ return Name
+}
+
+func (f *Formatter) Format(filename string, src []byte) ([]byte, error) {
+ return gofmt.Source(filename, src, f.options)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gofumpt/gofumpt.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gofumpt/gofumpt.go
new file mode 100644
index 0000000000..7c548a2afd
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/gofumpt/gofumpt.go
@@ -0,0 +1,46 @@
+package gofumpt
+
+import (
+ "strings"
+
+ gofumpt "mvdan.cc/gofumpt/format"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+)
+
+const Name = "gofumpt"
+
+type Formatter struct {
+ options gofumpt.Options
+}
+
+func New(settings *config.GofumptSettings, goVersion string) *Formatter {
+ var options gofumpt.Options
+
+ if settings != nil {
+ options = gofumpt.Options{
+ LangVersion: getLangVersion(goVersion),
+ ModulePath: settings.ModulePath,
+ ExtraRules: settings.ExtraRules,
+ }
+ }
+
+ return &Formatter{options: options}
+}
+
+func (*Formatter) Name() string {
+ return Name
+}
+
+func (f *Formatter) Format(_ string, src []byte) ([]byte, error) {
+ return gofumpt.Source(src, f.options)
+}
+
+func getLangVersion(v string) string {
+ if v == "" {
+ // TODO: defaults to "1.15", in the future (v2) must be removed.
+ return "go1.15"
+ }
+
+ return "go" + strings.TrimPrefix(v, "go")
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/goimports/goimports.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/goimports/goimports.go
new file mode 100644
index 0000000000..fa0f1fc4f3
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/goimports/goimports.go
@@ -0,0 +1,28 @@
+package goimports
+
+import (
+ "golang.org/x/tools/imports"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+)
+
+const Name = "goimports"
+
+type Formatter struct{}
+
+func New(settings *config.GoImportsSettings) *Formatter {
+ if settings != nil {
+ imports.LocalPrefix = settings.LocalPrefixes
+ }
+
+ return &Formatter{}
+}
+
+func (*Formatter) Name() string {
+ return Name
+}
+
+func (*Formatter) Format(filename string, src []byte) ([]byte, error) {
+ // The `imports.LocalPrefix` (`settings.LocalPrefixes`) is a global var.
+ return imports.Process(filename, src, nil)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/internal/commons.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/internal/commons.go
new file mode 100644
index 0000000000..5320e786b5
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/internal/commons.go
@@ -0,0 +1,6 @@
+package internal
+
+import "github.com/golangci/golangci-lint/pkg/logutils"
+
+// FormatterLogger must be used only when the context logger is not available.
+var FormatterLogger = logutils.NewStderrLog(logutils.DebugKeyFormatter)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/internal/diff.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/internal/diff.go
new file mode 100644
index 0000000000..75d65b73ad
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/internal/diff.go
@@ -0,0 +1,274 @@
+package internal
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "slices"
+ "strings"
+
+ diffpkg "github.com/sourcegraph/go-diff/diff"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/logutils"
+)
+
+type Change struct {
+ From, To int
+ NewLines []string
+}
+
+type diffLineType string
+
+const (
+ diffLineAdded diffLineType = "added"
+ diffLineOriginal diffLineType = "original"
+ diffLineDeleted diffLineType = "deleted"
+)
+
+type diffLine struct {
+ originalNumber int // 1-based original line number
+ typ diffLineType
+ data string // "+" or "-" stripped line
+}
+
+type hunkChangesParser struct {
+ // needed because we merge currently added lines with the last original line
+ lastOriginalLine *diffLine
+
+ // if the first line of diff is an adding we save all additions to replacementLinesToPrepend
+ replacementLinesToPrepend []string
+
+ log logutils.Log
+
+ changes []Change
+}
+
+func (p *hunkChangesParser) parse(h *diffpkg.Hunk) []Change {
+ lines := parseDiffLines(h)
+
+ for i := 0; i < len(lines); {
+ line := lines[i]
+
+ if line.typ == diffLineOriginal {
+ p.handleOriginalLine(lines, line, &i)
+ continue
+ }
+
+ var deletedLines []diffLine
+ for ; i < len(lines) && lines[i].typ == diffLineDeleted; i++ {
+ deletedLines = append(deletedLines, lines[i])
+ }
+
+ var addedLines []string
+ for ; i < len(lines) && lines[i].typ == diffLineAdded; i++ {
+ addedLines = append(addedLines, lines[i].data)
+ }
+
+ if len(deletedLines) != 0 {
+ p.handleDeletedLines(deletedLines, addedLines)
+ continue
+ }
+
+ // no deletions, only additions
+ p.handleAddedOnlyLines(addedLines)
+ }
+
+ if len(p.replacementLinesToPrepend) != 0 {
+ p.log.Infof("The diff contains only additions: no original or deleted lines: %#v", lines)
+ return nil
+ }
+
+ return p.changes
+}
+
+func (p *hunkChangesParser) handleOriginalLine(lines []diffLine, line diffLine, i *int) {
+ if len(p.replacementLinesToPrepend) == 0 {
+ p.lastOriginalLine = &line
+ *i++
+ return
+ }
+
+ // check following added lines for the case:
+ // + added line 1
+ // original line
+ // + added line 2
+
+ *i++
+ var followingAddedLines []string
+ for ; *i < len(lines) && lines[*i].typ == diffLineAdded; *i++ {
+ followingAddedLines = append(followingAddedLines, lines[*i].data)
+ }
+
+ change := Change{
+ From: line.originalNumber,
+ To: line.originalNumber,
+ NewLines: slices.Concat(p.replacementLinesToPrepend, []string{line.data}, followingAddedLines),
+ }
+ p.changes = append(p.changes, change)
+
+ p.replacementLinesToPrepend = nil
+ p.lastOriginalLine = &line
+}
+
+func (p *hunkChangesParser) handleDeletedLines(deletedLines []diffLine, addedLines []string) {
+ change := Change{
+ From: deletedLines[0].originalNumber,
+ To: deletedLines[len(deletedLines)-1].originalNumber,
+ }
+
+ switch {
+ case len(addedLines) != 0:
+ change.NewLines = slices.Concat(p.replacementLinesToPrepend, addedLines)
+ p.replacementLinesToPrepend = nil
+
+ case len(p.replacementLinesToPrepend) != 0:
+ // delete-only change with possible prepending
+ change.NewLines = slices.Clone(p.replacementLinesToPrepend)
+ p.replacementLinesToPrepend = nil
+ }
+
+ p.changes = append(p.changes, change)
+}
+
+func (p *hunkChangesParser) handleAddedOnlyLines(addedLines []string) {
+ if p.lastOriginalLine == nil {
+ // the first line is added; the diff looks like:
+ // 1. + ...
+ // 2. - ...
+ // or
+ // 1. + ...
+ // 2. ...
+
+ p.replacementLinesToPrepend = addedLines
+
+ return
+ }
+
+ // add-only change merged into the last original line with possible prepending
+ change := Change{
+ From: p.lastOriginalLine.originalNumber,
+ To: p.lastOriginalLine.originalNumber,
+ NewLines: slices.Concat(p.replacementLinesToPrepend, []string{p.lastOriginalLine.data}, addedLines),
+ }
+
+ p.changes = append(p.changes, change)
+
+ p.replacementLinesToPrepend = nil
+}
+
+func parseDiffLines(h *diffpkg.Hunk) []diffLine {
+ lines := bytes.Split(h.Body, []byte{'\n'})
+
+ currentOriginalLineNumber := int(h.OrigStartLine)
+
+ var diffLines []diffLine
+
+ for i, line := range lines {
+ dl := diffLine{
+ originalNumber: currentOriginalLineNumber,
+ }
+
+ if i == len(lines)-1 && len(line) == 0 {
+ // handle last \n: don't add an empty original line
+ break
+ }
+
+ lineStr := string(line)
+
+ switch {
+ case strings.HasPrefix(lineStr, "-"):
+ dl.typ = diffLineDeleted
+ dl.data = strings.TrimPrefix(lineStr, "-")
+ currentOriginalLineNumber++
+
+ case strings.HasPrefix(lineStr, "+"):
+ dl.typ = diffLineAdded
+ dl.data = strings.TrimPrefix(lineStr, "+")
+
+ default:
+ dl.typ = diffLineOriginal
+ dl.data = strings.TrimPrefix(lineStr, " ")
+ currentOriginalLineNumber++
+ }
+
+ diffLines = append(diffLines, dl)
+ }
+
+ // if > 0, then the original file had a 'No newline at end of file' mark
+ if h.OrigNoNewlineAt > 0 {
+ dl := diffLine{
+ originalNumber: currentOriginalLineNumber + 1,
+ typ: diffLineAdded,
+ data: "",
+ }
+ diffLines = append(diffLines, dl)
+ }
+
+ return diffLines
+}
+
+func ExtractDiagnosticFromPatch(
+ pass *analysis.Pass,
+ file *ast.File,
+ patch []byte,
+ logger logutils.Log,
+) error {
+ diffs, err := diffpkg.ParseMultiFileDiff(patch)
+ if err != nil {
+ return fmt.Errorf("can't parse patch: %w", err)
+ }
+
+ if len(diffs) == 0 {
+ return fmt.Errorf("got no diffs from patch parser: %s", patch)
+ }
+
+ ft := pass.Fset.File(file.Pos())
+
+ adjLine := pass.Fset.PositionFor(file.Pos(), false).Line - pass.Fset.PositionFor(file.Pos(), true).Line
+
+ for _, d := range diffs {
+ if len(d.Hunks) == 0 {
+ logger.Warnf("Got no hunks in diff %+v", d)
+ continue
+ }
+
+ for _, hunk := range d.Hunks {
+ p := hunkChangesParser{log: logger}
+
+ changes := p.parse(hunk)
+
+ for _, change := range changes {
+ pass.Report(toDiagnostic(ft, change, adjLine))
+ }
+ }
+ }
+
+ return nil
+}
+
+func toDiagnostic(ft *token.File, change Change, adjLine int) analysis.Diagnostic {
+ from := change.From + adjLine
+ if from > ft.LineCount() {
+ from = ft.LineCount()
+ }
+
+ start := ft.LineStart(from)
+
+ end := goanalysis.EndOfLinePos(ft, change.To+adjLine)
+
+ return analysis.Diagnostic{
+ Pos: start,
+ End: end,
+ Message: "File is not properly formatted",
+ SuggestedFixes: []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: start,
+ End: end,
+ NewText: []byte(strings.Join(change.NewLines, "\n")),
+ }},
+ }},
+ }
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goformatters/meta_formatter.go b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/meta_formatter.go
new file mode 100644
index 0000000000..d66878c7ab
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goformatters/meta_formatter.go
@@ -0,0 +1,74 @@
+package goformatters
+
+import (
+ "bytes"
+ "fmt"
+ "go/format"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+ "github.com/golangci/golangci-lint/pkg/goformatters/gci"
+ "github.com/golangci/golangci-lint/pkg/goformatters/gofmt"
+ "github.com/golangci/golangci-lint/pkg/goformatters/gofumpt"
+ "github.com/golangci/golangci-lint/pkg/goformatters/goimports"
+ "github.com/golangci/golangci-lint/pkg/lint/linter"
+ "github.com/golangci/golangci-lint/pkg/logutils"
+)
+
+type MetaFormatter struct {
+ log logutils.Log
+ formatters []Formatter
+}
+
+func NewMetaFormatter(log logutils.Log, cfg *config.Config, enabledLinters map[string]*linter.Config) (*MetaFormatter, error) {
+ m := &MetaFormatter{log: log}
+
+ if _, ok := enabledLinters[gofmt.Name]; ok {
+ m.formatters = append(m.formatters, gofmt.New(&cfg.LintersSettings.Gofmt))
+ }
+
+ if _, ok := enabledLinters[gofumpt.Name]; ok {
+ m.formatters = append(m.formatters, gofumpt.New(&cfg.LintersSettings.Gofumpt, cfg.Run.Go))
+ }
+
+ if _, ok := enabledLinters[goimports.Name]; ok {
+ m.formatters = append(m.formatters, goimports.New(&cfg.LintersSettings.Goimports))
+ }
+
+ // gci is a last because the only goal of gci is to handle imports.
+ if _, ok := enabledLinters[gci.Name]; ok {
+ formatter, err := gci.New(&cfg.LintersSettings.Gci)
+ if err != nil {
+ return nil, fmt.Errorf("gci: creating formatter: %w", err)
+ }
+
+ m.formatters = append(m.formatters, formatter)
+ }
+
+ return m, nil
+}
+
+func (m *MetaFormatter) Format(filename string, src []byte) []byte {
+ if len(m.formatters) == 0 {
+ data, err := format.Source(src)
+ if err != nil {
+ m.log.Warnf("(fmt) formatting file %s: %v", filename, err)
+ return src
+ }
+
+ return data
+ }
+
+ data := bytes.Clone(src)
+
+ for _, formatter := range m.formatters {
+ formatted, err := formatter.Format(filename, data)
+ if err != nil {
+ m.log.Warnf("(%s) formatting file %s: %v", formatter.Name(), filename, err)
+ continue
+ }
+
+ data = formatted
+ }
+
+ return data
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint/asasalint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint/asasalint.go
index 653a2d5142..ccc58fee40 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint/asasalint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asasalint/asasalint.go
@@ -9,12 +9,12 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/internal"
)
-func New(setting *config.AsasalintSettings) *goanalysis.Linter {
+func New(settings *config.AsasalintSettings) *goanalysis.Linter {
cfg := asasalint.LinterSetting{}
- if setting != nil {
- cfg.Exclude = setting.Exclude
- cfg.NoBuiltinExclusions = !setting.UseBuiltinExclusions
- cfg.IgnoreTest = setting.IgnoreTest
+ if settings != nil {
+ cfg.Exclude = settings.Exclude
+ cfg.NoBuiltinExclusions = !settings.UseBuiltinExclusions
+ cfg.IgnoreTest = settings.IgnoreTest
}
a, err := asasalint.NewAnalyzer(cfg)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk/bidichk.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk/bidichk.go
index 4ced901e8f..c6315965c4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk/bidichk.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk/bidichk.go
@@ -10,42 +10,42 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(cfg *config.BiDiChkSettings) *goanalysis.Linter {
+func New(settings *config.BiDiChkSettings) *goanalysis.Linter {
a := bidichk.NewAnalyzer()
- cfgMap := map[string]map[string]any{}
- if cfg != nil {
+ cfg := map[string]map[string]any{}
+ if settings != nil {
var opts []string
- if cfg.LeftToRightEmbedding {
+ if settings.LeftToRightEmbedding {
opts = append(opts, "LEFT-TO-RIGHT-EMBEDDING")
}
- if cfg.RightToLeftEmbedding {
+ if settings.RightToLeftEmbedding {
opts = append(opts, "RIGHT-TO-LEFT-EMBEDDING")
}
- if cfg.PopDirectionalFormatting {
+ if settings.PopDirectionalFormatting {
opts = append(opts, "POP-DIRECTIONAL-FORMATTING")
}
- if cfg.LeftToRightOverride {
+ if settings.LeftToRightOverride {
opts = append(opts, "LEFT-TO-RIGHT-OVERRIDE")
}
- if cfg.RightToLeftOverride {
+ if settings.RightToLeftOverride {
opts = append(opts, "RIGHT-TO-LEFT-OVERRIDE")
}
- if cfg.LeftToRightIsolate {
+ if settings.LeftToRightIsolate {
opts = append(opts, "LEFT-TO-RIGHT-ISOLATE")
}
- if cfg.RightToLeftIsolate {
+ if settings.RightToLeftIsolate {
opts = append(opts, "RIGHT-TO-LEFT-ISOLATE")
}
- if cfg.FirstStrongIsolate {
+ if settings.FirstStrongIsolate {
opts = append(opts, "FIRST-STRONG-ISOLATE")
}
- if cfg.PopDirectionalIsolate {
+ if settings.PopDirectionalIsolate {
opts = append(opts, "POP-DIRECTIONAL-ISOLATE")
}
- cfgMap[a.Name] = map[string]any{
+ cfg[a.Name] = map[string]any{
"disallowed-runes": strings.Join(opts, ","),
}
}
@@ -54,6 +54,6 @@ func New(cfg *config.BiDiChkSettings) *goanalysis.Linter {
a.Name,
"Checks for dangerous unicode character sequences",
[]*analysis.Analyzer{a},
- cfgMap,
+ cfg,
).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose/bodyclose.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose/bodyclose.go
index f39814edc5..c520e88db3 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose/bodyclose.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose/bodyclose.go
@@ -12,7 +12,7 @@ func New() *goanalysis.Linter {
return goanalysis.NewLinter(
a.Name,
- "checks whether HTTP response body is closed successfully",
+ a.Doc,
[]*analysis.Analyzer{a},
nil,
).WithLoadMode(goanalysis.LoadModeTypesInfo)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go
index eb8c0577a5..772b5601ca 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go
@@ -30,8 +30,8 @@ func New(settings *config.Cyclop) *goanalysis.Linter {
return goanalysis.NewLinter(
a.Name,
- "checks function and package cyclomatic complexity",
+ a.Doc,
[]*analysis.Analyzer{a},
cfg,
- ).WithLoadMode(goanalysis.LoadModeTypesInfo)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard/depguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard/depguard.go
index d2aedf2524..1944059996 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard/depguard.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard/depguard.go
@@ -1,22 +1,30 @@
package depguard
import (
+ "strings"
+
"github.com/OpenPeeDeeP/depguard/v2"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/golinters/internal"
"github.com/golangci/golangci-lint/pkg/lint/linter"
)
-func New(settings *config.DepGuardSettings) *goanalysis.Linter {
+func New(settings *config.DepGuardSettings, basePath string) *goanalysis.Linter {
conf := depguard.LinterSettings{}
if settings != nil {
for s, rule := range settings.Rules {
+ var extendedPatterns []string
+ for _, file := range rule.Files {
+ extendedPatterns = append(extendedPatterns, strings.ReplaceAll(file, internal.PlaceholderBasePath, basePath))
+ }
+
list := &depguard.List{
ListMode: rule.ListMode,
- Files: rule.Files,
+ Files: extendedPatterns,
Allow: rule.Allow,
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled/dogsled.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled/dogsled.go
index 49108f4f1f..afa8152fac 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled/dogsled.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled/dogsled.go
@@ -1,41 +1,26 @@
package dogsled
import (
- "fmt"
"go/ast"
- "go/token"
- "sync"
"golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "dogsled"
func New(settings *config.DogsledSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
analyzer := &analysis.Analyzer{
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues := runDogsled(pass, settings)
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
+ return run(pass, settings.MaxBlankIdentifiers)
},
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
}
return goanalysis.NewLinter(
@@ -43,68 +28,51 @@ func New(settings *config.DogsledSettings) *goanalysis.Linter {
"Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
-func runDogsled(pass *analysis.Pass, settings *config.DogsledSettings) []goanalysis.Issue {
- var reports []goanalysis.Issue
- for _, f := range pass.Files {
- v := &returnsVisitor{
- maxBlanks: settings.MaxBlankIdentifiers,
- f: pass.Fset,
- }
-
- ast.Walk(v, f)
-
- for i := range v.issues {
- reports = append(reports, goanalysis.NewIssue(&v.issues[i], pass))
- }
- }
-
- return reports
-}
-
-type returnsVisitor struct {
- f *token.FileSet
- maxBlanks int
- issues []result.Issue
-}
-
-func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
- funcDecl, ok := node.(*ast.FuncDecl)
+func run(pass *analysis.Pass, maxBlanks int) (any, error) {
+ insp, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
if !ok {
- return v
+ return nil, nil
}
- if funcDecl.Body == nil {
- return v
+
+ nodeFilter := []ast.Node{
+ (*ast.FuncDecl)(nil),
}
- for _, expr := range funcDecl.Body.List {
- assgnStmt, ok := expr.(*ast.AssignStmt)
+ insp.Preorder(nodeFilter, func(node ast.Node) {
+ funcDecl, ok := node.(*ast.FuncDecl)
if !ok {
- continue
+ return
+ }
+
+ if funcDecl.Body == nil {
+ return
}
- numBlank := 0
- for _, left := range assgnStmt.Lhs {
- ident, ok := left.(*ast.Ident)
+ for _, expr := range funcDecl.Body.List {
+ assgnStmt, ok := expr.(*ast.AssignStmt)
if !ok {
continue
}
- if ident.Name == "_" {
- numBlank++
+
+ numBlank := 0
+ for _, left := range assgnStmt.Lhs {
+ ident, ok := left.(*ast.Ident)
+ if !ok {
+ continue
+ }
+ if ident.Name == "_" {
+ numBlank++
+ }
}
- }
- if numBlank > v.maxBlanks {
- v.issues = append(v.issues, result.Issue{
- FromLinter: linterName,
- Text: fmt.Sprintf("declaration has %v blank identifiers", numBlank),
- Pos: v.f.Position(assgnStmt.Pos()),
- })
+ if numBlank > maxBlanks {
+ pass.Reportf(assgnStmt.Pos(), "declaration has %v blank identifiers", numBlank)
+ }
}
- }
- return v
+ })
+
+ return nil, nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl/dupl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl/dupl.go
index 7abcb4c4f4..6d1a9809c7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl/dupl.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl/dupl.go
@@ -5,7 +5,7 @@ import (
"go/token"
"sync"
- duplAPI "github.com/golangci/dupl"
+ duplAPI "github.com/golangci/dupl/lib"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
@@ -45,7 +45,7 @@ func New(settings *config.DuplSettings) *goanalysis.Linter {
return goanalysis.NewLinter(
linterName,
- "Tool for code clone detection",
+ "Detects duplicate fragments of code.",
[]*analysis.Analyzer{analyzer},
nil,
).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
@@ -54,9 +54,7 @@ func New(settings *config.DuplSettings) *goanalysis.Linter {
}
func runDupl(pass *analysis.Pass, settings *config.DuplSettings) ([]goanalysis.Issue, error) {
- fileNames := internal.GetFileNames(pass)
-
- issues, err := duplAPI.Run(fileNames, settings.Threshold)
+ issues, err := duplAPI.Run(internal.GetGoFileNames(pass), settings.Threshold)
if err != nil {
return nil, err
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword/dupword.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword/dupword.go
index bba4fc9e19..a2bcc34d40 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword/dupword.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword/dupword.go
@@ -10,14 +10,14 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(setting *config.DupWordSettings) *goanalysis.Linter {
+func New(settings *config.DupWordSettings) *goanalysis.Linter {
a := dupword.NewAnalyzer()
- cfgMap := map[string]map[string]any{}
- if setting != nil {
- cfgMap[a.Name] = map[string]any{
- "keyword": strings.Join(setting.Keywords, ","),
- "ignore": strings.Join(setting.Ignore, ","),
+ cfg := map[string]map[string]any{}
+ if settings != nil {
+ cfg[a.Name] = map[string]any{
+ "keyword": strings.Join(settings.Keywords, ","),
+ "ignore": strings.Join(settings.Ignore, ","),
}
}
@@ -25,6 +25,6 @@ func New(setting *config.DupWordSettings) *goanalysis.Linter {
a.Name,
"checks for duplicate words in the source code",
[]*analysis.Analyzer{a},
- cfgMap,
+ cfg,
).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck/errcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck/errcheck.go
index 9a8a2aa876..67a1b2ca8d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck/errcheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck/errcheck.go
@@ -2,6 +2,7 @@ package errcheck
import (
"bufio"
+ "cmp"
"fmt"
"os"
"os/user"
@@ -90,10 +91,7 @@ func runErrCheck(lintCtx *linter.Context, pass *analysis.Pass, checker *errcheck
text := "Error return value is not checked"
if err.FuncName != "" {
- code := err.SelectorName
- if err.SelectorName == "" {
- code = err.FuncName
- }
+ code := cmp.Or(err.SelectorName, err.FuncName)
text = fmt.Sprintf("Error return value of %s is not checked", internal.FormatCode(code, lintCtx.Cfg))
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson/errchkjson.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson/errchkjson.go
index 8389a750c6..506113d6d5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson/errchkjson.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson/errchkjson.go
@@ -8,17 +8,17 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(cfg *config.ErrChkJSONSettings) *goanalysis.Linter {
+func New(settings *config.ErrChkJSONSettings) *goanalysis.Linter {
a := errchkjson.NewAnalyzer()
- cfgMap := map[string]map[string]any{}
- cfgMap[a.Name] = map[string]any{
+ cfg := map[string]map[string]any{}
+ cfg[a.Name] = map[string]any{
"omit-safe": true,
}
- if cfg != nil {
- cfgMap[a.Name] = map[string]any{
- "omit-safe": !cfg.CheckErrorFreeEncoding,
- "report-no-exported": cfg.ReportNoExported,
+ if settings != nil {
+ cfg[a.Name] = map[string]any{
+ "omit-safe": !settings.CheckErrorFreeEncoding,
+ "report-no-exported": settings.ReportNoExported,
}
}
@@ -26,6 +26,6 @@ func New(cfg *config.ErrChkJSONSettings) *goanalysis.Linter {
a.Name,
a.Doc,
[]*analysis.Analyzer{a},
- cfgMap,
+ cfg,
).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint/errorlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint/errorlint.go
index 86db8552d0..14851adc28 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint/errorlint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint/errorlint.go
@@ -8,16 +8,16 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(cfg *config.ErrorLintSettings) *goanalysis.Linter {
+func New(settings *config.ErrorLintSettings) *goanalysis.Linter {
var opts []errorlint.Option
- if cfg != nil {
- ae := toAllowPairs(cfg.AllowedErrors)
+ if settings != nil {
+ ae := toAllowPairs(settings.AllowedErrors)
if len(ae) > 0 {
opts = append(opts, errorlint.WithAllowedErrors(ae))
}
- aew := toAllowPairs(cfg.AllowedErrorsWildcard)
+ aew := toAllowPairs(settings.AllowedErrorsWildcard)
if len(aew) > 0 {
opts = append(opts, errorlint.WithAllowedWildcard(aew))
}
@@ -25,14 +25,14 @@ func New(cfg *config.ErrorLintSettings) *goanalysis.Linter {
a := errorlint.NewAnalyzer(opts...)
- cfgMap := map[string]map[string]any{}
+ cfg := map[string]map[string]any{}
- if cfg != nil {
- cfgMap[a.Name] = map[string]any{
- "errorf": cfg.Errorf,
- "errorf-multi": cfg.ErrorfMulti,
- "asserts": cfg.Asserts,
- "comparison": cfg.Comparison,
+ if settings != nil {
+ cfg[a.Name] = map[string]any{
+ "errorf": settings.Errorf,
+ "errorf-multi": settings.ErrorfMulti,
+ "asserts": settings.Asserts,
+ "comparison": settings.Comparison,
}
}
@@ -41,7 +41,7 @@ func New(cfg *config.ErrorLintSettings) *goanalysis.Linter {
"errorlint is a linter for that can be used to find code "+
"that will cause problems with the error wrapping scheme introduced in Go 1.13.",
[]*analysis.Analyzer{a},
- cfgMap,
+ cfg,
).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref/exportloopref.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref/exportloopref.go
deleted file mode 100644
index e232f8045d..0000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref/exportloopref.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package exportloopref
-
-import (
- "github.com/kyoh86/exportloopref"
- "golang.org/x/tools/go/analysis"
-
- "github.com/golangci/golangci-lint/pkg/goanalysis"
-)
-
-func New() *goanalysis.Linter {
- a := exportloopref.Analyzer
-
- return goanalysis.NewLinter(
- a.Name,
- a.Doc,
- []*analysis.Analyzer{a},
- nil,
- ).WithLoadMode(goanalysis.LoadModeTypesInfo)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exptostd/exptostd.go
similarity index 76%
rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go
rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/exptostd/exptostd.go
index 3832873c63..2de8ea98c2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exptostd/exptostd.go
@@ -1,14 +1,14 @@
-package execinquery
+package exptostd
import (
- "github.com/lufeee/execinquery"
+ "github.com/ldez/exptostd"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
func New() *goanalysis.Linter {
- a := execinquery.Analyzer
+ a := exptostd.NewAnalyzer()
return goanalysis.NewLinter(
a.Name,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/fatcontext/fatcontext.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/fatcontext/fatcontext.go
index 378025a8cc..2ffacacd39 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/fatcontext/fatcontext.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/fatcontext/fatcontext.go
@@ -4,16 +4,25 @@ import (
"github.com/Crocmagnon/fatcontext/pkg/analyzer"
"golang.org/x/tools/go/analysis"
+ "github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New() *goanalysis.Linter {
- a := analyzer.Analyzer
+func New(settings *config.FatcontextSettings) *goanalysis.Linter {
+ a := analyzer.NewAnalyzer()
+
+ cfg := map[string]map[string]any{}
+
+ if settings != nil {
+ cfg[a.Name] = map[string]any{
+ analyzer.FlagCheckStructPointers: settings.CheckStructPointers,
+ }
+ }
return goanalysis.NewLinter(
a.Name,
a.Doc,
[]*analysis.Analyzer{a},
- nil,
+ cfg,
).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo/forbidigo.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo/forbidigo.go
index 3572b60c23..3b410359d0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo/forbidigo.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo/forbidigo.go
@@ -2,40 +2,27 @@ package forbidigo
import (
"fmt"
- "sync"
"github.com/ashanbrown/forbidigo/forbidigo"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "forbidigo"
func New(settings *config.ForbidigoSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
analyzer := &analysis.Analyzer{
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := runForbidigo(pass, settings)
+ err := runForbidigo(pass, settings)
if err != nil {
return nil, err
}
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
return nil, nil
},
}
@@ -48,12 +35,10 @@ func New(settings *config.ForbidigoSettings) *goanalysis.Linter {
"Forbids identifiers",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeTypesInfo)
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
-func runForbidigo(pass *analysis.Pass, settings *config.ForbidigoSettings) ([]goanalysis.Issue, error) {
+func runForbidigo(pass *analysis.Pass, settings *config.ForbidigoSettings) error {
options := []forbidigo.Option{
forbidigo.OptionExcludeGodocExamples(settings.ExcludeGodocExamples),
// disable "//permit" directives so only "//nolint" directives matters within golangci-lint
@@ -66,38 +51,39 @@ func runForbidigo(pass *analysis.Pass, settings *config.ForbidigoSettings) ([]go
for _, pattern := range settings.Forbid {
buffer, err := pattern.MarshalString()
if err != nil {
- return nil, err
+ return err
}
+
patterns = append(patterns, string(buffer))
}
forbid, err := forbidigo.NewLinter(patterns, options...)
if err != nil {
- return nil, fmt.Errorf("failed to create linter %q: %w", linterName, err)
+ return fmt.Errorf("failed to create linter %q: %w", linterName, err)
}
- var issues []goanalysis.Issue
for _, file := range pass.Files {
runConfig := forbidigo.RunConfig{
Fset: pass.Fset,
DebugLog: logutils.Debug(logutils.DebugKeyForbidigo),
}
- if settings != nil && settings.AnalyzeTypes {
+
+ if settings.AnalyzeTypes {
runConfig.TypesInfo = pass.TypesInfo
}
+
hints, err := forbid.RunWithConfig(runConfig, file)
if err != nil {
- return nil, fmt.Errorf("forbidigo linter failed on file %q: %w", file.Name.String(), err)
+ return fmt.Errorf("forbidigo linter failed on file %q: %w", file.Name.String(), err)
}
for _, hint := range hints {
- issues = append(issues, goanalysis.NewIssue(&result.Issue{
- Pos: hint.Position(),
- Text: hint.Details(),
- FromLinter: linterName,
- }, pass))
+ pass.Report(analysis.Diagnostic{
+ Pos: hint.Pos(),
+ Message: hint.Details(),
+ })
}
}
- return issues, nil
+ return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert/forcetypeassert.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert/forcetypeassert.go
index 741b57ceac..98abad7e0d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert/forcetypeassert.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert/forcetypeassert.go
@@ -15,5 +15,5 @@ func New() *goanalysis.Linter {
"finds forced type assertions",
[]*analysis.Analyzer{a},
nil,
- ).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen/funlen.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen/funlen.go
index e43339394d..bdadcece46 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen/funlen.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen/funlen.go
@@ -1,75 +1,33 @@
package funlen
import (
- "go/token"
- "strings"
- "sync"
-
"github.com/ultraware/funlen"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
-const linterName = "funlen"
+type Config struct {
+ lineLimit int
+ stmtLimit int
+ ignoreComments bool
+}
func New(settings *config.FunlenSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
- analyzer := &analysis.Analyzer{
- Name: linterName,
- Doc: goanalysis.TheOnlyanalyzerDoc,
- Run: func(pass *analysis.Pass) (any, error) {
- issues := runFunlen(pass, settings)
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
- },
+ cfg := Config{}
+ if settings != nil {
+ cfg.lineLimit = settings.Lines
+ cfg.stmtLimit = settings.Statements
+ cfg.ignoreComments = !settings.IgnoreComments
}
+ a := funlen.NewAnalyzer(cfg.lineLimit, cfg.stmtLimit, cfg.ignoreComments)
+
return goanalysis.NewLinter(
- linterName,
- "Tool for detection of long functions",
- []*analysis.Analyzer{analyzer},
+ a.Name,
+ a.Doc,
+ []*analysis.Analyzer{a},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runFunlen(pass *analysis.Pass, settings *config.FunlenSettings) []goanalysis.Issue {
- var lintIssues []funlen.Message
- for _, file := range pass.Files {
- fileIssues := funlen.Run(file, pass.Fset, settings.Lines, settings.Statements, settings.IgnoreComments)
- lintIssues = append(lintIssues, fileIssues...)
- }
-
- if len(lintIssues) == 0 {
- return nil
- }
-
- issues := make([]goanalysis.Issue, len(lintIssues))
- for k, i := range lintIssues {
- issues[k] = goanalysis.NewIssue(&result.Issue{
- Pos: token.Position{
- Filename: i.Pos.Filename,
- Line: i.Pos.Line,
- },
- Text: strings.TrimRight(i.Message, "\n"),
- FromLinter: linterName,
- }, pass)
- }
-
- return issues
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci/gci.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci/gci.go
index 38ed2a0330..b79f1a370c 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci/gci.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci/gci.go
@@ -1,248 +1,33 @@
package gci
import (
- "fmt"
- "sort"
- "strings"
- "sync"
-
- gcicfg "github.com/daixiang0/gci/pkg/config"
- "github.com/daixiang0/gci/pkg/gci"
- "github.com/daixiang0/gci/pkg/io"
- "github.com/daixiang0/gci/pkg/log"
- "github.com/daixiang0/gci/pkg/section"
- "github.com/golangci/modinfo"
- "github.com/hexops/gotextdiff"
- "github.com/hexops/gotextdiff/myers"
- "github.com/hexops/gotextdiff/span"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/goformatters"
+ gcibase "github.com/golangci/golangci-lint/pkg/goformatters/gci"
"github.com/golangci/golangci-lint/pkg/golinters/internal"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
)
const linterName = "gci"
func New(settings *config.GciSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
- analyzer := &analysis.Analyzer{
- Name: linterName,
- Doc: goanalysis.TheOnlyanalyzerDoc,
- Run: goanalysis.DummyRun,
- Requires: []*analysis.Analyzer{
- modinfo.Analyzer,
- },
- }
-
- var cfg *gcicfg.Config
- if settings != nil {
- rawCfg := gcicfg.YamlConfig{
- Cfg: gcicfg.BoolConfig{
- SkipGenerated: settings.SkipGenerated,
- CustomOrder: settings.CustomOrder,
- },
- SectionStrings: settings.Sections,
- }
-
- if settings.LocalPrefixes != "" {
- prefix := []string{"standard", "default", fmt.Sprintf("prefix(%s)", settings.LocalPrefixes)}
- rawCfg.SectionStrings = prefix
- }
-
- var err error
- cfg, err = YamlConfig{origin: rawCfg}.Parse()
- if err != nil {
- internal.LinterLogger.Fatalf("gci: configuration parsing: %v", err)
- }
+ formatter, err := gcibase.New(settings)
+ if err != nil {
+ internal.LinterLogger.Fatalf("%s: create analyzer: %v", linterName, err)
}
- var lock sync.Mutex
+ a := goformatters.NewAnalyzer(
+ internal.LinterLogger.Child(linterName),
+ "Checks if code and import statements are formatted, with additional rules.",
+ formatter,
+ )
return goanalysis.NewLinter(
- linterName,
- "Gci controls Go package import order and makes it always deterministic.",
- []*analysis.Analyzer{analyzer},
+ a.Name,
+ a.Doc,
+ []*analysis.Analyzer{a},
nil,
- ).WithContextSetter(func(lintCtx *linter.Context) {
- analyzer.Run = func(pass *analysis.Pass) (any, error) {
- var err error
- cfg.Sections, err = hackSectionList(pass, cfg)
- if err != nil {
- return nil, err
- }
-
- issues, err := runGci(pass, lintCtx, cfg, &lock)
- if err != nil {
- return nil, err
- }
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
- }
- }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runGci(pass *analysis.Pass, lintCtx *linter.Context, cfg *gcicfg.Config, lock *sync.Mutex) ([]goanalysis.Issue, error) {
- fileNames := internal.GetFileNames(pass)
-
- var diffs []string
- err := diffFormattedFilesToArray(fileNames, *cfg, &diffs, lock)
- if err != nil {
- return nil, err
- }
-
- var issues []goanalysis.Issue
-
- for _, diff := range diffs {
- if diff == "" {
- continue
- }
-
- is, err := internal.ExtractIssuesFromPatch(diff, lintCtx, linterName, getIssuedTextGci)
- if err != nil {
- return nil, fmt.Errorf("can't extract issues from gci diff output %s: %w", diff, err)
- }
-
- for i := range is {
- issues = append(issues, goanalysis.NewIssue(&is[i], pass))
- }
- }
-
- return issues, nil
-}
-
-func getIssuedTextGci(settings *config.LintersSettings) string {
- text := "File is not `gci`-ed"
-
- hasOptions := settings.Gci.SkipGenerated || len(settings.Gci.Sections) > 0
- if !hasOptions {
- return text
- }
-
- text += " with"
-
- if settings.Gci.SkipGenerated {
- text += " --skip-generated"
- }
-
- if len(settings.Gci.Sections) > 0 {
- for _, sect := range settings.Gci.Sections {
- text += " -s " + sect
- }
- }
-
- if settings.Gci.CustomOrder {
- text += " --custom-order"
- }
-
- return text
-}
-
-func hackSectionList(pass *analysis.Pass, cfg *gcicfg.Config) (section.SectionList, error) {
- var sections section.SectionList
-
- for _, sect := range cfg.Sections {
- // local module hack
- if v, ok := sect.(*section.LocalModule); ok {
- info, err := modinfo.FindModuleFromPass(pass)
- if err != nil {
- return nil, err
- }
-
- if info.Path == "" {
- continue
- }
-
- v.Path = info.Path
- }
-
- sections = append(sections, sect)
- }
-
- return sections, nil
-}
-
-// diffFormattedFilesToArray is a copy of gci.DiffFormattedFilesToArray without io.StdInGenerator.
-// gci.DiffFormattedFilesToArray uses gci.processStdInAndGoFilesInPaths that uses io.StdInGenerator but stdin is not active on CI.
-// https://github.com/daixiang0/gci/blob/6f5cb16718ba07f0342a58de9b830ec5a6d58790/pkg/gci/gci.go#L63-L75
-// https://github.com/daixiang0/gci/blob/6f5cb16718ba07f0342a58de9b830ec5a6d58790/pkg/gci/gci.go#L80
-func diffFormattedFilesToArray(paths []string, cfg gcicfg.Config, diffs *[]string, lock *sync.Mutex) error {
- log.InitLogger()
- defer func() { _ = log.L().Sync() }()
-
- return gci.ProcessFiles(io.GoFilesInPathsGenerator(paths, true), cfg, func(filePath string, unmodifiedFile, formattedFile []byte) error {
- fileURI := span.URIFromPath(filePath)
- edits := myers.ComputeEdits(fileURI, string(unmodifiedFile), string(formattedFile))
- unifiedEdits := gotextdiff.ToUnified(filePath, filePath, string(unmodifiedFile), edits)
- lock.Lock()
- *diffs = append(*diffs, fmt.Sprint(unifiedEdits))
- lock.Unlock()
- return nil
- })
-}
-
-// Code below this comment is borrowed and modified from gci.
-// https://github.com/daixiang0/gci/blob/4725b0c101801e7449530eee2ddb0c72592e3405/pkg/config/config.go
-
-var defaultOrder = map[string]int{
- section.StandardType: 0,
- section.DefaultType: 1,
- section.CustomType: 2,
- section.BlankType: 3,
- section.DotType: 4,
- section.AliasType: 5,
- section.LocalModuleType: 6,
-}
-
-type YamlConfig struct {
- origin gcicfg.YamlConfig
-}
-
-//nolint:gocritic // code borrowed from gci and modified to fix LocalModule section behavior.
-func (g YamlConfig) Parse() (*gcicfg.Config, error) {
- var err error
-
- sections, err := section.Parse(g.origin.SectionStrings)
- if err != nil {
- return nil, err
- }
-
- if sections == nil {
- sections = section.DefaultSections()
- }
-
- // if default order sorted sections
- if !g.origin.Cfg.CustomOrder {
- sort.Slice(sections, func(i, j int) bool {
- sectionI, sectionJ := sections[i].Type(), sections[j].Type()
-
- if strings.Compare(sectionI, sectionJ) == 0 {
- return strings.Compare(sections[i].String(), sections[j].String()) < 0
- }
- return defaultOrder[sectionI] < defaultOrder[sectionJ]
- })
- }
-
- sectionSeparators, err := section.Parse(g.origin.SectionSeparatorStrings)
- if err != nil {
- return nil, err
- }
- if sectionSeparators == nil {
- sectionSeparators = section.DefaultSectionSeparators()
- }
-
- return &gcicfg.Config{BoolConfig: g.origin.Cfg, Sections: sections, SectionSeparators: sectionSeparators}, nil
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go
index 54d2072570..6826b77b6b 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go
@@ -14,17 +14,18 @@ func New(settings *config.GinkgoLinterSettings) *goanalysis.Linter {
if settings != nil {
cfg = &types.Config{
- SuppressLen: types.Boolean(settings.SuppressLenAssertion),
- SuppressNil: types.Boolean(settings.SuppressNilAssertion),
- SuppressErr: types.Boolean(settings.SuppressErrAssertion),
- SuppressCompare: types.Boolean(settings.SuppressCompareAssertion),
- SuppressAsync: types.Boolean(settings.SuppressAsyncAssertion),
- ForbidFocus: types.Boolean(settings.ForbidFocusContainer),
- SuppressTypeCompare: types.Boolean(settings.SuppressTypeCompareWarning),
- AllowHaveLen0: types.Boolean(settings.AllowHaveLenZero),
- ForceExpectTo: types.Boolean(settings.ForceExpectTo),
- ValidateAsyncIntervals: types.Boolean(settings.ForbidSpecPollution),
- ForbidSpecPollution: types.Boolean(settings.ValidateAsyncIntervals),
+ SuppressLen: settings.SuppressLenAssertion,
+ SuppressNil: settings.SuppressNilAssertion,
+ SuppressErr: settings.SuppressErrAssertion,
+ SuppressCompare: settings.SuppressCompareAssertion,
+ SuppressAsync: settings.SuppressAsyncAssertion,
+ ForbidFocus: settings.ForbidFocusContainer,
+ SuppressTypeCompare: settings.SuppressTypeCompareWarning,
+ AllowHaveLen0: settings.AllowHaveLenZero,
+ ForceExpectTo: settings.ForceExpectTo,
+ ValidateAsyncIntervals: settings.ValidateAsyncIntervals,
+ ForbidSpecPollution: settings.ForbidSpecPollution,
+ ForceSucceedForFuncs: settings.ForceSucceedForFuncs,
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals/gochecknoglobals.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals/gochecknoglobals.go
index af22b2f8e9..e893dfcbb6 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals/gochecknoglobals.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals/gochecknoglobals.go
@@ -10,17 +10,10 @@ import (
func New() *goanalysis.Linter {
a := checknoglobals.Analyzer()
- // gochecknoglobals only lints test files if the `-t` flag is passed,
- // so we pass the `t` flag as true to the analyzer before running it.
- // This can be turned off by using the regular golangci-lint flags such as `--tests` or `--exclude-files`.
- linterConfig := map[string]map[string]any{
- a.Name: {"t": true},
- }
-
return goanalysis.NewLinter(
a.Name,
"Check that no global variables exist.",
[]*analysis.Analyzer{a},
- linterConfig,
+ nil,
).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits/gochecknoinits.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits/gochecknoinits.go
index 1345eb8c29..510a06c91d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits/gochecknoinits.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits/gochecknoinits.go
@@ -1,46 +1,24 @@
package gochecknoinits
import (
- "fmt"
"go/ast"
- "go/token"
- "sync"
"golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
"github.com/golangci/golangci-lint/pkg/goanalysis"
"github.com/golangci/golangci-lint/pkg/golinters/internal"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "gochecknoinits"
func New() *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
analyzer := &analysis.Analyzer{
- Name: linterName,
- Doc: goanalysis.TheOnlyanalyzerDoc,
- Run: func(pass *analysis.Pass) (any, error) {
- var res []goanalysis.Issue
- for _, file := range pass.Files {
- fileIssues := checkFileForInits(file, pass.Fset)
- for i := range fileIssues {
- res = append(res, goanalysis.NewIssue(&fileIssues[i], pass))
- }
- }
- if len(res) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, res...)
- mu.Unlock()
-
- return nil, nil
- },
+ Name: linterName,
+ Doc: goanalysis.TheOnlyanalyzerDoc,
+ Run: run,
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
}
return goanalysis.NewLinter(
@@ -48,28 +26,30 @@ func New() *goanalysis.Linter {
"Checks that no init functions are present in Go code",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
-func checkFileForInits(f *ast.File, fset *token.FileSet) []result.Issue {
- var res []result.Issue
- for _, decl := range f.Decls {
+func run(pass *analysis.Pass) (any, error) {
+ insp, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ if !ok {
+ return nil, nil
+ }
+
+ nodeFilter := []ast.Node{
+ (*ast.FuncDecl)(nil),
+ }
+
+ insp.Preorder(nodeFilter, func(decl ast.Node) {
funcDecl, ok := decl.(*ast.FuncDecl)
if !ok {
- continue
+ return
}
fnName := funcDecl.Name.Name
if fnName == "init" && funcDecl.Recv.NumFields() == 0 {
- res = append(res, result.Issue{
- Pos: fset.Position(funcDecl.Pos()),
- Text: fmt.Sprintf("don't use %s function", internal.FormatCode(fnName, nil)),
- FromLinter: linterName,
- })
+ pass.Reportf(funcDecl.Pos(), "don't use %s function", internal.FormatCode(fnName, nil))
}
- }
+ })
- return res
+ return nil, nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go
index 446f0e564f..cbc5873126 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go
@@ -8,6 +8,7 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
+ "github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/result"
@@ -15,7 +16,7 @@ import (
const linterName = "gochecksumtype"
-func New() *goanalysis.Linter {
+func New(settings *config.GoChecksumTypeSettings) *goanalysis.Linter {
var mu sync.Mutex
var resIssues []goanalysis.Issue
@@ -23,7 +24,7 @@ func New() *goanalysis.Linter {
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := runGoCheckSumType(pass)
+ issues, err := runGoCheckSumType(pass, settings)
if err != nil {
return nil, err
}
@@ -50,7 +51,7 @@ func New() *goanalysis.Linter {
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
-func runGoCheckSumType(pass *analysis.Pass) ([]goanalysis.Issue, error) {
+func runGoCheckSumType(pass *analysis.Pass, settings *config.GoChecksumTypeSettings) ([]goanalysis.Issue, error) {
var resIssues []goanalysis.Issue
pkg := &packages.Package{
@@ -60,8 +61,13 @@ func runGoCheckSumType(pass *analysis.Pass) ([]goanalysis.Issue, error) {
TypesInfo: pass.TypesInfo,
}
+ cfg := gochecksumtype.Config{
+ DefaultSignifiesExhaustive: settings.DefaultSignifiesExhaustive,
+ IncludeSharedInterfaces: settings.IncludeSharedInterfaces,
+ }
+
var unknownError error
- errors := gochecksumtype.Run([]*packages.Package{pkg})
+ errors := gochecksumtype.Run([]*packages.Package{pkg}, cfg)
for _, err := range errors {
err, ok := err.(gochecksumtype.Error)
if !ok {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go
index 68cc338e43..0fa4c63d1a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go
@@ -5,25 +5,23 @@ import (
"fmt"
"go/ast"
"go/types"
- "path/filepath"
+ "maps"
"reflect"
"runtime"
- "sort"
+ "slices"
"strings"
"sync"
"github.com/go-critic/go-critic/checkers"
gocriticlinter "github.com/go-critic/go-critic/linter"
_ "github.com/quasilyte/go-ruleguard/dsl"
- "golang.org/x/exp/maps"
- "golang.org/x/exp/slices"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/golinters/internal"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "gocritic"
@@ -34,9 +32,6 @@ var (
)
func New(settings *config.GoCriticSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
wrapper := &goCriticWrapper{
sizes: types.SizesFor("gc", runtime.GOARCH),
}
@@ -45,19 +40,11 @@ func New(settings *config.GoCriticSettings) *goanalysis.Linter {
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := wrapper.run(pass)
+ err := wrapper.run(pass)
if err != nil {
return nil, err
}
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
return nil, nil
},
}
@@ -71,19 +58,19 @@ Dynamic rules are written declaratively with AST patterns, filters, report messa
nil,
).
WithContextSetter(func(context *linter.Context) {
- wrapper.configDir = context.Cfg.GetConfigDir()
+ wrapper.replacer = strings.NewReplacer(
+ internal.PlaceholderBasePath, context.Cfg.GetBasePath(),
+ internal.PlaceholderConfigDir, context.Cfg.GetConfigDir(), //nolint:staticcheck // It must be removed in v2.
+ )
wrapper.init(context.Log, settings)
}).
- WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).
WithLoadMode(goanalysis.LoadModeTypesInfo)
}
type goCriticWrapper struct {
settingsWrapper *settingsWrapper
- configDir string
+ replacer *strings.Replacer
sizes types.Sizes
once sync.Once
}
@@ -111,9 +98,9 @@ func (w *goCriticWrapper) init(logger logutils.Log, settings *config.GoCriticSet
w.settingsWrapper = settingsWrapper
}
-func (w *goCriticWrapper) run(pass *analysis.Pass) ([]goanalysis.Issue, error) {
+func (w *goCriticWrapper) run(pass *analysis.Pass) error {
if w.settingsWrapper == nil {
- return nil, errors.New("the settings wrapper is nil")
+ return errors.New("the settings wrapper is nil")
}
linterCtx := gocriticlinter.NewContext(pass.Fset, w.sizes)
@@ -122,19 +109,14 @@ func (w *goCriticWrapper) run(pass *analysis.Pass) ([]goanalysis.Issue, error) {
enabledCheckers, err := w.buildEnabledCheckers(linterCtx)
if err != nil {
- return nil, err
+ return err
}
linterCtx.SetPackageInfo(pass.TypesInfo, pass.Pkg)
- pkgIssues := runOnPackage(linterCtx, enabledCheckers, pass.Files)
+ runOnPackage(pass, enabledCheckers, pass.Files)
- issues := make([]goanalysis.Issue, 0, len(pkgIssues))
- for i := range pkgIssues {
- issues = append(issues, goanalysis.NewIssue(&pkgIssues[i], pass))
- }
-
- return issues, nil
+ return nil
}
func (w *goCriticWrapper) buildEnabledCheckers(linterCtx *gocriticlinter.Context) ([]*gocriticlinter.Checker, error) {
@@ -154,6 +136,7 @@ func (w *goCriticWrapper) buildEnabledCheckers(linterCtx *gocriticlinter.Context
if err != nil {
return nil, err
}
+
enabledCheckers = append(enabledCheckers, c)
}
@@ -184,8 +167,7 @@ func (w *goCriticWrapper) configureCheckerInfo(
info.Name, k)
}
- supportedKeys := maps.Keys(info.Params)
- sort.Strings(supportedKeys)
+ supportedKeys := slices.Sorted(maps.Keys(info.Params))
return fmt.Errorf("checker %s config param %s doesn't exist, all existing: %s",
info.Name, k, supportedKeys)
@@ -208,53 +190,42 @@ func (w *goCriticWrapper) normalizeCheckerParamsValue(p any) any {
return rv.Bool()
case reflect.String:
// Perform variable substitution.
- return strings.ReplaceAll(rv.String(), "${configDir}", w.configDir)
+ return w.replacer.Replace(rv.String())
default:
return p
}
}
-func runOnPackage(linterCtx *gocriticlinter.Context, checks []*gocriticlinter.Checker, files []*ast.File) []result.Issue {
- var res []result.Issue
+func runOnPackage(pass *analysis.Pass, checks []*gocriticlinter.Checker, files []*ast.File) {
for _, f := range files {
- filename := filepath.Base(linterCtx.FileSet.Position(f.Pos()).Filename)
- linterCtx.SetFileInfo(filename, f)
-
- issues := runOnFile(linterCtx, f, checks)
- res = append(res, issues...)
+ runOnFile(pass, f, checks)
}
- return res
}
-func runOnFile(linterCtx *gocriticlinter.Context, f *ast.File, checks []*gocriticlinter.Checker) []result.Issue {
- var res []result.Issue
-
+func runOnFile(pass *analysis.Pass, f *ast.File, checks []*gocriticlinter.Checker) {
for _, c := range checks {
// All checkers are expected to use *lint.Context
// as read-only structure, so no copying is required.
for _, warn := range c.Check(f) {
- pos := linterCtx.FileSet.Position(warn.Pos)
- issue := result.Issue{
- Pos: pos,
- Text: fmt.Sprintf("%s: %s", c.Info.Name, warn.Text),
- FromLinter: linterName,
+ diag := analysis.Diagnostic{
+ Pos: warn.Pos,
+ Category: c.Info.Name,
+ Message: fmt.Sprintf("%s: %s", c.Info.Name, warn.Text),
}
if warn.HasQuickFix() {
- issue.Replacement = &result.Replacement{
- Inline: &result.InlineFix{
- StartCol: pos.Column - 1,
- Length: int(warn.Suggestion.To - warn.Suggestion.From),
- NewString: string(warn.Suggestion.Replacement),
- },
- }
+ diag.SuggestedFixes = []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: warn.Suggestion.From,
+ End: warn.Suggestion.To,
+ NewText: warn.Suggestion.Replacement,
+ }},
+ }}
}
- res = append(res, issue)
+ pass.Report(diag)
}
}
-
- return res
}
type goCriticChecks[T any] map[string]T
@@ -297,8 +268,7 @@ func newSettingsWrapper(settings *config.GoCriticSettings, logger logutils.Log)
}
}
- allTagsSorted := maps.Keys(allChecksByTag)
- sort.Strings(allTagsSorted)
+ allTagsSorted := slices.Sorted(maps.Keys(allChecksByTag))
return &settingsWrapper{
GoCriticSettings: settings,
@@ -326,6 +296,7 @@ func (s *settingsWrapper) InferEnabledChecks() {
s.debugChecksInitialState()
enabledByDefaultChecks, disabledByDefaultChecks := s.buildEnabledAndDisabledByDefaultChecks()
+
debugChecksListf(enabledByDefaultChecks, "Enabled by default")
debugChecksListf(disabledByDefaultChecks, "Disabled by default")
@@ -346,7 +317,8 @@ func (s *settingsWrapper) InferEnabledChecks() {
if len(s.EnabledTags) != 0 {
enabledFromTags := s.expandTagsToChecks(s.EnabledTags)
- debugChecksListf(enabledFromTags, "Enabled by config tags %s", sprintSortedStrings(s.EnabledTags))
+
+ debugChecksListf(enabledFromTags, "Enabled by config tags %s", s.EnabledTags)
for _, check := range enabledFromTags {
enabledChecks[check] = struct{}{}
@@ -367,7 +339,8 @@ func (s *settingsWrapper) InferEnabledChecks() {
if len(s.DisabledTags) != 0 {
disabledFromTags := s.expandTagsToChecks(s.DisabledTags)
- debugChecksListf(disabledFromTags, "Disabled by config tags %s", sprintSortedStrings(s.DisabledTags))
+
+ debugChecksListf(disabledFromTags, "Disabled by config tags %s", s.DisabledTags)
for _, check := range disabledFromTags {
delete(enabledChecks, check)
@@ -388,6 +361,7 @@ func (s *settingsWrapper) InferEnabledChecks() {
s.inferredEnabledChecks = enabledChecks
s.inferredEnabledChecksLowerCased = normalizeMap(s.inferredEnabledChecks)
+
s.debugChecksFinalState()
}
@@ -581,10 +555,7 @@ func debugChecksListf(checks []string, format string, args ...any) {
return
}
- debugf("%s checks (%d): %s", fmt.Sprintf(format, args...), len(checks), sprintSortedStrings(checks))
-}
+ v := slices.Sorted(slices.Values(checks))
-func sprintSortedStrings(v []string) string {
- sort.Strings(slices.Clone(v))
- return fmt.Sprint(v)
+ debugf("%s checks (%d): %s", fmt.Sprintf(format, args...), len(checks), strings.Join(v, ", "))
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot/godot.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot/godot.go
index fc51b5bb8c..3194b3d3ac 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot/godot.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot/godot.go
@@ -1,23 +1,18 @@
package godot
import (
- "sync"
+ "cmp"
"github.com/tetafro/godot"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "godot"
func New(settings *config.GodotSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
var dotSettings godot.Settings
if settings != nil {
@@ -29,32 +24,22 @@ func New(settings *config.GodotSettings) *goanalysis.Linter {
}
// Convert deprecated setting
- if settings.CheckAll {
+ if settings.CheckAll != nil && *settings.CheckAll {
dotSettings.Scope = godot.AllScope
}
}
- if dotSettings.Scope == "" {
- dotSettings.Scope = godot.DeclScope
- }
+ dotSettings.Scope = cmp.Or(dotSettings.Scope, godot.DeclScope)
analyzer := &analysis.Analyzer{
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := runGodot(pass, dotSettings)
+ err := runGodot(pass, dotSettings)
if err != nil {
return nil, err
}
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
return nil, nil
},
}
@@ -64,38 +49,40 @@ func New(settings *config.GodotSettings) *goanalysis.Linter {
"Check if comments end in a period",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
-func runGodot(pass *analysis.Pass, settings godot.Settings) ([]goanalysis.Issue, error) {
- var lintIssues []godot.Issue
+func runGodot(pass *analysis.Pass, settings godot.Settings) error {
for _, file := range pass.Files {
iss, err := godot.Run(file, pass.Fset, settings)
if err != nil {
- return nil, err
+ return err
}
- lintIssues = append(lintIssues, iss...)
- }
-
- if len(lintIssues) == 0 {
- return nil, nil
- }
- issues := make([]goanalysis.Issue, len(lintIssues))
- for k, i := range lintIssues {
- issue := result.Issue{
- Pos: i.Pos,
- Text: i.Message,
- FromLinter: linterName,
- Replacement: &result.Replacement{
- NewLines: []string{i.Replacement},
- },
+ if len(iss) == 0 {
+ continue
}
- issues[k] = goanalysis.NewIssue(&issue, pass)
+ f := pass.Fset.File(file.Pos())
+
+ for _, i := range iss {
+ start := f.Pos(i.Pos.Offset)
+ end := goanalysis.EndOfLinePos(f, i.Pos.Line)
+
+ pass.Report(analysis.Diagnostic{
+ Pos: start,
+ End: end,
+ Message: i.Message,
+ SuggestedFixes: []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: start,
+ End: end,
+ NewText: []byte(i.Replacement),
+ }},
+ }},
+ })
+ }
}
- return issues, nil
+ return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox/godox.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox/godox.go
index d8de026baf..589789d146 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox/godox.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox/godox.go
@@ -3,73 +3,60 @@ package godox
import (
"go/token"
"strings"
- "sync"
"github.com/matoous/godox"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "godox"
func New(settings *config.GodoxSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
analyzer := &analysis.Analyzer{
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues := runGodox(pass, settings)
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
+ return run(pass, settings), nil
},
}
return goanalysis.NewLinter(
linterName,
- "Tool for detection of FIXME, TODO and other comment keywords",
+ "Detects usage of FIXME, TODO and other keywords inside comments",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
-func runGodox(pass *analysis.Pass, settings *config.GodoxSettings) []goanalysis.Issue {
- var messages []godox.Message
+func run(pass *analysis.Pass, settings *config.GodoxSettings) error {
for _, file := range pass.Files {
- messages = append(messages, godox.Run(file, pass.Fset, settings.Keywords...)...)
- }
-
- if len(messages) == 0 {
- return nil
- }
-
- issues := make([]goanalysis.Issue, len(messages))
-
- for k, i := range messages {
- issues[k] = goanalysis.NewIssue(&result.Issue{
- Pos: token.Position{
- Filename: i.Pos.Filename,
- Line: i.Pos.Line,
- },
- Text: strings.TrimRight(i.Message, "\n"),
- FromLinter: linterName,
- }, pass)
+ position, isGoFile := goanalysis.GetGoFilePosition(pass, file)
+ if !isGoFile {
+ continue
+ }
+
+ messages, err := godox.Run(file, pass.Fset, settings.Keywords...)
+ if err != nil {
+ return err
+ }
+
+ if len(messages) == 0 {
+ continue
+ }
+
+ nonAdjPosition := pass.Fset.PositionFor(file.Pos(), false)
+
+ ft := pass.Fset.File(file.Pos())
+
+ for _, i := range messages {
+ pass.Report(analysis.Diagnostic{
+ Pos: ft.LineStart(goanalysis.AdjustPos(i.Pos.Line, nonAdjPosition.Line, position.Line)) + token.Pos(i.Pos.Column),
+ Message: strings.TrimRight(i.Message, "\n"),
+ })
+ }
}
- return issues
+ return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt/gofmt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt/gofmt.go
index 289ceab8ae..2212242949 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt/gofmt.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt/gofmt.go
@@ -1,98 +1,28 @@
package gofmt
import (
- "fmt"
- "sync"
-
- gofmtAPI "github.com/golangci/gofmt/gofmt"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/goformatters"
+ gofmtbase "github.com/golangci/golangci-lint/pkg/goformatters/gofmt"
"github.com/golangci/golangci-lint/pkg/golinters/internal"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
)
const linterName = "gofmt"
func New(settings *config.GoFmtSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
- analyzer := &analysis.Analyzer{
- Name: linterName,
- Doc: goanalysis.TheOnlyanalyzerDoc,
- Run: goanalysis.DummyRun,
- }
+ a := goformatters.NewAnalyzer(
+ internal.LinterLogger.Child(linterName),
+ "Checks if the code is formatted according to 'gofmt' command.",
+ gofmtbase.New(settings),
+ )
return goanalysis.NewLinter(
- linterName,
- "Gofmt checks whether code was gofmt-ed. By default "+
- "this tool runs with -s option to check for code simplification",
- []*analysis.Analyzer{analyzer},
+ a.Name,
+ a.Doc,
+ []*analysis.Analyzer{a},
nil,
- ).WithContextSetter(func(lintCtx *linter.Context) {
- analyzer.Run = func(pass *analysis.Pass) (any, error) {
- issues, err := runGofmt(lintCtx, pass, settings)
- if err != nil {
- return nil, err
- }
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
- }
- }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runGofmt(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoFmtSettings) ([]goanalysis.Issue, error) {
- fileNames := internal.GetFileNames(pass)
-
- var rewriteRules []gofmtAPI.RewriteRule
- for _, rule := range settings.RewriteRules {
- rewriteRules = append(rewriteRules, gofmtAPI.RewriteRule(rule))
- }
-
- var issues []goanalysis.Issue
-
- for _, f := range fileNames {
- diff, err := gofmtAPI.RunRewrite(f, settings.Simplify, rewriteRules)
- if err != nil { // TODO: skip
- return nil, err
- }
- if diff == nil {
- continue
- }
-
- is, err := internal.ExtractIssuesFromPatch(string(diff), lintCtx, linterName, getIssuedTextGoFmt)
- if err != nil {
- return nil, fmt.Errorf("can't extract issues from gofmt diff output %q: %w", string(diff), err)
- }
-
- for i := range is {
- issues = append(issues, goanalysis.NewIssue(&is[i], pass))
- }
- }
-
- return issues, nil
-}
-
-func getIssuedTextGoFmt(settings *config.LintersSettings) string {
- text := "File is not `gofmt`-ed"
- if settings.Gofmt.Simplify {
- text += " with `-s`"
- }
- for _, rule := range settings.Gofmt.RewriteRules {
- text += fmt.Sprintf(" `-r '%s -> %s'`", rule.Pattern, rule.Replacement)
- }
-
- return text
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt/gofumpt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt/gofumpt.go
index 9a0bef84aa..878a5c79b0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt/gofumpt.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt/gofumpt.go
@@ -1,130 +1,28 @@
package gofumpt
import (
- "bytes"
- "fmt"
- "io"
- "os"
- "sync"
-
- "github.com/shazow/go-diff/difflib"
"golang.org/x/tools/go/analysis"
- "mvdan.cc/gofumpt/format"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/goformatters"
+ gofumptbase "github.com/golangci/golangci-lint/pkg/goformatters/gofumpt"
"github.com/golangci/golangci-lint/pkg/golinters/internal"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
)
const linterName = "gofumpt"
-type differ interface {
- Diff(out io.Writer, a io.ReadSeeker, b io.ReadSeeker) error
-}
-
func New(settings *config.GofumptSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
- diff := difflib.New()
-
- var options format.Options
-
- if settings != nil {
- options = format.Options{
- LangVersion: getLangVersion(settings),
- ModulePath: settings.ModulePath,
- ExtraRules: settings.ExtraRules,
- }
- }
-
- analyzer := &analysis.Analyzer{
- Name: linterName,
- Doc: goanalysis.TheOnlyanalyzerDoc,
- Run: goanalysis.DummyRun,
- }
+ a := goformatters.NewAnalyzer(
+ internal.LinterLogger.Child(linterName),
+ "Checks if code and import statements are formatted, with additional rules.",
+ gofumptbase.New(settings, settings.LangVersion),
+ )
return goanalysis.NewLinter(
- linterName,
- "Gofumpt checks whether code was gofumpt-ed.",
- []*analysis.Analyzer{analyzer},
+ a.Name,
+ a.Doc,
+ []*analysis.Analyzer{a},
nil,
- ).WithContextSetter(func(lintCtx *linter.Context) {
- analyzer.Run = func(pass *analysis.Pass) (any, error) {
- issues, err := runGofumpt(lintCtx, pass, diff, options)
- if err != nil {
- return nil, err
- }
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
- }
- }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runGofumpt(lintCtx *linter.Context, pass *analysis.Pass, diff differ, options format.Options) ([]goanalysis.Issue, error) {
- fileNames := internal.GetFileNames(pass)
-
- var issues []goanalysis.Issue
-
- for _, f := range fileNames {
- input, err := os.ReadFile(f)
- if err != nil {
- return nil, fmt.Errorf("unable to open file %s: %w", f, err)
- }
-
- output, err := format.Source(input, options)
- if err != nil {
- return nil, fmt.Errorf("error while running gofumpt: %w", err)
- }
-
- if !bytes.Equal(input, output) {
- out := bytes.NewBufferString(fmt.Sprintf("--- %[1]s\n+++ %[1]s\n", f))
-
- err := diff.Diff(out, bytes.NewReader(input), bytes.NewReader(output))
- if err != nil {
- return nil, fmt.Errorf("error while running gofumpt: %w", err)
- }
-
- diff := out.String()
- is, err := internal.ExtractIssuesFromPatch(diff, lintCtx, linterName, getIssuedTextGoFumpt)
- if err != nil {
- return nil, fmt.Errorf("can't extract issues from gofumpt diff output %q: %w", diff, err)
- }
-
- for i := range is {
- issues = append(issues, goanalysis.NewIssue(&is[i], pass))
- }
- }
- }
-
- return issues, nil
-}
-
-func getLangVersion(settings *config.GofumptSettings) string {
- if settings == nil || settings.LangVersion == "" {
- // TODO: defaults to "1.15", in the future (v2) must be set by using build.Default.ReleaseTags like staticcheck.
- return "1.15"
- }
- return settings.LangVersion
-}
-
-func getIssuedTextGoFumpt(settings *config.LintersSettings) string {
- text := "File is not `gofumpt`-ed"
-
- if settings.Gofumpt.ExtraRules {
- text += " with `-extra`"
- }
-
- return text
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go
index 14d517fb30..d24ad453e0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go
@@ -2,29 +2,25 @@ package goheader
import (
"go/token"
- "sync"
+ "strings"
goheader "github.com/denis-tingaikin/go-header"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
+ "github.com/golangci/golangci-lint/pkg/golinters/internal"
)
const linterName = "goheader"
-func New(settings *config.GoHeaderSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
+func New(settings *config.GoHeaderSettings, basePath string) *goanalysis.Linter {
conf := &goheader.Configuration{}
if settings != nil {
conf = &goheader.Configuration{
Values: settings.Values,
Template: settings.Template,
- TemplatePath: settings.TemplatePath,
+ TemplatePath: strings.ReplaceAll(settings.TemplatePath, internal.PlaceholderBasePath, basePath),
}
}
@@ -32,84 +28,105 @@ func New(settings *config.GoHeaderSettings) *goanalysis.Linter {
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := runGoHeader(pass, conf)
+ err := runGoHeader(pass, conf)
if err != nil {
return nil, err
}
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
return nil, nil
},
}
return goanalysis.NewLinter(
linterName,
- "Checks is file header matches to pattern",
+ "Checks if file header matches to pattern",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
-func runGoHeader(pass *analysis.Pass, conf *goheader.Configuration) ([]goanalysis.Issue, error) {
+func runGoHeader(pass *analysis.Pass, conf *goheader.Configuration) error {
if conf.TemplatePath == "" && conf.Template == "" {
// User did not pass template, so then do not run go-header linter
- return nil, nil
+ return nil
}
template, err := conf.GetTemplate()
if err != nil {
- return nil, err
+ return err
}
values, err := conf.GetValues()
if err != nil {
- return nil, err
+ return err
}
a := goheader.New(goheader.WithTemplate(template), goheader.WithValues(values))
- var issues []goanalysis.Issue
for _, file := range pass.Files {
- path := pass.Fset.Position(file.Pos()).Filename
+ position, isGoFile := goanalysis.GetGoFilePosition(pass, file)
+ if !isGoFile {
+ continue
+ }
+
+ issue := a.Analyze(&goheader.Target{File: file, Path: position.Filename})
+ if issue == nil {
+ continue
+ }
- i := a.Analyze(&goheader.Target{File: file, Path: path})
+ f := pass.Fset.File(file.Pos())
- if i == nil {
+ commentLine := 1
+ var offset int
+
+ // Inspired by https://github.com/denis-tingaikin/go-header/blob/4c75a6a2332f025705325d6c71fff4616aedf48f/analyzer.go#L85-L92
+ if len(file.Comments) > 0 && file.Comments[0].Pos() < file.Package {
+ if !strings.HasPrefix(file.Comments[0].List[0].Text, "/*") {
+ // When the comment is "//" there is a one character offset.
+ offset = 1
+ }
+ commentLine = goanalysis.GetFilePositionFor(pass.Fset, file.Comments[0].Pos()).Line
+ }
+
+ // Skip issues related to build directives.
+ // https://github.com/denis-tingaikin/go-header/issues/18
+ if issue.Location().Position-offset < 0 {
continue
}
- issue := result.Issue{
- Pos: token.Position{
- Line: i.Location().Line + 1,
- Column: i.Location().Position,
- Filename: path,
- },
- Text: i.Message(),
- FromLinter: linterName,
+ diag := analysis.Diagnostic{
+ Pos: f.LineStart(issue.Location().Line+1) + token.Pos(issue.Location().Position-offset), // The position of the first divergence.
+ Message: issue.Message(),
}
- if fix := i.Fix(); fix != nil {
- issue.LineRange = &result.Range{
- From: issue.Line(),
- To: issue.Line() + len(fix.Actual) - 1,
+ if fix := issue.Fix(); fix != nil {
+ current := len(fix.Actual)
+ for _, s := range fix.Actual {
+ current += len(s)
}
- issue.Replacement = &result.Replacement{
- NeedOnlyDelete: len(fix.Expected) == 0,
- NewLines: fix.Expected,
+
+ start := f.LineStart(commentLine)
+
+ end := start + token.Pos(current)
+
+ header := strings.Join(fix.Expected, "\n") + "\n"
+
+ // Adds an extra line between the package and the header.
+ if end == file.Package {
+ header += "\n"
}
+
+ diag.SuggestedFixes = []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: start,
+ End: end,
+ NewText: []byte(header),
+ }},
+ }}
}
- issues = append(issues, goanalysis.NewIssue(&issue, pass))
+ pass.Report(diag)
}
- return issues, nil
+ return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports/goimports.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports/goimports.go
index de965d5c85..d7ba98559a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports/goimports.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports/goimports.go
@@ -1,94 +1,28 @@
package goimports
import (
- "fmt"
- "sync"
-
- goimportsAPI "github.com/golangci/gofmt/goimports"
"golang.org/x/tools/go/analysis"
- "golang.org/x/tools/imports"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/goformatters"
+ goimportsbase "github.com/golangci/golangci-lint/pkg/goformatters/goimports"
"github.com/golangci/golangci-lint/pkg/golinters/internal"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
)
const linterName = "goimports"
func New(settings *config.GoImportsSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
- analyzer := &analysis.Analyzer{
- Name: linterName,
- Doc: goanalysis.TheOnlyanalyzerDoc,
- Run: goanalysis.DummyRun,
- }
+ a := goformatters.NewAnalyzer(
+ internal.LinterLogger.Child(linterName),
+ "Checks if the code and import statements are formatted according to the 'goimports' command.",
+ goimportsbase.New(settings),
+ )
return goanalysis.NewLinter(
- linterName,
- "Check import statements are formatted according to the 'goimport' command. "+
- "Reformat imports in autofix mode.",
- []*analysis.Analyzer{analyzer},
+ a.Name,
+ a.Doc,
+ []*analysis.Analyzer{a},
nil,
- ).WithContextSetter(func(lintCtx *linter.Context) {
- imports.LocalPrefix = settings.LocalPrefixes
-
- analyzer.Run = func(pass *analysis.Pass) (any, error) {
- issues, err := runGoImports(lintCtx, pass)
- if err != nil {
- return nil, err
- }
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
- }
- }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runGoImports(lintCtx *linter.Context, pass *analysis.Pass) ([]goanalysis.Issue, error) {
- fileNames := internal.GetFileNames(pass)
-
- var issues []goanalysis.Issue
-
- for _, f := range fileNames {
- diff, err := goimportsAPI.Run(f)
- if err != nil { // TODO: skip
- return nil, err
- }
- if diff == nil {
- continue
- }
-
- is, err := internal.ExtractIssuesFromPatch(string(diff), lintCtx, linterName, getIssuedTextGoImports)
- if err != nil {
- return nil, fmt.Errorf("can't extract issues from gofmt diff output %q: %w", string(diff), err)
- }
-
- for i := range is {
- issues = append(issues, goanalysis.NewIssue(&is[i], pass))
- }
- }
-
- return issues, nil
-}
-
-func getIssuedTextGoImports(settings *config.LintersSettings) string {
- text := "File is not `goimports`-ed"
-
- if settings.Goimports.LocalPrefixes != "" {
- text += " with -local " + settings.Goimports.LocalPrefixes
- }
-
- return text
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives/gomoddirectives.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives/gomoddirectives.go
index 9cde7e26c6..f8f47ba2b4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives/gomoddirectives.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives/gomoddirectives.go
@@ -1,6 +1,7 @@
package gomoddirectives
import (
+ "regexp"
"sync"
"github.com/ldez/gomoddirectives"
@@ -8,6 +9,7 @@ import (
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/golinters/internal"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/result"
)
@@ -24,6 +26,27 @@ func New(settings *config.GoModDirectivesSettings) *goanalysis.Linter {
opts.ReplaceAllowList = settings.ReplaceAllowList
opts.RetractAllowNoExplanation = settings.RetractAllowNoExplanation
opts.ExcludeForbidden = settings.ExcludeForbidden
+ opts.ToolchainForbidden = settings.ToolchainForbidden
+ opts.ToolForbidden = settings.ToolForbidden
+ opts.GoDebugForbidden = settings.GoDebugForbidden
+
+ if settings.ToolchainPattern != "" {
+ exp, err := regexp.Compile(settings.ToolchainPattern)
+ if err != nil {
+ internal.LinterLogger.Fatalf("%s: invalid toolchain pattern: %v", linterName, err)
+ } else {
+ opts.ToolchainPattern = exp
+ }
+ }
+
+ if settings.GoVersionPattern != "" {
+ exp, err := regexp.Compile(settings.GoVersionPattern)
+ if err != nil {
+ internal.LinterLogger.Fatalf("%s: invalid Go version pattern: %v", linterName, err)
+ } else {
+ opts.GoVersionPattern = exp
+ }
+ }
}
analyzer := &analysis.Analyzer{
@@ -40,7 +63,7 @@ func New(settings *config.GoModDirectivesSettings) *goanalysis.Linter {
).WithContextSetter(func(lintCtx *linter.Context) {
analyzer.Run = func(pass *analysis.Pass) (any, error) {
once.Do(func() {
- results, err := gomoddirectives.Analyze(opts)
+ results, err := gomoddirectives.AnalyzePass(pass, opts)
if err != nil {
lintCtx.Log.Warnf("running %s failed: %s: "+
"if you are not using go modules it is suggested to disable this linter", linterName, err)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard/gomodguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard/gomodguard.go
index 8f1036b0f1..8bddebc162 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard/gomodguard.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard/gomodguard.go
@@ -73,7 +73,7 @@ func New(settings *config.GoModGuardSettings) *goanalysis.Linter {
}
analyzer.Run = func(pass *analysis.Pass) (any, error) {
- gomodguardIssues := processor.ProcessFiles(internal.GetFileNames(pass))
+ gomodguardIssues := processor.ProcessFiles(internal.GetGoFileNames(pass))
mu.Lock()
defer mu.Unlock()
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go
index 85154a9b38..c206ffaa3e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go
@@ -1,7 +1,7 @@
package goprintffuncname
import (
- "github.com/jirfag/go-printf-func-name/pkg/analyzer"
+ "github.com/golangci/go-printf-func-name/pkg/analyzer"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/goanalysis"
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec/gosec.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec/gosec.go
index c333152e69..6b46beaccf 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec/gosec.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec/gosec.go
@@ -10,6 +10,7 @@ import (
"sync"
"github.com/securego/gosec/v2"
+ "github.com/securego/gosec/v2/analyzers"
"github.com/securego/gosec/v2/issue"
"github.com/securego/gosec/v2/rules"
"golang.org/x/tools/go/analysis"
@@ -27,16 +28,25 @@ func New(settings *config.GoSecSettings) *goanalysis.Linter {
var mu sync.Mutex
var resIssues []goanalysis.Issue
- var filters []rules.RuleFilter
conf := gosec.NewConfig()
+
+ var ruleFilters []rules.RuleFilter
+ var analyzerFilters []analyzers.AnalyzerFilter
if settings != nil {
- filters = gosecRuleFilters(settings.Includes, settings.Excludes)
+ // TODO(ldez) to remove when the problem will be fixed by gosec.
+ // https://github.com/securego/gosec/issues/1211
+ // https://github.com/securego/gosec/issues/1209
+ settings.Excludes = append(settings.Excludes, "G407")
+
+ ruleFilters = createRuleFilters(settings.Includes, settings.Excludes)
+ analyzerFilters = createAnalyzerFilters(settings.Includes, settings.Excludes)
conf = toGosecConfig(settings)
}
logger := log.New(io.Discard, "", 0)
- ruleDefinitions := rules.Generate(false, filters...)
+ ruleDefinitions := rules.Generate(false, ruleFilters...)
+ analyzerDefinitions := analyzers.Generate(false, analyzerFilters...)
analyzer := &analysis.Analyzer{
Name: linterName,
@@ -53,7 +63,9 @@ func New(settings *config.GoSecSettings) *goanalysis.Linter {
analyzer.Run = func(pass *analysis.Pass) (any, error) {
// The `gosecAnalyzer` is here because of concurrency issue.
gosecAnalyzer := gosec.NewAnalyzer(conf, true, settings.ExcludeGenerated, false, settings.Concurrency, logger)
+
gosecAnalyzer.LoadRules(ruleDefinitions.RulesInfo())
+ gosecAnalyzer.LoadAnalyzers(analyzerDefinitions.AnalyzersInfo())
issues := runGoSec(lintCtx, pass, settings, gosecAnalyzer)
@@ -77,6 +89,7 @@ func runGoSec(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoS
}
analyzer.CheckRules(pkg)
+ analyzer.CheckAnalyzers(pkg)
secIssues, _, _ := analyzer.Report()
if len(secIssues) == 0 {
@@ -171,12 +184,35 @@ func convertGosecGlobals(globalOptionFromConfig any, conf gosec.Config) {
}
for k, v := range globalOptionMap {
- conf.SetGlobal(gosec.GlobalOption(k), fmt.Sprintf("%v", v))
+ option := gosec.GlobalOption(k)
+
+ // Set nosec global option only if the value is true
+ // https://github.com/securego/gosec/blob/v2.21.4/analyzer.go#L572
+ if option == gosec.Nosec && v == false {
+ continue
+ }
+
+ conf.SetGlobal(option, fmt.Sprintf("%v", v))
+ }
+}
+
+// based on https://github.com/securego/gosec/blob/81cda2f91fbe1bf4735feb55febcae03e697a92b/cmd/gosec/main.go#L258-L275
+func createAnalyzerFilters(includes, excludes []string) []analyzers.AnalyzerFilter {
+ var filters []analyzers.AnalyzerFilter
+
+ if len(includes) > 0 {
+ filters = append(filters, analyzers.NewAnalyzerFilter(false, includes...))
}
+
+ if len(excludes) > 0 {
+ filters = append(filters, analyzers.NewAnalyzerFilter(true, excludes...))
+ }
+
+ return filters
}
// based on https://github.com/securego/gosec/blob/569328eade2ccbad4ce2d0f21ee158ab5356a5cf/cmd/gosec/main.go#L170-L188
-func gosecRuleFilters(includes, excludes []string) []rules.RuleFilter {
+func createRuleFilters(includes, excludes []string) []rules.RuleFilter {
var filters []rules.RuleFilter
if len(includes) > 0 {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple/gosimple.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple/gosimple.go
index 6a0d967232..c03871adf9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple/gosimple.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple/gosimple.go
@@ -11,7 +11,7 @@ import (
func New(settings *config.StaticCheckSettings) *goanalysis.Linter {
cfg := internal.StaticCheckConfig(settings)
- analyzers := internal.SetupStaticCheckAnalyzers(simple.Analyzers, internal.GetGoVersion(settings), cfg.Checks)
+ analyzers := internal.SetupStaticCheckAnalyzers(simple.Analyzers, cfg.Checks)
return goanalysis.NewLinter(
"gosimple",
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan/gosmopolitan.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan/gosmopolitan.go
index 4f6fb80358..bf9b19f129 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan/gosmopolitan.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan/gosmopolitan.go
@@ -10,16 +10,16 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(s *config.GosmopolitanSettings) *goanalysis.Linter {
+func New(settings *config.GosmopolitanSettings) *goanalysis.Linter {
a := gosmopolitan.NewAnalyzer()
- cfgMap := map[string]map[string]any{}
- if s != nil {
- cfgMap[a.Name] = map[string]any{
- "allowtimelocal": s.AllowTimeLocal,
- "escapehatches": strings.Join(s.EscapeHatches, ","),
- "lookattests": !s.IgnoreTests,
- "watchforscripts": strings.Join(s.WatchForScripts, ","),
+ cfg := map[string]map[string]any{}
+ if settings != nil {
+ cfg[a.Name] = map[string]any{
+ "allowtimelocal": settings.AllowTimeLocal,
+ "escapehatches": strings.Join(settings.EscapeHatches, ","),
+ "lookattests": !settings.IgnoreTests,
+ "watchforscripts": strings.Join(settings.WatchForScripts, ","),
}
}
@@ -27,6 +27,6 @@ func New(s *config.GosmopolitanSettings) *goanalysis.Linter {
a.Name,
a.Doc,
[]*analysis.Analyzer{a},
- cfgMap,
+ cfg,
).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet/govet.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet/govet.go
index 1211a8833b..b970e40392 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet/govet.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet/govet.go
@@ -2,7 +2,7 @@ package govet
import (
"slices"
- "sort"
+ "strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/appends"
@@ -40,6 +40,7 @@ import (
"golang.org/x/tools/go/analysis/passes/slog"
"golang.org/x/tools/go/analysis/passes/sortslice"
"golang.org/x/tools/go/analysis/passes/stdmethods"
+ "golang.org/x/tools/go/analysis/passes/stdversion"
"golang.org/x/tools/go/analysis/passes/stringintconv"
"golang.org/x/tools/go/analysis/passes/structtag"
"golang.org/x/tools/go/analysis/passes/testinggoroutine"
@@ -50,6 +51,7 @@ import (
"golang.org/x/tools/go/analysis/passes/unsafeptr"
"golang.org/x/tools/go/analysis/passes/unusedresult"
"golang.org/x/tools/go/analysis/passes/unusedwrite"
+ "golang.org/x/tools/go/analysis/passes/waitgroup"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
@@ -89,6 +91,7 @@ var (
slog.Analyzer,
sortslice.Analyzer,
stdmethods.Analyzer,
+ stdversion.Analyzer,
stringintconv.Analyzer,
structtag.Analyzer,
testinggoroutine.Analyzer,
@@ -99,9 +102,10 @@ var (
unsafeptr.Analyzer,
unusedresult.Analyzer,
unusedwrite.Analyzer,
+ waitgroup.Analyzer,
}
- // https://github.com/golang/go/blob/b56645a87b28840a180d64077877cb46570b4176/src/cmd/vet/main.go#L49-L81
+ // https://github.com/golang/go/blob/go1.23.0/src/cmd/vet/main.go#L55-L87
defaultAnalyzers = []*analysis.Analyzer{
appends.Analyzer,
asmdecl.Analyzer,
@@ -126,6 +130,7 @@ var (
sigchanyzer.Analyzer,
slog.Analyzer,
stdmethods.Analyzer,
+ stdversion.Analyzer,
stringintconv.Analyzer,
structtag.Analyzer,
testinggoroutine.Analyzer,
@@ -159,8 +164,8 @@ func New(settings *config.GovetSettings) *goanalysis.Linter {
}
func analyzersFromConfig(settings *config.GovetSettings) []*analysis.Analyzer {
- debugAnalyzersListf(allAnalyzers, "All available analyzers")
- debugAnalyzersListf(defaultAnalyzers, "Default analyzers")
+ logAnalyzers("All available analyzers", allAnalyzers)
+ logAnalyzers("Default analyzers", defaultAnalyzers)
if settings == nil {
return defaultAnalyzers
@@ -173,19 +178,19 @@ func analyzersFromConfig(settings *config.GovetSettings) []*analysis.Analyzer {
}
}
- debugAnalyzersListf(enabledAnalyzers, "Enabled by config analyzers")
+ logAnalyzers("Enabled by config analyzers", enabledAnalyzers)
return enabledAnalyzers
}
func isAnalyzerEnabled(name string, cfg *config.GovetSettings, defaultAnalyzers []*analysis.Analyzer) bool {
- // TODO(ldez) remove loopclosure when go1.23
+ // TODO(ldez) remove loopclosure when go1.24
if name == loopclosure.Analyzer.Name && config.IsGoGreaterThanOrEqual(cfg.Go, "1.22") {
return false
}
// Keeping for backward compatibility.
- if cfg.CheckShadowing && name == shadow.Analyzer.Name {
+ if cfg.CheckShadowing != nil && *cfg.CheckShadowing && name == shadow.Analyzer.Name {
return true
}
@@ -207,7 +212,7 @@ func isAnalyzerEnabled(name string, cfg *config.GovetSettings, defaultAnalyzers
}
}
-func debugAnalyzersListf(analyzers []*analysis.Analyzer, message string) {
+func logAnalyzers(message string, analyzers []*analysis.Analyzer) {
if !isDebug {
return
}
@@ -217,7 +222,7 @@ func debugAnalyzersListf(analyzers []*analysis.Analyzer, message string) {
analyzerNames = append(analyzerNames, a.Name)
}
- sort.Strings(analyzerNames)
+ slices.Sort(analyzerNames)
- debugf("%s (%d): %s", message, len(analyzerNames), analyzerNames)
+ debugf("%s (%d): %s", message, len(analyzerNames), strings.Join(analyzerNames, ", "))
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper/grouper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper/grouper.go
index aa6ce1cebb..e0a3f794a7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper/grouper.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper/grouper.go
@@ -11,9 +11,9 @@ import (
func New(settings *config.GrouperSettings) *goanalysis.Linter {
a := grouper.New()
- linterCfg := map[string]map[string]any{}
+ cfg := map[string]map[string]any{}
if settings != nil {
- linterCfg[a.Name] = map[string]any{
+ cfg[a.Name] = map[string]any{
"const-require-single-const": settings.ConstRequireSingleConst,
"const-require-grouping": settings.ConstRequireGrouping,
"import-require-single-import": settings.ImportRequireSingleImport,
@@ -29,6 +29,6 @@ func New(settings *config.GrouperSettings) *goanalysis.Linter {
a.Name,
"Analyze expression groups.",
[]*analysis.Analyzer{a},
- linterCfg,
+ cfg,
).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go
new file mode 100644
index 0000000000..31f88160ea
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go
@@ -0,0 +1,57 @@
+package iface
+
+import (
+ "slices"
+
+ "github.com/uudashr/iface/identical"
+ "github.com/uudashr/iface/opaque"
+ "github.com/uudashr/iface/unused"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+ "github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.IfaceSettings) *goanalysis.Linter {
+ var conf map[string]map[string]any
+ if settings != nil {
+ conf = settings.Settings
+ }
+
+ return goanalysis.NewLinter(
+ "iface",
+ "Detect the incorrect use of interfaces, helping developers avoid interface pollution.",
+ analyzersFromSettings(settings),
+ conf,
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
+
+func analyzersFromSettings(settings *config.IfaceSettings) []*analysis.Analyzer {
+ allAnalyzers := map[string]*analysis.Analyzer{
+ "identical": identical.Analyzer,
+ "unused": unused.Analyzer,
+ "opaque": opaque.Analyzer,
+ }
+
+ if settings == nil || len(settings.Enable) == 0 {
+ // Default enable `identical` analyzer only
+ return []*analysis.Analyzer{identical.Analyzer}
+ }
+
+ var analyzers []*analysis.Analyzer
+ for _, name := range uniqueNames(settings.Enable) {
+ if _, ok := allAnalyzers[name]; !ok {
+ // skip unknown analyzer
+ continue
+ }
+
+ analyzers = append(analyzers, allAnalyzers[name])
+ }
+
+ return analyzers
+}
+
+func uniqueNames(names []string) []string {
+ slices.Sort(names)
+ return slices.Compact(names)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas/importas.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas/importas.go
index 45117c9a48..b7c6c35aea 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas/importas.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas/importas.go
@@ -51,8 +51,11 @@ func New(settings *config.ImportAsSettings) *goanalysis.Linter {
uniqPackages[a.Pkg] = a
}
- // skip the duplication check when the alias is a regular expression replacement pattern (ie. contains `$`).
- if v, ok := uniqAliases[a.Alias]; ok && !strings.Contains(a.Alias, "$") {
+ // Skips the duplication check when:
+ // - the alias is empty.
+ // - the alias is a regular expression replacement pattern (ie. contains `$`).
+ v, ok := uniqAliases[a.Alias]
+ if ok && a.Alias != "" && !strings.Contains(a.Alias, "$") {
lintCtx.Log.Errorf("invalid configuration, multiple packages with the same alias: alias=%s packages=[%s,%s]", a.Alias, a.Pkg, v.Pkg)
} else {
uniqAliases[a.Alias] = a
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/commons.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/commons.go
index c21dd00927..ebb0b13a0d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/commons.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/commons.go
@@ -4,3 +4,12 @@ import "github.com/golangci/golangci-lint/pkg/logutils"
// LinterLogger must be use only when the context logger is not available.
var LinterLogger = logutils.NewStderrLog(logutils.DebugKeyLinter)
+
+// Placeholders used inside linters to evaluate relative paths.
+const (
+ PlaceholderBasePath = "${base-path}"
+ // Deprecated: it must be removed in v2.
+ // [PlaceholderBasePath] will be the only one placeholder as it is a dynamic value based on
+ // [github.com/golangci/golangci-lint/pkg/config.Run.RelativePathMode].
+ PlaceholderConfigDir = "${configDir}"
+)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/diff.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/diff.go
deleted file mode 100644
index b20230dfa9..0000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/diff.go
+++ /dev/null
@@ -1,265 +0,0 @@
-package internal
-
-import (
- "bytes"
- "fmt"
- "go/token"
- "strings"
-
- diffpkg "github.com/sourcegraph/go-diff/diff"
-
- "github.com/golangci/golangci-lint/pkg/config"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/logutils"
- "github.com/golangci/golangci-lint/pkg/result"
-)
-
-type Change struct {
- LineRange result.Range
- Replacement result.Replacement
-}
-
-type diffLineType string
-
-const (
- diffLineAdded diffLineType = "added"
- diffLineOriginal diffLineType = "original"
- diffLineDeleted diffLineType = "deleted"
-)
-
-type fmtTextFormatter func(settings *config.LintersSettings) string
-
-type diffLine struct {
- originalNumber int // 1-based original line number
- typ diffLineType
- data string // "+" or "-" stripped line
-}
-
-type hunkChangesParser struct {
- // needed because we merge currently added lines with the last original line
- lastOriginalLine *diffLine
-
- // if the first line of diff is an adding we save all additions to replacementLinesToPrepend
- replacementLinesToPrepend []string
-
- log logutils.Log
-
- lines []diffLine
-
- ret []Change
-}
-
-func (p *hunkChangesParser) parseDiffLines(h *diffpkg.Hunk) {
- lines := bytes.Split(h.Body, []byte{'\n'})
- currentOriginalLineNumber := int(h.OrigStartLine)
- var ret []diffLine
-
- for i, line := range lines {
- dl := diffLine{
- originalNumber: currentOriginalLineNumber,
- }
-
- lineStr := string(line)
-
- if strings.HasPrefix(lineStr, "-") {
- dl.typ = diffLineDeleted
- dl.data = strings.TrimPrefix(lineStr, "-")
- currentOriginalLineNumber++
- } else if strings.HasPrefix(lineStr, "+") {
- dl.typ = diffLineAdded
- dl.data = strings.TrimPrefix(lineStr, "+")
- } else {
- if i == len(lines)-1 && lineStr == "" {
- // handle last \n: don't add an empty original line
- break
- }
-
- dl.typ = diffLineOriginal
- dl.data = strings.TrimPrefix(lineStr, " ")
- currentOriginalLineNumber++
- }
-
- ret = append(ret, dl)
- }
-
- // if > 0, then the original file had a 'No newline at end of file' mark
- if h.OrigNoNewlineAt > 0 {
- dl := diffLine{
- originalNumber: currentOriginalLineNumber + 1,
- typ: diffLineAdded,
- data: "",
- }
- ret = append(ret, dl)
- }
-
- p.lines = ret
-}
-
-func (p *hunkChangesParser) handleOriginalLine(line diffLine, i *int) {
- if len(p.replacementLinesToPrepend) == 0 {
- p.lastOriginalLine = &line
- *i++
- return
- }
-
- // check following added lines for the case:
- // + added line 1
- // original line
- // + added line 2
-
- *i++
- var followingAddedLines []string
- for ; *i < len(p.lines) && p.lines[*i].typ == diffLineAdded; *i++ {
- followingAddedLines = append(followingAddedLines, p.lines[*i].data)
- }
-
- p.ret = append(p.ret, Change{
- LineRange: result.Range{
- From: line.originalNumber,
- To: line.originalNumber,
- },
- Replacement: result.Replacement{
- NewLines: append(p.replacementLinesToPrepend, append([]string{line.data}, followingAddedLines...)...),
- },
- })
- p.replacementLinesToPrepend = nil
- p.lastOriginalLine = &line
-}
-
-func (p *hunkChangesParser) handleDeletedLines(deletedLines []diffLine, addedLines []string) {
- change := Change{
- LineRange: result.Range{
- From: deletedLines[0].originalNumber,
- To: deletedLines[len(deletedLines)-1].originalNumber,
- },
- }
-
- if len(addedLines) != 0 {
- change.Replacement.NewLines = append([]string{}, p.replacementLinesToPrepend...)
- change.Replacement.NewLines = append(change.Replacement.NewLines, addedLines...)
- if len(p.replacementLinesToPrepend) != 0 {
- p.replacementLinesToPrepend = nil
- }
-
- p.ret = append(p.ret, change)
- return
- }
-
- // delete-only change with possible prepending
- if len(p.replacementLinesToPrepend) != 0 {
- change.Replacement.NewLines = p.replacementLinesToPrepend
- p.replacementLinesToPrepend = nil
- } else {
- change.Replacement.NeedOnlyDelete = true
- }
-
- p.ret = append(p.ret, change)
-}
-
-func (p *hunkChangesParser) handleAddedOnlyLines(addedLines []string) {
- if p.lastOriginalLine == nil {
- // the first line is added; the diff looks like:
- // 1. + ...
- // 2. - ...
- // or
- // 1. + ...
- // 2. ...
-
- p.replacementLinesToPrepend = addedLines
- return
- }
-
- // add-only change merged into the last original line with possible prepending
- p.ret = append(p.ret, Change{
- LineRange: result.Range{
- From: p.lastOriginalLine.originalNumber,
- To: p.lastOriginalLine.originalNumber,
- },
- Replacement: result.Replacement{
- NewLines: append(p.replacementLinesToPrepend, append([]string{p.lastOriginalLine.data}, addedLines...)...),
- },
- })
- p.replacementLinesToPrepend = nil
-}
-
-func (p *hunkChangesParser) parse(h *diffpkg.Hunk) []Change {
- p.parseDiffLines(h)
-
- for i := 0; i < len(p.lines); {
- line := p.lines[i]
- if line.typ == diffLineOriginal {
- p.handleOriginalLine(line, &i)
- continue
- }
-
- var deletedLines []diffLine
- for ; i < len(p.lines) && p.lines[i].typ == diffLineDeleted; i++ {
- deletedLines = append(deletedLines, p.lines[i])
- }
-
- var addedLines []string
- for ; i < len(p.lines) && p.lines[i].typ == diffLineAdded; i++ {
- addedLines = append(addedLines, p.lines[i].data)
- }
-
- if len(deletedLines) != 0 {
- p.handleDeletedLines(deletedLines, addedLines)
- continue
- }
-
- // no deletions, only additions
- p.handleAddedOnlyLines(addedLines)
- }
-
- if len(p.replacementLinesToPrepend) != 0 {
- p.log.Infof("The diff contains only additions: no original or deleted lines: %#v", p.lines)
- return nil
- }
-
- return p.ret
-}
-
-func ExtractIssuesFromPatch(patch string, lintCtx *linter.Context, linterName string, formatter fmtTextFormatter) ([]result.Issue, error) {
- diffs, err := diffpkg.ParseMultiFileDiff([]byte(patch))
- if err != nil {
- return nil, fmt.Errorf("can't parse patch: %w", err)
- }
-
- if len(diffs) == 0 {
- return nil, fmt.Errorf("got no diffs from patch parser: %v", patch)
- }
-
- var issues []result.Issue
- for _, d := range diffs {
- if len(d.Hunks) == 0 {
- lintCtx.Log.Warnf("Got no hunks in diff %+v", d)
- continue
- }
-
- for _, hunk := range d.Hunks {
- p := hunkChangesParser{log: lintCtx.Log}
-
- changes := p.parse(hunk)
-
- for _, change := range changes {
- change := change // fix scope
- i := result.Issue{
- FromLinter: linterName,
- Pos: token.Position{
- Filename: d.NewName,
- Line: change.LineRange.From,
- },
- Text: formatter(lintCtx.Settings()),
- Replacement: &change.Replacement,
- }
- if change.LineRange.From != change.LineRange.To {
- i.LineRange = &change.LineRange
- }
-
- issues = append(issues, i)
- }
- }
- }
-
- return issues, nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go
index 5b5812c318..e5a0e33b7d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go
@@ -9,25 +9,9 @@ import (
scconfig "honnef.co/go/tools/config"
"github.com/golangci/golangci-lint/pkg/config"
- "github.com/golangci/golangci-lint/pkg/logutils"
)
-var debugf = logutils.Debug(logutils.DebugKeyMegacheck)
-
-func GetGoVersion(settings *config.StaticCheckSettings) string {
- var goVersion string
- if settings != nil {
- goVersion = settings.GoVersion
- }
-
- if goVersion != "" {
- return goVersion
- }
-
- return "1.17"
-}
-
-func SetupStaticCheckAnalyzers(src []*lint.Analyzer, goVersion string, checks []string) []*analysis.Analyzer {
+func SetupStaticCheckAnalyzers(src []*lint.Analyzer, checks []string) []*analysis.Analyzer {
var names []string
for _, a := range src {
names = append(names, a.Analyzer.Name)
@@ -38,7 +22,6 @@ func SetupStaticCheckAnalyzers(src []*lint.Analyzer, goVersion string, checks []
var ret []*analysis.Analyzer
for _, a := range src {
if filter[a.Analyzer.Name] {
- SetAnalyzerGoVersion(a.Analyzer, goVersion)
ret = append(ret, a.Analyzer)
}
}
@@ -46,14 +29,6 @@ func SetupStaticCheckAnalyzers(src []*lint.Analyzer, goVersion string, checks []
return ret
}
-func SetAnalyzerGoVersion(a *analysis.Analyzer, goVersion string) {
- if v := a.Flags.Lookup("go"); v != nil {
- if err := v.Value.Set(goVersion); err != nil {
- debugf("Failed to set go version: %s", err)
- }
- }
-}
-
func StaticCheckConfig(settings *config.StaticCheckSettings) *scconfig.Config {
var cfg *scconfig.Config
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/util.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/util.go
index 80b194dd26..7525f2f2c5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/util.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/util.go
@@ -2,12 +2,12 @@ package internal
import (
"fmt"
- "path/filepath"
"strings"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
+ "github.com/golangci/golangci-lint/pkg/goanalysis"
)
func FormatCode(code string, _ *config.Config) string {
@@ -18,16 +18,17 @@ func FormatCode(code string, _ *config.Config) string {
return fmt.Sprintf("`%s`", code)
}
-func GetFileNames(pass *analysis.Pass) []string {
- var fileNames []string
+func GetGoFileNames(pass *analysis.Pass) []string {
+ var filenames []string
+
for _, f := range pass.Files {
- fileName := pass.Fset.PositionFor(f.Pos(), true).Filename
- ext := filepath.Ext(fileName)
- if ext != "" && ext != ".go" {
- // position has been adjusted to a non-go file, revert to original file
- fileName = pass.Fset.PositionFor(f.Pos(), false).Filename
+ position, b := goanalysis.GetGoFilePosition(pass, f)
+ if !b {
+ continue
}
- fileNames = append(fileNames, fileName)
+
+ filenames = append(filenames, position.Filename)
}
- return fileNames
+
+ return filenames
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/intrange/intrange.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/intrange/intrange.go
index a27569ebbc..d5ffd43453 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/intrange/intrange.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/intrange/intrange.go
@@ -15,5 +15,5 @@ func New() *goanalysis.Linter {
a.Doc,
[]*analysis.Analyzer{a},
nil,
- ).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll/lll.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll/lll.go
index 15edcccad4..bad3b0c4e2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll/lll.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll/lll.go
@@ -4,19 +4,15 @@ import (
"bufio"
"errors"
"fmt"
- "go/token"
+ "go/ast"
"os"
"strings"
- "sync"
"unicode/utf8"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/golinters/internal"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "lll"
@@ -24,26 +20,15 @@ const linterName = "lll"
const goCommentDirectivePrefix = "//go:"
func New(settings *config.LllSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
analyzer := &analysis.Analyzer{
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := runLll(pass, settings)
+ err := runLll(pass, settings)
if err != nil {
return nil, err
}
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
return nil, nil
},
}
@@ -53,40 +38,39 @@ func New(settings *config.LllSettings) *goanalysis.Linter {
"Reports long lines",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
-func runLll(pass *analysis.Pass, settings *config.LllSettings) ([]goanalysis.Issue, error) {
- fileNames := internal.GetFileNames(pass)
-
+func runLll(pass *analysis.Pass, settings *config.LllSettings) error {
spaces := strings.Repeat(" ", settings.TabWidth)
- var issues []goanalysis.Issue
- for _, f := range fileNames {
- lintIssues, err := getLLLIssuesForFile(f, settings.LineLength, spaces)
+ for _, file := range pass.Files {
+ err := getLLLIssuesForFile(pass, file, settings.LineLength, spaces)
if err != nil {
- return nil, err
- }
-
- for i := range lintIssues {
- issues = append(issues, goanalysis.NewIssue(&lintIssues[i], pass))
+ return err
}
}
- return issues, nil
+ return nil
}
-func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]result.Issue, error) {
- var res []result.Issue
+func getLLLIssuesForFile(pass *analysis.Pass, file *ast.File, maxLineLen int, tabSpaces string) error {
+ position, isGoFile := goanalysis.GetGoFilePosition(pass, file)
+ if !isGoFile {
+ return nil
+ }
+
+ nonAdjPosition := pass.Fset.PositionFor(file.Pos(), false)
- f, err := os.Open(filename)
+ f, err := os.Open(position.Filename)
if err != nil {
- return nil, fmt.Errorf("can't open file %s: %w", filename, err)
+ return fmt.Errorf("can't open file %s: %w", position.Filename, err)
}
+
defer f.Close()
+ ft := pass.Fset.File(file.Pos())
+
lineNumber := 0
multiImportEnabled := false
@@ -116,42 +100,34 @@ func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]r
lineLen := utf8.RuneCountInString(line)
if lineLen > maxLineLen {
- res = append(res, result.Issue{
- Pos: token.Position{
- Filename: filename,
- Line: lineNumber,
- },
- Text: fmt.Sprintf("line is %d characters", lineLen),
- FromLinter: linterName,
+ pass.Report(analysis.Diagnostic{
+ Pos: ft.LineStart(goanalysis.AdjustPos(lineNumber, nonAdjPosition.Line, position.Line)),
+ Message: fmt.Sprintf("The line is %d characters long, which exceeds the maximum of %d characters.",
+ lineLen, maxLineLen),
})
}
}
if err := scanner.Err(); err != nil {
+ // scanner.Scan() might fail if the line is longer than bufio.MaxScanTokenSize
+ // In the case where the specified maxLineLen is smaller than bufio.MaxScanTokenSize
+ // we can return this line as a long line instead of returning an error.
+ // The reason for this change is that this case might happen with autogenerated files
+ // The go-bindata tool for instance might generate a file with a very long line.
+ // In this case, as it's an auto generated file, the warning returned by lll will
+ // be ignored.
+ // But if we return a linter error here, and this error happens for an autogenerated
+ // file the error will be discarded (fine), but all the subsequent errors for lll will
+ // be discarded for other files, and we'll miss legit error.
if errors.Is(err, bufio.ErrTooLong) && maxLineLen < bufio.MaxScanTokenSize {
- // scanner.Scan() might fail if the line is longer than bufio.MaxScanTokenSize
- // In the case where the specified maxLineLen is smaller than bufio.MaxScanTokenSize
- // we can return this line as a long line instead of returning an error.
- // The reason for this change is that this case might happen with autogenerated files
- // The go-bindata tool for instance might generate a file with a very long line.
- // In this case, as it's an auto generated file, the warning returned by lll will
- // be ignored.
- // But if we return a linter error here, and this error happens for an autogenerated
- // file the error will be discarded (fine), but all the subsequent errors for lll will
- // be discarded for other files, and we'll miss legit error.
- res = append(res, result.Issue{
- Pos: token.Position{
- Filename: filename,
- Line: lineNumber,
- Column: 1,
- },
- Text: fmt.Sprintf("line is more than %d characters", bufio.MaxScanTokenSize),
- FromLinter: linterName,
+ pass.Report(analysis.Diagnostic{
+ Pos: ft.LineStart(goanalysis.AdjustPos(lineNumber, nonAdjPosition.Line, position.Line)),
+ Message: fmt.Sprintf("line is more than %d characters", bufio.MaxScanTokenSize),
})
} else {
- return nil, fmt.Errorf("can't scan file %s: %w", filename, err)
+ return fmt.Errorf("can't scan file %s: %w", position.Filename, err)
}
}
- return res, nil
+ return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck/loggercheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck/loggercheck.go
index 077e8a512f..84c8d73635 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck/loggercheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/loggercheck/loggercheck.go
@@ -22,6 +22,9 @@ func New(settings *config.LoggerCheckSettings) *goanalysis.Linter {
if !settings.Logr {
disable = append(disable, "logr")
}
+ if !settings.Slog {
+ disable = append(disable, "slog")
+ }
if !settings.Zap {
disable = append(disable, "zap")
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx/maintidx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx/maintidx.go
index 08f12369e6..799c51c874 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx/maintidx.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx/maintidx.go
@@ -8,16 +8,16 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(cfg *config.MaintIdxSettings) *goanalysis.Linter {
+func New(settings *config.MaintIdxSettings) *goanalysis.Linter {
analyzer := maintidx.Analyzer
- cfgMap := map[string]map[string]any{
+ cfg := map[string]map[string]any{
analyzer.Name: {"under": 20},
}
- if cfg != nil {
- cfgMap[analyzer.Name] = map[string]any{
- "under": cfg.Under,
+ if settings != nil {
+ cfg[analyzer.Name] = map[string]any{
+ "under": settings.Under,
}
}
@@ -25,6 +25,6 @@ func New(cfg *config.MaintIdxSettings) *goanalysis.Linter {
analyzer.Name,
analyzer.Doc,
[]*analysis.Analyzer{analyzer},
- cfgMap,
+ cfg,
).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero/makezero.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero/makezero.go
index ae4bf21842..b5ab4515e5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero/makezero.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero/makezero.go
@@ -2,40 +2,26 @@ package makezero
import (
"fmt"
- "sync"
"github.com/ashanbrown/makezero/makezero"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "makezero"
func New(settings *config.MakezeroSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
analyzer := &analysis.Analyzer{
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := runMakeZero(pass, settings)
+ err := runMakeZero(pass, settings)
if err != nil {
return nil, err
}
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
return nil, nil
},
}
@@ -45,30 +31,25 @@ func New(settings *config.MakezeroSettings) *goanalysis.Linter {
"Finds slice declarations with non-zero initial length",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeTypesInfo)
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
-func runMakeZero(pass *analysis.Pass, settings *config.MakezeroSettings) ([]goanalysis.Issue, error) {
+func runMakeZero(pass *analysis.Pass, settings *config.MakezeroSettings) error {
zero := makezero.NewLinter(settings.Always)
- var issues []goanalysis.Issue
-
for _, file := range pass.Files {
hints, err := zero.Run(pass.Fset, pass.TypesInfo, file)
if err != nil {
- return nil, fmt.Errorf("makezero linter failed on file %q: %w", file.Name.String(), err)
+ return fmt.Errorf("makezero linter failed on file %q: %w", file.Name.String(), err)
}
for _, hint := range hints {
- issues = append(issues, goanalysis.NewIssue(&result.Issue{
- Pos: hint.Position(),
- Text: hint.Details(),
- FromLinter: linterName,
- }, pass))
+ pass.Report(analysis.Diagnostic{
+ Pos: hint.Pos(),
+ Message: hint.Details(),
+ })
}
}
- return issues, nil
+ return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror/mirror.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror/mirror.go
index 34b880b529..e15dfa3a5a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror/mirror.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror/mirror.go
@@ -1,70 +1,30 @@
package mirror
import (
- "sync"
-
"github.com/butuzov/mirror"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
func New() *goanalysis.Linter {
- var (
- mu sync.Mutex
- issues []goanalysis.Issue
- )
-
a := mirror.NewAnalyzer()
- a.Run = func(pass *analysis.Pass) (any, error) {
- // mirror only lints test files if the `--with-tests` flag is passed,
- // so we pass the `with-tests` flag as true to the analyzer before running it.
- // This can be turned off by using the regular golangci-lint flags such as `--tests` or `--skip-files`
- // or can be disabled per linter via exclude rules.
- // (see https://github.com/golangci/golangci-lint/issues/2527#issuecomment-1023707262)
- violations := mirror.Run(pass, true)
-
- if len(violations) == 0 {
- return nil, nil
- }
-
- for index := range violations {
- i := violations[index].Issue(pass.Fset)
- issue := result.Issue{
- FromLinter: a.Name,
- Text: i.Message,
- Pos: i.Start,
- }
-
- if i.InlineFix != "" {
- issue.Replacement = &result.Replacement{
- Inline: &result.InlineFix{
- StartCol: i.Start.Column - 1,
- Length: len(i.Original),
- NewString: i.InlineFix,
- },
- }
- }
-
- mu.Lock()
- issues = append(issues, goanalysis.NewIssue(&issue, pass))
- mu.Unlock()
- }
-
- return nil, nil
+ // mirror only lints test files if the `--with-tests` flag is passed,
+ // so we pass the `with-tests` flag as true to the analyzer before running it.
+ // This can be turned off by using the regular golangci-lint flags such as `--tests` or `--skip-files`
+ // or can be disabled per linter via exclude rules.
+ // (see https://github.com/golangci/golangci-lint/issues/2527#issuecomment-1023707262)
+ linterCfg := map[string]map[string]any{
+ a.Name: {
+ "with-tests": true,
+ },
}
- analyzer := goanalysis.NewLinter(
+ return goanalysis.NewLinter(
a.Name,
a.Doc,
[]*analysis.Analyzer{a},
- nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return issues
- }).WithLoadMode(goanalysis.LoadModeTypesInfo)
-
- return analyzer
+ linterCfg,
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell/misspell.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell/misspell.go
index 44409cec9d..9d19780aca 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell/misspell.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell/misspell.go
@@ -2,9 +2,9 @@ package misspell
import (
"fmt"
+ "go/ast"
"go/token"
"strings"
- "sync"
"unicode"
"github.com/golangci/misspell"
@@ -13,71 +13,37 @@ import (
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
"github.com/golangci/golangci-lint/pkg/golinters/internal"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "misspell"
func New(settings *config.MisspellSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
- analyzer := &analysis.Analyzer{
- Name: linterName,
- Doc: goanalysis.TheOnlyanalyzerDoc,
- Run: goanalysis.DummyRun,
+ replacer, err := createMisspellReplacer(settings)
+ if err != nil {
+ internal.LinterLogger.Fatalf("%s: %v", linterName, err)
}
- return goanalysis.NewLinter(
- linterName,
- "Finds commonly misspelled English words",
- []*analysis.Analyzer{analyzer},
- nil,
- ).WithContextSetter(func(lintCtx *linter.Context) {
- replacer, ruleErr := createMisspellReplacer(settings)
-
- analyzer.Run = func(pass *analysis.Pass) (any, error) {
- if ruleErr != nil {
- return nil, ruleErr
- }
-
- issues, err := runMisspell(lintCtx, pass, replacer, settings.Mode)
- if err != nil {
- return nil, err
- }
-
- if len(issues) == 0 {
- return nil, nil
+ a := &analysis.Analyzer{
+ Name: linterName,
+ Doc: "Finds commonly misspelled English words",
+ Run: func(pass *analysis.Pass) (any, error) {
+ for _, file := range pass.Files {
+ err := runMisspellOnFile(pass, file, replacer, settings.Mode)
+ if err != nil {
+ return nil, err
+ }
}
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
return nil, nil
- }
- }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runMisspell(lintCtx *linter.Context, pass *analysis.Pass, replacer *misspell.Replacer, mode string) ([]goanalysis.Issue, error) {
- fileNames := internal.GetFileNames(pass)
-
- var issues []goanalysis.Issue
- for _, filename := range fileNames {
- lintIssues, err := runMisspellOnFile(lintCtx, filename, replacer, mode)
- if err != nil {
- return nil, err
- }
-
- for i := range lintIssues {
- issues = append(issues, goanalysis.NewIssue(&lintIssues[i], pass))
- }
+ },
}
- return issues, nil
+ return goanalysis.NewLinter(
+ a.Name,
+ a.Doc,
+ []*analysis.Analyzer{a},
+ nil,
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
func createMisspellReplacer(settings *config.MisspellSettings) (*misspell.Replacer, error) {
@@ -112,10 +78,17 @@ func createMisspellReplacer(settings *config.MisspellSettings) (*misspell.Replac
return replacer, nil
}
-func runMisspellOnFile(lintCtx *linter.Context, filename string, replacer *misspell.Replacer, mode string) ([]result.Issue, error) {
- fileContent, err := lintCtx.FileCache.GetFileBytes(filename)
+func runMisspellOnFile(pass *analysis.Pass, file *ast.File, replacer *misspell.Replacer, mode string) error {
+ position, isGoFile := goanalysis.GetGoFilePosition(pass, file)
+ if !isGoFile {
+ return nil
+ }
+
+ // Uses the non-adjusted file to work with cgo:
+ // if we read the real file, the positions are wrong in some cases.
+ fileContent, err := pass.ReadFile(pass.Fset.PositionFor(file.Pos(), false).Filename)
if err != nil {
- return nil, fmt.Errorf("can't get file %s contents: %w", filename, err)
+ return fmt.Errorf("can't get file %s contents: %w", position.Filename, err)
}
// `r.ReplaceGo` doesn't find issues inside strings: it searches only inside comments.
@@ -129,36 +102,31 @@ func runMisspellOnFile(lintCtx *linter.Context, filename string, replacer *missp
replace = replacer.Replace
}
- _, diffs := replace(string(fileContent))
+ f := pass.Fset.File(file.Pos())
- var res []result.Issue
+ _, diffs := replace(string(fileContent))
for _, diff := range diffs {
text := fmt.Sprintf("`%s` is a misspelling of `%s`", diff.Original, diff.Corrected)
- pos := token.Position{
- Filename: filename,
- Line: diff.Line,
- Column: diff.Column + 1,
- }
-
- replacement := &result.Replacement{
- Inline: &result.InlineFix{
- StartCol: diff.Column,
- Length: len(diff.Original),
- NewString: diff.Corrected,
- },
- }
-
- res = append(res, result.Issue{
- Pos: pos,
- Text: text,
- FromLinter: linterName,
- Replacement: replacement,
+ start := f.LineStart(diff.Line) + token.Pos(diff.Column)
+ end := f.LineStart(diff.Line) + token.Pos(diff.Column+len(diff.Original))
+
+ pass.Report(analysis.Diagnostic{
+ Pos: start,
+ End: end,
+ Message: text,
+ SuggestedFixes: []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: start,
+ End: end,
+ NewText: []byte(diff.Corrected),
+ }},
+ }},
})
}
- return res, nil
+ return nil
}
func appendExtraWords(replacer *misspell.Replacer, extraWords []config.MisspellExtraWords) error {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go
index 9aa8692ff3..fe64653b91 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go
@@ -12,27 +12,6 @@ func New(settings *config.MndSettings) *goanalysis.Linter {
return newMND(mnd.Analyzer, settings, nil)
}
-func NewGoMND(settings *config.GoMndSettings) *goanalysis.Linter {
- // shallow copy because mnd.Analyzer is a global variable.
- a := new(analysis.Analyzer)
- *a = *mnd.Analyzer
-
- // Used to force the analyzer name to use the same name as the linter.
- // This is required to avoid displaying the analyzer name inside the issue text.
- a.Name = "gomnd"
-
- var linterCfg map[string]map[string]any
-
- if settings != nil && len(settings.Settings) > 0 {
- // Convert deprecated setting.
- linterCfg = map[string]map[string]any{
- a.Name: settings.Settings["mnd"],
- }
- }
-
- return newMND(a, &settings.MndSettings, linterCfg)
-}
-
func newMND(a *analysis.Analyzer, settings *config.MndSettings, linterCfg map[string]map[string]any) *goanalysis.Linter {
if len(linterCfg) == 0 && settings != nil {
cfg := make(map[string]any)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag/musttag.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag/musttag.go
index 30047abfc2..a4e9ceff28 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag/musttag.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag/musttag.go
@@ -8,11 +8,11 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(setting *config.MustTagSettings) *goanalysis.Linter {
+func New(settings *config.MustTagSettings) *goanalysis.Linter {
var funcs []musttag.Func
- if setting != nil {
- for _, fn := range setting.Functions {
+ if settings != nil {
+ for _, fn := range settings.Functions {
funcs = append(funcs, musttag.Func{
Name: fn.Name,
Tag: fn.Tag,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go
index 4dd3fd4c3f..e69fa5e9f5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go
@@ -9,12 +9,12 @@ import (
)
func New(settings *config.NakedretSettings) *goanalysis.Linter {
- var maxLines int
+ var maxLines uint
if settings != nil {
maxLines = settings.MaxFuncLines
}
- a := nakedret.NakedReturnAnalyzer(uint(maxLines))
+ a := nakedret.NakedReturnAnalyzer(maxLines, false)
return goanalysis.NewLinter(
a.Name,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif/nestif.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif/nestif.go
index 43be973b0a..b72538fd16 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif/nestif.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif/nestif.go
@@ -1,37 +1,21 @@
package nestif
import (
- "sort"
- "sync"
-
"github.com/nakabonne/nestif"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "nestif"
func New(settings *config.NestifSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
analyzer := &analysis.Analyzer{
- Name: goanalysis.TheOnlyAnalyzerName,
+ Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues := runNestIf(pass, settings)
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
+ runNestIf(pass, settings)
return nil, nil
},
@@ -42,37 +26,34 @@ func New(settings *config.NestifSettings) *goanalysis.Linter {
"Reports deeply nested if statements",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
-func runNestIf(pass *analysis.Pass, settings *config.NestifSettings) []goanalysis.Issue {
+func runNestIf(pass *analysis.Pass, settings *config.NestifSettings) {
checker := &nestif.Checker{
MinComplexity: settings.MinComplexity,
}
- var lintIssues []nestif.Issue
- for _, f := range pass.Files {
- lintIssues = append(lintIssues, checker.Check(f, pass.Fset)...)
- }
+ for _, file := range pass.Files {
+ position, isGoFile := goanalysis.GetGoFilePosition(pass, file)
+ if !isGoFile {
+ continue
+ }
- if len(lintIssues) == 0 {
- return nil
- }
+ issues := checker.Check(file, pass.Fset)
+ if len(issues) == 0 {
+ continue
+ }
- sort.SliceStable(lintIssues, func(i, j int) bool {
- return lintIssues[i].Complexity > lintIssues[j].Complexity
- })
+ nonAdjPosition := pass.Fset.PositionFor(file.Pos(), false)
- issues := make([]goanalysis.Issue, 0, len(lintIssues))
- for _, i := range lintIssues {
- issues = append(issues, goanalysis.NewIssue(&result.Issue{
- Pos: i.Pos,
- Text: i.Message,
- FromLinter: linterName,
- }, pass))
- }
+ f := pass.Fset.File(file.Pos())
- return issues
+ for _, issue := range issues {
+ pass.Report(analysis.Diagnostic{
+ Pos: f.LineStart(goanalysis.AdjustPos(issue.Pos.Line, nonAdjPosition.Line, position.Line)),
+ Message: issue.Message,
+ })
+ }
+ }
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnesserr/nilnesserr.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnesserr/nilnesserr.go
new file mode 100644
index 0000000000..8349377b7b
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnesserr/nilnesserr.go
@@ -0,0 +1,23 @@
+package nilnesserr
+
+import (
+ "github.com/alingse/nilnesserr"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/golangci/golangci-lint/pkg/goanalysis"
+ "github.com/golangci/golangci-lint/pkg/golinters/internal"
+)
+
+func New() *goanalysis.Linter {
+ a, err := nilnesserr.NewAnalyzer(nilnesserr.LinterSetting{})
+ if err != nil {
+ internal.LinterLogger.Fatalf("nilnesserr: create analyzer: %v", err)
+ }
+
+ return goanalysis.NewLinter(
+ a.Name,
+ a.Doc,
+ []*analysis.Analyzer{a},
+ nil,
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go
index c9237035d3..ed25dec71f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go
@@ -1,8 +1,6 @@
package nilnil
import (
- "strings"
-
"github.com/Antonboom/nilnil/pkg/analyzer"
"golang.org/x/tools/go/analysis"
@@ -10,13 +8,16 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(cfg *config.NilNilSettings) *goanalysis.Linter {
+func New(settings *config.NilNilSettings) *goanalysis.Linter {
a := analyzer.New()
- cfgMap := make(map[string]map[string]any)
- if cfg != nil && len(cfg.CheckedTypes) != 0 {
- cfgMap[a.Name] = map[string]any{
- "checked-types": strings.Join(cfg.CheckedTypes, ","),
+ cfg := make(map[string]map[string]any)
+ if settings != nil {
+ cfg[a.Name] = map[string]any{
+ "detect-opposite": settings.DetectOpposite,
+ }
+ if len(settings.CheckedTypes) != 0 {
+ cfg[a.Name]["checked-types"] = settings.CheckedTypes
}
}
@@ -24,7 +25,7 @@ func New(cfg *config.NilNilSettings) *goanalysis.Linter {
a.Name,
a.Doc,
[]*analysis.Analyzer{a},
- cfgMap,
+ cfg,
).
WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/issues.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/issues.go
new file mode 100644
index 0000000000..5e9ba4117c
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/issues.go
@@ -0,0 +1,41 @@
+package internal
+
+import (
+ "fmt"
+ "strings"
+ "unicode"
+)
+
+func formatExtraLeadingSpace(fullDirective string) string {
+ return fmt.Sprintf("directive `%s` should not have more than one leading space", fullDirective)
+}
+
+func formatNotMachine(fullDirective string) string {
+ expected := fullDirective[:2] + strings.TrimLeftFunc(fullDirective[2:], unicode.IsSpace)
+ return fmt.Sprintf("directive `%s` should be written without leading space as `%s`",
+ fullDirective, expected)
+}
+
+func formatNotSpecific(fullDirective, directiveWithOptionalLeadingSpace string) string {
+ return fmt.Sprintf("directive `%s` should mention specific linter such as `%s:my-linter`",
+ fullDirective, directiveWithOptionalLeadingSpace)
+}
+
+func formatParseError(fullDirective, directiveWithOptionalLeadingSpace string) string {
+ return fmt.Sprintf("directive `%s` should match `%s[:] [// ]`",
+ fullDirective,
+ directiveWithOptionalLeadingSpace)
+}
+
+func formatNoExplanation(fullDirective, fullDirectiveWithoutExplanation string) string {
+ return fmt.Sprintf("directive `%s` should provide explanation such as `%s // this is why`",
+ fullDirective, fullDirectiveWithoutExplanation)
+}
+
+func formatUnusedCandidate(fullDirective, expectedLinter string) string {
+ details := fmt.Sprintf("directive `%s` is unused", fullDirective)
+ if expectedLinter != "" {
+ details += fmt.Sprintf(" for linter %q", expectedLinter)
+ }
+ return details
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/nolintlint.go
index 5fed41cfdf..21cd20124f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/nolintlint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal/nolintlint.go
@@ -2,123 +2,17 @@
package internal
import (
- "fmt"
- "go/ast"
"go/token"
"regexp"
"strings"
- "unicode"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/golangci/golangci-lint/pkg/goanalysis"
"github.com/golangci/golangci-lint/pkg/result"
)
-type BaseIssue struct {
- fullDirective string
- directiveWithOptionalLeadingSpace string
- position token.Position
- replacement *result.Replacement
-}
-
-//nolint:gocritic // TODO(ldez) must be change in the future.
-func (b BaseIssue) Position() token.Position {
- return b.position
-}
-
-//nolint:gocritic // TODO(ldez) must be change in the future.
-func (b BaseIssue) Replacement() *result.Replacement {
- return b.replacement
-}
-
-type ExtraLeadingSpace struct {
- BaseIssue
-}
-
-//nolint:gocritic // TODO(ldez) must be change in the future.
-func (i ExtraLeadingSpace) Details() string {
- return fmt.Sprintf("directive `%s` should not have more than one leading space", i.fullDirective)
-}
-
-func (i ExtraLeadingSpace) String() string { return toString(i) }
-
-type NotMachine struct {
- BaseIssue
-}
-
-//nolint:gocritic // TODO(ldez) must be change in the future.
-func (i NotMachine) Details() string {
- expected := i.fullDirective[:2] + strings.TrimLeftFunc(i.fullDirective[2:], unicode.IsSpace)
- return fmt.Sprintf("directive `%s` should be written without leading space as `%s`",
- i.fullDirective, expected)
-}
-
-func (i NotMachine) String() string { return toString(i) }
-
-type NotSpecific struct {
- BaseIssue
-}
-
-//nolint:gocritic // TODO(ldez) must be change in the future.
-func (i NotSpecific) Details() string {
- return fmt.Sprintf("directive `%s` should mention specific linter such as `%s:my-linter`",
- i.fullDirective, i.directiveWithOptionalLeadingSpace)
-}
-
-func (i NotSpecific) String() string { return toString(i) }
-
-type ParseError struct {
- BaseIssue
-}
-
-//nolint:gocritic // TODO(ldez) must be change in the future.
-func (i ParseError) Details() string {
- return fmt.Sprintf("directive `%s` should match `%s[:] [// ]`",
- i.fullDirective,
- i.directiveWithOptionalLeadingSpace)
-}
-
-func (i ParseError) String() string { return toString(i) }
-
-type NoExplanation struct {
- BaseIssue
- fullDirectiveWithoutExplanation string
-}
-
-//nolint:gocritic // TODO(ldez) must be change in the future.
-func (i NoExplanation) Details() string {
- return fmt.Sprintf("directive `%s` should provide explanation such as `%s // this is why`",
- i.fullDirective, i.fullDirectiveWithoutExplanation)
-}
-
-func (i NoExplanation) String() string { return toString(i) }
-
-type UnusedCandidate struct {
- BaseIssue
- ExpectedLinter string
-}
-
-//nolint:gocritic // TODO(ldez) must be change in the future.
-func (i UnusedCandidate) Details() string {
- details := fmt.Sprintf("directive `%s` is unused", i.fullDirective)
- if i.ExpectedLinter != "" {
- details += fmt.Sprintf(" for linter %q", i.ExpectedLinter)
- }
- return details
-}
-
-func (i UnusedCandidate) String() string { return toString(i) }
-
-func toString(issue Issue) string {
- return fmt.Sprintf("%s at %s", issue.Details(), issue.Position())
-}
-
-type Issue interface {
- Details() string
- Position() token.Position
- String() string
- Replacement() *result.Replacement
-}
-
-type Needs uint
+const LinterName = "nolintlint"
const (
NeedsMachineOnly Needs = 1 << iota
@@ -128,6 +22,10 @@ const (
NeedsAll = NeedsMachineOnly | NeedsSpecific | NeedsExplanation
)
+type Needs uint
+
+const commentMark = "//"
+
var commentPattern = regexp.MustCompile(`^//\s*(nolint)(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\b`)
// matches a complete nolint directive
@@ -157,15 +55,10 @@ var (
)
//nolint:funlen,gocyclo // the function is going to be refactored in the future
-func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
- var issues []Issue
-
- for _, node := range nodes {
- file, ok := node.(*ast.File)
- if !ok {
- continue
- }
+func (l Linter) Run(pass *analysis.Pass) ([]goanalysis.Issue, error) {
+ var issues []goanalysis.Issue
+ for _, file := range pass.Files {
for _, c := range file.Comments {
for _, comment := range c.List {
if !commentPattern.MatchString(comment.Text) {
@@ -180,47 +73,58 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
leadingSpace = leadingSpaceMatches[1]
}
- directiveWithOptionalLeadingSpace := "//"
+ directiveWithOptionalLeadingSpace := commentMark
if leadingSpace != "" {
directiveWithOptionalLeadingSpace += " "
}
- split := strings.Split(strings.SplitN(comment.Text, ":", 2)[0], "//")
+ split := strings.Split(strings.SplitN(comment.Text, ":", 2)[0], commentMark)
directiveWithOptionalLeadingSpace += strings.TrimSpace(split[1])
- pos := fset.Position(comment.Pos())
- end := fset.Position(comment.End())
-
- base := BaseIssue{
- fullDirective: comment.Text,
- directiveWithOptionalLeadingSpace: directiveWithOptionalLeadingSpace,
- position: pos,
- }
+ pos := pass.Fset.Position(comment.Pos())
+ end := pass.Fset.Position(comment.End())
// check for, report and eliminate leading spaces, so we can check for other issues
if leadingSpace != "" {
- removeWhitespace := &result.Replacement{
- Inline: &result.InlineFix{
- StartCol: pos.Column + 1,
- Length: len(leadingSpace),
- NewString: "",
- },
- }
+ removeWhitespace := []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: token.Pos(pos.Offset),
+ End: token.Pos(pos.Offset + len(commentMark) + len(leadingSpace)),
+ NewText: []byte(commentMark),
+ }},
+ }}
+
if (l.needs & NeedsMachineOnly) != 0 {
- issue := NotMachine{BaseIssue: base}
- issue.BaseIssue.replacement = removeWhitespace
- issues = append(issues, issue)
+ issue := &result.Issue{
+ FromLinter: LinterName,
+ Text: formatNotMachine(comment.Text),
+ Pos: pos,
+ SuggestedFixes: removeWhitespace,
+ }
+
+ issues = append(issues, goanalysis.NewIssue(issue, pass))
} else if len(leadingSpace) > 1 {
- issue := ExtraLeadingSpace{BaseIssue: base}
- issue.BaseIssue.replacement = removeWhitespace
- issue.BaseIssue.replacement.Inline.NewString = " " // assume a single space was intended
- issues = append(issues, issue)
+ issue := &result.Issue{
+ FromLinter: LinterName,
+ Text: formatExtraLeadingSpace(comment.Text),
+ Pos: pos,
+ SuggestedFixes: removeWhitespace,
+ }
+
+ issues = append(issues, goanalysis.NewIssue(issue, pass))
}
}
fullMatches := fullDirectivePattern.FindStringSubmatch(comment.Text)
if len(fullMatches) == 0 {
- issues = append(issues, ParseError{BaseIssue: base})
+ issue := &result.Issue{
+ FromLinter: LinterName,
+ Text: formatParseError(comment.Text, directiveWithOptionalLeadingSpace),
+ Pos: pos,
+ }
+
+ issues = append(issues, goanalysis.NewIssue(issue, pass))
+
continue
}
@@ -230,7 +134,7 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
if lintersText != "" && !strings.HasPrefix(lintersText, "all") {
lls := strings.Split(lintersText, ",")
linters = make([]string, 0, len(lls))
- rangeStart := (pos.Column - 1) + len("//") + len(leadingSpace) + len("nolint:")
+ rangeStart := (pos.Column - 1) + len(commentMark) + len(leadingSpace) + len("nolint:")
for i, ll := range lls {
rangeEnd := rangeStart + len(ll)
if i < len(lls)-1 {
@@ -246,39 +150,59 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
if (l.needs & NeedsSpecific) != 0 {
if len(linters) == 0 {
- issues = append(issues, NotSpecific{BaseIssue: base})
+ issue := &result.Issue{
+ FromLinter: LinterName,
+ Text: formatNotSpecific(comment.Text, directiveWithOptionalLeadingSpace),
+ Pos: pos,
+ }
+
+ issues = append(issues, goanalysis.NewIssue(issue, pass))
}
}
// when detecting unused directives, we send all the directives through and filter them out in the nolint processor
if (l.needs & NeedsUnused) != 0 {
- removeNolintCompletely := &result.Replacement{
- Inline: &result.InlineFix{
- StartCol: pos.Column - 1,
- Length: end.Column - pos.Column,
- NewString: "",
- },
- }
+ removeNolintCompletely := []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: token.Pos(pos.Offset),
+ End: token.Pos(end.Offset),
+ NewText: nil,
+ }},
+ }}
if len(linters) == 0 {
- issue := UnusedCandidate{BaseIssue: base}
- issue.replacement = removeNolintCompletely
- issues = append(issues, issue)
+ issue := &result.Issue{
+ FromLinter: LinterName,
+ Text: formatUnusedCandidate(comment.Text, ""),
+ Pos: pos,
+ ExpectNoLint: true,
+ SuggestedFixes: removeNolintCompletely,
+ }
+
+ issues = append(issues, goanalysis.NewIssue(issue, pass))
} else {
for _, linter := range linters {
- issue := UnusedCandidate{BaseIssue: base, ExpectedLinter: linter}
- // only offer replacement if there is a single linter
+ issue := &result.Issue{
+ FromLinter: LinterName,
+ Text: formatUnusedCandidate(comment.Text, linter),
+ Pos: pos,
+ ExpectNoLint: true,
+ ExpectedNoLintLinter: linter,
+ }
+
+ // only offer SuggestedFix if there is a single linter
// because of issues around commas and the possibility of all
// linters being removed
if len(linters) == 1 {
- issue.replacement = removeNolintCompletely
+ issue.SuggestedFixes = removeNolintCompletely
}
- issues = append(issues, issue)
+
+ issues = append(issues, goanalysis.NewIssue(issue, pass))
}
}
}
- if (l.needs&NeedsExplanation) != 0 && (explanation == "" || strings.TrimSpace(explanation) == "//") {
+ if (l.needs&NeedsExplanation) != 0 && (explanation == "" || strings.TrimSpace(explanation) == commentMark) {
needsExplanation := len(linters) == 0 // if no linters are mentioned, we must have explanation
// otherwise, check if we are excluding all the mentioned linters
for _, ll := range linters {
@@ -290,10 +214,14 @@ func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
if needsExplanation {
fullDirectiveWithoutExplanation := trailingBlankExplanation.ReplaceAllString(comment.Text, "")
- issues = append(issues, NoExplanation{
- BaseIssue: base,
- fullDirectiveWithoutExplanation: fullDirectiveWithoutExplanation,
- })
+
+ issue := &result.Issue{
+ FromLinter: LinterName,
+ Text: formatNoExplanation(comment.Text, fullDirectiveWithoutExplanation),
+ Pos: pos,
+ }
+
+ issues = append(issues, goanalysis.NewIssue(issue, pass))
}
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go
index 9f04454a5a..e1c878628d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go
@@ -2,31 +2,47 @@ package nolintlint
import (
"fmt"
- "go/ast"
"sync"
"golang.org/x/tools/go/analysis"
+ "github.com/golangci/golangci-lint/pkg/golinters/internal"
+
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal"
+ nolintlint "github.com/golangci/golangci-lint/pkg/golinters/nolintlint/internal"
"github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
-const LinterName = "nolintlint"
+const LinterName = nolintlint.LinterName
func New(settings *config.NoLintLintSettings) *goanalysis.Linter {
var mu sync.Mutex
var resIssues []goanalysis.Issue
+ var needs nolintlint.Needs
+ if settings.RequireExplanation {
+ needs |= nolintlint.NeedsExplanation
+ }
+ if settings.RequireSpecific {
+ needs |= nolintlint.NeedsSpecific
+ }
+ if !settings.AllowUnused {
+ needs |= nolintlint.NeedsUnused
+ }
+
+ lnt, err := nolintlint.NewLinter(needs, settings.AllowNoExplanation)
+ if err != nil {
+ internal.LinterLogger.Fatalf("%s: create analyzer: %v", nolintlint.LinterName, err)
+ }
+
analyzer := &analysis.Analyzer{
- Name: LinterName,
+ Name: nolintlint.LinterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := runNoLintLint(pass, settings)
+ issues, err := lnt.Run(pass)
if err != nil {
- return nil, err
+ return nil, fmt.Errorf("linter failed to run: %w", err)
}
if len(issues) == 0 {
@@ -42,7 +58,7 @@ func New(settings *config.NoLintLintSettings) *goanalysis.Linter {
}
return goanalysis.NewLinter(
- LinterName,
+ nolintlint.LinterName,
"Reports ill-formed or insufficient nolint directives",
[]*analysis.Analyzer{analyzer},
nil,
@@ -50,55 +66,3 @@ func New(settings *config.NoLintLintSettings) *goanalysis.Linter {
return resIssues
}).WithLoadMode(goanalysis.LoadModeSyntax)
}
-
-func runNoLintLint(pass *analysis.Pass, settings *config.NoLintLintSettings) ([]goanalysis.Issue, error) {
- var needs internal.Needs
- if settings.RequireExplanation {
- needs |= internal.NeedsExplanation
- }
- if settings.RequireSpecific {
- needs |= internal.NeedsSpecific
- }
- if !settings.AllowUnused {
- needs |= internal.NeedsUnused
- }
-
- lnt, err := internal.NewLinter(needs, settings.AllowNoExplanation)
- if err != nil {
- return nil, err
- }
-
- nodes := make([]ast.Node, 0, len(pass.Files))
- for _, n := range pass.Files {
- nodes = append(nodes, n)
- }
-
- lintIssues, err := lnt.Run(pass.Fset, nodes...)
- if err != nil {
- return nil, fmt.Errorf("linter failed to run: %w", err)
- }
-
- var issues []goanalysis.Issue
-
- for _, i := range lintIssues {
- expectNoLint := false
- var expectedNolintLinter string
- if ii, ok := i.(internal.UnusedCandidate); ok {
- expectedNolintLinter = ii.ExpectedLinter
- expectNoLint = true
- }
-
- issue := &result.Issue{
- FromLinter: LinterName,
- Text: i.Details(),
- Pos: i.Position(),
- ExpectNoLint: expectNoLint,
- ExpectedNoLintLinter: expectedNolintLinter,
- Replacement: i.Replacement(),
- }
-
- issues = append(issues, goanalysis.NewIssue(issue, pass))
- }
-
- return issues, nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint/perfsprint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint/perfsprint.go
index a4ead1914d..c34f7e4c62 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint/perfsprint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/perfsprint/perfsprint.go
@@ -16,11 +16,19 @@ func New(settings *config.PerfSprintSettings) *goanalysis.Linter {
}
if settings != nil {
+ cfg[a.Name]["integer-format"] = settings.IntegerFormat
cfg[a.Name]["int-conversion"] = settings.IntConversion
+
+ cfg[a.Name]["error-format"] = settings.ErrorFormat
cfg[a.Name]["err-error"] = settings.ErrError
cfg[a.Name]["errorf"] = settings.ErrorF
+
+ cfg[a.Name]["string-format"] = settings.StringFormat
cfg[a.Name]["sprintf1"] = settings.SprintF1
cfg[a.Name]["strconcat"] = settings.StrConcat
+
+ cfg[a.Name]["bool-format"] = settings.BoolFormat
+ cfg[a.Name]["hex-format"] = settings.HexFormat
}
return goanalysis.NewLinter(
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc/prealloc.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc/prealloc.go
index ce7ff9d59c..17e86c98ee 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc/prealloc.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc/prealloc.go
@@ -2,7 +2,6 @@ package prealloc
import (
"fmt"
- "sync"
"github.com/alexkohler/prealloc/pkg"
"golang.org/x/tools/go/analysis"
@@ -10,29 +9,16 @@ import (
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
"github.com/golangci/golangci-lint/pkg/golinters/internal"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "prealloc"
func New(settings *config.PreallocSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
analyzer := &analysis.Analyzer{
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues := runPreAlloc(pass, settings)
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
+ runPreAlloc(pass, settings)
return nil, nil
},
@@ -43,23 +29,16 @@ func New(settings *config.PreallocSettings) *goanalysis.Linter {
"Finds slice declarations that could potentially be pre-allocated",
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
-func runPreAlloc(pass *analysis.Pass, settings *config.PreallocSettings) []goanalysis.Issue {
- var issues []goanalysis.Issue
-
+func runPreAlloc(pass *analysis.Pass, settings *config.PreallocSettings) {
hints := pkg.Check(pass.Files, settings.Simple, settings.RangeLoops, settings.ForLoops)
for _, hint := range hints {
- issues = append(issues, goanalysis.NewIssue(&result.Issue{
- Pos: pass.Fset.Position(hint.Pos),
- Text: fmt.Sprintf("Consider pre-allocating %s", internal.FormatCode(hint.DeclaredSliceName, nil)),
- FromLinter: linterName,
- }, pass))
+ pass.Report(analysis.Diagnostic{
+ Pos: hint.Pos,
+ Message: fmt.Sprintf("Consider pre-allocating %s", internal.FormatCode(hint.DeclaredSliceName, nil)),
+ })
}
-
- return issues
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter/protogetter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter/protogetter.go
index 302ce67b88..423e4ca9ef 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter/protogetter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/protogetter/protogetter.go
@@ -1,21 +1,14 @@
package protogetter
import (
- "sync"
-
"github.com/ghostiam/protogetter"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
func New(settings *config.ProtoGetterSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
var cfg protogetter.Config
if settings != nil {
cfg = protogetter.Config{
@@ -25,50 +18,13 @@ func New(settings *config.ProtoGetterSettings) *goanalysis.Linter {
ReplaceFirstArgInAppend: settings.ReplaceFirstArgInAppend,
}
}
- cfg.Mode = protogetter.GolangciLintMode
a := protogetter.NewAnalyzer(&cfg)
- a.Run = func(pass *analysis.Pass) (any, error) {
- pgIssues, err := protogetter.Run(pass, &cfg)
- if err != nil {
- return nil, err
- }
-
- issues := make([]goanalysis.Issue, len(pgIssues))
- for i, issue := range pgIssues {
- report := &result.Issue{
- FromLinter: a.Name,
- Pos: issue.Pos,
- Text: issue.Message,
- Replacement: &result.Replacement{
- Inline: &result.InlineFix{
- StartCol: issue.InlineFix.StartCol,
- Length: issue.InlineFix.Length,
- NewString: issue.InlineFix.NewString,
- },
- },
- }
-
- issues[i] = goanalysis.NewIssue(report, pass)
- }
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
- }
return goanalysis.NewLinter(
a.Name,
a.Doc,
[]*analysis.Analyzer{a},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeTypesInfo)
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/recvcheck/recvcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/recvcheck/recvcheck.go
new file mode 100644
index 0000000000..3af4885b40
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/recvcheck/recvcheck.go
@@ -0,0 +1,27 @@
+package recvcheck
+
+import (
+ "github.com/raeperd/recvcheck"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+ "github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.RecvcheckSettings) *goanalysis.Linter {
+ var cfg recvcheck.Settings
+
+ if settings != nil {
+ cfg.DisableBuiltin = settings.DisableBuiltin
+ cfg.Exclusions = settings.Exclusions
+ }
+
+ a := recvcheck.NewAnalyzer(cfg)
+
+ return goanalysis.NewLinter(
+ a.Name,
+ a.Doc,
+ []*analysis.Analyzer{a},
+ nil,
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go
index da44d92414..460281287f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go
@@ -2,14 +2,18 @@ package revive
import (
"bytes"
+ "cmp"
"encoding/json"
"fmt"
"go/token"
"os"
"reflect"
+ "slices"
+ "strings"
"sync"
"github.com/BurntSushi/toml"
+ hcversion "github.com/hashicorp/go-version"
reviveConfig "github.com/mgechev/revive/config"
"github.com/mgechev/revive/lint"
"github.com/mgechev/revive/rule"
@@ -25,7 +29,10 @@ import (
const linterName = "revive"
-var debugf = logutils.Debug(logutils.DebugKeyRevive)
+var (
+ debugf = logutils.Debug(logutils.DebugKeyRevive)
+ isDebug = logutils.HaveDebugTag(logutils.DebugKeyRevive)
+)
// jsonObject defines a JSON object of a failure
type jsonObject struct {
@@ -49,8 +56,14 @@ func New(settings *config.ReviveSettings) *goanalysis.Linter {
[]*analysis.Analyzer{analyzer},
nil,
).WithContextSetter(func(lintCtx *linter.Context) {
+ w, err := newWrapper(settings)
+ if err != nil {
+ lintCtx.Log.Errorf("setup revive: %v", err)
+ return
+ }
+
analyzer.Run = func(pass *analysis.Pass) (any, error) {
- issues, err := runRevive(lintCtx, pass, settings)
+ issues, err := w.run(lintCtx, pass)
if err != nil {
return nil, err
}
@@ -70,27 +83,48 @@ func New(settings *config.ReviveSettings) *goanalysis.Linter {
}).WithLoadMode(goanalysis.LoadModeSyntax)
}
-func runRevive(lintCtx *linter.Context, pass *analysis.Pass, settings *config.ReviveSettings) ([]goanalysis.Issue, error) {
- packages := [][]string{internal.GetFileNames(pass)}
+type wrapper struct {
+ revive lint.Linter
+ formatter lint.Formatter
+ lintingRules []lint.Rule
+ conf *lint.Config
+}
- conf, err := getReviveConfig(settings)
+func newWrapper(settings *config.ReviveSettings) (*wrapper, error) {
+ conf, err := getConfig(settings)
if err != nil {
return nil, err
}
- formatter, err := reviveConfig.GetFormatter("json")
+ displayRules(conf)
+
+ conf.GoVersion, err = hcversion.NewVersion(settings.Go)
if err != nil {
return nil, err
}
- revive := lint.New(os.ReadFile, settings.MaxOpenFiles)
+ formatter, err := reviveConfig.GetFormatter("json")
+ if err != nil {
+ return nil, err
+ }
lintingRules, err := reviveConfig.GetLintingRules(conf, []lint.Rule{})
if err != nil {
return nil, err
}
- failures, err := revive.Lint(packages, lintingRules, *conf)
+ return &wrapper{
+ revive: lint.New(os.ReadFile, settings.MaxOpenFiles),
+ formatter: formatter,
+ lintingRules: lintingRules,
+ conf: conf,
+ }, nil
+}
+
+func (w *wrapper) run(lintCtx *linter.Context, pass *analysis.Pass) ([]goanalysis.Issue, error) {
+ packages := [][]string{internal.GetGoFileNames(pass)}
+
+ failures, err := w.revive.Lint(packages, w.lintingRules, *w.conf)
if err != nil {
return nil, err
}
@@ -100,7 +134,7 @@ func runRevive(lintCtx *linter.Context, pass *analysis.Pass, settings *config.Re
var output string
go func() {
- output, err = formatter.Format(formatChan, *conf)
+ output, err = w.formatter.Format(formatChan, *w.conf)
if err != nil {
lintCtx.Log.Errorf("Format error: %v", err)
}
@@ -108,7 +142,7 @@ func runRevive(lintCtx *linter.Context, pass *analysis.Pass, settings *config.Re
}()
for f := range failures {
- if f.Confidence < conf.Confidence {
+ if f.Confidence < w.conf.Confidence {
continue
}
@@ -126,19 +160,19 @@ func runRevive(lintCtx *linter.Context, pass *analysis.Pass, settings *config.Re
var issues []goanalysis.Issue
for i := range results {
- issues = append(issues, reviveToIssue(pass, &results[i]))
+ issues = append(issues, toIssue(pass, &results[i]))
}
return issues, nil
}
-func reviveToIssue(pass *analysis.Pass, object *jsonObject) goanalysis.Issue {
+func toIssue(pass *analysis.Pass, object *jsonObject) goanalysis.Issue {
lineRangeTo := object.Position.End.Line
if object.RuleName == (&rule.ExportedRule{}).Name() {
lineRangeTo = object.Position.Start.Line
}
- return goanalysis.NewIssue(&result.Issue{
+ issue := &result.Issue{
Severity: string(object.Severity),
Text: fmt.Sprintf("%s: %s", object.RuleName, object.Failure.Failure),
Pos: token.Position{
@@ -152,18 +186,38 @@ func reviveToIssue(pass *analysis.Pass, object *jsonObject) goanalysis.Issue {
To: lineRangeTo,
},
FromLinter: linterName,
- }, pass)
+ }
+
+ if object.ReplacementLine != "" {
+ f := pass.Fset.File(token.Pos(object.Position.Start.Offset))
+
+ // Skip cgo files because the positions are wrong.
+ if object.GetFilename() == f.Name() {
+ issue.SuggestedFixes = []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: f.LineStart(object.Position.Start.Line),
+ End: goanalysis.EndOfLinePos(f, object.Position.End.Line),
+ NewText: []byte(object.ReplacementLine),
+ }},
+ }}
+ }
+ }
+
+ return goanalysis.NewIssue(issue, pass)
}
// This function mimics the GetConfig function of revive.
// This allows to get default values and right types.
// https://github.com/golangci/golangci-lint/issues/1745
-// https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L217
-// https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L169-L174
-func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) {
+// https://github.com/mgechev/revive/blob/v1.6.0/config/config.go#L230
+// https://github.com/mgechev/revive/blob/v1.6.0/config/config.go#L182-L188
+func getConfig(cfg *config.ReviveSettings) (*lint.Config, error) {
conf := defaultConfig()
- if !reflect.DeepEqual(cfg, &config.ReviveSettings{}) {
+ // Since the Go version is dynamic, this value must be neutralized in order to compare with a "zero value" of the configuration structure.
+ zero := &config.ReviveSettings{Go: cfg.Go}
+
+ if !reflect.DeepEqual(cfg, zero) {
rawRoot := createConfigMap(cfg)
buf := bytes.NewBuffer(nil)
@@ -189,8 +243,6 @@ func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) {
conf.Rules[k] = r
}
- debugf("revive configuration: %#v", conf)
-
return conf, nil
}
@@ -255,7 +307,7 @@ func safeTomlSlice(r []any) []any {
}
// This element is not exported by revive, so we need copy the code.
-// Extracted from https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L15
+// Extracted from https://github.com/mgechev/revive/blob/v1.6.0/config/config.go#L16
var defaultRules = []lint.Rule{
&rule.VarDeclarationsRule{},
&rule.PackageCommentsRule{},
@@ -338,21 +390,22 @@ var allRules = append([]lint.Rule{
&rule.EnforceRepeatedArgTypeStyleRule{},
&rule.EnforceSliceStyleRule{},
&rule.MaxControlNestingRule{},
+ &rule.CommentsDensityRule{},
+ &rule.FileLengthLimitRule{},
+ &rule.FilenameFormatRule{},
+ &rule.RedundantBuildTagRule{},
+ &rule.UseErrorsNewRule{},
}, defaultRules...)
const defaultConfidence = 0.8
// This element is not exported by revive, so we need copy the code.
-// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L145
+// Extracted from https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L183
func normalizeConfig(cfg *lint.Config) {
// NOTE(ldez): this custom section for golangci-lint should be kept.
// ---
- if cfg.Confidence == 0 {
- cfg.Confidence = defaultConfidence
- }
- if cfg.Severity == "" {
- cfg.Severity = lint.SeverityWarning
- }
+ cfg.Confidence = cmp.Or(cfg.Confidence, defaultConfidence)
+ cfg.Severity = cmp.Or(cfg.Severity, lint.SeverityWarning)
// ---
if len(cfg.Rules) == 0 {
@@ -389,7 +442,7 @@ func normalizeConfig(cfg *lint.Config) {
}
// This element is not exported by revive, so we need copy the code.
-// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L214
+// Extracted from https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L252
func defaultConfig() *lint.Config {
defaultConfig := lint.Config{
Confidence: defaultConfidence,
@@ -401,3 +454,36 @@ func defaultConfig() *lint.Config {
}
return &defaultConfig
}
+
+func displayRules(conf *lint.Config) {
+ if !isDebug {
+ return
+ }
+
+ var enabledRules []string
+ for k, r := range conf.Rules {
+ if !r.Disabled {
+ enabledRules = append(enabledRules, k)
+ }
+ }
+
+ slices.Sort(enabledRules)
+
+ debugf("All available rules (%d): %s.", len(allRules), strings.Join(extractRulesName(allRules), ", "))
+ debugf("Default rules (%d): %s.", len(allRules), strings.Join(extractRulesName(allRules), ", "))
+ debugf("Enabled by config rules (%d): %s.", len(enabledRules), strings.Join(enabledRules, ", "))
+
+ debugf("revive configuration: %#v", conf)
+}
+
+func extractRulesName(rules []lint.Rule) []string {
+ var names []string
+
+ for _, r := range rules {
+ names = append(names, r.Name())
+ }
+
+ slices.Sort(names)
+
+ return names
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck/spancheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck/spancheck.go
index a800a17058..efdc1d0bf6 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck/spancheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/spancheck/spancheck.go
@@ -12,16 +12,16 @@ func New(settings *config.SpancheckSettings) *goanalysis.Linter {
cfg := spancheck.NewDefaultConfig()
if settings != nil {
- if settings.Checks != nil {
+ if len(settings.Checks) > 0 {
cfg.EnabledChecks = settings.Checks
}
- if settings.IgnoreCheckSignatures != nil {
+ if len(settings.IgnoreCheckSignatures) > 0 {
cfg.IgnoreChecksSignaturesSlice = settings.IgnoreCheckSignatures
}
- if settings.ExtraStartSpanSignatures != nil {
- cfg.StartSpanMatchersSlice = settings.ExtraStartSpanSignatures
+ if len(settings.ExtraStartSpanSignatures) > 0 {
+ cfg.StartSpanMatchersSlice = append(cfg.StartSpanMatchersSlice, settings.ExtraStartSpanSignatures...)
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck/staticcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck/staticcheck.go
index 0c0534539e..79394bdb7f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck/staticcheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck/staticcheck.go
@@ -10,7 +10,7 @@ import (
func New(settings *config.StaticCheckSettings) *goanalysis.Linter {
cfg := internal.StaticCheckConfig(settings)
- analyzers := internal.SetupStaticCheckAnalyzers(staticcheck.Analyzers, internal.GetGoVersion(settings), cfg.Checks)
+ analyzers := internal.SetupStaticCheckAnalyzers(staticcheck.Analyzers, cfg.Checks)
return goanalysis.NewLinter(
"staticcheck",
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck/stylecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck/stylecheck.go
index b8fc8fe547..60859f28af 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck/stylecheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck/stylecheck.go
@@ -20,7 +20,7 @@ func New(settings *config.StaticCheckSettings) *goanalysis.Linter {
return cfg, nil
}
- analyzers := internal.SetupStaticCheckAnalyzers(stylecheck.Analyzers, internal.GetGoVersion(settings), cfg.Checks)
+ analyzers := internal.SetupStaticCheckAnalyzers(stylecheck.Analyzers, cfg.Checks)
return goanalysis.NewLinter(
"stylecheck",
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign/tagalign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign/tagalign.go
index f438c51b5c..7c8a0c8b02 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign/tagalign.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign/tagalign.go
@@ -1,22 +1,15 @@
package tagalign
import (
- "sync"
-
"github.com/4meepo/tagalign"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
func New(settings *config.TagAlignSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
- options := []tagalign.Option{tagalign.WithMode(tagalign.GolangciLintMode)}
+ var options []tagalign.Option
if settings != nil {
options = append(options, tagalign.WithAlign(settings.Align))
@@ -32,44 +25,11 @@ func New(settings *config.TagAlignSettings) *goanalysis.Linter {
}
analyzer := tagalign.NewAnalyzer(options...)
- analyzer.Run = func(pass *analysis.Pass) (any, error) {
- taIssues := tagalign.Run(pass, options...)
-
- issues := make([]goanalysis.Issue, len(taIssues))
- for i, issue := range taIssues {
- report := &result.Issue{
- FromLinter: analyzer.Name,
- Pos: issue.Pos,
- Text: issue.Message,
- Replacement: &result.Replacement{
- Inline: &result.InlineFix{
- StartCol: issue.InlineFix.StartCol,
- Length: issue.InlineFix.Length,
- NewString: issue.InlineFix.NewString,
- },
- },
- }
-
- issues[i] = goanalysis.NewIssue(report, pass)
- }
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
- }
return goanalysis.NewLinter(
analyzer.Name,
analyzer.Doc,
[]*analysis.Analyzer{analyzer},
nil,
- ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle/tagliatelle.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle/tagliatelle.go
index d1674c3e9e..08215c3a53 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle/tagliatelle.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle/tagliatelle.go
@@ -10,10 +10,12 @@ import (
func New(settings *config.TagliatelleSettings) *goanalysis.Linter {
cfg := tagliatelle.Config{
- Rules: map[string]string{
- "json": "camel",
- "yaml": "camel",
- "header": "header",
+ Base: tagliatelle.Base{
+ Rules: map[string]string{
+ "json": "camel",
+ "yaml": "camel",
+ "header": "header",
+ },
},
}
@@ -21,7 +23,23 @@ func New(settings *config.TagliatelleSettings) *goanalysis.Linter {
for k, v := range settings.Case.Rules {
cfg.Rules[k] = v
}
+
+ cfg.ExtendedRules = toExtendedRules(settings.Case.ExtendedRules)
cfg.UseFieldName = settings.Case.UseFieldName
+ cfg.IgnoredFields = settings.Case.IgnoredFields
+
+ for _, override := range settings.Case.Overrides {
+ cfg.Overrides = append(cfg.Overrides, tagliatelle.Overrides{
+ Base: tagliatelle.Base{
+ Rules: override.Rules,
+ ExtendedRules: toExtendedRules(override.ExtendedRules),
+ UseFieldName: override.UseFieldName,
+ IgnoredFields: override.IgnoredFields,
+ Ignore: override.Ignore,
+ },
+ Package: override.Package,
+ })
+ }
}
a := tagliatelle.New(cfg)
@@ -31,5 +49,19 @@ func New(settings *config.TagliatelleSettings) *goanalysis.Linter {
a.Doc,
[]*analysis.Analyzer{a},
nil,
- ).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
+
+func toExtendedRules(src map[string]config.TagliatelleExtendedRule) map[string]tagliatelle.ExtendedRule {
+ result := make(map[string]tagliatelle.ExtendedRule, len(src))
+
+ for k, v := range src {
+ result[k] = tagliatelle.ExtendedRule{
+ Case: v.Case,
+ ExtraInitialisms: v.ExtraInitialisms,
+ InitialismOverrides: v.InitialismOverrides,
+ }
+ }
+
+ return result
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go
index b80a783b65..2fc247fab8 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go
@@ -25,5 +25,5 @@ func New(settings *config.TenvSettings) *goanalysis.Linter {
a.Doc,
[]*analysis.Analyzer{a},
cfg,
- ).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint/testifylint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint/testifylint.go
index a5f95a1e83..b3f2f0bd46 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint/testifylint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testifylint/testifylint.go
@@ -18,6 +18,7 @@ func New(settings *config.TestifylintSettings) *goanalysis.Linter {
"disable-all": settings.DisableAll,
"bool-compare.ignore-custom-types": settings.BoolCompare.IgnoreCustomTypes,
+ "formatter.require-f-funcs": settings.Formatter.RequireFFuncs,
"go-require.ignore-http-handlers": settings.GoRequire.IgnoreHTTPHandlers,
}
if len(settings.EnabledCheckers) > 0 {
@@ -27,6 +28,9 @@ func New(settings *config.TestifylintSettings) *goanalysis.Linter {
cfg[a.Name]["disable"] = settings.DisabledCheckers
}
+ if b := settings.Formatter.CheckFormatString; b != nil {
+ cfg[a.Name]["formatter.check-format-string"] = *b
+ }
if p := settings.ExpectedActual.ExpVarPattern; p != "" {
cfg[a.Name]["expected-actual.pattern"] = p
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage/testpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage/testpackage.go
index 632152712b..f617da5536 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage/testpackage.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage/testpackage.go
@@ -10,19 +10,19 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(cfg *config.TestpackageSettings) *goanalysis.Linter {
+func New(settings *config.TestpackageSettings) *goanalysis.Linter {
a := testpackage.NewAnalyzer()
- var settings map[string]map[string]any
- if cfg != nil {
- settings = map[string]map[string]any{
+ var cfg map[string]map[string]any
+ if settings != nil {
+ cfg = map[string]map[string]any{
a.Name: {
- testpackage.SkipRegexpFlagName: cfg.SkipRegexp,
- testpackage.AllowPackagesFlagName: strings.Join(cfg.AllowPackages, ","),
+ testpackage.SkipRegexpFlagName: settings.SkipRegexp,
+ testpackage.AllowPackagesFlagName: strings.Join(settings.AllowPackages, ","),
},
}
}
- return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, settings).
+ return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, cfg).
WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper/thelper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper/thelper.go
index cc6ea755c9..04503b9ce0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper/thelper.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper/thelper.go
@@ -1,10 +1,11 @@
package thelper
import (
+ "maps"
+ "slices"
"strings"
"github.com/kulti/thelper/pkg/analyzer"
- "golang.org/x/exp/maps"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
@@ -12,7 +13,7 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/internal"
)
-func New(cfg *config.ThelperSettings) *goanalysis.Linter {
+func New(settings *config.ThelperSettings) *goanalysis.Linter {
a := analyzer.NewAnalyzer()
opts := map[string]struct{}{
@@ -33,20 +34,20 @@ func New(cfg *config.ThelperSettings) *goanalysis.Linter {
"tb_first": {},
}
- if cfg != nil {
- applyTHelperOptions(cfg.Test, "t_", opts)
- applyTHelperOptions(cfg.Fuzz, "f_", opts)
- applyTHelperOptions(cfg.Benchmark, "b_", opts)
- applyTHelperOptions(cfg.TB, "tb_", opts)
+ if settings != nil {
+ applyTHelperOptions(settings.Test, "t_", opts)
+ applyTHelperOptions(settings.Fuzz, "f_", opts)
+ applyTHelperOptions(settings.Benchmark, "b_", opts)
+ applyTHelperOptions(settings.TB, "tb_", opts)
}
if len(opts) == 0 {
internal.LinterLogger.Fatalf("thelper: at least one option must be enabled")
}
- args := maps.Keys(opts)
+ args := slices.Collect(maps.Keys(opts))
- cfgMap := map[string]map[string]any{
+ cfg := map[string]map[string]any{
a.Name: {
"checks": strings.Join(args, ","),
},
@@ -56,7 +57,7 @@ func New(cfg *config.ThelperSettings) *goanalysis.Linter {
a.Name,
a.Doc,
[]*analysis.Analyzer{a},
- cfgMap,
+ cfg,
).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam/unparam.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam/unparam.go
index 0fe1847366..04c9a223e5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam/unparam.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam/unparam.go
@@ -1,8 +1,6 @@
package unparam
import (
- "sync"
-
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"golang.org/x/tools/go/packages"
@@ -11,33 +9,21 @@ import (
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
"github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "unparam"
func New(settings *config.UnparamSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
analyzer := &analysis.Analyzer{
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Requires: []*analysis.Analyzer{buildssa.Analyzer},
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := runUnparam(pass, settings)
+ err := runUnparam(pass, settings)
if err != nil {
return nil, err
}
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
return nil, nil
},
}
@@ -51,12 +37,10 @@ func New(settings *config.UnparamSettings) *goanalysis.Linter {
if settings.Algo != "cha" {
lintCtx.Log.Warnf("`linters-settings.unparam.algo` isn't supported by the newest `unparam`")
}
- }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
-func runUnparam(pass *analysis.Pass, settings *config.UnparamSettings) ([]goanalysis.Issue, error) {
+func runUnparam(pass *analysis.Pass, settings *config.UnparamSettings) error {
ssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
ssaPkg := ssa.Pkg
@@ -74,17 +58,15 @@ func runUnparam(pass *analysis.Pass, settings *config.UnparamSettings) ([]goanal
unparamIssues, err := c.Check()
if err != nil {
- return nil, err
+ return err
}
- var issues []goanalysis.Issue
for _, i := range unparamIssues {
- issues = append(issues, goanalysis.NewIssue(&result.Issue{
- Pos: pass.Fset.Position(i.Pos()),
- Text: i.Message(),
- FromLinter: linterName,
- }, pass))
+ pass.Report(analysis.Diagnostic{
+ Pos: i.Pos(),
+ Message: i.Message(),
+ })
}
- return issues, nil
+ return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused/unused.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused/unused.go
index 55712f0840..7b2b478fc9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused/unused.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused/unused.go
@@ -12,14 +12,13 @@ import (
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/golinters/internal"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/result"
)
const linterName = "unused"
-func New(settings *config.UnusedSettings, scSettings *config.StaticCheckSettings) *goanalysis.Linter {
+func New(settings *config.UnusedSettings) *goanalysis.Linter {
var mu sync.Mutex
var resIssues []goanalysis.Issue
@@ -41,8 +40,6 @@ func New(settings *config.UnusedSettings, scSettings *config.StaticCheckSettings
},
}
- internal.SetAnalyzerGoVersion(analyzer, internal.GetGoVersion(scSettings))
-
return goanalysis.NewLinter(
linterName,
"Checks Go code for unused constants, variables, functions and types",
@@ -90,11 +87,13 @@ func getUnusedResults(pass *analysis.Pass, settings *config.UnusedSettings) unus
opts := unused.Options{
FieldWritesAreUses: settings.FieldWritesAreUses,
PostStatementsAreReads: settings.PostStatementsAreReads,
- ExportedIsUsed: settings.ExportedIsUsed,
- ExportedFieldsAreUsed: settings.ExportedFieldsAreUsed,
- ParametersAreUsed: settings.ParametersAreUsed,
- LocalVariablesAreUsed: settings.LocalVariablesAreUsed,
- GeneratedIsUsed: settings.GeneratedIsUsed,
+ // Related to https://github.com/golangci/golangci-lint/issues/4218
+ // https://github.com/dominikh/go-tools/issues/1474#issuecomment-1850760813
+ ExportedIsUsed: true,
+ ExportedFieldsAreUsed: settings.ExportedFieldsAreUsed,
+ ParametersAreUsed: settings.ParametersAreUsed,
+ LocalVariablesAreUsed: settings.LocalVariablesAreUsed,
+ GeneratedIsUsed: settings.GeneratedIsUsed,
}
// ref: https://github.com/dominikh/go-tools/blob/4ec1f474ca6c0feb8e10a8fcca4ab95f5b5b9881/internal/cmd/unused/unused.go#L68
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars/usestdlibvars.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars/usestdlibvars.go
index 050e47f24c..00f7d9742a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars/usestdlibvars.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars/usestdlibvars.go
@@ -8,24 +8,24 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(cfg *config.UseStdlibVarsSettings) *goanalysis.Linter {
+func New(settings *config.UseStdlibVarsSettings) *goanalysis.Linter {
a := analyzer.New()
- cfgMap := make(map[string]map[string]any)
- if cfg != nil {
- cfgMap[a.Name] = map[string]any{
- analyzer.ConstantKindFlag: cfg.ConstantKind,
- analyzer.CryptoHashFlag: cfg.CryptoHash,
- analyzer.HTTPMethodFlag: cfg.HTTPMethod,
- analyzer.HTTPStatusCodeFlag: cfg.HTTPStatusCode,
- analyzer.OSDevNullFlag: cfg.OSDevNull,
- analyzer.RPCDefaultPathFlag: cfg.DefaultRPCPath,
- analyzer.SQLIsolationLevelFlag: cfg.SQLIsolationLevel,
- analyzer.SyslogPriorityFlag: cfg.SyslogPriority,
- analyzer.TimeLayoutFlag: cfg.TimeLayout,
- analyzer.TimeMonthFlag: cfg.TimeMonth,
- analyzer.TimeWeekdayFlag: cfg.TimeWeekday,
- analyzer.TLSSignatureSchemeFlag: cfg.TLSSignatureScheme,
+ cfg := make(map[string]map[string]any)
+ if settings != nil {
+ cfg[a.Name] = map[string]any{
+ analyzer.ConstantKindFlag: settings.ConstantKind,
+ analyzer.CryptoHashFlag: settings.CryptoHash,
+ analyzer.HTTPMethodFlag: settings.HTTPMethod,
+ analyzer.HTTPStatusCodeFlag: settings.HTTPStatusCode,
+ analyzer.OSDevNullFlag: settings.OSDevNull != nil && *settings.OSDevNull,
+ analyzer.RPCDefaultPathFlag: settings.DefaultRPCPath,
+ analyzer.SQLIsolationLevelFlag: settings.SQLIsolationLevel,
+ analyzer.SyslogPriorityFlag: settings.SyslogPriority != nil && *settings.SyslogPriority,
+ analyzer.TimeLayoutFlag: settings.TimeLayout,
+ analyzer.TimeMonthFlag: settings.TimeMonth,
+ analyzer.TimeWeekdayFlag: settings.TimeWeekday,
+ analyzer.TLSSignatureSchemeFlag: settings.TLSSignatureScheme,
}
}
@@ -33,6 +33,6 @@ func New(cfg *config.UseStdlibVarsSettings) *goanalysis.Linter {
a.Name,
a.Doc,
[]*analysis.Analyzer{a},
- cfgMap,
+ cfg,
).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/usetesting/usetesting.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/usetesting/usetesting.go
new file mode 100644
index 0000000000..a21742fbd6
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/usetesting/usetesting.go
@@ -0,0 +1,33 @@
+package usetesting
+
+import (
+ "github.com/ldez/usetesting"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+ "github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.UseTestingSettings) *goanalysis.Linter {
+ a := usetesting.NewAnalyzer()
+
+ cfg := make(map[string]map[string]any)
+ if settings != nil {
+ cfg[a.Name] = map[string]any{
+ "contextbackground": settings.ContextBackground,
+ "contexttodo": settings.ContextTodo,
+ "oschdir": settings.OSChdir,
+ "osmkdirtemp": settings.OSMkdirTemp,
+ "ossetenv": settings.OSSetenv,
+ "ostempdir": settings.OSTempDir,
+ "oscreatetemp": settings.OSCreateTemp,
+ }
+ }
+
+ return goanalysis.NewLinter(
+ a.Name,
+ a.Doc,
+ []*analysis.Analyzer{a},
+ cfg,
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace/whitespace.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace/whitespace.go
index 721bfada1c..d45969efce 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace/whitespace.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace/whitespace.go
@@ -1,28 +1,17 @@
package whitespace
import (
- "fmt"
- "sync"
-
"github.com/ultraware/whitespace"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
- "github.com/golangci/golangci-lint/pkg/lint/linter"
- "github.com/golangci/golangci-lint/pkg/result"
)
-const linterName = "whitespace"
-
func New(settings *config.WhitespaceSettings) *goanalysis.Linter {
- var mu sync.Mutex
- var resIssues []goanalysis.Issue
-
var wsSettings whitespace.Settings
if settings != nil {
wsSettings = whitespace.Settings{
- Mode: whitespace.RunningModeGolangCI,
MultiIf: settings.MultiIf,
MultiFunc: settings.MultiFunc,
}
@@ -35,68 +24,5 @@ func New(settings *config.WhitespaceSettings) *goanalysis.Linter {
a.Doc,
[]*analysis.Analyzer{a},
nil,
- ).WithContextSetter(func(_ *linter.Context) {
- a.Run = func(pass *analysis.Pass) (any, error) {
- issues, err := runWhitespace(pass, wsSettings)
- if err != nil {
- return nil, err
- }
-
- if len(issues) == 0 {
- return nil, nil
- }
-
- mu.Lock()
- resIssues = append(resIssues, issues...)
- mu.Unlock()
-
- return nil, nil
- }
- }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
- return resIssues
- }).WithLoadMode(goanalysis.LoadModeSyntax)
-}
-
-func runWhitespace(pass *analysis.Pass, wsSettings whitespace.Settings) ([]goanalysis.Issue, error) {
- lintIssues := whitespace.Run(pass, &wsSettings)
-
- issues := make([]goanalysis.Issue, len(lintIssues))
- for i, issue := range lintIssues {
- report := &result.Issue{
- FromLinter: linterName,
- Pos: pass.Fset.PositionFor(issue.Diagnostic, false),
- Text: issue.Message,
- }
-
- switch issue.MessageType {
- case whitespace.MessageTypeRemove:
- if len(issue.LineNumbers) == 0 {
- continue
- }
-
- report.LineRange = &result.Range{
- From: issue.LineNumbers[0],
- To: issue.LineNumbers[len(issue.LineNumbers)-1],
- }
-
- report.Replacement = &result.Replacement{NeedOnlyDelete: true}
-
- case whitespace.MessageTypeAdd:
- report.Pos = pass.Fset.PositionFor(issue.FixStart, false)
- report.Replacement = &result.Replacement{
- Inline: &result.InlineFix{
- StartCol: 0,
- Length: 1,
- NewString: "\n\t",
- },
- }
-
- default:
- return nil, fmt.Errorf("unknown message type: %v", issue.MessageType)
- }
-
- issues[i] = goanalysis.NewIssue(report, pass)
- }
-
- return issues, nil
+ ).WithLoadMode(goanalysis.LoadModeSyntax)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck/wrapcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck/wrapcheck.go
index 96ec2eeae0..b2f5ec7420 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck/wrapcheck.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck/wrapcheck.go
@@ -11,6 +11,8 @@ import (
func New(settings *config.WrapcheckSettings) *goanalysis.Linter {
cfg := wrapcheck.NewDefaultConfig()
if settings != nil {
+ cfg.ExtraIgnoreSigs = settings.ExtraIgnoreSigs
+
if len(settings.IgnoreSigs) != 0 {
cfg.IgnoreSigs = settings.IgnoreSigs
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl/wsl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl/wsl.go
index 5a72035b50..c728340ece 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl/wsl.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl/wsl.go
@@ -25,6 +25,7 @@ func New(settings *config.WSLSettings) *goanalysis.Linter {
ForceCuddleErrCheckAndAssign: settings.ForceCuddleErrCheckAndAssign,
ErrorVariableNames: settings.ErrorVariableNames,
ForceExclusiveShortDeclarations: settings.ForceExclusiveShortDeclarations,
+ IncludeGenerated: true, // force to true because golangci-lint already have a way to filter generated files.
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go b/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go
index 7b748d8e90..beb71f722e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go
@@ -2,53 +2,39 @@ package goutil
import (
"context"
- "encoding/json"
"fmt"
"os"
- "os/exec"
- "strings"
"time"
+ "github.com/ldez/grignotin/goenv"
+
"github.com/golangci/golangci-lint/pkg/logutils"
)
type EnvKey string
-const (
- EnvGoCache EnvKey = "GOCACHE"
- EnvGoRoot EnvKey = "GOROOT"
-)
-
type Env struct {
- vars map[string]string
- log logutils.Log
- debugf logutils.DebugFunc
+ vars map[string]string
+ log logutils.Log
}
func NewEnv(log logutils.Log) *Env {
return &Env{
- vars: map[string]string{},
- log: log,
- debugf: logutils.Debug(logutils.DebugKeyEnv),
+ vars: map[string]string{},
+ log: log,
}
}
func (e Env) Discover(ctx context.Context) error {
startedAt := time.Now()
- //nolint:gosec // Everything is static here.
- cmd := exec.CommandContext(ctx, "go", "env", "-json", string(EnvGoCache), string(EnvGoRoot))
-
- out, err := cmd.Output()
+ var err error
+ e.vars, err = goenv.Get(ctx, goenv.GOCACHE, goenv.GOROOT)
if err != nil {
- return fmt.Errorf("failed to run '%s': %w", strings.Join(cmd.Args, " "), err)
- }
-
- if err = json.Unmarshal(out, &e.vars); err != nil {
- return fmt.Errorf("failed to parse '%s' json: %w", strings.Join(cmd.Args, " "), err)
+ return fmt.Errorf("%w", err)
}
- e.debugf("Read go env for %s: %#v", time.Since(startedAt), e.vars)
+ e.log.Infof("Read go env for %s: %#v", time.Since(startedAt), e.vars)
return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goutil/version.go b/vendor/github.com/golangci/golangci-lint/pkg/goutil/version.go
new file mode 100644
index 0000000000..4f42ebd1bf
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goutil/version.go
@@ -0,0 +1,75 @@
+package goutil
+
+import (
+ "fmt"
+ "go/version"
+ "regexp"
+ "runtime"
+ "strings"
+
+ hcversion "github.com/hashicorp/go-version"
+)
+
+func CheckGoVersion(goVersion string) error {
+ rv, err := CleanRuntimeVersion()
+ if err != nil {
+ return fmt.Errorf("clean runtime version: %w", err)
+ }
+
+ langVersion := version.Lang(rv)
+
+ runtimeVersion, err := hcversion.NewVersion(strings.TrimPrefix(langVersion, "go"))
+ if err != nil {
+ return err
+ }
+
+ targetedVersion, err := hcversion.NewVersion(TrimGoVersion(goVersion))
+ if err != nil {
+ return err
+ }
+
+ if runtimeVersion.LessThan(targetedVersion) {
+ return fmt.Errorf("the Go language version (%s) used to build golangci-lint is lower than the targeted Go version (%s)",
+ langVersion, goVersion)
+ }
+
+ return nil
+}
+
+// TrimGoVersion Trims the Go version to keep only M.m.
+// Since Go 1.21 the version inside the go.mod can be a patched version (ex: 1.21.0).
+// The version can also include information which we want to remove (ex: 1.21alpha1)
+// https://go.dev/doc/toolchain#versions
+// This a problem with staticcheck and gocritic.
+func TrimGoVersion(v string) string {
+ if v == "" {
+ return ""
+ }
+
+ exp := regexp.MustCompile(`(\d\.\d+)(?:\.\d+|[a-z]+\d)`)
+
+ if exp.MatchString(v) {
+ return exp.FindStringSubmatch(v)[1]
+ }
+
+ return v
+}
+
+func CleanRuntimeVersion() (string, error) {
+ return cleanRuntimeVersion(runtime.Version())
+}
+
+func cleanRuntimeVersion(rv string) (string, error) {
+ parts := strings.Fields(rv)
+
+ for _, part := range parts {
+ // Allow to handle:
+ // - GOEXPERIMENT -> "go1.23.0 X:boringcrypto"
+ // - devel -> "devel go1.24-e705a2d Wed Aug 7 01:16:42 2024 +0000 linux/amd64"
+ if strings.HasPrefix(part, "go1.") {
+ return part, nil
+ }
+ }
+
+ return "", fmt.Errorf("invalid Go runtime version: %s", rv)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go
index 160620338f..2ac5a2d2c4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go
@@ -4,10 +4,9 @@ import (
"context"
"fmt"
- "github.com/golangci/golangci-lint/internal/pkgcache"
+ "github.com/golangci/golangci-lint/internal/cache"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/exitcodes"
- "github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/goanalysis/load"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils"
@@ -18,19 +17,17 @@ type ContextBuilder struct {
pkgLoader *PackageLoader
- fileCache *fsutils.FileCache
- pkgCache *pkgcache.Cache
+ pkgCache *cache.Cache
loadGuard *load.Guard
}
func NewContextBuilder(cfg *config.Config, pkgLoader *PackageLoader,
- fileCache *fsutils.FileCache, pkgCache *pkgcache.Cache, loadGuard *load.Guard,
+ pkgCache *cache.Cache, loadGuard *load.Guard,
) *ContextBuilder {
return &ContextBuilder{
cfg: cfg,
pkgLoader: pkgLoader,
- fileCache: fileCache,
pkgCache: pkgCache,
loadGuard: loadGuard,
}
@@ -55,7 +52,6 @@ func (cl *ContextBuilder) Build(ctx context.Context, log logutils.Log, linters [
Cfg: cl.cfg,
Log: log,
- FileCache: cl.fileCache,
PkgCache: cl.pkgCache,
LoadGuard: cl.loadGuard,
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go
index 57c51fa75e..20bed6a711 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go
@@ -81,7 +81,7 @@ func (lc *Config) IsSlowLinter() bool {
}
func (lc *Config) WithLoadFiles() *Config {
- lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles
+ lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedModule
return lc
}
@@ -164,12 +164,16 @@ func (lc *Config) WithNoopFallback(cfg *config.Config, cond func(cfg *config.Con
}
func IsGoLowerThanGo122() func(cfg *config.Config) error {
+ return isGoLowerThanGo("1.22")
+}
+
+func isGoLowerThanGo(v string) func(cfg *config.Config) error {
return func(cfg *config.Config) error {
- if cfg == nil || config.IsGoGreaterThanOrEqual(cfg.Run.Go, "1.22") {
+ if cfg == nil || config.IsGoGreaterThanOrEqual(cfg.Run.Go, v) {
return nil
}
- return fmt.Errorf("this linter is disabled because the Go version (%s) of your project is lower than Go 1.22", cfg.Run.Go)
+ return fmt.Errorf("this linter is disabled because the Go version (%s) of your project is lower than Go %s", cfg.Run.Go, v)
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go
index 5c03630b26..6986b62314 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go
@@ -5,9 +5,8 @@ import (
"golang.org/x/tools/go/packages"
- "github.com/golangci/golangci-lint/internal/pkgcache"
+ "github.com/golangci/golangci-lint/internal/cache"
"github.com/golangci/golangci-lint/pkg/config"
- "github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/goanalysis/load"
"github.com/golangci/golangci-lint/pkg/logutils"
)
@@ -20,11 +19,10 @@ type Context struct {
// version for each of packages
OriginalPackages []*packages.Package
- Cfg *config.Config
- FileCache *fsutils.FileCache
- Log logutils.Log
+ Cfg *config.Config
+ Log logutils.Log
- PkgCache *pkgcache.Cache
+ PkgCache *cache.Cache
LoadGuard *load.Guard
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go
index a66f2eea09..ddeb99e144 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go
@@ -23,10 +23,9 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/errchkjson"
"github.com/golangci/golangci-lint/pkg/golinters/errname"
"github.com/golangci/golangci-lint/pkg/golinters/errorlint"
- "github.com/golangci/golangci-lint/pkg/golinters/execinquery"
"github.com/golangci/golangci-lint/pkg/golinters/exhaustive"
"github.com/golangci/golangci-lint/pkg/golinters/exhaustruct"
- "github.com/golangci/golangci-lint/pkg/golinters/exportloopref"
+ "github.com/golangci/golangci-lint/pkg/golinters/exptostd"
"github.com/golangci/golangci-lint/pkg/golinters/fatcontext"
"github.com/golangci/golangci-lint/pkg/golinters/forbidigo"
"github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert"
@@ -55,6 +54,7 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan"
"github.com/golangci/golangci-lint/pkg/golinters/govet"
"github.com/golangci/golangci-lint/pkg/golinters/grouper"
+ "github.com/golangci/golangci-lint/pkg/golinters/iface"
"github.com/golangci/golangci-lint/pkg/golinters/importas"
"github.com/golangci/golangci-lint/pkg/golinters/inamedparam"
"github.com/golangci/golangci-lint/pkg/golinters/ineffassign"
@@ -72,6 +72,7 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/nakedret"
"github.com/golangci/golangci-lint/pkg/golinters/nestif"
"github.com/golangci/golangci-lint/pkg/golinters/nilerr"
+ "github.com/golangci/golangci-lint/pkg/golinters/nilnesserr"
"github.com/golangci/golangci-lint/pkg/golinters/nilnil"
"github.com/golangci/golangci-lint/pkg/golinters/nlreturn"
"github.com/golangci/golangci-lint/pkg/golinters/noctx"
@@ -85,6 +86,7 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/promlinter"
"github.com/golangci/golangci-lint/pkg/golinters/protogetter"
"github.com/golangci/golangci-lint/pkg/golinters/reassign"
+ "github.com/golangci/golangci-lint/pkg/golinters/recvcheck"
"github.com/golangci/golangci-lint/pkg/golinters/revive"
"github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck"
"github.com/golangci/golangci-lint/pkg/golinters/sloglint"
@@ -104,6 +106,7 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/unparam"
"github.com/golangci/golangci-lint/pkg/golinters/unused"
"github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars"
+ "github.com/golangci/golangci-lint/pkg/golinters/usetesting"
"github.com/golangci/golangci-lint/pkg/golinters/varnamelen"
"github.com/golangci/golangci-lint/pkg/golinters/wastedassign"
"github.com/golangci/golangci-lint/pkg/golinters/whitespace"
@@ -134,7 +137,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
// When a new linter is added the version in `WithSince(...)` must be the next minor version of golangci-lint.
return []*linter.Config{
linter.NewConfig(asasalint.New(&cfg.LintersSettings.Asasalint)).
- WithSince("1.47.0").
+ WithSince("v1.47.0").
WithPresets(linter.PresetBugs).
WithLoadForGoAnalysis().
WithURL("https://github.com/alingse/asasalint"),
@@ -145,7 +148,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/tdakkota/asciicheck"),
linter.NewConfig(bidichk.New(&cfg.LintersSettings.BiDiChk)).
- WithSince("1.43.0").
+ WithSince("v1.43.0").
WithPresets(linter.PresetBugs).
WithURL("https://github.com/breml/bidichk"),
@@ -159,10 +162,11 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.58.0").
WithPresets(linter.PresetStyle).
WithLoadForGoAnalysis().
- WithURL("https://github.com/lasiar/canonicalHeader"),
+ WithAutoFix().
+ WithURL("https://github.com/lasiar/canonicalheader"),
linter.NewConfig(containedctx.New()).
- WithSince("1.44.0").
+ WithSince("v1.44.0").
WithLoadForGoAnalysis().
WithPresets(linter.PresetStyle).
WithURL("https://github.com/sivchari/containedctx"),
@@ -176,28 +180,27 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(copyloopvar.New(&cfg.LintersSettings.CopyLoopVar)).
WithSince("v1.57.0").
WithPresets(linter.PresetStyle).
+ WithAutoFix().
WithURL("https://github.com/karamaru-alpha/copyloopvar").
WithNoopFallback(cfg, linter.IsGoLowerThanGo122()),
linter.NewConfig(cyclop.New(&cfg.LintersSettings.Cyclop)).
WithSince("v1.37.0").
- WithLoadForGoAnalysis().
WithPresets(linter.PresetComplexity).
WithURL("https://github.com/bkielbasa/cyclop"),
linter.NewConfig(decorder.New(&cfg.LintersSettings.Decorder)).
WithSince("v1.44.0").
- WithPresets(linter.PresetFormatting, linter.PresetStyle).
+ WithPresets(linter.PresetStyle).
WithURL("https://gitlab.com/bosi/decorder"),
linter.NewConfig(linter.NewNoopDeprecated("deadcode", cfg, linter.DeprecationError)).
WithSince("v1.0.0").
- WithLoadForGoAnalysis().
WithPresets(linter.PresetUnused).
- WithURL("https://github.com/remyoudompheng/go-misc/tree/master/deadcode").
+ WithURL("https://github.com/remyoudompheng/go-misc/tree/HEAD/deadcode").
DeprecatedError("The owner seems to have abandoned the linter.", "v1.49.0", "unused"),
- linter.NewConfig(depguard.New(&cfg.LintersSettings.Depguard)).
+ linter.NewConfig(depguard.New(&cfg.LintersSettings.Depguard, cfg.GetBasePath())).
WithSince("v1.4.0").
WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule).
WithURL("https://github.com/OpenPeeDeeP/depguard"),
@@ -213,8 +216,9 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/mibk/dupl"),
linter.NewConfig(dupword.New(&cfg.LintersSettings.DupWord)).
- WithSince("1.50.0").
+ WithSince("v1.50.0").
WithPresets(linter.PresetComment).
+ WithAutoFix().
WithURL("https://github.com/Abirdcfly/dupword"),
linter.NewConfig(durationcheck.New()).
@@ -231,7 +235,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/kisielk/errcheck"),
linter.NewConfig(errchkjson.New(&cfg.LintersSettings.ErrChkJSON)).
- WithSince("1.44.0").
+ WithSince("v1.44.0").
WithPresets(linter.PresetBugs).
WithLoadForGoAnalysis().
WithURL("https://github.com/breml/errchkjson"),
@@ -246,14 +250,14 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.32.0").
WithPresets(linter.PresetBugs, linter.PresetError).
WithLoadForGoAnalysis().
+ WithAutoFix().
WithURL("https://github.com/polyfloyd/go-errorlint"),
- linter.NewConfig(execinquery.New()).
+ linter.NewConfig(linter.NewNoopDeprecated("execinquery", cfg, linter.DeprecationError)).
WithSince("v1.46.0").
WithPresets(linter.PresetSQL).
- WithLoadForGoAnalysis().
WithURL("https://github.com/1uf3/execinquery").
- DeprecatedWarning("The repository of the linter has been archived by the owner.", "v1.58.0", ""),
+ DeprecatedError("The repository of the linter has been archived by the owner.", "v1.58.0", ""),
linter.NewConfig(exhaustive.New(&cfg.LintersSettings.Exhaustive)).
WithSince(" v1.28.0").
@@ -264,7 +268,6 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(linter.NewNoopDeprecated("exhaustivestruct", cfg, linter.DeprecationError)).
WithSince("v1.32.0").
WithPresets(linter.PresetStyle, linter.PresetTest).
- WithLoadForGoAnalysis().
WithURL("https://github.com/mbilski/exhaustivestruct").
DeprecatedError("The repository of the linter has been deprecated by the owner.", "v1.46.0", "exhaustruct"),
@@ -274,11 +277,19 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithLoadForGoAnalysis().
WithURL("https://github.com/GaijinEntertainment/go-exhaustruct"),
- linter.NewConfig(exportloopref.New()).
+ linter.NewConfig(linter.NewNoopDeprecated("exportloopref", cfg, linter.DeprecationError)).
WithSince("v1.28.0").
WithPresets(linter.PresetBugs).
WithLoadForGoAnalysis().
- WithURL("https://github.com/kyoh86/exportloopref"),
+ WithURL("https://github.com/kyoh86/exportloopref").
+ DeprecatedError("Since Go1.22 (loopvar) this linter is no longer relevant.", "v1.60.2", "copyloopvar"),
+
+ linter.NewConfig(exptostd.New()).
+ WithSince("v1.63.0").
+ WithPresets(linter.PresetStyle).
+ WithLoadForGoAnalysis().
+ WithAutoFix().
+ WithURL("https://github.com/ldez/exptostd"),
linter.NewConfig(forbidigo.New(&cfg.LintersSettings.Forbidigo)).
WithSince("v1.34.0").
@@ -293,12 +304,14 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(forcetypeassert.New()).
WithSince("v1.38.0").
WithPresets(linter.PresetStyle).
+ WithLoadForGoAnalysis().
WithURL("https://github.com/gostaticanalysis/forcetypeassert"),
- linter.NewConfig(fatcontext.New()).
- WithSince("1.58.0").
+ linter.NewConfig(fatcontext.New(&cfg.LintersSettings.Fatcontext)).
+ WithSince("v1.58.0").
WithPresets(linter.PresetPerformance).
WithLoadForGoAnalysis().
+ WithAutoFix().
WithURL("https://github.com/Crocmagnon/fatcontext"),
linter.NewConfig(funlen.New(&cfg.LintersSettings.Funlen)).
@@ -316,6 +329,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.51.0").
WithLoadForGoAnalysis().
WithPresets(linter.PresetStyle).
+ WithAutoFix().
WithURL("https://github.com/nunnatsa/ginkgolinter"),
linter.NewConfig(gocheckcompilerdirectives.New()).
@@ -333,7 +347,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.12.0").
WithPresets(linter.PresetStyle),
- linter.NewConfig(gochecksumtype.New()).
+ linter.NewConfig(gochecksumtype.New(&cfg.LintersSettings.GoChecksumType)).
WithSince("v1.55.0").
WithPresets(linter.PresetBugs).
WithLoadForGoAnalysis().
@@ -377,6 +391,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithPresets(linter.PresetStyle, linter.PresetError).
WithLoadForGoAnalysis().
WithAlternativeNames("goerr113").
+ WithAutoFix().
WithURL("https://github.com/Djarvur/go-err113"),
linter.NewConfig(gofmt.New(&cfg.LintersSettings.Gofmt)).
@@ -391,7 +406,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithAutoFix().
WithURL("https://github.com/mvdan/gofumpt"),
- linter.NewConfig(goheader.New(&cfg.LintersSettings.Goheader)).
+ linter.NewConfig(goheader.New(&cfg.LintersSettings.Goheader, cfg.GetBasePath())).
WithSince("v1.28.0").
WithPresets(linter.PresetStyle).
WithAutoFix().
@@ -405,7 +420,6 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(linter.NewNoopDeprecated("golint", cfg, linter.DeprecationError)).
WithSince("v1.0.0").
- WithLoadForGoAnalysis().
WithPresets(linter.PresetStyle).
WithURL("https://github.com/golang/lint").
DeprecatedError("The repository of the linter has been archived by the owner.", "v1.41.0", "revive"),
@@ -415,11 +429,11 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithPresets(linter.PresetStyle).
WithURL("https://github.com/tommy-muehle/go-mnd"),
- linter.NewConfig(mnd.NewGoMND(&cfg.LintersSettings.Gomnd)).
+ linter.NewConfig(linter.NewNoopDeprecated("gomnd", cfg, linter.DeprecationError)).
WithSince("v1.22.0").
WithPresets(linter.PresetStyle).
WithURL("https://github.com/tommy-muehle/go-mnd").
- DeprecatedWarning("The linter has been renamed.", "v1.58.0", "mnd"),
+ DeprecatedError("The linter has been renamed.", "v1.58.0", "mnd"),
linter.NewConfig(gomoddirectives.New(&cfg.LintersSettings.GoModDirectives)).
WithSince("v1.39.0").
@@ -434,7 +448,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(goprintffuncname.New()).
WithSince("v1.23.0").
WithPresets(linter.PresetStyle).
- WithURL("https://github.com/jirfag/go-printf-func-name"),
+ WithURL("https://github.com/golangci/go-printf-func-name"),
linter.NewConfig(gosec.New(&cfg.LintersSettings.Gosec)).
WithSince("v1.0.0").
@@ -449,7 +463,8 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithLoadForGoAnalysis().
WithPresets(linter.PresetStyle).
WithAlternativeNames(megacheckName).
- WithURL("https://github.com/dominikh/go-tools/tree/master/simple"),
+ WithAutoFix().
+ WithURL("https://github.com/dominikh/go-tools/tree/HEAD/simple"),
linter.NewConfig(gosmopolitan.New(&cfg.LintersSettings.Gosmopolitan)).
WithSince("v1.53.0").
@@ -462,6 +477,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.0.0").
WithLoadForGoAnalysis().
WithPresets(linter.PresetBugs, linter.PresetMetaLinter).
+ WithAutoFix().
WithAlternativeNames("vet", "vetshadow").
WithURL("https://pkg.go.dev/cmd/vet"),
@@ -476,10 +492,18 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/esimonov/ifshort").
DeprecatedError("The repository of the linter has been deprecated by the owner.", "v1.48.0", ""),
+ linter.NewConfig(iface.New(&cfg.LintersSettings.Iface)).
+ WithSince("v1.62.0").
+ WithLoadForGoAnalysis().
+ WithPresets(linter.PresetStyle).
+ WithAutoFix().
+ WithURL("https://github.com/uudashr/iface"),
+
linter.NewConfig(importas.New(&cfg.LintersSettings.ImportAs)).
WithSince("v1.38.0").
WithPresets(linter.PresetStyle).
WithLoadForGoAnalysis().
+ WithAutoFix().
WithURL("https://github.com/julz/importas"),
linter.NewConfig(inamedparam.New(&cfg.LintersSettings.Inamedparam)).
@@ -500,14 +524,15 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(linter.NewNoopDeprecated("interfacer", cfg, linter.DeprecationError)).
WithSince("v1.0.0").
- WithLoadForGoAnalysis().
WithPresets(linter.PresetStyle).
WithURL("https://github.com/mvdan/interfacer").
DeprecatedError("The repository of the linter has been archived by the owner.", "v1.38.0", ""),
linter.NewConfig(intrange.New()).
WithSince("v1.57.0").
+ WithLoadForGoAnalysis().
WithPresets(linter.PresetStyle).
+ WithAutoFix().
WithURL("https://github.com/ckaznocha/intrange").
WithNoopFallback(cfg, linter.IsGoLowerThanGo122()),
@@ -541,7 +566,6 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(linter.NewNoopDeprecated("maligned", cfg, linter.DeprecationError)).
WithSince("v1.0.0").
- WithLoadForGoAnalysis().
WithPresets(linter.PresetPerformance).
WithURL("https://github.com/mdempsky/maligned").
DeprecatedError("The repository of the linter has been archived by the owner.", "v1.38.0", "govet 'fieldalignment'"),
@@ -568,6 +592,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(nakedret.New(&cfg.LintersSettings.Nakedret)).
WithSince("v1.19.0").
WithPresets(linter.PresetStyle).
+ WithAutoFix().
WithURL("https://github.com/alexkohler/nakedret"),
linter.NewConfig(nestif.New(&cfg.LintersSettings.Nestif)).
@@ -581,6 +606,12 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithPresets(linter.PresetBugs).
WithURL("https://github.com/gostaticanalysis/nilerr"),
+ linter.NewConfig(nilnesserr.New()).
+ WithSince("v1.63.0").
+ WithLoadForGoAnalysis().
+ WithPresets(linter.PresetBugs).
+ WithURL("https://github.com/alingse/nilnesserr"),
+
linter.NewConfig(nilnil.New(&cfg.LintersSettings.NilNil)).
WithSince("v1.43.0").
WithPresets(linter.PresetStyle).
@@ -590,6 +621,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(nlreturn.New(&cfg.LintersSettings.Nlreturn)).
WithSince("v1.30.0").
WithPresets(linter.PresetStyle).
+ WithAutoFix().
WithURL("https://github.com/ssgreg/nlreturn"),
linter.NewConfig(noctx.New()).
@@ -625,6 +657,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.55.0").
WithLoadForGoAnalysis().
WithPresets(linter.PresetPerformance).
+ WithAutoFix().
WithURL("https://github.com/catenacyber/perfsprint"),
linter.NewConfig(prealloc.New(&cfg.LintersSettings.Prealloc)).
@@ -650,15 +683,22 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/ghostiam/protogetter"),
linter.NewConfig(reassign.New(&cfg.LintersSettings.Reassign)).
- WithSince("1.49.0").
+ WithSince("v1.49.0").
WithPresets(linter.PresetBugs).
WithLoadForGoAnalysis().
WithURL("https://github.com/curioswitch/go-reassign"),
+ linter.NewConfig(recvcheck.New(&cfg.LintersSettings.Recvcheck)).
+ WithSince("v1.62.0").
+ WithPresets(linter.PresetBugs).
+ WithLoadForGoAnalysis().
+ WithURL("https://github.com/raeperd/recvcheck"),
+
linter.NewConfig(revive.New(&cfg.LintersSettings.Revive)).
WithSince("v1.37.0").
WithPresets(linter.PresetStyle, linter.PresetMetaLinter).
ConsiderSlow().
+ WithAutoFix().
WithURL("https://github.com/mgechev/revive"),
linter.NewConfig(rowserrcheck.New(&cfg.LintersSettings.RowsErrCheck)).
@@ -670,7 +710,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(sloglint.New(&cfg.LintersSettings.SlogLint)).
WithSince("v1.55.0").
WithLoadForGoAnalysis().
- WithPresets(linter.PresetStyle, linter.PresetFormatting).
+ WithPresets(linter.PresetStyle).
WithURL("https://github.com/go-simpler/sloglint"),
linter.NewConfig(linter.NewNoopDeprecated("scopelint", cfg, linter.DeprecationError)).
@@ -697,11 +737,11 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithLoadForGoAnalysis().
WithPresets(linter.PresetBugs, linter.PresetMetaLinter).
WithAlternativeNames(megacheckName).
- WithURL("https://staticcheck.io/"),
+ WithAutoFix().
+ WithURL("https://staticcheck.dev/"),
linter.NewConfig(linter.NewNoopDeprecated("structcheck", cfg, linter.DeprecationError)).
WithSince("v1.0.0").
- WithLoadForGoAnalysis().
WithPresets(linter.PresetUnused).
WithURL("https://github.com/opennota/check").
DeprecatedError("The owner seems to have abandoned the linter.", "v1.49.0", "unused"),
@@ -710,24 +750,27 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.20.0").
WithLoadForGoAnalysis().
WithPresets(linter.PresetStyle).
- WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"),
+ WithAutoFix().
+ WithURL("https://github.com/dominikh/go-tools/tree/HEAD/stylecheck"),
linter.NewConfig(tagalign.New(&cfg.LintersSettings.TagAlign)).
WithSince("v1.53.0").
- WithPresets(linter.PresetStyle, linter.PresetFormatting).
+ WithPresets(linter.PresetStyle).
WithAutoFix().
WithURL("https://github.com/4meepo/tagalign"),
linter.NewConfig(tagliatelle.New(&cfg.LintersSettings.Tagliatelle)).
WithSince("v1.40.0").
WithPresets(linter.PresetStyle).
+ WithLoadForGoAnalysis().
WithURL("https://github.com/ldez/tagliatelle"),
linter.NewConfig(tenv.New(&cfg.LintersSettings.Tenv)).
WithSince("v1.43.0").
- WithPresets(linter.PresetStyle).
+ WithPresets(linter.PresetTest).
WithLoadForGoAnalysis().
- WithURL("https://github.com/sivchari/tenv"),
+ WithURL("https://github.com/sivchari/tenv").
+ DeprecatedWarning("Duplicate feature in another linter.", "v1.64.0", "usetesting"),
linter.NewConfig(testableexamples.New()).
WithSince("v1.50.0").
@@ -738,6 +781,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.55.0").
WithPresets(linter.PresetTest, linter.PresetBugs).
WithLoadForGoAnalysis().
+ WithAutoFix().
WithURL("https://github.com/Antonboom/testifylint"),
linter.NewConfig(testpackage.New(&cfg.LintersSettings.Testpackage)).
@@ -747,7 +791,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(thelper.New(&cfg.LintersSettings.Thelper)).
WithSince("v1.34.0").
- WithPresets(linter.PresetStyle).
+ WithPresets(linter.PresetTest).
WithLoadForGoAnalysis().
WithURL("https://github.com/kulti/thelper"),
@@ -774,7 +818,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithLoadForGoAnalysis().
WithURL("https://github.com/mvdan/unparam"),
- linter.NewConfig(unused.New(&cfg.LintersSettings.Unused, &cfg.LintersSettings.Staticcheck)).
+ linter.NewConfig(unused.New(&cfg.LintersSettings.Unused)).
WithEnabledByDefault().
WithSince("v1.20.0").
WithLoadForGoAnalysis().
@@ -782,16 +826,23 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithAlternativeNames(megacheckName).
ConsiderSlow().
WithChangeTypes().
- WithURL("https://github.com/dominikh/go-tools/tree/master/unused"),
+ WithURL("https://github.com/dominikh/go-tools/tree/HEAD/unused"),
linter.NewConfig(usestdlibvars.New(&cfg.LintersSettings.UseStdlibVars)).
WithSince("v1.48.0").
WithPresets(linter.PresetStyle).
+ WithAutoFix().
WithURL("https://github.com/sashamelentyev/usestdlibvars"),
+ linter.NewConfig(usetesting.New(&cfg.LintersSettings.UseTesting)).
+ WithSince("v1.63.0").
+ WithPresets(linter.PresetTest).
+ WithLoadForGoAnalysis().
+ WithAutoFix().
+ WithURL("https://github.com/ldez/usetesting"),
+
linter.NewConfig(linter.NewNoopDeprecated("varcheck", cfg, linter.DeprecationError)).
WithSince("v1.0.0").
- WithLoadForGoAnalysis().
WithPresets(linter.PresetUnused).
WithURL("https://github.com/opennota/check").
DeprecatedError("The owner seems to have abandoned the linter.", "v1.49.0", "unused"),
@@ -823,6 +874,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(wsl.New(&cfg.LintersSettings.WSL)).
WithSince("v1.20.0").
WithPresets(linter.PresetStyle).
+ WithAutoFix().
WithURL("https://github.com/bombsimon/wsl"),
linter.NewConfig(zerologlint.New()).
@@ -836,6 +888,6 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.26.0").
WithPresets(linter.PresetStyle).
WithAutoFix().
- WithURL("https://github.com/golangci/golangci-lint/blob/master/pkg/golinters/nolintlint/README.md"),
+ WithURL("https://github.com/golangci/golangci-lint/tree/HEAD/pkg/golinters/nolintlint/internal"),
}, nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_go.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_go.go
index c6dbaf7930..e9f6931f33 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_go.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_plugin_go.go
@@ -1,6 +1,7 @@
package lintersdb
import (
+ "context"
"errors"
"fmt"
"path/filepath"
@@ -9,6 +10,7 @@ import (
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
+ "github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/goanalysis"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils"
@@ -43,8 +45,6 @@ func (b *PluginGoBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
continue
}
- settings := settings
-
lc, err := b.loadConfig(cfg, name, &settings)
if err != nil {
return nil, fmt.Errorf("unable to load custom analyzer %q: %s, %w", name, settings.Path, err)
@@ -83,8 +83,21 @@ func (b *PluginGoBuilder) loadConfig(cfg *config.Config, name string, settings *
// or the linter does not implement the AnalyzerPlugin interface.
func (b *PluginGoBuilder) getAnalyzerPlugin(cfg *config.Config, path string, settings any) ([]*analysis.Analyzer, error) {
if !filepath.IsAbs(path) {
+ // Hack for compatibility:
+ // the previous default (v1) was `cfg` but `fsutils.GetBasePath` defaults on `wd`.
+ // TODO(ldez): should be removed in v2.
+ relativePathMode := cfg.Run.RelativePathMode
+ if relativePathMode == "" {
+ relativePathMode = fsutils.RelativePathModeCfg
+ }
+
+ basePath, err := fsutils.GetBasePath(context.Background(), relativePathMode, cfg.GetConfigDir())
+ if err != nil {
+ return nil, fmt.Errorf("get base path: %w", err)
+ }
+
// resolve non-absolute paths relative to config file's directory
- path = filepath.Join(cfg.GetConfigDir(), path)
+ path = filepath.Join(basePath, path)
}
plug, err := plugin.Open(path)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go
index 0a487be92e..4fe57a3b48 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go
@@ -2,11 +2,11 @@ package lintersdb
import (
"fmt"
+ "maps"
"os"
"slices"
"sort"
-
- "golang.org/x/exp/maps"
+ "strings"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
@@ -109,24 +109,25 @@ func (m *Manager) GetOptimizedLinters() ([]*linter.Config, error) {
m.combineGoAnalysisLinters(resultLintersSet)
- resultLinters := maps.Values(resultLintersSet)
-
// Make order of execution of linters (go/analysis metalinter and unused) stable.
- sort.Slice(resultLinters, func(i, j int) bool {
- a, b := resultLinters[i], resultLinters[j]
-
+ resultLinters := slices.SortedFunc(maps.Values(resultLintersSet), func(a *linter.Config, b *linter.Config) int {
if b.Name() == linter.LastLinter {
- return true
+ return -1
}
if a.Name() == linter.LastLinter {
- return false
+ return 1
}
if a.DoesChangeTypes != b.DoesChangeTypes {
- return b.DoesChangeTypes // move type-changing linters to the end to optimize speed
+ // move type-changing linters to the end to optimize speed
+ if b.DoesChangeTypes {
+ return -1
+ }
+ return 1
}
- return a.Name() < b.Name()
+
+ return strings.Compare(a.Name(), b.Name())
})
return resultLinters, nil
@@ -162,7 +163,6 @@ func (m *Manager) build(enabledByDefaultLinters []*linter.Config) map[string]*li
// --presets can only add linters to default set
for _, p := range m.cfg.Linters.Presets {
for _, lc := range m.GetAllLinterConfigsForPreset(p) {
- lc := lc
resultLintersSet[lc.Name()] = lc
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/package.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/package.go
index c314166cae..736498b0b4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/package.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/package.go
@@ -11,6 +11,7 @@ import (
"strings"
"time"
+ "github.com/ldez/grignotin/goenv"
"golang.org/x/tools/go/packages"
"github.com/golangci/golangci-lint/pkg/config"
@@ -204,12 +205,13 @@ func (l *PackageLoader) debugPrintLoadedPackages(pkgs []*packages.Package) {
func (l *PackageLoader) prepareBuildContext() {
// Set GOROOT to have working cross-compilation: cross-compiled binaries
// have invalid GOROOT. XXX: can't use runtime.GOROOT().
- goroot := l.goenv.Get(goutil.EnvGoRoot)
+ goroot := l.goenv.Get(goenv.GOROOT)
if goroot == "" {
return
}
- os.Setenv(string(goutil.EnvGoRoot), goroot)
+ _ = os.Setenv(goenv.GOROOT, goroot)
+
build.Default.GOROOT = goroot
build.Default.BuildTags = l.cfg.Run.BuildTags
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go
index f583121ed8..4e6d4692b5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go
@@ -10,6 +10,7 @@ import (
"github.com/golangci/golangci-lint/internal/errorutil"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/fsutils"
+ "github.com/golangci/golangci-lint/pkg/goformatters"
"github.com/golangci/golangci-lint/pkg/goutil"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
@@ -40,6 +41,16 @@ func NewRunner(log logutils.Log, cfg *config.Config, args []string, goenv *gouti
// or process other paths (skip files).
files := fsutils.NewFiles(lineCache, cfg.Output.PathPrefix)
+ pathRelativity, err := processors.NewPathRelativity(log, cfg.GetBasePath())
+ if err != nil {
+ return nil, fmt.Errorf("error creating path relativity processor: %w", err)
+ }
+
+ exclusionPaths, err := processors.NewExclusionPaths(log, &cfg.Linters.LinterExclusions)
+ if err != nil {
+ return nil, err
+ }
+
skipFilesProcessor, err := processors.NewSkipFiles(cfg.Issues.ExcludeFiles, cfg.Output.PathPrefix)
if err != nil {
return nil, err
@@ -60,45 +71,59 @@ func NewRunner(log logutils.Log, cfg *config.Config, args []string, goenv *gouti
return nil, fmt.Errorf("failed to get enabled linters: %w", err)
}
+ metaFormatter, err := goformatters.NewMetaFormatter(log, cfg, enabledLinters)
+ if err != nil {
+ return nil, fmt.Errorf("failed to create meta-formatter: %w", err)
+ }
+
return &Runner{
Processors: []processors.Processor{
+ // Must be the first processor.
+ processors.NewPathAbsoluter(log),
+
processors.NewCgo(goenv),
- // Must go after Cgo.
+ // Must be after Cgo.
processors.NewFilenameUnadjuster(lintCtx.Packages, log.Child(logutils.DebugKeyFilenameUnadjuster)),
- // Must go after FilenameUnadjuster.
+ // Must be after FilenameUnadjuster.
processors.NewInvalidIssue(log.Child(logutils.DebugKeyInvalidIssue)),
- // Must be before diff, nolint and exclude autogenerated processor at least.
- processors.NewPathPrettifier(),
+ // Must be after PathAbsoluter, Cgo, FilenameUnadjuster InvalidIssue.
+ pathRelativity,
+
+ // Must be after PathRelativity.
+ exclusionPaths,
skipFilesProcessor,
- skipDirsProcessor, // must be after path prettifier
+ skipDirsProcessor,
- processors.NewAutogeneratedExclude(cfg.Issues.ExcludeGenerated),
+ processors.NewGeneratedFileFilter(cfg.Linters.LinterExclusions.Generated),
// Must be before exclude because users see already marked output and configure excluding by it.
processors.NewIdentifierMarker(),
- processors.NewExclude(&cfg.Issues),
- processors.NewExcludeRules(log.Child(logutils.DebugKeyExcludeRules), files, &cfg.Issues),
- processors.NewNolint(log.Child(logutils.DebugKeyNolint), dbManager, enabledLinters),
+ processors.NewExclusionRules(log.Child(logutils.DebugKeyExclusionRules), files,
+ &cfg.Linters.LinterExclusions, &cfg.Issues),
+
+ processors.NewNolintFilter(log.Child(logutils.DebugKeyNolintFilter), dbManager, enabledLinters),
- processors.NewUniqByLine(cfg),
processors.NewDiff(&cfg.Issues),
+
+ // The fixer still needs to see paths for the issues that are relative to the current directory.
+ processors.NewFixer(cfg, log, fileCache, metaFormatter),
+
+ // Must be after the Fixer.
+ processors.NewUniqByLine(cfg.Issues.UniqByLine),
processors.NewMaxPerFileFromLinter(cfg),
processors.NewMaxSameIssues(cfg.Issues.MaxSameIssues, log.Child(logutils.DebugKeyMaxSameIssues), cfg),
processors.NewMaxFromLinter(cfg.Issues.MaxIssuesPerLinter, log.Child(logutils.DebugKeyMaxFromLinter), cfg),
+
+ // Now we can modify the issues for output.
processors.NewSourceCode(lineCache, log.Child(logutils.DebugKeySourceCode)),
processors.NewPathShortener(),
processors.NewSeverity(log.Child(logutils.DebugKeySeverityRules), files, &cfg.Severity),
-
- // The fixer still needs to see paths for the issues that are relative to the current directory.
- processors.NewFixer(cfg, log, fileCache),
-
- // Now we can modify the issues for output.
- processors.NewPathPrefixer(cfg.Output.PathPrefix),
- processors.NewSortResults(cfg),
+ processors.NewPathPrettifier(log, cfg.Output.PathPrefix),
+ processors.NewSortResults(&cfg.Output),
},
lintCtx: lintCtx,
Log: log,
@@ -115,18 +140,17 @@ func (r *Runner) Run(ctx context.Context, linters []*linter.Config) ([]result.Is
)
for _, lc := range linters {
- lc := lc
- sw.TrackStage(lc.Name(), func() {
- linterIssues, err := r.runLinterSafe(ctx, r.lintCtx, lc)
- if err != nil {
- lintErrors = errors.Join(lintErrors, fmt.Errorf("can't run linter %s", lc.Linter.Name()), err)
- r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err)
-
- return
- }
-
- issues = append(issues, linterIssues...)
+ linterIssues, err := timeutils.TrackStage(sw, lc.Name(), func() ([]result.Issue, error) {
+ return r.runLinterSafe(ctx, r.lintCtx, lc)
})
+ if err != nil {
+ lintErrors = errors.Join(lintErrors, fmt.Errorf("can't run linter %s", lc.Linter.Name()), err)
+ r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err)
+
+ continue
+ }
+
+ issues = append(issues, linterIssues...)
}
return r.processLintResults(issues), lintErrors
@@ -189,10 +213,7 @@ func (r *Runner) processLintResults(inIssues []result.Issue) []result.Issue {
// finalize processors: logging, clearing, no heavy work here
for _, p := range r.Processors {
- p := p
- sw.TrackStage(p.Name(), func() {
- p.Finish()
- })
+ sw.TrackStage(p.Name(), p.Finish)
}
if issuesBefore != issuesAfter {
@@ -208,21 +229,18 @@ func (r *Runner) printPerProcessorStat(stat map[string]processorStat) {
parts := make([]string, 0, len(stat))
for name, ps := range stat {
if ps.inCount != 0 {
- parts = append(parts, fmt.Sprintf("%s: %d/%d", name, ps.outCount, ps.inCount))
+ parts = append(parts, fmt.Sprintf("%s: %d/%d", name, ps.inCount, ps.outCount))
}
}
if len(parts) != 0 {
- r.Log.Infof("Processors filtering stat (out/in): %s", strings.Join(parts, ", "))
+ r.Log.Infof("Processors filtering stat (in/out): %s", strings.Join(parts, ", "))
}
}
func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, statPerProcessor map[string]processorStat) []result.Issue {
for _, p := range r.Processors {
- var newIssues []result.Issue
- var err error
- p := p
- sw.TrackStage(p.Name(), func() {
- newIssues, err = p.Process(issues)
+ newIssues, err := timeutils.TrackStage(sw, p.Name(), func() ([]result.Issue, error) {
+ return p.Process(issues)
})
if err != nil {
@@ -235,6 +253,7 @@ func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, s
issues = newIssues
}
+ // This is required by JSON serialization
if issues == nil {
issues = []result.Issue{}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go
index e4bb98109d..0454d79271 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go
@@ -16,36 +16,53 @@ const EnvTestRun = "GL_TEST_RUN"
const envDebug = "GL_DEBUG"
const (
- DebugKeyAutogenExclude = "autogen_exclude" // Debugs a filter excluding autogenerated source code.
- DebugKeyBinSalt = "bin_salt"
- DebugKeyConfigReader = "config_reader"
- DebugKeyEmpty = ""
- DebugKeyEnabledLinters = "enabled_linters"
- DebugKeyEnv = "env" // Debugs `go env` command.
- DebugKeyExcludeRules = "exclude_rules"
- DebugKeyExec = "exec"
- DebugKeyFilenameUnadjuster = "filename_unadjuster"
- DebugKeyInvalidIssue = "invalid_issue"
- DebugKeyForbidigo = "forbidigo"
- DebugKeyGoEnv = "goenv"
- DebugKeyLinter = "linter"
- DebugKeyLintersContext = "linters_context"
- DebugKeyLintersDB = "lintersdb"
- DebugKeyLintersOutput = "linters_output"
- DebugKeyLoader = "loader" // Debugs packages loading (including `go/packages` internal debugging).
- DebugKeyMaxFromLinter = "max_from_linter"
- DebugKeyMaxSameIssues = "max_same_issues"
- DebugKeyPkgCache = "pkgcache"
- DebugKeyRunner = "runner"
- DebugKeySeverityRules = "severity_rules"
- DebugKeySkipDirs = "skip_dirs"
- DebugKeySourceCode = "source_code"
- DebugKeyStopwatch = "stopwatch"
+ DebugKeyBinSalt = "bin_salt"
+ DebugKeyConfigReader = "config_reader"
+ DebugKeyEmpty = ""
+ DebugKeyEnabledLinters = "enabled_linters"
+ DebugKeyExec = "exec"
+ DebugKeyFormatter = "formatter"
+ DebugKeyGoEnv = "goenv"
+ DebugKeyLinter = "linter"
+ DebugKeyLintersContext = "linters_context"
+ DebugKeyLintersDB = "lintersdb"
+ DebugKeyLintersOutput = "linters_output"
+ DebugKeyLoader = "loader" // Debugs packages loading (including `go/packages` internal debugging).
+ DebugKeyPkgCache = "pkgcache"
+ DebugKeyRunner = "runner"
+ DebugKeyStopwatch = "stopwatch"
+ DebugKeyTest = "test"
+)
+
+// Printers.
+const (
+ DebugKeyCheckstylePrinter = "checkstyle_printer"
+ DebugKeyCodeClimatePrinter = "codeclimate_printer"
+ DebugKeySarifPrinter = "sarif_printer"
DebugKeyTabPrinter = "tab_printer"
- DebugKeyTest = "test"
+ DebugKeyTeamCityPrinter = "teamcity_printer"
DebugKeyTextPrinter = "text_printer"
)
+// Processors.
+const (
+ DebugKeyExclusionPaths = "exclusion_paths"
+ DebugKeyExclusionRules = "exclusion_rules"
+ DebugKeyFilenameUnadjuster = "filename_unadjuster"
+ DebugKeyGeneratedFileFilter = "generated_file_filter" // Debugs a filter excluding autogenerated source code.
+ DebugKeyInvalidIssue = "invalid_issue"
+ DebugKeyMaxFromLinter = "max_from_linter"
+ DebugKeyMaxSameIssues = "max_same_issues"
+ DebugKeyNolintFilter = "nolint_filter" // Debugs a filter excluding issues by `//nolint` comments.
+ DebugKeyPathAbsoluter = "path_absoluter"
+ DebugKeyPathPrettifier = "path_prettifier"
+ DebugKeyPathRelativity = "path_relativity"
+ DebugKeySeverityRules = "severity_rules"
+ DebugKeySkipDirs = "skip_dirs"
+ DebugKeySourceCode = "source_code"
+)
+
+// Analysis.
const (
DebugKeyGoAnalysis = "goanalysis"
@@ -59,11 +76,11 @@ const (
DebugKeyGoAnalysisFactsInherit = DebugKeyGoAnalysisFacts + "/inherit"
)
+// Linters.
const (
- DebugKeyGoCritic = "gocritic" // Debugs `go-critic` linter.
+ DebugKeyForbidigo = "forbidigo" // Debugs `forbidigo` linter.
+ DebugKeyGoCritic = "gocritic" // Debugs `gocritic` linter.
DebugKeyGovet = "govet" // Debugs `govet` linter.
- DebugKeyMegacheck = "megacheck" // Debugs `staticcheck` related linters.
- DebugKeyNolint = "nolint" // Debugs a filter excluding issues by `//nolint` comments.
DebugKeyRevive = "revive" // Debugs `revive` linter.
)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go
index e32eef7f51..c31641d22d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go
@@ -4,44 +4,40 @@ import (
"encoding/xml"
"fmt"
"io"
- "sort"
+ "maps"
+ "slices"
+ "strings"
"github.com/go-xmlfmt/xmlfmt"
- "golang.org/x/exp/maps"
+ "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
)
const defaultCheckstyleSeverity = "error"
-type checkstyleOutput struct {
- XMLName xml.Name `xml:"checkstyle"`
- Version string `xml:"version,attr"`
- Files []*checkstyleFile `xml:"file"`
-}
-
-type checkstyleFile struct {
- Name string `xml:"name,attr"`
- Errors []*checkstyleError `xml:"error"`
-}
-
-type checkstyleError struct {
- Column int `xml:"column,attr"`
- Line int `xml:"line,attr"`
- Message string `xml:"message,attr"`
- Severity string `xml:"severity,attr"`
- Source string `xml:"source,attr"`
-}
-
+// Checkstyle prints issues in the Checkstyle format.
+// https://checkstyle.org/config.html
type Checkstyle struct {
- w io.Writer
+ log logutils.Log
+ w io.Writer
+ sanitizer severitySanitizer
}
-func NewCheckstyle(w io.Writer) *Checkstyle {
- return &Checkstyle{w: w}
+func NewCheckstyle(log logutils.Log, w io.Writer) *Checkstyle {
+ return &Checkstyle{
+ log: log.Child(logutils.DebugKeyCheckstylePrinter),
+ w: w,
+ sanitizer: severitySanitizer{
+ // https://checkstyle.org/config.html#Severity
+ // https://checkstyle.org/property_types.html#SeverityLevel
+ allowedSeverities: []string{"ignore", "info", "warning", defaultCheckstyleSeverity},
+ defaultSeverity: defaultCheckstyleSeverity,
+ },
+ }
}
-func (p Checkstyle) Print(issues []result.Issue) error {
+func (p *Checkstyle) Print(issues []result.Issue) error {
out := checkstyleOutput{
Version: "5.0",
}
@@ -59,26 +55,24 @@ func (p Checkstyle) Print(issues []result.Issue) error {
files[issue.FilePath()] = file
}
- severity := defaultCheckstyleSeverity
- if issue.Severity != "" {
- severity = issue.Severity
- }
-
newError := &checkstyleError{
Column: issue.Column(),
Line: issue.Line(),
Message: issue.Text,
Source: issue.FromLinter,
- Severity: severity,
+ Severity: p.sanitizer.Sanitize(issue.Severity),
}
file.Errors = append(file.Errors, newError)
}
- out.Files = maps.Values(files)
+ err := p.sanitizer.Err()
+ if err != nil {
+ p.log.Infof("%v", err)
+ }
- sort.Slice(out.Files, func(i, j int) bool {
- return out.Files[i].Name < out.Files[j].Name
+ out.Files = slices.SortedFunc(maps.Values(files), func(a *checkstyleFile, b *checkstyleFile) int {
+ return strings.Compare(a.Name, b.Name)
})
data, err := xml.Marshal(&out)
@@ -93,3 +87,22 @@ func (p Checkstyle) Print(issues []result.Issue) error {
return nil
}
+
+type checkstyleOutput struct {
+ XMLName xml.Name `xml:"checkstyle"`
+ Version string `xml:"version,attr"`
+ Files []*checkstyleFile `xml:"file"`
+}
+
+type checkstyleFile struct {
+ Name string `xml:"name,attr"`
+ Errors []*checkstyleError `xml:"error"`
+}
+
+type checkstyleError struct {
+ Column int `xml:"column,attr"`
+ Line int `xml:"line,attr"`
+ Message string `xml:"message,attr"`
+ Severity string `xml:"severity,attr"`
+ Source string `xml:"source,attr"`
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go
index 50d6dcff3b..983dcde363 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go
@@ -4,56 +4,72 @@ import (
"encoding/json"
"io"
+ "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
)
const defaultCodeClimateSeverity = "critical"
-// CodeClimateIssue is a subset of the Code Climate spec.
-// https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types
-// It is just enough to support GitLab CI Code Quality.
-// https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html
-type CodeClimateIssue struct {
- Description string `json:"description"`
- Severity string `json:"severity,omitempty"`
- Fingerprint string `json:"fingerprint"`
- Location struct {
- Path string `json:"path"`
- Lines struct {
- Begin int `json:"begin"`
- } `json:"lines"`
- } `json:"location"`
-}
-
+// CodeClimate prints issues in the Code Climate format.
+// https://github.com/codeclimate/platform/blob/HEAD/spec/analyzers/SPEC.md
type CodeClimate struct {
- w io.Writer
+ log logutils.Log
+ w io.Writer
+ sanitizer severitySanitizer
}
-func NewCodeClimate(w io.Writer) *CodeClimate {
- return &CodeClimate{w: w}
+func NewCodeClimate(log logutils.Log, w io.Writer) *CodeClimate {
+ return &CodeClimate{
+ log: log.Child(logutils.DebugKeyCodeClimatePrinter),
+ w: w,
+ sanitizer: severitySanitizer{
+ // https://github.com/codeclimate/platform/blob/HEAD/spec/analyzers/SPEC.md#data-types
+ allowedSeverities: []string{"info", "minor", "major", defaultCodeClimateSeverity, "blocker"},
+ defaultSeverity: defaultCodeClimateSeverity,
+ },
+ }
}
-func (p CodeClimate) Print(issues []result.Issue) error {
- codeClimateIssues := make([]CodeClimateIssue, 0, len(issues))
+func (p *CodeClimate) Print(issues []result.Issue) error {
+ ccIssues := make([]codeClimateIssue, 0, len(issues))
+
for i := range issues {
- issue := &issues[i]
- codeClimateIssue := CodeClimateIssue{}
- codeClimateIssue.Description = issue.Description()
- codeClimateIssue.Location.Path = issue.Pos.Filename
- codeClimateIssue.Location.Lines.Begin = issue.Pos.Line
- codeClimateIssue.Fingerprint = issue.Fingerprint()
- codeClimateIssue.Severity = defaultCodeClimateSeverity
-
- if issue.Severity != "" {
- codeClimateIssue.Severity = issue.Severity
+ issue := issues[i]
+
+ ccIssue := codeClimateIssue{
+ Description: issue.Description(),
+ CheckName: issue.FromLinter,
+ Severity: p.sanitizer.Sanitize(issue.Severity),
+ Fingerprint: issue.Fingerprint(),
}
- codeClimateIssues = append(codeClimateIssues, codeClimateIssue)
+ ccIssue.Location.Path = issue.Pos.Filename
+ ccIssue.Location.Lines.Begin = issue.Pos.Line
+
+ ccIssues = append(ccIssues, ccIssue)
}
- err := json.NewEncoder(p.w).Encode(codeClimateIssues)
+ err := p.sanitizer.Err()
if err != nil {
- return err
+ p.log.Infof("%v", err)
}
- return nil
+
+ return json.NewEncoder(p.w).Encode(ccIssues)
+}
+
+// codeClimateIssue is a subset of the Code Climate spec.
+// https://github.com/codeclimate/platform/blob/HEAD/spec/analyzers/SPEC.md#data-types
+// It is just enough to support GitLab CI Code Quality.
+// https://docs.gitlab.com/ee/ci/testing/code_quality.html#code-quality-report-format
+type codeClimateIssue struct {
+ Description string `json:"description"`
+ CheckName string `json:"check_name"`
+ Severity string `json:"severity,omitempty"`
+ Fingerprint string `json:"fingerprint"`
+ Location struct {
+ Path string `json:"path"`
+ Lines struct {
+ Begin int `json:"begin"`
+ } `json:"lines"`
+ } `json:"location"`
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go
index 7dd1e5c623..6fc6bc62a2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go
@@ -122,6 +122,8 @@ type htmlIssue struct {
Code string
}
+// HTML prints issues in an HTML page.
+// It uses the Cloudflare CDN (cdnjs) and React.
type HTML struct {
w io.Writer
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go
index 28509cac45..8fc94649f7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go
@@ -8,12 +8,13 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
+// JSON prints issues in a JSON representation.
type JSON struct {
- rd *report.Data // TODO(ldez) should be drop in v2. Only use by JSON reporter.
+ rd *report.Data
w io.Writer
}
-func NewJSON(rd *report.Data, w io.Writer) *JSON {
+func NewJSON(w io.Writer, rd *report.Data) *JSON {
return &JSON{
rd: rd,
w: w,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go
index 3e3f82f580..587cef4e2a 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go
@@ -4,49 +4,30 @@ import (
"encoding/xml"
"fmt"
"io"
- "sort"
+ "maps"
+ "slices"
"strings"
- "golang.org/x/exp/maps"
-
"github.com/golangci/golangci-lint/pkg/result"
)
-type testSuitesXML struct {
- XMLName xml.Name `xml:"testsuites"`
- TestSuites []testSuiteXML
-}
-
-type testSuiteXML struct {
- XMLName xml.Name `xml:"testsuite"`
- Suite string `xml:"name,attr"`
- Tests int `xml:"tests,attr"`
- Errors int `xml:"errors,attr"`
- Failures int `xml:"failures,attr"`
- TestCases []testCaseXML `xml:"testcase"`
-}
-
-type testCaseXML struct {
- Name string `xml:"name,attr"`
- ClassName string `xml:"classname,attr"`
- Failure failureXML `xml:"failure"`
-}
-
-type failureXML struct {
- Message string `xml:"message,attr"`
- Type string `xml:"type,attr"`
- Content string `xml:",cdata"`
+// JUnitXML prints issues in the JUnit XML format.
+// There is no official specification for the JUnit XML file format,
+// and various tools generate and support different flavors of this format.
+// https://github.com/testmoapp/junitxml
+type JUnitXML struct {
+ extended bool
+ w io.Writer
}
-type JunitXML struct {
- w io.Writer
-}
-
-func NewJunitXML(w io.Writer) *JunitXML {
- return &JunitXML{w: w}
+func NewJUnitXML(w io.Writer, extended bool) *JUnitXML {
+ return &JUnitXML{
+ extended: extended,
+ w: w,
+ }
}
-func (p JunitXML) Print(issues []result.Issue) error {
+func (p JUnitXML) Print(issues []result.Issue) error {
suites := make(map[string]testSuiteXML) // use a map to group by file
for ind := range issues {
@@ -68,15 +49,19 @@ func (p JunitXML) Print(issues []result.Issue) error {
},
}
+ if p.extended {
+ tc.File = i.Pos.Filename
+ tc.Line = i.Pos.Line
+ }
+
testSuite.TestCases = append(testSuite.TestCases, tc)
suites[suiteName] = testSuite
}
var res testSuitesXML
- res.TestSuites = maps.Values(suites)
- sort.Slice(res.TestSuites, func(i, j int) bool {
- return res.TestSuites[i].Suite < res.TestSuites[j].Suite
+ res.TestSuites = slices.SortedFunc(maps.Values(suites), func(a testSuiteXML, b testSuiteXML) int {
+ return strings.Compare(a.Suite, b.Suite)
})
enc := xml.NewEncoder(p.w)
@@ -86,3 +71,31 @@ func (p JunitXML) Print(issues []result.Issue) error {
}
return nil
}
+
+type testSuitesXML struct {
+ XMLName xml.Name `xml:"testsuites"`
+ TestSuites []testSuiteXML
+}
+
+type testSuiteXML struct {
+ XMLName xml.Name `xml:"testsuite"`
+ Suite string `xml:"name,attr"`
+ Tests int `xml:"tests,attr"`
+ Errors int `xml:"errors,attr"`
+ Failures int `xml:"failures,attr"`
+ TestCases []testCaseXML `xml:"testcase"`
+}
+
+type testCaseXML struct {
+ Name string `xml:"name,attr"`
+ ClassName string `xml:"classname,attr"`
+ Failure failureXML `xml:"failure"`
+ File string `xml:"file,attr,omitempty"`
+ Line int `xml:"line,attr,omitempty"`
+}
+
+type failureXML struct {
+ Message string `xml:"message,attr"`
+ Type string `xml:"type,attr"`
+ Content string `xml:",cdata"`
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go
index 53db01220e..613e4abec8 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go
@@ -6,6 +6,8 @@ import (
"io"
"os"
"path/filepath"
+ "slices"
+ "strings"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/logutils"
@@ -23,6 +25,7 @@ type issuePrinter interface {
type Printer struct {
cfg *config.Output
reportData *report.Data
+ basePath string
log logutils.Log
@@ -31,7 +34,7 @@ type Printer struct {
}
// NewPrinter creates a new Printer.
-func NewPrinter(log logutils.Log, cfg *config.Output, reportData *report.Data) (*Printer, error) {
+func NewPrinter(log logutils.Log, cfg *config.Output, reportData *report.Data, basePath string) (*Printer, error) {
if log == nil {
return nil, errors.New("missing log argument in constructor")
}
@@ -45,6 +48,7 @@ func NewPrinter(log logutils.Log, cfg *config.Output, reportData *report.Data) (
return &Printer{
cfg: cfg,
reportData: reportData,
+ basePath: basePath,
log: log,
stdOut: logutils.StdOut,
stdErr: logutils.StdErr,
@@ -96,6 +100,10 @@ func (c *Printer) createWriter(path string) (io.Writer, bool, error) {
return c.stdErr, false, nil
}
+ if !filepath.IsAbs(path) {
+ path = filepath.Join(c.basePath, path)
+ }
+
err := os.MkdirAll(filepath.Dir(path), os.ModePerm)
if err != nil {
return nil, false, err
@@ -114,32 +122,63 @@ func (c *Printer) createPrinter(format string, w io.Writer) (issuePrinter, error
switch format {
case config.OutFormatJSON:
- p = NewJSON(c.reportData, w)
- case config.OutFormatColoredLineNumber, config.OutFormatLineNumber:
- p = NewText(c.cfg.PrintIssuedLine,
- format == config.OutFormatColoredLineNumber, c.cfg.PrintLinterName,
- c.log.Child(logutils.DebugKeyTextPrinter), w)
+ p = NewJSON(w, c.reportData)
+ case config.OutFormatLineNumber, config.OutFormatColoredLineNumber:
+ p = NewText(c.log, w, c.cfg.PrintLinterName, c.cfg.PrintIssuedLine, format == config.OutFormatColoredLineNumber)
case config.OutFormatTab, config.OutFormatColoredTab:
- p = NewTab(c.cfg.PrintLinterName,
- format == config.OutFormatColoredTab,
- c.log.Child(logutils.DebugKeyTabPrinter), w)
+ p = NewTab(c.log, w, c.cfg.PrintLinterName, format == config.OutFormatColoredTab)
case config.OutFormatCheckstyle:
- p = NewCheckstyle(w)
+ p = NewCheckstyle(c.log, w)
case config.OutFormatCodeClimate:
- p = NewCodeClimate(w)
+ p = NewCodeClimate(c.log, w)
case config.OutFormatHTML:
p = NewHTML(w)
- case config.OutFormatJunitXML:
- p = NewJunitXML(w)
+ case config.OutFormatJUnitXML, config.OutFormatJUnitXMLExtended:
+ p = NewJUnitXML(w, format == config.OutFormatJUnitXMLExtended)
case config.OutFormatGithubActions:
p = NewGitHubAction(w)
case config.OutFormatTeamCity:
- p = NewTeamCity(w)
+ p = NewTeamCity(c.log, w)
case config.OutFormatSarif:
- p = NewSarif(w)
+ p = NewSarif(c.log, w)
default:
return nil, fmt.Errorf("unknown output format %q", format)
}
return p, nil
}
+
+type severitySanitizer struct {
+ allowedSeverities []string
+ defaultSeverity string
+
+ unsupportedSeverities map[string]struct{}
+}
+
+func (s *severitySanitizer) Sanitize(severity string) string {
+ if slices.Contains(s.allowedSeverities, severity) {
+ return severity
+ }
+
+ if s.unsupportedSeverities == nil {
+ s.unsupportedSeverities = make(map[string]struct{})
+ }
+
+ s.unsupportedSeverities[severity] = struct{}{}
+
+ return s.defaultSeverity
+}
+
+func (s *severitySanitizer) Err() error {
+ if len(s.unsupportedSeverities) == 0 {
+ return nil
+ }
+
+ var names []string
+ for k := range s.unsupportedSeverities {
+ names = append(names, "'"+k+"'")
+ }
+
+ return fmt.Errorf("severities (%v) are not inside supported values (%v), fallback to '%s'",
+ strings.Join(names, ", "), strings.Join(s.allowedSeverities, ", "), s.defaultSeverity)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/sarif.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/sarif.go
index 9ccf33ce19..c06c116244 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/sarif.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/sarif.go
@@ -4,6 +4,7 @@ import (
"encoding/json"
"io"
+ "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
)
@@ -12,6 +13,73 @@ const (
sarifSchemaURI = "https://schemastore.azurewebsites.net/schemas/json/sarif-2.1.0-rtm.6.json"
)
+const defaultSarifSeverity = "error"
+
+// Sarif prints issues in the SARIF format.
+// https://sarifweb.azurewebsites.net/
+// https://docs.oasis-open.org/sarif/sarif/v2.1.0/
+type Sarif struct {
+ log logutils.Log
+ w io.Writer
+ sanitizer severitySanitizer
+}
+
+func NewSarif(log logutils.Log, w io.Writer) *Sarif {
+ return &Sarif{
+ log: log.Child(logutils.DebugKeySarifPrinter),
+ w: w,
+ sanitizer: severitySanitizer{
+ // https://docs.oasis-open.org/sarif/sarif/v2.1.0/errata01/os/sarif-v2.1.0-errata01-os-complete.html#_Toc141790898
+ allowedSeverities: []string{"none", "note", "warning", defaultSarifSeverity},
+ defaultSeverity: defaultSarifSeverity,
+ },
+ }
+}
+
+func (p *Sarif) Print(issues []result.Issue) error {
+ run := sarifRun{}
+ run.Tool.Driver.Name = "golangci-lint"
+ run.Results = make([]sarifResult, 0)
+
+ for i := range issues {
+ issue := issues[i]
+
+ sr := sarifResult{
+ RuleID: issue.FromLinter,
+ Level: p.sanitizer.Sanitize(issue.Severity),
+ Message: sarifMessage{Text: issue.Text},
+ Locations: []sarifLocation{
+ {
+ PhysicalLocation: sarifPhysicalLocation{
+ ArtifactLocation: sarifArtifactLocation{URI: issue.FilePath()},
+ Region: sarifRegion{
+ StartLine: issue.Line(),
+ // If startColumn is absent, it SHALL default to 1.
+ // https://docs.oasis-open.org/sarif/sarif/v2.1.0/errata01/os/sarif-v2.1.0-errata01-os-complete.html#_Toc141790941
+ StartColumn: max(1, issue.Column()),
+ },
+ },
+ },
+ },
+ }
+
+ run.Results = append(run.Results, sr)
+ }
+
+ err := p.sanitizer.Err()
+ if err != nil {
+ p.log.Infof("%v", err)
+ }
+
+ output := SarifOutput{
+ Version: sarifVersion,
+ Schema: sarifSchemaURI,
+ Runs: []sarifRun{run},
+ }
+
+ return json.NewEncoder(p.w).Encode(output)
+}
+
type SarifOutput struct {
Version string `json:"version"`
Schema string `json:"$schema"`
@@ -58,55 +126,3 @@ type sarifRegion struct {
StartLine int `json:"startLine"`
StartColumn int `json:"startColumn"`
}
-
-type Sarif struct {
- w io.Writer
-}
-
-func NewSarif(w io.Writer) *Sarif {
- return &Sarif{w: w}
-}
-
-func (p Sarif) Print(issues []result.Issue) error {
- run := sarifRun{}
- run.Tool.Driver.Name = "golangci-lint"
- run.Results = make([]sarifResult, 0)
-
- for i := range issues {
- issue := issues[i]
-
- severity := issue.Severity
- if severity == "" {
- severity = "error"
- }
-
- sr := sarifResult{
- RuleID: issue.FromLinter,
- Level: severity,
- Message: sarifMessage{Text: issue.Text},
- Locations: []sarifLocation{
- {
- PhysicalLocation: sarifPhysicalLocation{
- ArtifactLocation: sarifArtifactLocation{URI: issue.FilePath()},
- Region: sarifRegion{
- StartLine: issue.Line(),
- // If startColumn is absent, it SHALL default to 1.
- // https://docs.oasis-open.org/sarif/sarif/v2.1.0/errata01/os/sarif-v2.1.0-errata01-os-complete.html#_Toc141790941
- StartColumn: max(1, issue.Column()),
- },
- },
- },
- },
- }
-
- run.Results = append(run.Results, sr)
- }
-
- output := SarifOutput{
- Version: sarifVersion,
- Schema: sarifSchemaURI,
- Runs: []sarifRun{run},
- }
-
- return json.NewEncoder(p.w).Encode(output)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go
index c6d390d188..ac04ab0fbe 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go
@@ -11,6 +11,7 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
+// Tab prints issues using tabulation as a field separator.
type Tab struct {
printLinterName bool
useColors bool
@@ -19,11 +20,11 @@ type Tab struct {
w io.Writer
}
-func NewTab(printLinterName, useColors bool, log logutils.Log, w io.Writer) *Tab {
+func NewTab(log logutils.Log, w io.Writer, printLinterName, useColors bool) *Tab {
return &Tab{
printLinterName: printLinterName,
useColors: useColors,
- log: log,
+ log: log.Child(logutils.DebugKeyTabPrinter),
w: w,
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go
index 1d1c9f7d32..9ff5fe5bc9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go
@@ -4,8 +4,8 @@ import (
"fmt"
"io"
"strings"
- "unicode/utf8"
+ "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
)
@@ -15,16 +15,22 @@ const (
largeLimit = 4000
)
-// TeamCity printer for TeamCity format.
+const defaultTeamCitySeverity = "ERROR"
+
+// TeamCity prints issues in the TeamCity format.
+// https://www.jetbrains.com/help/teamcity/service-messages.html
type TeamCity struct {
- w io.Writer
- escaper *strings.Replacer
+ log logutils.Log
+ w io.Writer
+ escaper *strings.Replacer
+ sanitizer severitySanitizer
}
// NewTeamCity output format outputs issues according to TeamCity service message format.
-func NewTeamCity(w io.Writer) *TeamCity {
+func NewTeamCity(log logutils.Log, w io.Writer) *TeamCity {
return &TeamCity{
- w: w,
+ log: log.Child(logutils.DebugKeyTeamCityPrinter),
+ w: w,
// https://www.jetbrains.com/help/teamcity/service-messages.html#Escaped+Values
escaper: strings.NewReplacer(
"'", "|'",
@@ -34,6 +40,11 @@ func NewTeamCity(w io.Writer) *TeamCity {
"[", "|[",
"]", "|]",
),
+ sanitizer: severitySanitizer{
+ // https://www.jetbrains.com/help/teamcity/service-messages.html#Inspection+Instance
+ allowedSeverities: []string{"INFO", defaultTeamCitySeverity, "WARNING", "WEAK WARNING"},
+ defaultSeverity: defaultTeamCitySeverity,
+ },
}
}
@@ -65,7 +76,7 @@ func (p *TeamCity) Print(issues []result.Issue) error {
message: issue.Text,
file: issue.FilePath(),
line: issue.Line(),
- severity: issue.Severity,
+ severity: p.sanitizer.Sanitize(strings.ToUpper(issue.Severity)),
}
_, err := instance.Print(p.w, p.escaper)
@@ -74,6 +85,11 @@ func (p *TeamCity) Print(issues []result.Issue) error {
}
}
+ err := p.sanitizer.Err()
+ if err != nil {
+ p.log.Infof("%v", err)
+ }
+
return nil
}
@@ -108,15 +124,13 @@ func (i InspectionInstance) Print(w io.Writer, replacer *strings.Replacer) (int,
cutVal(i.typeID, smallLimit),
cutVal(replacer.Replace(i.message), largeLimit),
cutVal(i.file, largeLimit),
- i.line, strings.ToUpper(i.severity))
+ i.line, i.severity)
}
func cutVal(s string, limit int) string {
- var size, count int
- for i := 0; i < limit && count < len(s); i++ {
- _, size = utf8.DecodeRuneInString(s[count:])
- count += size
+ runes := []rune(s)
+ if len(runes) > limit {
+ return string(runes[:limit])
}
-
- return s[:count]
+ return s
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go
index 56cced7696..9e60408f04 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go
@@ -11,21 +11,22 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
+// Text prints issues with a human friendly representation.
type Text struct {
- printIssuedLine bool
printLinterName bool
+ printIssuedLine bool
useColors bool
log logutils.Log
w io.Writer
}
-func NewText(printIssuedLine, useColors, printLinterName bool, log logutils.Log, w io.Writer) *Text {
+func NewText(log logutils.Log, w io.Writer, printLinterName, printIssuedLine, useColors bool) *Text {
return &Text{
- printIssuedLine: printIssuedLine,
printLinterName: printLinterName,
+ printIssuedLine: printIssuedLine,
useColors: useColors,
- log: log,
+ log: log.Child(logutils.DebugKeyTextPrinter),
w: w,
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go b/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go
index 32246a6df4..86a4ef3b73 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go
@@ -5,6 +5,7 @@ import (
"fmt"
"go/token"
+ "golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
)
@@ -12,18 +13,6 @@ type Range struct {
From, To int
}
-type Replacement struct {
- NeedOnlyDelete bool // need to delete all lines of the issue without replacement with new lines
- NewLines []string // if NeedDelete is false it's the replacement lines
- Inline *InlineFix
-}
-
-type InlineFix struct {
- StartCol int // zero-based
- Length int // length of chunk to be replaced
- NewString string
-}
-
type Issue struct {
FromLinter string
Text string
@@ -33,22 +22,28 @@ type Issue struct {
// Source lines of a code with the issue to show
SourceLines []string
- // If we know how to fix the issue we can provide replacement lines
- Replacement *Replacement
-
// Pkg is needed for proper caching of linting results
Pkg *packages.Package `json:"-"`
- LineRange *Range `json:",omitempty"`
-
Pos token.Position
+ LineRange *Range `json:",omitempty"`
+
// HunkPos is used only when golangci-lint is run over a diff
HunkPos int `json:",omitempty"`
+ // If we know how to fix the issue we can provide replacement lines
+ SuggestedFixes []analysis.SuggestedFix `json:",omitempty"`
+
// If we are expecting a nolint (because this is from nolintlint), record the expected linter
ExpectNoLint bool
ExpectedNoLintLinter string
+
+ // Only for Diff processor needs.
+ WorkingDirectoryRelativePath string `json:"-"`
+
+ // Only for processor that need relative paths evaluation.
+ RelativePath string `json:"-"`
}
func (i *Issue) FilePath() string {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go
index d7a4f0ec4b..72dc202847 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go
@@ -3,6 +3,7 @@ package processors
import (
"regexp"
+ "github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
@@ -18,6 +19,32 @@ type baseRule struct {
linters []string
}
+// The usage of `regexp.MustCompile()` is safe here,
+// because the regular expressions are checked before inside [config.BaseRule.Validate].
+func newBaseRule(rule *config.BaseRule, prefix string) baseRule {
+ base := baseRule{
+ linters: rule.Linters,
+ }
+
+ if rule.Text != "" {
+ base.text = regexp.MustCompile(prefix + rule.Text)
+ }
+
+ if rule.Source != "" {
+ base.source = regexp.MustCompile(prefix + rule.Source)
+ }
+
+ if rule.Path != "" {
+ base.path = regexp.MustCompile(fsutils.NormalizePathInRegex(rule.Path))
+ }
+
+ if rule.PathExcept != "" {
+ base.pathExcept = regexp.MustCompile(fsutils.NormalizePathInRegex(rule.PathExcept))
+ }
+
+ return base
+}
+
func (r *baseRule) isEmpty() bool {
return r.text == nil && r.source == nil && r.path == nil && r.pathExcept == nil && len(r.linters) == 0
}
@@ -29,10 +56,10 @@ func (r *baseRule) match(issue *result.Issue, files *fsutils.Files, log logutils
if r.text != nil && !r.text.MatchString(issue.Text) {
return false
}
- if r.path != nil && !r.path.MatchString(files.WithPathPrefix(issue.FilePath())) {
+ if r.path != nil && !r.path.MatchString(files.WithPathPrefix(issue.RelativePath)) {
return false
}
- if r.pathExcept != nil && r.pathExcept.MatchString(issue.FilePath()) {
+ if r.pathExcept != nil && r.pathExcept.MatchString(issue.RelativePath) {
return false
}
if len(r.linters) != 0 && !r.matchLinter(issue) {
@@ -58,11 +85,25 @@ func (r *baseRule) matchLinter(issue *result.Issue) bool {
}
func (r *baseRule) matchSource(issue *result.Issue, lineCache *fsutils.LineCache, log logutils.Log) bool {
- sourceLine, errSourceLine := lineCache.GetLine(issue.FilePath(), issue.Line())
+ sourceLine, errSourceLine := lineCache.GetLine(issue.RelativePath, issue.Line())
if errSourceLine != nil {
- log.Warnf("Failed to get line %s:%d from line cache: %s", issue.FilePath(), issue.Line(), errSourceLine)
+ log.Warnf("Failed to get line %s:%d from line cache: %s", issue.RelativePath, issue.Line(), errSourceLine)
return false // can't properly match
}
return r.source.MatchString(sourceLine)
}
+
+func parseRules[T, V any](rules []T, prefix string, newFn func(*T, string) V) []V {
+ if len(rules) == 0 {
+ return nil
+ }
+
+ parsedRules := make([]V, 0, len(rules))
+
+ for _, r := range rules {
+ parsedRules = append(parsedRules, newFn(&r, prefix))
+ }
+
+ return parsedRules
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go
index 0e659f0f3e..b09b8b7283 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go
@@ -1,52 +1,45 @@
package processors
import (
- "fmt"
"path/filepath"
"strings"
+ "github.com/ldez/grignotin/goenv"
+
"github.com/golangci/golangci-lint/pkg/goutil"
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*Cgo)(nil)
+// Cgo filters cgo artifacts.
+//
+// Some linters (e.g. gosec, etc.) return incorrect file paths for cgo files.
+//
+// Require absolute file path.
type Cgo struct {
goCacheDir string
}
-func NewCgo(goenv *goutil.Env) *Cgo {
+func NewCgo(env *goutil.Env) *Cgo {
return &Cgo{
- goCacheDir: goenv.Get(goutil.EnvGoCache),
+ goCacheDir: env.Get(goenv.GOCACHE),
}
}
-func (Cgo) Name() string {
+func (*Cgo) Name() string {
return "cgo"
}
-func (p Cgo) Process(issues []result.Issue) ([]result.Issue, error) {
+func (p *Cgo) Process(issues []result.Issue) ([]result.Issue, error) {
return filterIssuesErr(issues, p.shouldPassIssue)
}
-func (Cgo) Finish() {}
-
-func (p Cgo) shouldPassIssue(issue *result.Issue) (bool, error) {
- // some linters (e.g. gosec, deadcode) return incorrect filepaths for cgo issues,
- // also cgo files have strange issues looking like false positives.
-
- // cache dir contains all preprocessed files including cgo files
-
- issueFilePath := issue.FilePath()
- if !filepath.IsAbs(issue.FilePath()) {
- absPath, err := filepath.Abs(issue.FilePath())
- if err != nil {
- return false, fmt.Errorf("failed to build abs path for %q: %w", issue.FilePath(), err)
- }
- issueFilePath = absPath
- }
+func (*Cgo) Finish() {}
- if p.goCacheDir != "" && strings.HasPrefix(issueFilePath, p.goCacheDir) {
+func (p *Cgo) shouldPassIssue(issue *result.Issue) (bool, error) {
+ // [p.goCacheDir] contains all preprocessed files including cgo files.
+ if p.goCacheDir != "" && strings.HasPrefix(issue.FilePath(), p.goCacheDir) {
return false, nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go
index c602cdc65a..a7e268b4b7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go
@@ -2,6 +2,7 @@ package processors
import (
"bytes"
+ "context"
"fmt"
"io"
"os"
@@ -17,9 +18,16 @@ const envGolangciDiffProcessorPatch = "GOLANGCI_DIFF_PROCESSOR_PATCH"
var _ Processor = (*Diff)(nil)
+// Diff filters issues based on options `new`, `new-from-rev`, etc.
+//
+// Uses `git`.
+// The paths inside the patch are relative to the path where git is run (the same location where golangci-lint is run).
+//
+// Warning: it doesn't use `path-prefix` option.
type Diff struct {
onlyNew bool
fromRev string
+ fromMergeBase string
patchFilePath string
wholeFiles bool
patch string
@@ -29,38 +37,45 @@ func NewDiff(cfg *config.Issues) *Diff {
return &Diff{
onlyNew: cfg.Diff,
fromRev: cfg.DiffFromRevision,
+ fromMergeBase: cfg.DiffFromMergeBase,
patchFilePath: cfg.DiffPatchFilePath,
wholeFiles: cfg.WholeFiles,
patch: os.Getenv(envGolangciDiffProcessorPatch),
}
}
-func (Diff) Name() string {
+func (*Diff) Name() string {
return "diff"
}
-func (p Diff) Process(issues []result.Issue) ([]result.Issue, error) {
- if !p.onlyNew && p.fromRev == "" && p.patchFilePath == "" && p.patch == "" { // no need to work
+func (p *Diff) Process(issues []result.Issue) ([]result.Issue, error) {
+ if !p.onlyNew && p.fromRev == "" && p.fromMergeBase == "" && p.patchFilePath == "" && p.patch == "" {
return issues, nil
}
var patchReader io.Reader
- if p.patchFilePath != "" {
+ switch {
+ case p.patchFilePath != "":
patch, err := os.ReadFile(p.patchFilePath)
if err != nil {
return nil, fmt.Errorf("can't read from patch file %s: %w", p.patchFilePath, err)
}
+
patchReader = bytes.NewReader(patch)
- } else if p.patch != "" {
+
+ case p.patch != "":
patchReader = strings.NewReader(p.patch)
}
- c := revgrep.Checker{
+ checker := revgrep.Checker{
Patch: patchReader,
RevisionFrom: p.fromRev,
+ MergeBase: p.fromMergeBase,
WholeFiles: p.wholeFiles,
}
- if err := c.Prepare(); err != nil {
+
+ err := checker.Prepare(context.Background())
+ if err != nil {
return nil, fmt.Errorf("can't prepare diff by revgrep: %w", err)
}
@@ -70,15 +85,16 @@ func (p Diff) Process(issues []result.Issue) ([]result.Issue, error) {
return issue
}
- hunkPos, isNew := c.IsNewIssue(issue)
+ hunkPos, isNew := checker.IsNew(issue.WorkingDirectoryRelativePath, issue.Line())
if !isNew {
return nil
}
newIssue := *issue
newIssue.HunkPos = hunkPos
+
return &newIssue
}), nil
}
-func (Diff) Finish() {}
+func (*Diff) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go
deleted file mode 100644
index 5431204502..0000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go
+++ /dev/null
@@ -1,55 +0,0 @@
-package processors
-
-import (
- "fmt"
- "regexp"
- "strings"
-
- "github.com/golangci/golangci-lint/pkg/config"
- "github.com/golangci/golangci-lint/pkg/result"
-)
-
-var _ Processor = (*Exclude)(nil)
-
-type Exclude struct {
- name string
-
- pattern *regexp.Regexp
-}
-
-func NewExclude(cfg *config.Issues) *Exclude {
- p := &Exclude{name: "exclude"}
-
- var pattern string
- if len(cfg.ExcludePatterns) != 0 {
- pattern = fmt.Sprintf("(%s)", strings.Join(cfg.ExcludePatterns, "|"))
- }
-
- prefix := caseInsensitivePrefix
- if cfg.ExcludeCaseSensitive {
- p.name = "exclude-case-sensitive"
- prefix = ""
- }
-
- if pattern != "" {
- p.pattern = regexp.MustCompile(prefix + pattern)
- }
-
- return p
-}
-
-func (p Exclude) Name() string {
- return p.name
-}
-
-func (p Exclude) Process(issues []result.Issue) ([]result.Issue, error) {
- if p.pattern == nil {
- return issues, nil
- }
-
- return filterIssues(issues, func(issue *result.Issue) bool {
- return !p.pattern.MatchString(issue.Text)
- }), nil
-}
-
-func (Exclude) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go
deleted file mode 100644
index b468c51013..0000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go
+++ /dev/null
@@ -1,106 +0,0 @@
-package processors
-
-import (
- "regexp"
-
- "github.com/golangci/golangci-lint/pkg/config"
- "github.com/golangci/golangci-lint/pkg/fsutils"
- "github.com/golangci/golangci-lint/pkg/logutils"
- "github.com/golangci/golangci-lint/pkg/result"
-)
-
-var _ Processor = (*ExcludeRules)(nil)
-
-type excludeRule struct {
- baseRule
-}
-
-type ExcludeRules struct {
- name string
-
- log logutils.Log
- files *fsutils.Files
-
- rules []excludeRule
-}
-
-func NewExcludeRules(log logutils.Log, files *fsutils.Files, cfg *config.Issues) *ExcludeRules {
- p := &ExcludeRules{
- name: "exclude-rules",
- files: files,
- log: log,
- }
-
- prefix := caseInsensitivePrefix
- if cfg.ExcludeCaseSensitive {
- prefix = ""
- p.name = "exclude-rules-case-sensitive"
- }
-
- excludeRules := cfg.ExcludeRules
-
- if cfg.UseDefaultExcludes {
- for _, r := range config.GetExcludePatterns(cfg.IncludeDefaultExcludes) {
- excludeRules = append(excludeRules, config.ExcludeRule{
- BaseRule: config.BaseRule{
- Text: r.Pattern,
- Linters: []string{r.Linter},
- },
- })
- }
- }
-
- p.rules = createRules(excludeRules, prefix)
-
- return p
-}
-
-func (p ExcludeRules) Name() string { return p.name }
-
-func (p ExcludeRules) Process(issues []result.Issue) ([]result.Issue, error) {
- if len(p.rules) == 0 {
- return issues, nil
- }
-
- return filterIssues(issues, func(issue *result.Issue) bool {
- for _, rule := range p.rules {
- rule := rule
- if rule.match(issue, p.files, p.log) {
- return false
- }
- }
-
- return true
- }), nil
-}
-
-func (ExcludeRules) Finish() {}
-
-func createRules(rules []config.ExcludeRule, prefix string) []excludeRule {
- parsedRules := make([]excludeRule, 0, len(rules))
-
- for _, rule := range rules {
- parsedRule := excludeRule{}
- parsedRule.linters = rule.Linters
-
- if rule.Text != "" {
- parsedRule.text = regexp.MustCompile(prefix + rule.Text)
- }
-
- if rule.Source != "" {
- parsedRule.source = regexp.MustCompile(prefix + rule.Source)
- }
-
- if rule.Path != "" {
- parsedRule.path = regexp.MustCompile(fsutils.NormalizePathInRegex(rule.Path))
- }
-
- if rule.PathExcept != "" {
- parsedRule.pathExcept = regexp.MustCompile(fsutils.NormalizePathInRegex(rule.PathExcept))
- }
-
- parsedRules = append(parsedRules, parsedRule)
- }
-
- return parsedRules
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_generated_file_filter.go
similarity index 72%
rename from vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go
rename to vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_generated_file_filter.go
index 5cc5e530ce..ce4e2e2146 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_generated_file_filter.go
@@ -18,19 +18,32 @@ const (
AutogeneratedModeDisable = "disable"
)
+// The values must be in lowercase.
const (
genCodeGenerated = "code generated"
genDoNotEdit = "do not edit"
- genAutoFile = "autogenerated file" // easyjson
+
+ // Related to easyjson.
+ genAutoFile = "autogenerated file"
+
+ //nolint:lll // Long URL
+ // Related to Swagger Codegen.
+ // https://github.com/swagger-api/swagger-codegen/blob/61cfeac3b9d855b4eb8bffa0d118bece117bcb7d/modules/swagger-codegen/src/main/resources/go/partial_header.mustache#L16
+ // https://github.com/swagger-api/swagger-codegen/issues/12358
+ genSwaggerCodegen = "* generated by: swagger codegen "
)
-var _ Processor = (*AutogeneratedExclude)(nil)
+var _ Processor = (*GeneratedFileFilter)(nil)
type fileSummary struct {
generated bool
}
-type AutogeneratedExclude struct {
+// GeneratedFileFilter filters generated files.
+// - mode "lax": see `isGeneratedFileLax` documentation.
+// - mode "strict": see `isGeneratedFileStrict` documentation.
+// - mode "disable": skips this processor.
+type GeneratedFileFilter struct {
debugf logutils.DebugFunc
mode string
@@ -39,20 +52,20 @@ type AutogeneratedExclude struct {
fileSummaryCache map[string]*fileSummary
}
-func NewAutogeneratedExclude(mode string) *AutogeneratedExclude {
- return &AutogeneratedExclude{
- debugf: logutils.Debug(logutils.DebugKeyAutogenExclude),
+func NewGeneratedFileFilter(mode string) *GeneratedFileFilter {
+ return &GeneratedFileFilter{
+ debugf: logutils.Debug(logutils.DebugKeyGeneratedFileFilter),
mode: mode,
strictPattern: regexp.MustCompile(`^// Code generated .* DO NOT EDIT\.$`),
fileSummaryCache: map[string]*fileSummary{},
}
}
-func (*AutogeneratedExclude) Name() string {
- return "autogenerated_exclude"
+func (*GeneratedFileFilter) Name() string {
+ return "generated_file_filter"
}
-func (p *AutogeneratedExclude) Process(issues []result.Issue) ([]result.Issue, error) {
+func (p *GeneratedFileFilter) Process(issues []result.Issue) ([]result.Issue, error) {
if p.mode == AutogeneratedModeDisable {
return issues, nil
}
@@ -60,14 +73,9 @@ func (p *AutogeneratedExclude) Process(issues []result.Issue) ([]result.Issue, e
return filterIssuesErr(issues, p.shouldPassIssue)
}
-func (*AutogeneratedExclude) Finish() {}
-
-func (p *AutogeneratedExclude) shouldPassIssue(issue *result.Issue) (bool, error) {
- if issue.FromLinter == typeCheckName {
- // don't hide typechecking errors in generated files: users expect to see why the project isn't compiling
- return true, nil
- }
+func (*GeneratedFileFilter) Finish() {}
+func (p *GeneratedFileFilter) shouldPassIssue(issue *result.Issue) (bool, error) {
if filepath.Base(issue.FilePath()) == "go.mod" {
return true, nil
}
@@ -105,8 +113,8 @@ func (p *AutogeneratedExclude) shouldPassIssue(issue *result.Issue) (bool, error
// isGeneratedFileLax reports whether the source file is generated code.
// The function uses a bit laxer rules than isGeneratedFileStrict to match more generated code.
// See https://github.com/golangci/golangci-lint/issues/48 and https://github.com/golangci/golangci-lint/issues/72.
-func (p *AutogeneratedExclude) isGeneratedFileLax(doc string) bool {
- markers := []string{genCodeGenerated, genDoNotEdit, genAutoFile}
+func (p *GeneratedFileFilter) isGeneratedFileLax(doc string) bool {
+ markers := []string{genCodeGenerated, genDoNotEdit, genAutoFile, genSwaggerCodegen}
doc = strings.ToLower(doc)
@@ -129,7 +137,7 @@ func (p *AutogeneratedExclude) isGeneratedFileLax(doc string) bool {
//
// This line must appear before the first non-comment, non-blank text in the file.
// Based on https://go.dev/s/generatedcode.
-func (p *AutogeneratedExclude) isGeneratedFileStrict(filePath string) (bool, error) {
+func (p *GeneratedFileFilter) isGeneratedFileStrict(filePath string) (bool, error) {
file, err := parser.ParseFile(token.NewFileSet(), filePath, nil, parser.PackageClauseOnly|parser.ParseComments)
if err != nil {
return false, fmt.Errorf("failed to parse file: %w", err)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_paths.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_paths.go
new file mode 100644
index 0000000000..bd43048702
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_paths.go
@@ -0,0 +1,118 @@
+package processors
+
+import (
+ "fmt"
+ "regexp"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+ "github.com/golangci/golangci-lint/pkg/fsutils"
+ "github.com/golangci/golangci-lint/pkg/logutils"
+ "github.com/golangci/golangci-lint/pkg/result"
+)
+
+var _ Processor = (*ExclusionPaths)(nil)
+
+type ExclusionPaths struct {
+ pathPatterns []*regexp.Regexp
+ pathExceptPatterns []*regexp.Regexp
+
+ warnUnused bool
+ excludedPathCounter map[*regexp.Regexp]int
+ excludedPathExceptCounter map[*regexp.Regexp]int
+
+ log logutils.Log
+}
+
+func NewExclusionPaths(log logutils.Log, cfg *config.LinterExclusions) (*ExclusionPaths, error) {
+ excludedPathCounter := make(map[*regexp.Regexp]int)
+
+ var pathPatterns []*regexp.Regexp
+ for _, p := range cfg.Paths {
+ p = fsutils.NormalizePathInRegex(p)
+
+ patternRe, err := regexp.Compile(p)
+ if err != nil {
+ return nil, fmt.Errorf("can't compile regexp %q: %w", p, err)
+ }
+
+ pathPatterns = append(pathPatterns, patternRe)
+ excludedPathCounter[patternRe] = 0
+ }
+
+ excludedPathExceptCounter := make(map[*regexp.Regexp]int)
+
+ var pathExceptPatterns []*regexp.Regexp
+ for _, p := range cfg.PathsExcept {
+ p = fsutils.NormalizePathInRegex(p)
+
+ patternRe, err := regexp.Compile(p)
+ if err != nil {
+ return nil, fmt.Errorf("can't compile regexp %q: %w", p, err)
+ }
+
+ pathExceptPatterns = append(pathExceptPatterns, patternRe)
+ excludedPathExceptCounter[patternRe] = 0
+ }
+
+ return &ExclusionPaths{
+ pathPatterns: pathPatterns,
+ pathExceptPatterns: pathExceptPatterns,
+ warnUnused: cfg.WarnUnused,
+ excludedPathCounter: excludedPathCounter,
+ excludedPathExceptCounter: excludedPathExceptCounter,
+ log: log.Child(logutils.DebugKeyExclusionPaths),
+ }, nil
+}
+
+func (*ExclusionPaths) Name() string {
+ return "exclusion_paths"
+}
+
+func (p *ExclusionPaths) Process(issues []result.Issue) ([]result.Issue, error) {
+ if len(p.pathPatterns) == 0 && len(p.pathExceptPatterns) == 0 {
+ return issues, nil
+ }
+
+ return filterIssues(issues, p.shouldPassIssue), nil
+}
+
+func (p *ExclusionPaths) Finish() {
+ for pattern, count := range p.excludedPathCounter {
+ if p.warnUnused && count == 0 {
+ p.log.Warnf("The pattern %q match %d issues", pattern, count)
+ } else {
+ p.log.Infof("Skipped %d issues by pattern %q", count, pattern)
+ }
+ }
+
+ for pattern, count := range p.excludedPathExceptCounter {
+ if p.warnUnused && count == 0 {
+ p.log.Warnf("The pattern %q match %d issues", pattern, count)
+ }
+ }
+}
+
+func (p *ExclusionPaths) shouldPassIssue(issue *result.Issue) bool {
+ for _, pattern := range p.pathPatterns {
+ if pattern.MatchString(issue.RelativePath) {
+ p.excludedPathCounter[pattern]++
+ return false
+ }
+ }
+
+ if len(p.pathExceptPatterns) == 0 {
+ return true
+ }
+
+ matched := false
+ for _, pattern := range p.pathExceptPatterns {
+ if !pattern.MatchString(issue.RelativePath) {
+ continue
+ }
+
+ p.excludedPathExceptCounter[pattern]++
+ matched = true
+ }
+
+ return !matched
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_presets.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_presets.go
new file mode 100644
index 0000000000..17299b90c5
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_presets.go
@@ -0,0 +1,138 @@
+package processors
+
+import "github.com/golangci/golangci-lint/pkg/config"
+
+var linterExclusionPresets = map[string][]config.ExcludeRule{
+ config.ExclusionPresetComments: {
+ {
+ // Annoying issue about not having a comment. The rare codebase has such comments.
+ // CheckPackageComment, CheckExportedFunctionDocs, CheckExportedTypeDocs, CheckExportedVarDocs
+ BaseRule: config.BaseRule{
+ Text: "(ST1000|ST1020|ST1021|ST1022)",
+ Linters: []string{"stylecheck"},
+ InternalReference: "EXC0011",
+ },
+ },
+ {
+ // Annoying issue about not having a comment. The rare codebase has such comments.
+ // rule: exported
+ BaseRule: config.BaseRule{
+ Text: `exported (.+) should have comment( \(or a comment on this block\))? or be unexported`,
+ Linters: []string{"revive"},
+ InternalReference: "EXC0012",
+ },
+ },
+ {
+ // Annoying issue about not having a comment. The rare codebase has such comments.
+ // rule: package-comments
+ BaseRule: config.BaseRule{
+ Text: `package comment should be of the form "(.+)..."`,
+ Linters: []string{"revive"},
+ InternalReference: "EXC0013",
+ },
+ },
+ {
+ // Annoying issue about not having a comment. The rare codebase has such comments.
+ // rule: exported
+ BaseRule: config.BaseRule{
+ Text: `comment on exported (.+) should be of the form "(.+)..."`,
+ Linters: []string{"revive"},
+ InternalReference: "EXC0014",
+ },
+ },
+ {
+ // Annoying issue about not having a comment. The rare codebase has such comments.
+ // rule: package-comments
+ BaseRule: config.BaseRule{
+ Text: `should have a package comment`,
+ Linters: []string{"revive"},
+ InternalReference: "EXC0015",
+ },
+ },
+ },
+ config.ExclusionPresetStdErrorHandling: {
+ {
+ // Almost all programs ignore errors on these functions and in most cases it's ok.
+ BaseRule: config.BaseRule{
+ Text: "Error return value of .((os\\.)?std(out|err)\\..*|.*Close" +
+ "|.*Flush|os\\.Remove(All)?|.*print(f|ln)?|os\\.(Un)?Setenv). is not checked",
+ Linters: []string{"errcheck"},
+ InternalReference: "EXC0001",
+ },
+ },
+ },
+ config.ExclusionPresetCommonFalsePositives: {
+ {
+ // Too many false-positives on 'unsafe' usage.
+ BaseRule: config.BaseRule{
+ Text: "G103: Use of unsafe calls should be audited",
+ Linters: []string{"gosec"},
+ InternalReference: "EXC0006",
+ },
+ },
+ {
+ // Too many false-positives for parametrized shell calls.
+ BaseRule: config.BaseRule{
+ Text: "G204: Subprocess launched with variable",
+ Linters: []string{"gosec"},
+ InternalReference: "EXC0007",
+ },
+ },
+ {
+ // False positive is triggered by 'src, err := ioutil.ReadFile(filename)'.
+ BaseRule: config.BaseRule{
+ Text: "G304: Potential file inclusion via variable",
+ Linters: []string{"gosec"},
+ InternalReference: "EXC0010",
+ },
+ },
+ },
+ config.ExclusionPresetLegacy: {
+ {
+ // Common false positives.
+ BaseRule: config.BaseRule{
+ Text: "(possible misuse of unsafe.Pointer|should have signature)",
+ Linters: []string{"govet"},
+ InternalReference: "EXC0004",
+ },
+ },
+ {
+ // Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore.
+ // CheckScopedBreak
+ BaseRule: config.BaseRule{
+ Text: "SA4011",
+ Linters: []string{"staticcheck"},
+ InternalReference: "EXC0005",
+ },
+ },
+ {
+ // Duplicated errcheck checks.
+ // Errors unhandled.
+ BaseRule: config.BaseRule{
+ Text: "G104",
+ Linters: []string{"gosec"},
+ InternalReference: "EXC0008",
+ },
+ },
+ {
+ // Too many issues in popular repos.
+ BaseRule: config.BaseRule{
+ Text: "(G301|G302|G307): Expect (directory permissions to be 0750|file permissions to be 0600) or less",
+ Linters: []string{"gosec"},
+ InternalReference: "EXC0009",
+ },
+ },
+ },
+}
+
+func getLinterExclusionPresets(names []string) []config.ExcludeRule {
+ var rules []config.ExcludeRule
+
+ for _, name := range names {
+ if p, ok := linterExclusionPresets[name]; ok {
+ rules = append(rules, p...)
+ }
+ }
+
+ return rules
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_rules.go
new file mode 100644
index 0000000000..7730a53dd0
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclusion_rules.go
@@ -0,0 +1,164 @@
+package processors
+
+import (
+ "fmt"
+ "slices"
+ "strings"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+ "github.com/golangci/golangci-lint/pkg/fsutils"
+ "github.com/golangci/golangci-lint/pkg/logutils"
+ "github.com/golangci/golangci-lint/pkg/result"
+)
+
+var _ Processor = (*ExclusionRules)(nil)
+
+type ExclusionRules struct {
+ log logutils.Log
+ files *fsutils.Files
+
+ warnUnused bool
+ skippedCounter map[string]int
+
+ rules []excludeRule
+}
+
+func NewExclusionRules(log logutils.Log, files *fsutils.Files,
+ cfg *config.LinterExclusions, oldCfg *config.Issues) *ExclusionRules {
+ p := &ExclusionRules{
+ log: log,
+ files: files,
+ warnUnused: cfg.WarnUnused,
+ skippedCounter: map[string]int{},
+ }
+
+ // TODO(ldez) remove prefix in v2: the matching must be case sensitive, users can add `(?i)` inside the patterns if needed.
+ prefix := caseInsensitivePrefix
+ if oldCfg.ExcludeCaseSensitive {
+ prefix = ""
+ }
+
+ excludeRules := slices.Concat(slices.Clone(cfg.Rules),
+ filterInclude(getLinterExclusionPresets(cfg.Presets), oldCfg.IncludeDefaultExcludes))
+
+ p.rules = parseRules(excludeRules, prefix, newExcludeRule)
+
+ // TODO(ldez): should be removed in v2.
+ for _, pattern := range oldCfg.ExcludePatterns {
+ if pattern == "" {
+ continue
+ }
+
+ r := &config.ExcludeRule{
+ BaseRule: config.BaseRule{
+ Path: `.+\.go`,
+ Text: pattern,
+ },
+ }
+
+ rule := newExcludeRule(r, prefix)
+
+ p.rules = append(p.rules, rule)
+ }
+
+ for _, rule := range p.rules {
+ if rule.internalReference == "" {
+ p.skippedCounter[rule.String()] = 0
+ }
+ }
+
+ return p
+}
+
+func (*ExclusionRules) Name() string {
+ return "exclusion_rules"
+}
+
+func (p *ExclusionRules) Process(issues []result.Issue) ([]result.Issue, error) {
+ if len(p.rules) == 0 {
+ return issues, nil
+ }
+
+ return filterIssues(issues, func(issue *result.Issue) bool {
+ for _, rule := range p.rules {
+ if !rule.match(issue, p.files, p.log) {
+ continue
+ }
+
+ // Ignore default rules.
+ if rule.internalReference == "" {
+ p.skippedCounter[rule.String()]++
+ }
+
+ return false
+ }
+
+ return true
+ }), nil
+}
+
+func (p *ExclusionRules) Finish() {
+ for rule, count := range p.skippedCounter {
+ if p.warnUnused && count == 0 {
+ p.log.Warnf("Skipped %d issues by rules: [%s]", count, rule)
+ } else {
+ p.log.Infof("Skipped %d issues by rules: [%s]", count, rule)
+ }
+ }
+}
+
+type excludeRule struct {
+ baseRule
+
+ // For compatibility with exclude-use-default/include.
+ internalReference string `mapstructure:"-"`
+}
+
+func newExcludeRule(rule *config.ExcludeRule, prefix string) excludeRule {
+ return excludeRule{
+ baseRule: newBaseRule(&rule.BaseRule, prefix),
+ internalReference: rule.InternalReference,
+ }
+}
+
+func (e excludeRule) String() string {
+ var msg []string
+
+ if e.text != nil && e.text.String() != "" {
+ msg = append(msg, fmt.Sprintf("Text: %q", e.text))
+ }
+
+ if e.source != nil && e.source.String() != "" {
+ msg = append(msg, fmt.Sprintf("Source: %q", e.source))
+ }
+
+ if e.path != nil && e.path.String() != "" {
+ msg = append(msg, fmt.Sprintf("Path: %q", e.path))
+ }
+
+ if e.pathExcept != nil && e.pathExcept.String() != "" {
+ msg = append(msg, fmt.Sprintf("Path Except: %q", e.pathExcept))
+ }
+
+ if len(e.linters) > 0 {
+ msg = append(msg, fmt.Sprintf("Linters: %q", strings.Join(e.linters, ", ")))
+ }
+
+ return strings.Join(msg, ", ")
+}
+
+// TODO(ldez): must be removed in v2, only for compatibility with exclude-use-default/include.
+func filterInclude(rules []config.ExcludeRule, refs []string) []config.ExcludeRule {
+ if len(refs) == 0 {
+ return rules
+ }
+
+ var filteredRules []config.ExcludeRule
+ for _, rule := range rules {
+ if !slices.Contains(refs, rule.InternalReference) {
+ filteredRules = append(filteredRules, rule)
+ }
+ }
+
+ return filteredRules
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go
index 6a1387c872..5f39e064b9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go
@@ -3,7 +3,6 @@ package processors
import (
"go/parser"
"go/token"
- "path/filepath"
"strings"
"sync"
"time"
@@ -23,9 +22,13 @@ type adjustMap struct {
m map[string]posMapper
}
-// FilenameUnadjuster is needed because a lot of linters use fset.Position(f.Pos())
-// to get filename. And they return adjusted filename (e.g. *.qtpl) for an issue. We need
-// restore real .go filename to properly output it, parse it, etc.
+// FilenameUnadjuster fixes filename based on adjusted and unadjusted position (related to line directives and cgo).
+//
+// A lot of linters use `fset.Position(f.Pos())` to get filename,
+// and they return adjusted filename (e.g.` *.qtpl`) for an issue.
+// We need restore real `.go` filename to properly output it, parse it, etc.
+//
+// Require absolute file path.
type FilenameUnadjuster struct {
m map[string]posMapper // map from adjusted filename to position mapper: adjusted -> unadjusted position
log logutils.Log
@@ -36,8 +39,10 @@ func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *Filename
m := adjustMap{m: map[string]posMapper{}}
startedAt := time.Now()
+
var wg sync.WaitGroup
wg.Add(len(pkgs))
+
for _, pkg := range pkgs {
go func(pkg *packages.Package) {
// It's important to call func here to run GC
@@ -45,7 +50,9 @@ func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *Filename
wg.Done()
}(pkg)
}
+
wg.Wait()
+
log.Infof("Pre-built %d adjustments in %s", len(m.m), time.Since(startedAt))
return &FilenameUnadjuster{
@@ -61,17 +68,7 @@ func (*FilenameUnadjuster) Name() string {
func (p *FilenameUnadjuster) Process(issues []result.Issue) ([]result.Issue, error) {
return transformIssues(issues, func(issue *result.Issue) *result.Issue {
- issueFilePath := issue.FilePath()
- if !filepath.IsAbs(issue.FilePath()) {
- absPath, err := filepath.Abs(issue.FilePath())
- if err != nil {
- p.log.Warnf("failed to build abs path for %q: %s", issue.FilePath(), err)
- return issue
- }
- issueFilePath = absPath
- }
-
- mapper := p.m[issueFilePath]
+ mapper := p.m[issue.FilePath()]
if mapper == nil {
return issue
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go
index 4915dc479a..610f249ef5 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go
@@ -1,16 +1,26 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+// This file is inspired by go/analysis/internal/checker/checker.go
+
package processors
import (
- "bytes"
+ "errors"
"fmt"
+ "maps"
"os"
- "path/filepath"
- "sort"
- "strings"
+ "slices"
- "github.com/golangci/golangci-lint/internal/robustio"
+ "github.com/golangci/golangci-lint/internal/x/tools/diff"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/fsutils"
+ "github.com/golangci/golangci-lint/pkg/goformatters"
+ "github.com/golangci/golangci-lint/pkg/goformatters/gci"
+ "github.com/golangci/golangci-lint/pkg/goformatters/gofmt"
+ "github.com/golangci/golangci-lint/pkg/goformatters/gofumpt"
+ "github.com/golangci/golangci-lint/pkg/goformatters/goimports"
"github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
"github.com/golangci/golangci-lint/pkg/timeutils"
@@ -18,19 +28,25 @@ import (
var _ Processor = (*Fixer)(nil)
+const filePerm = 0644
+
+// Fixer fixes reports if possible.
+// The reports that are not fixed are passed to the next processor.
type Fixer struct {
cfg *config.Config
log logutils.Log
fileCache *fsutils.FileCache
sw *timeutils.Stopwatch
+ formatter *goformatters.MetaFormatter
}
-func NewFixer(cfg *config.Config, log logutils.Log, fileCache *fsutils.FileCache) *Fixer {
+func NewFixer(cfg *config.Config, log logutils.Log, fileCache *fsutils.FileCache, formatter *goformatters.MetaFormatter) *Fixer {
return &Fixer{
cfg: cfg,
log: log,
fileCache: fileCache,
sw: timeutils.NewStopwatch("fixer", log),
+ formatter: formatter,
}
}
@@ -43,219 +59,246 @@ func (p Fixer) Process(issues []result.Issue) ([]result.Issue, error) {
return issues, nil
}
- outIssues := make([]result.Issue, 0, len(issues))
- issuesToFixPerFile := map[string][]result.Issue{}
- for i := range issues {
- issue := &issues[i]
- if issue.Replacement == nil {
- outIssues = append(outIssues, *issue)
- continue
- }
+ p.log.Infof("Applying suggested fixes")
- issuesToFixPerFile[issue.FilePath()] = append(issuesToFixPerFile[issue.FilePath()], *issue)
- }
-
- for file, issuesToFix := range issuesToFixPerFile {
- var err error
- p.sw.TrackStage("all", func() {
- err = p.fixIssuesInFile(file, issuesToFix)
- })
- if err != nil {
- p.log.Errorf("Failed to fix issues in file %s: %s", file, err)
-
- // show issues only if can't fix them
- outIssues = append(outIssues, issuesToFix...)
- }
+ notFixableIssues, err := timeutils.TrackStage(p.sw, "all", func() ([]result.Issue, error) {
+ return p.process(issues)
+ })
+ if err != nil {
+ p.log.Warnf("Failed to fix issues: %v", err)
}
p.printStat()
- return outIssues, nil
+ return notFixableIssues, nil
}
-func (Fixer) Finish() {}
+//nolint:funlen,gocyclo // This function should not be split.
+func (p Fixer) process(issues []result.Issue) ([]result.Issue, error) {
+ // filenames / linters / edits
+ editsByLinter := make(map[string]map[string][]diff.Edit)
-func (p Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error {
- // TODO: don't read the whole file into memory: read line by line;
- // can't just use bufio.scanner: it has a line length limit
- origFileData, err := p.fileCache.GetFileBytes(filePath)
- if err != nil {
- return fmt.Errorf("failed to get file bytes for %s: %w", filePath, err)
- }
+ formatters := []string{gofumpt.Name, goimports.Name, gofmt.Name, gci.Name}
- origFileLines := bytes.Split(origFileData, []byte("\n"))
+ var notFixableIssues []result.Issue
- tmpFileName := filepath.Join(filepath.Dir(filePath), fmt.Sprintf(".%s.golangci_fix", filepath.Base(filePath)))
+ toBeFormattedFiles := make(map[string]struct{})
- tmpOutFile, err := os.Create(tmpFileName)
- if err != nil {
- return fmt.Errorf("failed to make file %s: %w", tmpFileName, err)
- }
-
- // merge multiple issues per line into one issue
- issuesPerLine := map[int][]result.Issue{}
for i := range issues {
- issue := &issues[i]
- issuesPerLine[issue.Line()] = append(issuesPerLine[issue.Line()], *issue)
- }
+ issue := issues[i]
- issues = issues[:0] // reuse the same memory
- for line, lineIssues := range issuesPerLine {
- if mergedIssue := p.mergeLineIssues(line, lineIssues, origFileLines); mergedIssue != nil {
- issues = append(issues, *mergedIssue)
+ if slices.Contains(formatters, issue.FromLinter) {
+ toBeFormattedFiles[issue.FilePath()] = struct{}{}
+ continue
}
- }
- issues = p.findNotIntersectingIssues(issues)
+ if issue.SuggestedFixes == nil || skipNoTextEdit(&issue) {
+ notFixableIssues = append(notFixableIssues, issue)
+ continue
+ }
- if err = p.writeFixedFile(origFileLines, issues, tmpOutFile); err != nil {
- tmpOutFile.Close()
- _ = robustio.RemoveAll(tmpOutFile.Name())
- return err
+ for _, sf := range issue.SuggestedFixes {
+ for _, edit := range sf.TextEdits {
+ start, end := edit.Pos, edit.End
+ if start > end {
+ return nil, fmt.Errorf("%q suggests invalid fix: pos (%v) > end (%v)",
+ issue.FromLinter, edit.Pos, edit.End)
+ }
+
+ edit := diff.Edit{
+ Start: int(start),
+ End: int(end),
+ New: string(edit.NewText),
+ }
+
+ if _, ok := editsByLinter[issue.FilePath()]; !ok {
+ editsByLinter[issue.FilePath()] = make(map[string][]diff.Edit)
+ }
+
+ editsByLinter[issue.FilePath()][issue.FromLinter] = append(editsByLinter[issue.FilePath()][issue.FromLinter], edit)
+ }
+ }
}
- tmpOutFile.Close()
+ // Validate and group the edits to each actual file.
+ editsByPath := make(map[string][]diff.Edit)
+ for path, linterToEdits := range editsByLinter {
+ excludedLinters := make(map[string]struct{})
- if err = robustio.Rename(tmpOutFile.Name(), filePath); err != nil {
- _ = robustio.RemoveAll(tmpOutFile.Name())
- return fmt.Errorf("failed to rename %s -> %s: %w", tmpOutFile.Name(), filePath, err)
- }
+ linters := slices.Collect(maps.Keys(linterToEdits))
- return nil
-}
+ // Does any linter create conflicting edits?
+ for _, linter := range linters {
+ edits := linterToEdits[linter]
+ if _, invalid := validateEdits(edits); invalid > 0 {
+ name, x, y := linter, edits[invalid-1], edits[invalid]
+ excludedLinters[name] = struct{}{}
-func (p Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileLines [][]byte) *result.Issue {
- origLine := origFileLines[lineNum-1] // lineNum is 1-based
+ err := diff3Conflict(path, name, name, []diff.Edit{x}, []diff.Edit{y})
+ // TODO(ldez) TUI?
+ p.log.Warnf("Changes related to %q are skipped for the file %q: %v",
+ name, path, err)
+ }
+ }
- if len(lineIssues) == 1 && lineIssues[0].Replacement.Inline == nil {
- return &lineIssues[0]
- }
+ // Does any pair of different linters create edits that conflict?
+ for j := range linters {
+ for k := range linters[:j] {
+ x, y := linters[j], linters[k]
+ if x > y {
+ x, y = y, x
+ }
- // check issues first
- for ind := range lineIssues {
- li := &lineIssues[ind]
+ _, foundX := excludedLinters[x]
+ _, foundY := excludedLinters[y]
+ if foundX || foundY {
+ continue
+ }
- if li.LineRange != nil {
- p.log.Infof("Line %d has multiple issues but at least one of them is ranged: %#v", lineNum, lineIssues)
- return &lineIssues[0]
- }
+ xedits, yedits := linterToEdits[x], linterToEdits[y]
- inline := li.Replacement.Inline
+ combined := slices.Concat(xedits, yedits)
- if inline == nil || len(li.Replacement.NewLines) != 0 || li.Replacement.NeedOnlyDelete {
- p.log.Infof("Line %d has multiple issues but at least one of them isn't inline: %#v", lineNum, lineIssues)
- return li
+ if _, invalid := validateEdits(combined); invalid > 0 {
+ excludedLinters[x] = struct{}{}
+ p.log.Warnf("Changes related to %q are skipped for the file %q due to conflicts with %q.", x, path, y)
+ }
+ }
}
- if inline.StartCol < 0 || inline.Length <= 0 || inline.StartCol+inline.Length > len(origLine) {
- p.log.Warnf("Line %d (%q) has invalid inline fix: %#v, %#v", lineNum, origLine, li, inline)
- return nil
+ var edits []diff.Edit
+ for linter := range linterToEdits {
+ if _, found := excludedLinters[linter]; !found {
+ edits = append(edits, linterToEdits[linter]...)
+ }
}
- }
- return p.applyInlineFixes(lineIssues, origLine, lineNum)
-}
+ editsByPath[path], _ = validateEdits(edits) // remove duplicates. already validated.
+ }
-func (p Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, lineNum int) *result.Issue {
- sort.Slice(lineIssues, func(i, j int) bool {
- return lineIssues[i].Replacement.Inline.StartCol < lineIssues[j].Replacement.Inline.StartCol
- })
+ var editError error
- var newLineBuf bytes.Buffer
- newLineBuf.Grow(len(origLine))
+ var formattedFiles []string
- //nolint:misspell // misspelling is intentional
- // example: origLine="it's becouse of them", StartCol=5, Length=7, NewString="because"
+ // Now we've got a set of valid edits for each file. Apply them.
+ for path, edits := range editsByPath {
+ contents, err := p.fileCache.GetFileBytes(path)
+ if err != nil {
+ editError = errors.Join(editError, fmt.Errorf("%s: %w", path, err))
+ continue
+ }
- curOrigLinePos := 0
- for i := range lineIssues {
- fix := lineIssues[i].Replacement.Inline
- if fix.StartCol < curOrigLinePos {
- p.log.Warnf("Line %d has multiple intersecting issues: %#v", lineNum, lineIssues)
- return nil
+ out, err := diff.ApplyBytes(contents, edits)
+ if err != nil {
+ editError = errors.Join(editError, fmt.Errorf("%s: %w", path, err))
+ continue
}
- if curOrigLinePos != fix.StartCol {
- newLineBuf.Write(origLine[curOrigLinePos:fix.StartCol])
+ // Try to format the file.
+ out = p.formatter.Format(path, out)
+
+ if err := os.WriteFile(path, out, filePerm); err != nil {
+ editError = errors.Join(editError, fmt.Errorf("%s: %w", path, err))
+ continue
}
- newLineBuf.WriteString(fix.NewString)
- curOrigLinePos = fix.StartCol + fix.Length
- }
- if curOrigLinePos != len(origLine) {
- newLineBuf.Write(origLine[curOrigLinePos:])
+
+ formattedFiles = append(formattedFiles, path)
}
- mergedIssue := lineIssues[0] // use text from the first issue (it's not really used)
- mergedIssue.Replacement = &result.Replacement{
- NewLines: []string{newLineBuf.String()},
+ for path := range toBeFormattedFiles {
+ // Skips files already formatted by the previous fix step.
+ if !slices.Contains(formattedFiles, path) {
+ content, err := p.fileCache.GetFileBytes(path)
+ if err != nil {
+ p.log.Warnf("Error reading file %s: %v", path, err)
+ continue
+ }
+
+ out := p.formatter.Format(path, content)
+
+ if err := os.WriteFile(path, out, filePerm); err != nil {
+ editError = errors.Join(editError, fmt.Errorf("%s: %w", path, err))
+ continue
+ }
+ }
}
- return &mergedIssue
+
+ return notFixableIssues, editError
}
-func (p Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue {
- sort.SliceStable(issues, func(i, j int) bool {
- a, b := issues[i], issues[j]
- return a.Line() < b.Line()
- })
+func (Fixer) Finish() {}
- var ret []result.Issue
- var currentEnd int
- for i := range issues {
- issue := &issues[i]
- rng := issue.GetLineRange()
- if rng.From <= currentEnd {
- p.log.Infof("Skip issue %#v: intersects with end %d", issue, currentEnd)
- continue // skip intersecting issue
+func (p Fixer) printStat() {
+ p.sw.PrintStages()
+}
+
+func skipNoTextEdit(issue *result.Issue) bool {
+ var onlyMessage int
+ for _, sf := range issue.SuggestedFixes {
+ if len(sf.TextEdits) == 0 {
+ onlyMessage++
}
- p.log.Infof("Fix issue %#v with range %v", issue, issue.GetLineRange())
- ret = append(ret, *issue)
- currentEnd = rng.To
}
- return ret
+ return len(issue.SuggestedFixes) == onlyMessage
}
-func (p Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmpOutFile *os.File) error {
- // issues aren't intersecting
+// validateEdits returns a list of edits that is sorted and
+// contains no duplicate edits. Returns the index of some
+// overlapping adjacent edits if there is one and <0 if the
+// edits are valid.
+//
+//nolint:gocritic // Copy of go/analysis/internal/checker/checker.go
+func validateEdits(edits []diff.Edit) ([]diff.Edit, int) {
+ if len(edits) == 0 {
+ return nil, -1
+ }
- nextIssueIndex := 0
- for i := 0; i < len(origFileLines); i++ {
- var outLine string
- var nextIssue *result.Issue
- if nextIssueIndex != len(issues) {
- nextIssue = &issues[nextIssueIndex]
- }
+ equivalent := func(x, y diff.Edit) bool {
+ return x.Start == y.Start && x.End == y.End && x.New == y.New
+ }
- origFileLineNumber := i + 1
- if nextIssue == nil || origFileLineNumber != nextIssue.GetLineRange().From {
- outLine = string(origFileLines[i])
- } else {
- nextIssueIndex++
- rng := nextIssue.GetLineRange()
- if rng.From > rng.To {
- // Maybe better decision is to skip such issues, re-evaluate if regressed.
- p.log.Warnf("[fixer]: issue line range is probably invalid, fix can be incorrect (from=%d, to=%d, linter=%s)",
- rng.From, rng.To, nextIssue.FromLinter,
- )
- }
- i += rng.To - rng.From
- if nextIssue.Replacement.NeedOnlyDelete {
- continue
+ diff.SortEdits(edits)
+
+ unique := []diff.Edit{edits[0]}
+
+ invalid := -1
+
+ for i := 1; i < len(edits); i++ {
+ prev, cur := edits[i-1], edits[i]
+ // We skip over equivalent edits without considering them
+ // an error. This handles identical edits coming from the
+ // multiple ways of loading a package into a
+ // *go/packages.Packages for testing, e.g. packages "p" and "p [p.test]".
+ if !equivalent(prev, cur) {
+ unique = append(unique, cur)
+ if prev.End > cur.Start {
+ invalid = i
}
- outLine = strings.Join(nextIssue.Replacement.NewLines, "\n")
}
+ }
+ return unique, invalid
+}
- if i < len(origFileLines)-1 {
- outLine += "\n"
- }
- if _, err := tmpOutFile.WriteString(outLine); err != nil {
- return fmt.Errorf("failed to write output line: %w", err)
- }
+// diff3Conflict returns an error describing two conflicting sets of
+// edits on a file at path.
+// Copy of go/analysis/internal/checker/checker.go
+func diff3Conflict(path, xlabel, ylabel string, xedits, yedits []diff.Edit) error {
+ contents, err := os.ReadFile(path)
+ if err != nil {
+ return err
}
+ oldlabel, old := "base", string(contents)
- return nil
-}
+ xdiff, err := diff.ToUnified(oldlabel, xlabel, old, xedits, diff.DefaultContextLines)
+ if err != nil {
+ return err
+ }
+ ydiff, err := diff.ToUnified(oldlabel, ylabel, old, yedits, diff.DefaultContextLines)
+ if err != nil {
+ return err
+ }
-func (p Fixer) printStat() {
- p.sw.PrintStages()
+ return fmt.Errorf("conflicting edits from %s and %s on %s\nfirst edits:\n%s\nsecond edits:\n%s",
+ xlabel, ylabel, path, xdiff, ydiff)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go
index 876fd3bd3e..9f332705e1 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go
@@ -9,146 +9,137 @@ import (
var _ Processor = (*IdentifierMarker)(nil)
type replacePattern struct {
- re string
+ exp *regexp.Regexp
repl string
}
-type replaceRegexp struct {
- re *regexp.Regexp
- repl string
-}
-
-var replacePatterns = []replacePattern{
- // unparam
- {`^(\S+) - (\S+) is unused$`, "`${1}` - `${2}` is unused"},
- {`^(\S+) - (\S+) always receives (\S+) \((.*)\)$`, "`${1}` - `${2}` always receives `${3}` (`${4}`)"},
- {`^(\S+) - (\S+) always receives (.*)$`, "`${1}` - `${2}` always receives `${3}`"},
- {`^(\S+) - result (\S+) is always (\S+)`, "`${1}` - result `${2}` is always `${3}`"},
-
- // interfacer
- {`^(\S+) can be (\S+)$`, "`${1}` can be `${2}`"},
-
- // govet
- {`^printf: (\S+) arg list ends with redundant newline$`, "printf: `${1}` arg list ends with redundant newline"},
- {`^composites: (\S+) composite literal uses unkeyed fields$`, "composites: `${1}` composite literal uses unkeyed fields"},
-
- // gosec
- {
- `^(\S+): Blacklisted import (\S+): weak cryptographic primitive$`,
- "${1}: Blacklisted import `${2}`: weak cryptographic primitive",
- },
- {`^TLS InsecureSkipVerify set true.$`, "TLS `InsecureSkipVerify` set true."},
-
- // gosimple
- {`should replace loop with (.*)$`, "should replace loop with `${1}`"},
- {
- `should use a simple channel send/receive instead of select with a single case`,
- "should use a simple channel send/receive instead of `select` with a single case",
- },
- {
- `should omit comparison to bool constant, can be simplified to (.+)$`,
- "should omit comparison to bool constant, can be simplified to `${1}`",
- },
- {`should write (.+) instead of (.+)$`, "should write `${1}` instead of `${2}`"},
- {`redundant return statement$`, "redundant `return` statement"},
- {
- `should replace this if statement with an unconditional strings.TrimPrefix`,
- "should replace this `if` statement with an unconditional `strings.TrimPrefix`",
- },
-
- // staticcheck
- {`this value of (\S+) is never used$`, "this value of `${1}` is never used"},
- {
- `should use time.Since instead of time.Now\(\).Sub$`,
- "should use `time.Since` instead of `time.Now().Sub`",
- },
- {
- `should check returned error before deferring response.Close\(\)$`,
- "should check returned error before deferring `response.Close()`",
- },
- {`no value of type uint is less than 0$`, "no value of type `uint` is less than `0`"},
-
- // unused
- {`(func|const|field|type|var) (\S+) is unused$`, "${1} `${2}` is unused"},
-
- // typecheck
- {`^unknown field (\S+) in struct literal$`, "unknown field `${1}` in struct literal"},
- {
- `^invalid operation: (\S+) \(variable of type (\S+)\) has no field or method (\S+)$`,
- "invalid operation: `${1}` (variable of type `${2}`) has no field or method `${3}`",
- },
- {`^undeclared name: (\S+)$`, "undeclared name: `${1}`"},
- {
- `^cannot use addr \(variable of type (\S+)\) as (\S+) value in argument to (\S+)$`,
- "cannot use addr (variable of type `${1}`) as `${2}` value in argument to `${3}`",
- },
- {`^other declaration of (\S+)$`, "other declaration of `${1}`"},
- {`^(\S+) redeclared in this block$`, "`${1}` redeclared in this block"},
-
- // golint
- {
- `^exported (type|method|function|var|const) (\S+) should have comment or be unexported$`,
- "exported ${1} `${2}` should have comment or be unexported",
- },
- {
- `^comment on exported (type|method|function|var|const) (\S+) should be of the form "(\S+) ..."$`,
- "comment on exported ${1} `${2}` should be of the form `${3} ...`",
- },
- {`^should replace (.+) with (.+)$`, "should replace `${1}` with `${2}`"},
- {
- `^if block ends with a return statement, so drop this else and outdent its block$`,
- "`if` block ends with a `return` statement, so drop this `else` and outdent its block",
- },
- {
- `^(struct field|var|range var|const|type|(?:func|method|interface method) (?:parameter|result)) (\S+) should be (\S+)$`,
- "${1} `${2}` should be `${3}`",
- },
- {
- `^don't use underscores in Go names; var (\S+) should be (\S+)$`,
- "don't use underscores in Go names; var `${1}` should be `${2}`",
- },
-}
-
+// IdentifierMarker modifies report text.
+// It must be before [Exclude] and [ExcludeRules]:
+// users configure exclusions based on the modified text.
type IdentifierMarker struct {
- replaceRegexps []replaceRegexp
+ patterns map[string][]replacePattern
}
func NewIdentifierMarker() *IdentifierMarker {
- var replaceRegexps []replaceRegexp
- for _, p := range replacePatterns {
- r := replaceRegexp{
- re: regexp.MustCompile(p.re),
- repl: p.repl,
- }
- replaceRegexps = append(replaceRegexps, r)
- }
-
return &IdentifierMarker{
- replaceRegexps: replaceRegexps,
+ patterns: map[string][]replacePattern{
+ "unparam": {
+ {
+ exp: regexp.MustCompile(`^(\S+) - (\S+) is unused$`),
+ repl: "`${1}` - `${2}` is unused",
+ },
+ {
+ exp: regexp.MustCompile(`^(\S+) - (\S+) always receives (\S+) \((.*)\)$`),
+ repl: "`${1}` - `${2}` always receives `${3}` (`${4}`)",
+ },
+ {
+ exp: regexp.MustCompile(`^(\S+) - (\S+) always receives (.*)$`),
+ repl: "`${1}` - `${2}` always receives `${3}`",
+ },
+ {
+ exp: regexp.MustCompile(`^(\S+) - result (\S+) is always (\S+)`),
+ repl: "`${1}` - result `${2}` is always `${3}`",
+ },
+ },
+ "govet": {
+ {
+ // printf
+ exp: regexp.MustCompile(`^printf: (\S+) arg list ends with redundant newline$`),
+ repl: "printf: `${1}` arg list ends with redundant newline",
+ },
+ },
+ "gosec": {
+ {
+ exp: regexp.MustCompile(`^TLS InsecureSkipVerify set true.$`),
+ repl: "TLS `InsecureSkipVerify` set true.",
+ },
+ },
+ "gosimple": {
+ {
+ // s1011
+ exp: regexp.MustCompile(`should replace loop with (.*)$`),
+ repl: "should replace loop with `${1}`",
+ },
+ {
+ // s1000
+ exp: regexp.MustCompile(`should use a simple channel send/receive instead of select with a single case`),
+ repl: "should use a simple channel send/receive instead of `select` with a single case",
+ },
+ {
+ // s1002
+ exp: regexp.MustCompile(`should omit comparison to bool constant, can be simplified to (.+)$`),
+ repl: "should omit comparison to bool constant, can be simplified to `${1}`",
+ },
+ {
+ // s1023
+ exp: regexp.MustCompile(`redundant return statement$`),
+ repl: "redundant `return` statement",
+ },
+ {
+ // s1017
+ exp: regexp.MustCompile(`should replace this if statement with an unconditional strings.TrimPrefix`),
+ repl: "should replace this `if` statement with an unconditional `strings.TrimPrefix`",
+ },
+ },
+ "staticcheck": {
+ {
+ // sa4006
+ exp: regexp.MustCompile(`this value of (\S+) is never used$`),
+ repl: "this value of `${1}` is never used",
+ },
+ {
+ // s1012
+ exp: regexp.MustCompile(`should use time.Since instead of time.Now\(\).Sub$`),
+ repl: "should use `time.Since` instead of `time.Now().Sub`",
+ },
+ {
+ // sa5001
+ exp: regexp.MustCompile(`should check returned error before deferring response.Close\(\)$`),
+ repl: "should check returned error before deferring `response.Close()`",
+ },
+ {
+ // sa4003
+ exp: regexp.MustCompile(`no value of type uint is less than 0$`),
+ repl: "no value of type `uint` is less than `0`",
+ },
+ },
+ "unused": {
+ {
+ exp: regexp.MustCompile(`(func|const|field|type|var) (\S+) is unused$`),
+ repl: "${1} `${2}` is unused",
+ },
+ },
+ },
}
}
-func (IdentifierMarker) Name() string {
+func (*IdentifierMarker) Name() string {
return "identifier_marker"
}
-func (p IdentifierMarker) Process(issues []result.Issue) ([]result.Issue, error) {
+func (p *IdentifierMarker) Process(issues []result.Issue) ([]result.Issue, error) {
return transformIssues(issues, func(issue *result.Issue) *result.Issue {
+ re, ok := p.patterns[issue.FromLinter]
+ if !ok {
+ return issue
+ }
+
newIssue := *issue
- newIssue.Text = p.markIdentifiers(newIssue.Text)
+ newIssue.Text = markIdentifiers(re, newIssue.Text)
+
return &newIssue
}), nil
}
-func (IdentifierMarker) Finish() {}
+func (*IdentifierMarker) Finish() {}
-func (p IdentifierMarker) markIdentifiers(s string) string {
- for _, rr := range p.replaceRegexps {
- rs := rr.re.ReplaceAllString(s, rr.repl)
- if rs != s {
+func markIdentifiers(re []replacePattern, text string) string {
+ for _, rr := range re {
+ rs := rr.exp.ReplaceAllString(text, rr.repl)
+ if rs != text {
return rs
}
}
- return s
+ return text
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/invalid_issue.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/invalid_issue.go
index c1389e9707..042675b59e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/invalid_issue.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/invalid_issue.go
@@ -9,6 +9,9 @@ import (
var _ Processor = (*InvalidIssue)(nil)
+// InvalidIssue filters invalid reports.
+// - non-go files (except `go.mod`)
+// - reports without file path
type InvalidIssue struct {
log logutils.Log
}
@@ -22,7 +25,7 @@ func (InvalidIssue) Name() string {
}
func (p InvalidIssue) Process(issues []result.Issue) ([]result.Issue, error) {
- tcIssues := filterIssues(issues, func(issue *result.Issue) bool {
+ tcIssues := filterIssuesUnsafe(issues, func(issue *result.Issue) bool {
return issue.FromLinter == typeCheckName
})
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go
index a65b0c2b0c..ab443b87d7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go
@@ -7,6 +7,23 @@ import (
)
func filterIssues(issues []result.Issue, filter func(issue *result.Issue) bool) []result.Issue {
+ retIssues := make([]result.Issue, 0, len(issues))
+ for i := range issues {
+ if issues[i].FromLinter == typeCheckName {
+ // don't hide typechecking errors in generated files: users expect to see why the project isn't compiling
+ retIssues = append(retIssues, issues[i])
+ continue
+ }
+
+ if filter(&issues[i]) {
+ retIssues = append(retIssues, issues[i])
+ }
+ }
+
+ return retIssues
+}
+
+func filterIssuesUnsafe(issues []result.Issue, filter func(issue *result.Issue) bool) []result.Issue {
retIssues := make([]result.Issue, 0, len(issues))
for i := range issues {
if filter(&issues[i]) {
@@ -20,6 +37,12 @@ func filterIssues(issues []result.Issue, filter func(issue *result.Issue) bool)
func filterIssuesErr(issues []result.Issue, filter func(issue *result.Issue) (bool, error)) ([]result.Issue, error) {
retIssues := make([]result.Issue, 0, len(issues))
for i := range issues {
+ if issues[i].FromLinter == typeCheckName {
+ // don't hide typechecking errors in generated files: users expect to see why the project isn't compiling
+ retIssues = append(retIssues, issues[i])
+ continue
+ }
+
ok, err := filter(&issues[i])
if err != nil {
return nil, fmt.Errorf("can't filter issue %#v: %w", issues[i], err)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go
index e6200eec4c..ced200af72 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go
@@ -8,6 +8,7 @@ import (
var _ Processor = (*MaxFromLinter)(nil)
+// MaxFromLinter limits the number of reports from the same linter.
type MaxFromLinter struct {
linterCounter map[string]int
limit int
@@ -33,12 +34,7 @@ func (p *MaxFromLinter) Process(issues []result.Issue) ([]result.Issue, error) {
return issues, nil
}
- return filterIssues(issues, func(issue *result.Issue) bool {
- if issue.Replacement != nil && p.cfg.Issues.NeedFix {
- // we need to fix all issues at once => we need to return all of them
- return true
- }
-
+ return filterIssuesUnsafe(issues, func(issue *result.Issue) bool {
p.linterCounter[issue.FromLinter]++ // always inc for stat
return p.linterCounter[issue.FromLinter] <= p.limit
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go
index da9fe4b7df..7c59b5dd60 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go
@@ -7,6 +7,7 @@ import (
var _ Processor = (*MaxPerFileFromLinter)(nil)
+// MaxPerFileFromLinter limits the number of reports by file and by linter.
type MaxPerFileFromLinter struct {
fileLinterCounter fileLinterCounter
maxPerFileFromLinterConfig map[string]int
@@ -20,6 +21,7 @@ func NewMaxPerFileFromLinter(cfg *config.Config) *MaxPerFileFromLinter {
// otherwise we need to fix all issues in the file at once
maxPerFileFromLinterConfig["gofmt"] = 1
maxPerFileFromLinterConfig["goimports"] = 1
+ maxPerFileFromLinterConfig["gci"] = 1
}
return &MaxPerFileFromLinter{
@@ -33,7 +35,7 @@ func (*MaxPerFileFromLinter) Name() string {
}
func (p *MaxPerFileFromLinter) Process(issues []result.Issue) ([]result.Issue, error) {
- return filterIssues(issues, func(issue *result.Issue) bool {
+ return filterIssuesUnsafe(issues, func(issue *result.Issue) bool {
limit := p.maxPerFileFromLinterConfig[issue.FromLinter]
if limit == 0 {
return true
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go
index 8948fa79db..349f6a9afa 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go
@@ -10,6 +10,7 @@ import (
var _ Processor = (*MaxSameIssues)(nil)
+// MaxSameIssues limits the number of reports with the same text.
type MaxSameIssues struct {
textCounter map[string]int
limit int
@@ -35,13 +36,9 @@ func (p *MaxSameIssues) Process(issues []result.Issue) ([]result.Issue, error) {
return issues, nil
}
- return filterIssues(issues, func(issue *result.Issue) bool {
- if issue.Replacement != nil && p.cfg.Issues.NeedFix {
- // we need to fix all issues at once => we need to return all of them
- return true
- }
-
+ return filterIssuesUnsafe(issues, func(issue *result.Issue) bool {
p.textCounter[issue.Text]++ // always inc for stat
+
return p.textCounter[issue.Text] <= p.limit
}), nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint_filter.go
similarity index 86%
rename from vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go
rename to vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint_filter.go
index 7794bd3ecb..99cd799954 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint_filter.go
@@ -4,12 +4,12 @@ import (
"go/ast"
"go/parser"
"go/token"
+ "maps"
"regexp"
+ "slices"
"sort"
"strings"
- "golang.org/x/exp/maps"
-
"github.com/golangci/golangci-lint/pkg/golinters/nolintlint"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/lint/lintersdb"
@@ -17,9 +17,9 @@ import (
"github.com/golangci/golangci-lint/pkg/result"
)
-var _ Processor = (*Nolint)(nil)
+var _ Processor = (*NolintFilter)(nil)
-var nolintDebugf = logutils.Debug(logutils.DebugKeyNolint)
+var nolintDebugf = logutils.Debug(logutils.DebugKeyNolintFilter)
type ignoredRange struct {
linters []string
@@ -64,7 +64,8 @@ type fileData struct {
ignoredRanges []ignoredRange
}
-type Nolint struct {
+// NolintFilter filters and sorts reports related to `nolint` directives.
+type NolintFilter struct {
fileCache map[string]*fileData
dbManager *lintersdb.Manager
enabledLinters map[string]*linter.Config
@@ -75,8 +76,8 @@ type Nolint struct {
pattern *regexp.Regexp
}
-func NewNolint(log logutils.Log, dbManager *lintersdb.Manager, enabledLinters map[string]*linter.Config) *Nolint {
- return &Nolint{
+func NewNolintFilter(log logutils.Log, dbManager *lintersdb.Manager, enabledLinters map[string]*linter.Config) *NolintFilter {
+ return &NolintFilter{
fileCache: map[string]*fileData{},
dbManager: dbManager,
enabledLinters: enabledLinters,
@@ -86,28 +87,27 @@ func NewNolint(log logutils.Log, dbManager *lintersdb.Manager, enabledLinters ma
}
}
-func (*Nolint) Name() string {
- return "nolint"
+func (*NolintFilter) Name() string {
+ return "nolint_filter"
}
-func (p *Nolint) Process(issues []result.Issue) ([]result.Issue, error) {
+func (p *NolintFilter) Process(issues []result.Issue) ([]result.Issue, error) {
// put nolintlint issues last because we process other issues first to determine which nolint directives are unused
sort.Stable(sortWithNolintlintLast(issues))
return filterIssuesErr(issues, p.shouldPassIssue)
}
-func (p *Nolint) Finish() {
+func (p *NolintFilter) Finish() {
if len(p.unknownLintersSet) == 0 {
return
}
- unknownLinters := maps.Keys(p.unknownLintersSet)
- sort.Strings(unknownLinters)
+ unknownLinters := slices.Sorted(maps.Keys(p.unknownLintersSet))
p.log.Warnf("Found unknown linters in //nolint directives: %s", strings.Join(unknownLinters, ", "))
}
-func (p *Nolint) shouldPassIssue(issue *result.Issue) (bool, error) {
+func (p *NolintFilter) shouldPassIssue(issue *result.Issue) (bool, error) {
nolintDebugf("got issue: %v", *issue)
// don't expect disabled linters to cover their nolint statements
@@ -142,7 +142,7 @@ func (p *Nolint) shouldPassIssue(issue *result.Issue) (bool, error) {
return true, nil
}
-func (p *Nolint) getOrCreateFileData(issue *result.Issue) *fileData {
+func (p *NolintFilter) getOrCreateFileData(issue *result.Issue) *fileData {
fd := p.fileCache[issue.FilePath()]
if fd != nil {
return fd
@@ -169,7 +169,7 @@ func (p *Nolint) getOrCreateFileData(issue *result.Issue) *fileData {
return fd
}
-func (p *Nolint) buildIgnoredRangesForFile(f *ast.File, fset *token.FileSet, filePath string) []ignoredRange {
+func (p *NolintFilter) buildIgnoredRangesForFile(f *ast.File, fset *token.FileSet, filePath string) []ignoredRange {
inlineRanges := p.extractFileCommentsInlineRanges(fset, f.Comments...)
nolintDebugf("file %s: inline nolint ranges are %+v", filePath, inlineRanges)
@@ -191,7 +191,7 @@ func (p *Nolint) buildIgnoredRangesForFile(f *ast.File, fset *token.FileSet, fil
return allRanges
}
-func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments ...*ast.CommentGroup) []ignoredRange {
+func (p *NolintFilter) extractFileCommentsInlineRanges(fset *token.FileSet, comments ...*ast.CommentGroup) []ignoredRange {
var ret []ignoredRange
for _, g := range comments {
for _, c := range g.List {
@@ -205,7 +205,7 @@ func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments .
return ret
}
-func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *token.FileSet) *ignoredRange {
+func (p *NolintFilter) extractInlineRangeFromComment(text string, g ast.Node, fset *token.FileSet) *ignoredRange {
text = strings.TrimLeft(text, "/ ")
if !p.pattern.MatchString(text) {
return nil
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_absoluter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_absoluter.go
new file mode 100644
index 0000000000..a649716d5d
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_absoluter.go
@@ -0,0 +1,44 @@
+package processors
+
+import (
+ "path/filepath"
+
+ "github.com/golangci/golangci-lint/pkg/logutils"
+ "github.com/golangci/golangci-lint/pkg/result"
+)
+
+var _ Processor = (*PathAbsoluter)(nil)
+
+// PathAbsoluter ensures that representation of path are absolute.
+type PathAbsoluter struct {
+ log logutils.Log
+}
+
+func NewPathAbsoluter(log logutils.Log) *PathAbsoluter {
+ return &PathAbsoluter{log: log.Child(logutils.DebugKeyPathAbsoluter)}
+}
+
+func (*PathAbsoluter) Name() string {
+ return "path_absoluter"
+}
+
+func (p *PathAbsoluter) Process(issues []result.Issue) ([]result.Issue, error) {
+ return transformIssues(issues, func(issue *result.Issue) *result.Issue {
+ if filepath.IsAbs(issue.FilePath()) {
+ return issue
+ }
+
+ absPath, err := filepath.Abs(issue.FilePath())
+ if err != nil {
+ p.log.Warnf("failed to get absolute path for %q: %v", issue.FilePath(), err)
+ return nil
+ }
+
+ newIssue := issue
+ newIssue.Pos.Filename = absPath
+
+ return newIssue
+ }), nil
+}
+
+func (*PathAbsoluter) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go
deleted file mode 100644
index 8036e3fd6d..0000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package processors
-
-import (
- "github.com/golangci/golangci-lint/pkg/fsutils"
- "github.com/golangci/golangci-lint/pkg/result"
-)
-
-var _ Processor = (*PathPrefixer)(nil)
-
-// PathPrefixer adds a customizable prefix to every output path
-type PathPrefixer struct {
- prefix string
-}
-
-// NewPathPrefixer returns a new path prefixer for the provided string
-func NewPathPrefixer(prefix string) *PathPrefixer {
- return &PathPrefixer{prefix: prefix}
-}
-
-// Name returns the name of this processor
-func (*PathPrefixer) Name() string {
- return "path_prefixer"
-}
-
-// Process adds the prefix to each path
-func (p *PathPrefixer) Process(issues []result.Issue) ([]result.Issue, error) {
- if p.prefix != "" {
- for i := range issues {
- issues[i].Pos.Filename = fsutils.WithPathPrefix(p.prefix, issues[i].Pos.Filename)
- }
- }
- return issues, nil
-}
-
-// Finish is implemented to satisfy the Processor interface
-func (*PathPrefixer) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go
index c5c27357c6..6a04b1c359 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go
@@ -1,40 +1,39 @@
package processors
import (
- "path/filepath"
-
"github.com/golangci/golangci-lint/pkg/fsutils"
+ "github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/result"
)
var _ Processor = (*PathPrettifier)(nil)
+// PathPrettifier modifies report file path to be relative to the base path.
+// Also handles the `output.path-prefix` option.
type PathPrettifier struct {
+ prefix string
+ log logutils.Log
}
-func NewPathPrettifier() *PathPrettifier {
- return &PathPrettifier{}
+func NewPathPrettifier(log logutils.Log, prefix string) *PathPrettifier {
+ return &PathPrettifier{
+ prefix: prefix,
+ log: log.Child(logutils.DebugKeyPathPrettifier),
+ }
}
-func (PathPrettifier) Name() string {
+func (*PathPrettifier) Name() string {
return "path_prettifier"
}
-func (PathPrettifier) Process(issues []result.Issue) ([]result.Issue, error) {
+func (p *PathPrettifier) Process(issues []result.Issue) ([]result.Issue, error) {
return transformIssues(issues, func(issue *result.Issue) *result.Issue {
- if !filepath.IsAbs(issue.FilePath()) {
- return issue
- }
+ newIssue := issue
- rel, err := fsutils.ShortestRelPath(issue.FilePath(), "")
- if err != nil {
- return issue
- }
+ newIssue.Pos.Filename = fsutils.WithPathPrefix(p.prefix, issue.RelativePath)
- newIssue := issue
- newIssue.Pos.Filename = rel
return newIssue
}), nil
}
-func (PathPrettifier) Finish() {}
+func (*PathPrettifier) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_relativity.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_relativity.go
new file mode 100644
index 0000000000..bdaf1b98d6
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_relativity.go
@@ -0,0 +1,60 @@
+package processors
+
+import (
+ "fmt"
+ "path/filepath"
+
+ "github.com/golangci/golangci-lint/pkg/fsutils"
+ "github.com/golangci/golangci-lint/pkg/logutils"
+ "github.com/golangci/golangci-lint/pkg/result"
+)
+
+var _ Processor = (*PathRelativity)(nil)
+
+// PathRelativity computes [result.Issue.RelativePath] and [result.Issue.WorkingDirectoryRelativePath],
+// based on the base path.
+type PathRelativity struct {
+ log logutils.Log
+ basePath string
+ workingDirectory string
+}
+
+func NewPathRelativity(log logutils.Log, basePath string) (*PathRelativity, error) {
+ wd, err := fsutils.Getwd()
+ if err != nil {
+ return nil, fmt.Errorf("error getting working directory: %w", err)
+ }
+
+ return &PathRelativity{
+ log: log.Child(logutils.DebugKeyPathRelativity),
+ basePath: basePath,
+ workingDirectory: wd,
+ }, nil
+}
+
+func (*PathRelativity) Name() string {
+ return "path_relativity"
+}
+
+func (p *PathRelativity) Process(issues []result.Issue) ([]result.Issue, error) {
+ return transformIssues(issues, func(issue *result.Issue) *result.Issue {
+ newIssue := *issue
+
+ var err error
+ newIssue.RelativePath, err = filepath.Rel(p.basePath, issue.FilePath())
+ if err != nil {
+ p.log.Warnf("Getting relative path (basepath): %v", err)
+ return nil
+ }
+
+ newIssue.WorkingDirectoryRelativePath, err = filepath.Rel(p.workingDirectory, issue.FilePath())
+ if err != nil {
+ p.log.Warnf("Getting relative path (wd): %v", err)
+ return nil
+ }
+
+ return &newIssue
+ }), nil
+}
+
+func (*PathRelativity) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go
index b161e86c2f..0c0288269e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go
@@ -10,6 +10,8 @@ import (
var _ Processor = (*PathShortener)(nil)
+// PathShortener modifies text of the reports to reduce file path inside the text.
+// It uses the rooted path name corresponding to the current directory (`wd`).
type PathShortener struct {
wd string
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity.go
index 93a26586d6..2dacf66387 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity.go
@@ -1,7 +1,7 @@
package processors
import (
- "regexp"
+ "cmp"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/fsutils"
@@ -13,11 +13,10 @@ const severityFromLinter = "@linter"
var _ Processor = (*Severity)(nil)
-type severityRule struct {
- baseRule
- severity string
-}
-
+// Severity modifies report severity.
+// It uses the same `baseRule` structure as [ExcludeRules] processor.
+//
+// Warning: it doesn't use `path-prefix` option.
type Severity struct {
name string
@@ -43,7 +42,7 @@ func NewSeverity(log logutils.Log, files *fsutils.Files, cfg *config.Severity) *
p.name = "severity-rules-case-sensitive"
}
- p.rules = createSeverityRules(cfg.Rules, prefix)
+ p.rules = parseRules(cfg.Rules, prefix, newSeverityRule)
return p
}
@@ -67,10 +66,7 @@ func (p *Severity) transform(issue *result.Issue) *result.Issue {
return issue
}
- issue.Severity = rule.severity
- if issue.Severity == "" {
- issue.Severity = p.defaultSeverity
- }
+ issue.Severity = cmp.Or(rule.severity, p.defaultSeverity)
return issue
}
@@ -83,34 +79,14 @@ func (p *Severity) transform(issue *result.Issue) *result.Issue {
return issue
}
-func createSeverityRules(rules []config.SeverityRule, prefix string) []severityRule {
- parsedRules := make([]severityRule, 0, len(rules))
-
- for _, rule := range rules {
- parsedRule := severityRule{}
- parsedRule.linters = rule.Linters
- parsedRule.severity = rule.Severity
-
- if rule.Text != "" {
- parsedRule.text = regexp.MustCompile(prefix + rule.Text)
- }
-
- if rule.Source != "" {
- parsedRule.source = regexp.MustCompile(prefix + rule.Source)
- }
-
- if rule.Path != "" {
- path := fsutils.NormalizePathInRegex(rule.Path)
- parsedRule.path = regexp.MustCompile(path)
- }
-
- if rule.PathExcept != "" {
- pathExcept := fsutils.NormalizePathInRegex(rule.PathExcept)
- parsedRule.pathExcept = regexp.MustCompile(pathExcept)
- }
+type severityRule struct {
+ baseRule
+ severity string
+}
- parsedRules = append(parsedRules, parsedRule)
+func newSeverityRule(rule *config.SeverityRule, prefix string) severityRule {
+ return severityRule{
+ baseRule: newBaseRule(&rule.BaseRule, prefix),
+ severity: rule.Severity,
}
-
- return parsedRules
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go
index 39dbfd1d38..8900c96a92 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go
@@ -26,23 +26,30 @@ type skipStat struct {
count int
}
+// SkipDirs filters reports based on directory names.
+// It uses the shortest relative paths and `path-prefix` option.
+// TODO(ldez): should be removed in v2.
type SkipDirs struct {
- patterns []*regexp.Regexp
- log logutils.Log
+ patterns []*regexp.Regexp
+ pathPrefix string
+
+ log logutils.Log
+
skippedDirs map[string]*skipStat
absArgsDirs []string
skippedDirsCache map[string]bool
- pathPrefix string
}
func NewSkipDirs(log logutils.Log, patterns, args []string, pathPrefix string) (*SkipDirs, error) {
var patternsRe []*regexp.Regexp
for _, p := range patterns {
p = fsutils.NormalizePathInRegex(p)
+
patternRe, err := regexp.Compile(p)
if err != nil {
return nil, fmt.Errorf("can't compile regexp %q: %w", p, err)
}
+
patternsRe = append(patternsRe, patternRe)
}
@@ -53,11 +60,11 @@ func NewSkipDirs(log logutils.Log, patterns, args []string, pathPrefix string) (
return &SkipDirs{
patterns: patternsRe,
+ pathPrefix: pathPrefix,
log: log,
skippedDirs: map[string]*skipStat{},
absArgsDirs: absArgsDirs,
skippedDirsCache: map[string]bool{},
- pathPrefix: pathPrefix,
}, nil
}
@@ -80,30 +87,27 @@ func (p *SkipDirs) Finish() {
}
func (p *SkipDirs) shouldPassIssue(issue *result.Issue) bool {
- if filepath.IsAbs(issue.FilePath()) {
- if isGoFile(issue.FilePath()) {
- p.log.Warnf("Got abs path %s in skip dirs processor, it should be relative", issue.FilePath())
- }
- return true
- }
-
- issueRelDir := filepath.Dir(issue.FilePath())
+ issueRelDir := filepath.Dir(issue.RelativePath)
if toPass, ok := p.skippedDirsCache[issueRelDir]; ok {
if !toPass {
p.skippedDirs[issueRelDir].count++
}
+
return toPass
}
issueAbsDir, err := filepath.Abs(issueRelDir)
if err != nil {
p.log.Warnf("Can't abs-ify path %q: %s", issueRelDir, err)
+
return true
}
toPass := p.shouldPassIssueDirs(issueRelDir, issueAbsDir)
+
p.skippedDirsCache[issueRelDir] = toPass
+
return toPass
}
@@ -123,15 +127,19 @@ func (p *SkipDirs) shouldPassIssueDirs(issueRelDir, issueAbsDir string) bool {
// disadvantages (https://github.com/golangci/golangci-lint/pull/313).
path := fsutils.WithPathPrefix(p.pathPrefix, issueRelDir)
+
for _, pattern := range p.patterns {
if pattern.MatchString(path) {
ps := pattern.String()
+
if p.skippedDirs[issueRelDir] == nil {
p.skippedDirs[issueRelDir] = &skipStat{
pattern: ps,
}
}
+
p.skippedDirs[issueRelDir].count++
+
return false
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go
index 3b17a9f327..5907cf6777 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go
@@ -10,6 +10,10 @@ import (
var _ Processor = (*SkipFiles)(nil)
+// SkipFiles filters reports based on filename.
+//
+// It uses the shortest relative paths and `path-prefix` option.
+// TODO(ldez): should be removed in v2.
type SkipFiles struct {
patterns []*regexp.Regexp
pathPrefix string
@@ -34,26 +38,28 @@ func NewSkipFiles(patterns []string, pathPrefix string) (*SkipFiles, error) {
}, nil
}
-func (SkipFiles) Name() string {
+func (*SkipFiles) Name() string {
return "skip_files"
}
-func (p SkipFiles) Process(issues []result.Issue) ([]result.Issue, error) {
+func (p *SkipFiles) Process(issues []result.Issue) ([]result.Issue, error) {
if len(p.patterns) == 0 {
return issues, nil
}
- return filterIssues(issues, func(issue *result.Issue) bool {
- path := fsutils.WithPathPrefix(p.pathPrefix, issue.FilePath())
+ return filterIssues(issues, p.shouldPassIssue), nil
+}
+
+func (*SkipFiles) Finish() {}
+
+func (p *SkipFiles) shouldPassIssue(issue *result.Issue) bool {
+ path := fsutils.WithPathPrefix(p.pathPrefix, issue.RelativePath)
- for _, pattern := range p.patterns {
- if pattern.MatchString(path) {
- return false
- }
+ for _, pattern := range p.patterns {
+ if pattern.MatchString(path) {
+ return false
}
+ }
- return true
- }), nil
+ return true
}
-
-func (SkipFiles) Finish() {}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go
index 77f58c03e5..96f5574f6f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go
@@ -1,47 +1,53 @@
package processors
import (
- "errors"
+ "cmp"
"fmt"
"slices"
- "sort"
"strings"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/result"
)
-// Base propose of this functionality to sort results (issues)
-// produced by various linters by analyzing code. We're achieving this
-// by sorting results.Issues using processor step, and chain based
-// rules that can compare different properties of the Issues struct.
-
const (
orderNameFile = "file"
orderNameLinter = "linter"
orderNameSeverity = "severity"
)
+const (
+ less = iota - 1
+ equal
+ greater
+)
+
var _ Processor = (*SortResults)(nil)
+type issueComparator func(a, b *result.Issue) int
+
+// SortResults sorts reports based on criteria:
+// - file names, line numbers, positions
+// - linter names
+// - severity names
type SortResults struct {
- cmps map[string]*comparator
+ cmps map[string][]issueComparator
cfg *config.Output
}
-func NewSortResults(cfg *config.Config) *SortResults {
+func NewSortResults(cfg *config.Output) *SortResults {
return &SortResults{
- cmps: map[string]*comparator{
+ cmps: map[string][]issueComparator{
// For sorting we are comparing (in next order):
// file names, line numbers, position, and finally - giving up.
- orderNameFile: byFileName().SetNext(byLine().SetNext(byColumn())),
+ orderNameFile: {byFileName, byLine, byColumn},
// For sorting we are comparing: linter name
- orderNameLinter: byLinter(),
+ orderNameLinter: {byLinter},
// For sorting we are comparing: severity
- orderNameSeverity: bySeverity(),
+ orderNameSeverity: {bySeverity},
},
- cfg: &cfg.Output,
+ cfg: cfg,
}
}
@@ -57,23 +63,21 @@ func (p SortResults) Process(issues []result.Issue) ([]result.Issue, error) {
p.cfg.SortOrder = []string{orderNameFile}
}
- var cmps []*comparator
+ var cmps []issueComparator
+
for _, name := range p.cfg.SortOrder {
c, ok := p.cmps[name]
if !ok {
return nil, fmt.Errorf("unsupported sort-order name %q", name)
}
- cmps = append(cmps, c)
+ cmps = append(cmps, c...)
}
- cmp, err := mergeComparators(cmps)
- if err != nil {
- return nil, err
- }
+ comp := mergeComparators(cmps...)
- sort.Slice(issues, func(i, j int) bool {
- return cmp.Compare(&issues[i], &issues[j]) == less
+ slices.SortFunc(issues, func(a, b result.Issue) int {
+ return comp(&a, &b)
})
return issues, nil
@@ -81,147 +85,32 @@ func (p SortResults) Process(issues []result.Issue) ([]result.Issue, error) {
func (SortResults) Finish() {}
-type compareResult int
-
-const (
- less compareResult = iota - 1
- equal
- greater
- none
-)
-
-func (c compareResult) isNeutral() bool {
- // return true if compare result is incomparable or equal.
- return c == none || c == equal
-}
-
-func (c compareResult) String() string {
- switch c {
- case less:
- return "less"
- case equal:
- return "equal"
- case greater:
- return "greater"
- default:
- return "none"
- }
-}
-
-// comparator describes how to implement compare for two "issues".
-type comparator struct {
- name string
- compare func(a, b *result.Issue) compareResult
- next *comparator
-}
-
-func (cmp *comparator) Next() *comparator { return cmp.next }
-
-func (cmp *comparator) SetNext(c *comparator) *comparator {
- cmp.next = c
- return cmp
-}
-
-func (cmp *comparator) String() string {
- s := cmp.name
- if cmp.Next() != nil {
- s += " > " + cmp.Next().String()
- }
-
- return s
-}
-
-func (cmp *comparator) Compare(a, b *result.Issue) compareResult {
- res := cmp.compare(a, b)
- if !res.isNeutral() {
- return res
- }
-
- if next := cmp.Next(); next != nil {
- return next.Compare(a, b)
- }
-
- return res
-}
-
-func byFileName() *comparator {
- return &comparator{
- name: "byFileName",
- compare: func(a, b *result.Issue) compareResult {
- return compareResult(strings.Compare(a.FilePath(), b.FilePath()))
- },
- }
-}
-
-func byLine() *comparator {
- return &comparator{
- name: "byLine",
- compare: func(a, b *result.Issue) compareResult {
- return numericCompare(a.Line(), b.Line())
- },
- }
-}
-
-func byColumn() *comparator {
- return &comparator{
- name: "byColumn",
- compare: func(a, b *result.Issue) compareResult {
- return numericCompare(a.Column(), b.Column())
- },
- }
+func byFileName(a, b *result.Issue) int {
+ return strings.Compare(a.FilePath(), b.FilePath())
}
-func byLinter() *comparator {
- return &comparator{
- name: "byLinter",
- compare: func(a, b *result.Issue) compareResult {
- return compareResult(strings.Compare(a.FromLinter, b.FromLinter))
- },
- }
+func byLine(a, b *result.Issue) int {
+ return numericCompare(a.Line(), b.Line())
}
-func bySeverity() *comparator {
- return &comparator{
- name: "bySeverity",
- compare: func(a, b *result.Issue) compareResult {
- return severityCompare(a.Severity, b.Severity)
- },
- }
+func byColumn(a, b *result.Issue) int {
+ return numericCompare(a.Column(), b.Column())
}
-func mergeComparators(cmps []*comparator) (*comparator, error) {
- if len(cmps) == 0 {
- return nil, errors.New("no comparator")
- }
-
- for i := 0; i < len(cmps)-1; i++ {
- findComparatorTip(cmps[i]).SetNext(cmps[i+1])
- }
-
- return cmps[0], nil
+func byLinter(a, b *result.Issue) int {
+ return strings.Compare(a.FromLinter, b.FromLinter)
}
-func findComparatorTip(cmp *comparator) *comparator {
- if cmp.Next() != nil {
- return findComparatorTip(cmp.Next())
- }
-
- return cmp
+func bySeverity(a, b *result.Issue) int {
+ return severityCompare(a.Severity, b.Severity)
}
-func severityCompare(a, b string) compareResult {
+func severityCompare(a, b string) int {
// The position inside the slice define the importance (lower to higher).
classic := []string{"low", "medium", "high", "warning", "error"}
if slices.Contains(classic, a) && slices.Contains(classic, b) {
- switch {
- case slices.Index(classic, a) > slices.Index(classic, b):
- return greater
- case slices.Index(classic, a) < slices.Index(classic, b):
- return less
- default:
- return equal
- }
+ return cmp.Compare(slices.Index(classic, a), slices.Index(classic, b))
}
if slices.Contains(classic, a) {
@@ -232,28 +121,27 @@ func severityCompare(a, b string) compareResult {
return less
}
- return compareResult(strings.Compare(a, b))
+ return strings.Compare(a, b)
}
-func numericCompare(a, b int) compareResult {
- var (
- isValuesInvalid = a < 0 || b < 0
- isZeroValuesBoth = a == 0 && b == 0
- isEqual = a == b
- isZeroValueInA = b > 0 && a == 0
- isZeroValueInB = a > 0 && b == 0
- )
-
- switch {
- case isZeroValuesBoth || isEqual:
+func numericCompare(a, b int) int {
+ // Negative values and zeros are skipped (equal) because they either invalid or "neutral" (default int value).
+ if a <= 0 || b <= 0 {
return equal
- case isValuesInvalid || isZeroValueInA || isZeroValueInB:
- return none
- case a > b:
- return greater
- case a < b:
- return less
}
- return equal
+ return cmp.Compare(a, b)
+}
+
+func mergeComparators(comps ...issueComparator) issueComparator {
+ return func(a, b *result.Issue) int {
+ for _, comp := range comps {
+ i := comp(a, b)
+ if i != equal {
+ return i
+ }
+ }
+
+ return equal
+ }
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go
index 4a89fc73ed..3f20b2f560 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go
@@ -8,6 +8,13 @@ import (
var _ Processor = (*SourceCode)(nil)
+// SourceCode modifies displayed information based on [result.Issue.GetLineRange()].
+//
+// This is used:
+// - to display the "UnderLinePointer".
+// - in some rare cases to display multiple lines instead of one (ex: `dupl`)
+//
+// It requires to use [fsutils.LineCache] ([fsutils.FileCache]) to get the file information before the fixes.
type SourceCode struct {
lineCache *fsutils.LineCache
log logutils.Log
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go
index 8e384e390b..7298421227 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go
@@ -1,7 +1,6 @@
package processors
import (
- "github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/result"
)
@@ -9,15 +8,16 @@ const uniqByLineLimit = 1
var _ Processor = (*UniqByLine)(nil)
+// UniqByLine filters reports to keep only one report by line of code.
type UniqByLine struct {
fileLineCounter fileLineCounter
- cfg *config.Config
+ enabled bool
}
-func NewUniqByLine(cfg *config.Config) *UniqByLine {
+func NewUniqByLine(enable bool) *UniqByLine {
return &UniqByLine{
fileLineCounter: fileLineCounter{},
- cfg: cfg,
+ enabled: enable,
}
}
@@ -26,22 +26,16 @@ func (*UniqByLine) Name() string {
}
func (p *UniqByLine) Process(issues []result.Issue) ([]result.Issue, error) {
- if !p.cfg.Output.UniqByLine {
+ if !p.enabled {
return issues, nil
}
- return filterIssues(issues, p.shouldPassIssue), nil
+ return filterIssuesUnsafe(issues, p.shouldPassIssue), nil
}
func (*UniqByLine) Finish() {}
func (p *UniqByLine) shouldPassIssue(issue *result.Issue) bool {
- if issue.Replacement != nil && p.cfg.Issues.NeedFix {
- // if issue will be auto-fixed we shouldn't collapse issues:
- // e.g. one line can contain 2 misspellings, they will be in 2 issues and misspell should fix both of them.
- return true
- }
-
if p.fileLineCounter.GetCount(issue) == uniqByLineLimit {
return false
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go
index d944dea2ea..95b16de9fc 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go
@@ -114,3 +114,25 @@ func (s *Stopwatch) TrackStage(name string, f func()) {
s.stages[name] += time.Since(startedAt)
s.mu.Unlock()
}
+
+func (s *Stopwatch) TrackStageErr(name string, f func() error) error {
+ startedAt := time.Now()
+ err := f()
+
+ s.mu.Lock()
+ s.stages[name] += time.Since(startedAt)
+ s.mu.Unlock()
+
+ return err
+}
+
+func TrackStage[T any](s *Stopwatch, name string, f func() (T, error)) (T, error) {
+ var result T
+ var err error
+
+ s.TrackStage(name, func() {
+ result, err = f()
+ })
+
+ return result, err
+}
diff --git a/vendor/github.com/golangci/modinfo/.gitignore b/vendor/github.com/golangci/modinfo/.gitignore
deleted file mode 100644
index 9f11b755a1..0000000000
--- a/vendor/github.com/golangci/modinfo/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-.idea/
diff --git a/vendor/github.com/golangci/modinfo/.golangci.yml b/vendor/github.com/golangci/modinfo/.golangci.yml
deleted file mode 100644
index 9698182f2a..0000000000
--- a/vendor/github.com/golangci/modinfo/.golangci.yml
+++ /dev/null
@@ -1,157 +0,0 @@
-run:
- timeout: 7m
-
-linters-settings:
- govet:
- enable:
- - shadow
- gocyclo:
- min-complexity: 12
- goconst:
- min-len: 3
- min-occurrences: 3
- funlen:
- lines: -1
- statements: 50
- misspell:
- locale: US
- depguard:
- rules:
- main:
- deny:
- - pkg: "github.com/instana/testify"
- desc: not allowed
- - pkg: "github.com/pkg/errors"
- desc: Should be replaced by standard lib errors package
- tagalign:
- align: false
- order:
- - xml
- - json
- - yaml
- - yml
- - toml
- - mapstructure
- - url
- godox:
- keywords:
- - FIXME
- gocritic:
- enabled-tags:
- - diagnostic
- - style
- - performance
- disabled-checks:
- - paramTypeCombine # already handle by gofumpt.extra-rules
- - whyNoLint # already handle by nonolint
- - unnamedResult
- - hugeParam
- - sloppyReassign
- - rangeValCopy
- - octalLiteral
- - ptrToRefParam
- - appendAssign
- - ruleguard
- - httpNoBody
- - exposedSyncMutex
- revive:
- rules:
- - name: struct-tag
- - name: blank-imports
- - name: context-as-argument
- - name: context-keys-type
- - name: dot-imports
- - name: error-return
- - name: error-strings
- - name: error-naming
- - name: exported
- disabled: true
- - name: if-return
- - name: increment-decrement
- - name: var-naming
- - name: var-declaration
- - name: package-comments
- disabled: true
- - name: range
- - name: receiver-naming
- - name: time-naming
- - name: unexported-return
- - name: indent-error-flow
- - name: errorf
- - name: empty-block
- - name: superfluous-else
- - name: unused-parameter
- disabled: true
- - name: unreachable-code
- - name: redefines-builtin-id
-
- tagliatelle:
- case:
- rules:
- json: pascal
- yaml: camel
- xml: camel
- header: header
- mapstructure: camel
- env: upperSnake
- envconfig: upperSnake
-
-linters:
- enable-all: true
- disable:
- - deadcode # deprecated
- - exhaustivestruct # deprecated
- - golint # deprecated
- - ifshort # deprecated
- - interfacer # deprecated
- - maligned # deprecated
- - nosnakecase # deprecated
- - scopelint # deprecated
- - structcheck # deprecated
- - varcheck # deprecated
- - cyclop # duplicate of gocyclo
- - sqlclosecheck # not relevant (SQL)
- - rowserrcheck # not relevant (SQL)
- - execinquery # not relevant (SQL)
- - lll
- - gosec
- - dupl # not relevant
- - prealloc # too many false-positive
- - bodyclose # too many false-positive
- - gomnd
- - testpackage # not relevant
- - tparallel # not relevant
- - paralleltest # not relevant
- - nestif # too many false-positive
- - wrapcheck
- - goerr113 # not relevant
- - nlreturn # not relevant
- - wsl # not relevant
- - exhaustive # not relevant
- - exhaustruct # not relevant
- - makezero # not relevant
- - forbidigo
- - varnamelen # not relevant
- - nilnil # not relevant
- - ireturn # not relevant
- - contextcheck # too many false-positive
- - tenv # we already have a test "framework" to handle env vars
- - noctx
- - errchkjson
- - nonamedreturns
- - gosmopolitan # not relevant
- - gochecknoglobals
-
-issues:
- exclude-use-default: false
- max-issues-per-linter: 0
- max-same-issues: 0
- exclude:
- - 'Error return value of .((os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked'
- - 'ST1000: at least one file in a package should have a package comment'
- exclude-rules:
- - path: (.+)_test.go
- linters:
- - funlen
- - goconst
- - maintidx
diff --git a/vendor/github.com/golangci/modinfo/LICENSE b/vendor/github.com/golangci/modinfo/LICENSE
deleted file mode 100644
index f288702d2f..0000000000
--- a/vendor/github.com/golangci/modinfo/LICENSE
+++ /dev/null
@@ -1,674 +0,0 @@
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
-
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Use with the GNU Affero General Public License.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
-
- Copyright (C)
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-
-Also add information on how to contact you by electronic and paper mail.
-
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-.
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-.
diff --git a/vendor/github.com/golangci/modinfo/Makefile b/vendor/github.com/golangci/modinfo/Makefile
deleted file mode 100644
index df91018f11..0000000000
--- a/vendor/github.com/golangci/modinfo/Makefile
+++ /dev/null
@@ -1,12 +0,0 @@
-.PHONY: clean check test
-
-default: clean check test
-
-clean:
- rm -rf dist/ cover.out
-
-test: clean
- go test -v -cover ./...
-
-check:
- golangci-lint run
diff --git a/vendor/github.com/golangci/modinfo/module.go b/vendor/github.com/golangci/modinfo/module.go
deleted file mode 100644
index ff0b21b9b8..0000000000
--- a/vendor/github.com/golangci/modinfo/module.go
+++ /dev/null
@@ -1,157 +0,0 @@
-package modinfo
-
-import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
- "os"
- "os/exec"
- "path/filepath"
- "reflect"
- "sort"
- "strings"
- "sync"
-
- "golang.org/x/mod/modfile"
- "golang.org/x/tools/go/analysis"
-)
-
-type ModInfo struct {
- Path string `json:"Path"`
- Dir string `json:"Dir"`
- GoMod string `json:"GoMod"`
- GoVersion string `json:"GoVersion"`
- Main bool `json:"Main"`
-}
-
-var (
- once sync.Once
- information []ModInfo
- errInfo error
-)
-
-var Analyzer = &analysis.Analyzer{
- Name: "modinfo",
- Doc: "Module information",
- URL: "https://github.com/golangci/modinfo",
- Run: runOnce,
- ResultType: reflect.TypeOf([]ModInfo(nil)),
-}
-
-func runOnce(pass *analysis.Pass) (any, error) {
- _, ok := os.LookupEnv("MODINFO_DEBUG_DISABLE_ONCE")
- if ok {
- return GetModuleInfo(pass)
- }
-
- once.Do(func() {
- information, errInfo = GetModuleInfo(pass)
- })
-
- return information, errInfo
-}
-
-// GetModuleInfo gets modules information.
-// Always returns 1 element except for workspace (returns all the modules of the workspace).
-// Based on `go list -m -json` behavior.
-func GetModuleInfo(pass *analysis.Pass) ([]ModInfo, error) {
- // https://github.com/golang/go/issues/44753#issuecomment-790089020
- cmd := exec.Command("go", "list", "-m", "-json")
- for _, file := range pass.Files {
- name := pass.Fset.File(file.Pos()).Name()
- if filepath.Ext(name) != ".go" {
- continue
- }
-
- cmd.Dir = filepath.Dir(name)
- break
- }
-
- out, err := cmd.Output()
- if err != nil {
- return nil, fmt.Errorf("command go list: %w: %s", err, string(out))
- }
-
- var infos []ModInfo
-
- for dec := json.NewDecoder(bytes.NewBuffer(out)); dec.More(); {
- var v ModInfo
- if err := dec.Decode(&v); err != nil {
- return nil, fmt.Errorf("unmarshaling error: %w: %s", err, string(out))
- }
-
- if v.GoMod == "" {
- return nil, errors.New("working directory is not part of a module")
- }
-
- if !v.Main || v.Dir == "" {
- continue
- }
-
- infos = append(infos, v)
- }
-
- if len(infos) == 0 {
- return nil, errors.New("go.mod file not found")
- }
-
- sort.Slice(infos, func(i, j int) bool {
- return len(infos[i].Path) > len(infos[j].Path)
- })
-
- return infos, nil
-}
-
-// FindModuleFromPass finds the module related to the files of the pass.
-func FindModuleFromPass(pass *analysis.Pass) (ModInfo, error) {
- infos, ok := pass.ResultOf[Analyzer].([]ModInfo)
- if !ok {
- return ModInfo{}, errors.New("no modinfo analyzer result")
- }
-
- var name string
- for _, file := range pass.Files {
- f := pass.Fset.File(file.Pos()).Name()
- if filepath.Ext(f) != ".go" {
- continue
- }
-
- name = f
- break
- }
-
- // no Go file found in analysis pass
- if name == "" {
- name, _ = os.Getwd()
- }
-
- for _, info := range infos {
- if !strings.HasPrefix(name, info.Dir) {
- continue
- }
- return info, nil
- }
-
- return ModInfo{}, errors.New("module information not found")
-}
-
-// ReadModuleFileFromPass read the `go.mod` file from the pass result.
-func ReadModuleFileFromPass(pass *analysis.Pass) (*modfile.File, error) {
- info, err := FindModuleFromPass(pass)
- if err != nil {
- return nil, err
- }
-
- return ReadModuleFile(info)
-}
-
-// ReadModuleFile read the `go.mod` file.
-func ReadModuleFile(info ModInfo) (*modfile.File, error) {
- raw, err := os.ReadFile(info.GoMod)
- if err != nil {
- return nil, fmt.Errorf("reading go.mod file: %w", err)
- }
-
- return modfile.Parse("go.mod", raw, nil)
-}
diff --git a/vendor/github.com/golangci/modinfo/readme.md b/vendor/github.com/golangci/modinfo/readme.md
deleted file mode 100644
index 2175de8eb4..0000000000
--- a/vendor/github.com/golangci/modinfo/readme.md
+++ /dev/null
@@ -1,73 +0,0 @@
-# modinfo
-
-This module contains:
-- an analyzer that returns module information.
-- methods to find and read `go.mod` file
-
-## Examples
-
-```go
-package main
-
-import (
- "fmt"
-
- "github.com/golangci/modinfo"
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/passes/inspect"
-)
-
-var Analyzer = &analysis.Analyzer{
- Name: "example",
- Doc: "Example",
- Run: func(pass *analysis.Pass) (interface{}, error) {
- file, err := modinfo.ReadModuleFileFromPass(pass)
- if err != nil {
- return nil, err
- }
-
- fmt.Println("go.mod", file)
-
- // TODO
-
- return nil, nil
- },
- Requires: []*analysis.Analyzer{
- inspect.Analyzer,
- modinfo.Analyzer,
- },
-}
-```
-
-```go
-package main
-
-import (
- "fmt"
-
- "github.com/golangci/modinfo"
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/passes/inspect"
-)
-
-var Analyzer = &analysis.Analyzer{
- Name: "example",
- Doc: "Example",
- Run: func(pass *analysis.Pass) (interface{}, error) {
- info, err := modinfo.FindModuleFromPass(pass)
- if err != nil {
- return nil, err
- }
-
- fmt.Println("Module", info.Dir)
-
- // TODO
-
- return nil, nil
- },
- Requires: []*analysis.Analyzer{
- inspect.Analyzer,
- modinfo.Analyzer,
- },
-}
-```
diff --git a/vendor/github.com/golangci/revgrep/.golangci.yml b/vendor/github.com/golangci/revgrep/.golangci.yml
index 5239720ac6..f08807b12b 100644
--- a/vendor/github.com/golangci/revgrep/.golangci.yml
+++ b/vendor/github.com/golangci/revgrep/.golangci.yml
@@ -1,5 +1,28 @@
-run:
- timeout: 2m
+linters:
+ enable-all: true
+ disable:
+ - exportloopref # deprecated
+ - cyclop # duplicate of gocyclo
+ - sqlclosecheck # not relevant (SQL)
+ - rowserrcheck # not relevant (SQL)
+ - dupl
+ - lll
+ - nestif
+ - mnd
+ - err113
+ - nlreturn
+ - wsl
+ - exhaustive
+ - exhaustruct
+ - tparallel
+ - testpackage
+ - paralleltest
+ - forcetypeassert
+ - varnamelen
+ - prealloc # false-positives
+ - nonamedreturns
+ - nilerr
+ - depguard
linters-settings:
govet:
@@ -23,44 +46,9 @@ linters-settings:
godox:
keywords:
- FIXME
-
-linters:
- enable-all: true
- disable:
- - deadcode # deprecated
- - exhaustivestruct # deprecated
- - golint # deprecated
- - ifshort # deprecated
- - interfacer # deprecated
- - maligned # deprecated
- - nosnakecase # deprecated
- - scopelint # deprecated
- - structcheck # deprecated
- - varcheck # deprecated
- - cyclop # duplicate of gocyclo
- - sqlclosecheck # not relevant (SQL)
- - rowserrcheck # not relevant (SQL)
- - execinquery # not relevant (SQL)
- - dupl
- - lll
- - nestif
- - gomnd
- - goerr113
- - nlreturn
- - wsl
- - exhaustive
- - exhaustruct
- - tparallel
- - testpackage
- - paralleltest
- - ifshort
- - forcetypeassert
- - varnamelen
- - prealloc # false-positives
- - nosnakecase
- - nonamedreturns
- - nilerr
- - depguard
+ gosec:
+ excludes:
+ - G115 # integer overflow conversion
issues:
exclude-use-default: false
@@ -78,3 +66,6 @@ issues:
- path: cmd/revgrep/main.go
linters:
- forbidigo
+
+run:
+ timeout: 2m
diff --git a/vendor/github.com/golangci/revgrep/README.md b/vendor/github.com/golangci/revgrep/README.md
index 97f25ffb39..c776cb4519 100644
--- a/vendor/github.com/golangci/revgrep/README.md
+++ b/vendor/github.com/golangci/revgrep/README.md
@@ -1,14 +1,14 @@
-# Overview
+## Overview
`revgrep` is a CLI tool used to filter static analysis tools to only lines changed based on a commit reference.
-# Install
+## Install
```bash
-go get -u github.com/golangci/revgrep/...
+go install github.com/golangci/revgrep/cmd/revgrep@latest
```
-# Usage
+## Usage
In the scenario below, a change was made causing a warning in `go vet` on line 5, but `go vet` will show all warnings.
Using `revgrep`, you can show only warnings for lines of code that have been changed (in this case, hiding line 6).
@@ -42,7 +42,7 @@ from-rev filters issues to lines changed since (and including) this revision
Regexp to match path, line number, optional column number, and message
```
-# Other Examples
+## Other Examples
Issues between branches:
```bash
diff --git a/vendor/github.com/golangci/revgrep/issue.go b/vendor/github.com/golangci/revgrep/issue.go
new file mode 100644
index 0000000000..694d416390
--- /dev/null
+++ b/vendor/github.com/golangci/revgrep/issue.go
@@ -0,0 +1,37 @@
+package revgrep
+
+// Issue contains metadata about an issue found.
+type Issue struct {
+ // File is the name of the file as it appeared from the patch.
+ File string
+ // LineNo is the line number of the file.
+ LineNo int
+ // ColNo is the column number or 0 if none could be parsed.
+ ColNo int
+ // HunkPos is position from file's first @@, for new files this will be the line number.
+ // See also: https://developer.github.com/v3/pulls/comments/#create-a-comment
+ HunkPos int
+ // Issue text as it appeared from the tool.
+ Issue string
+ // Message is the issue without file name, line number and column number.
+ Message string
+}
+
+// InputIssue represents issue found by some linter.
+type InputIssue interface {
+ FilePath() string
+ Line() int
+}
+
+type simpleInputIssue struct {
+ filePath string
+ lineNumber int
+}
+
+func (i simpleInputIssue) FilePath() string {
+ return i.filePath
+}
+
+func (i simpleInputIssue) Line() int {
+ return i.lineNumber
+}
diff --git a/vendor/github.com/golangci/revgrep/patch.go b/vendor/github.com/golangci/revgrep/patch.go
new file mode 100644
index 0000000000..81a2acd7e5
--- /dev/null
+++ b/vendor/github.com/golangci/revgrep/patch.go
@@ -0,0 +1,195 @@
+package revgrep
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "os/exec"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+type patchOption struct {
+ revisionFrom string
+ revisionTo string
+ mergeBase string
+}
+
+// GitPatch returns a patch from a git repository.
+// If no git repository was found and no errors occurred, nil is returned,
+// else an error is returned revisionFrom and revisionTo defines the git diff parameters,
+// if left blank and there are unstaged changes or untracked files,
+// only those will be returned else only check changes since HEAD~.
+// If revisionFrom is set but revisionTo is not,
+// untracked files will be included, to exclude untracked files set revisionTo to HEAD~.
+// It's incorrect to specify revisionTo without a revisionFrom.
+func GitPatch(ctx context.Context, option patchOption) (io.Reader, []string, error) {
+ // check if git repo exists
+ if err := exec.CommandContext(ctx, "git", "status", "--porcelain").Run(); err != nil {
+ // don't return an error, we assume the error is not repo exists
+ return nil, nil, nil
+ }
+
+ // make a patch for untracked files
+ ls, err := exec.CommandContext(ctx, "git", "ls-files", "--others", "--exclude-standard").CombinedOutput()
+ if err != nil {
+ return nil, nil, fmt.Errorf("error executing git ls-files: %w", err)
+ }
+
+ var newFiles []string
+ for _, file := range bytes.Split(ls, []byte{'\n'}) {
+ if len(file) == 0 || bytes.HasSuffix(file, []byte{'/'}) {
+ // ls-files was sometimes showing directories when they were ignored
+ // I couldn't create a test case for this as I couldn't reproduce correctly for the moment,
+ // just exclude files with trailing /
+ continue
+ }
+
+ newFiles = append(newFiles, string(file))
+ }
+
+ if option.mergeBase != "" {
+ var base string
+ base, err = getMergeBase(ctx, option.mergeBase)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ if base != "" {
+ option.revisionFrom = base
+ }
+ }
+
+ if option.revisionFrom != "" {
+ args := []string{option.revisionFrom}
+
+ if option.revisionTo != "" {
+ args = append(args, option.revisionTo)
+ }
+
+ args = append(args, "--")
+
+ patch, errDiff := gitDiff(ctx, args...)
+ if errDiff != nil {
+ return nil, nil, errDiff
+ }
+
+ if option.revisionTo == "" {
+ return patch, newFiles, nil
+ }
+
+ return patch, nil, nil
+ }
+
+ // make a patch for unstaged changes
+ patch, err := gitDiff(ctx, "--")
+ if err != nil {
+ return nil, nil, err
+ }
+
+ unstaged := patch.Len() > 0
+
+ // If there's unstaged changes OR untracked changes (or both),
+ // then this is a suitable patch
+ if unstaged || newFiles != nil {
+ return patch, newFiles, nil
+ }
+
+ // check for changes in recent commit
+ patch, err = gitDiff(ctx, "HEAD~", "--")
+ if err != nil {
+ return nil, nil, err
+ }
+
+ return patch, nil, nil
+}
+
+func gitDiff(ctx context.Context, extraArgs ...string) (*bytes.Buffer, error) {
+ cmd := exec.CommandContext(ctx, "git", "diff", "--color=never", "--no-ext-diff")
+
+ if isSupportedByGit(ctx, 2, 41, 0) {
+ cmd.Args = append(cmd.Args, "--default-prefix")
+ }
+
+ cmd.Args = append(cmd.Args, "--relative")
+ cmd.Args = append(cmd.Args, extraArgs...)
+
+ patch := new(bytes.Buffer)
+ errBuff := new(bytes.Buffer)
+
+ cmd.Stdout = patch
+ cmd.Stderr = errBuff
+
+ if err := cmd.Run(); err != nil {
+ return nil, fmt.Errorf("error executing %q: %w: %w", strings.Join(cmd.Args, " "), err, readAsError(errBuff))
+ }
+
+ return patch, nil
+}
+
+func readAsError(buff io.Reader) error {
+ output, err := io.ReadAll(buff)
+ if err != nil {
+ return fmt.Errorf("read stderr: %w", err)
+ }
+
+ return errors.New(string(output))
+}
+
+func isSupportedByGit(ctx context.Context, major, minor, patch int) bool {
+ output, err := exec.CommandContext(ctx, "git", "version").CombinedOutput()
+ if err != nil {
+ return false
+ }
+
+ parts := bytes.Split(bytes.TrimSpace(output), []byte(" "))
+ if len(parts) < 3 {
+ return false
+ }
+
+ v := string(parts[2])
+ if v == "" {
+ return false
+ }
+
+ vp := regexp.MustCompile(`^(\d+)\.(\d+)(?:\.(\d+))?.*$`).FindStringSubmatch(v)
+ if len(vp) < 4 {
+ return false
+ }
+
+ currentMajor, err := strconv.Atoi(vp[1])
+ if err != nil {
+ return false
+ }
+
+ currentMinor, err := strconv.Atoi(vp[2])
+ if err != nil {
+ return false
+ }
+
+ currentPatch, err := strconv.Atoi(vp[3])
+ if err != nil {
+ return false
+ }
+
+ return currentMajor*1_000_000_000+currentMinor*1_000_000+currentPatch*1_000 >= major*1_000_000_000+minor*1_000_000+patch*1_000
+}
+
+func getMergeBase(ctx context.Context, base string) (string, error) {
+ cmd := exec.CommandContext(ctx, "git", "merge-base", base, "HEAD")
+
+ patch := new(bytes.Buffer)
+ errBuff := new(bytes.Buffer)
+
+ cmd.Stdout = patch
+ cmd.Stderr = errBuff
+
+ if err := cmd.Run(); err != nil {
+ return "", fmt.Errorf("error executing %q: %w: %w", strings.Join(cmd.Args, " "), err, readAsError(errBuff))
+ }
+
+ return strings.TrimSpace(patch.String()), nil
+}
diff --git a/vendor/github.com/golangci/revgrep/revgrep.go b/vendor/github.com/golangci/revgrep/revgrep.go
index 1ef81b203a..ca4ac791c8 100644
--- a/vendor/github.com/golangci/revgrep/revgrep.go
+++ b/vendor/github.com/golangci/revgrep/revgrep.go
@@ -3,12 +3,11 @@ package revgrep
import (
"bufio"
- "bytes"
+ "context"
"errors"
"fmt"
"io"
"os"
- "os/exec"
"path/filepath"
"regexp"
"strconv"
@@ -30,96 +29,71 @@ type Checker struct {
Debug io.Writer
// RevisionFrom check revision starting at, leave blank for auto-detection ignored if patch is set.
RevisionFrom string
- // WholeFiles indicates that the user wishes to see all issues that comes up anywhere in any file that has been changed in this revision or patch.
- WholeFiles bool
// RevisionTo checks revision finishing at, leave blank for auto-detection ignored if patch is set.
RevisionTo string
+ // MergeBase checks revision starting at the best common ancestor, leave blank for auto-detection ignored if patch is set.
+ MergeBase string
+ // WholeFiles indicates that the user wishes to see all issues that comes up anywhere in any file that has been changed in this revision or patch.
+ WholeFiles bool
// Regexp to match path, line number, optional column number, and message.
Regexp string
// AbsPath is used to make an absolute path of an issue's filename to be relative in order to match patch file.
// If not set, current working directory is used.
AbsPath string
- // Calculated changes for next calls to IsNewIssue
+ // Calculated changes for next calls to [Checker.IsNewIssue]/[Checker.IsNew].
changes map[string][]pos
}
-// Issue contains metadata about an issue found.
-type Issue struct {
- // File is the name of the file as it appeared from the patch.
- File string
- // LineNo is the line number of the file.
- LineNo int
- // ColNo is the column number or 0 if none could be parsed.
- ColNo int
- // HunkPos is position from file's first @@, for new files this will be the line number.
- // See also: https://developer.github.com/v3/pulls/comments/#create-a-comment
- HunkPos int
- // Issue text as it appeared from the tool.
- Issue string
- // Message is the issue without file name, line number and column number.
- Message string
-}
-
-// InputIssue represents issue found by some linter.
-type InputIssue interface {
- FilePath() string
- Line() int
-}
-
-type simpleInputIssue struct {
- filePath string
- lineNumber int
-}
-
-type pos struct {
- lineNo int // line number
- hunkPos int // position relative to first @@ in file
-}
-
-func (i simpleInputIssue) FilePath() string {
- return i.filePath
-}
-
-func (i simpleInputIssue) Line() int {
- return i.lineNumber
-}
-
// Prepare extracts a patch and changed lines.
-func (c *Checker) Prepare() error {
- returnErr := c.preparePatch()
+//
+// WARNING: it should only be used before an explicit call to [Checker.IsNewIssue]/[Checker.IsNew].
+//
+// WARNING: only [Checker.Patch], [Checker.RevisionFrom], [Checker.RevisionTo], [Checker.WholeFiles] options are used,
+// the other options ([Checker.Regexp], [Checker.AbsPath]) are only used by [Checker.Check].
+func (c *Checker) Prepare(ctx context.Context) error {
+ err := c.loadPatch(ctx)
+
c.changes = c.linesChanged()
- return returnErr
+
+ return err
}
-// IsNewIssue checks whether issue found by linter is new: it was found in changed lines.
-func (c *Checker) IsNewIssue(i InputIssue) (hunkPos int, isNew bool) {
- fchanges, ok := c.changes[filepath.ToSlash(i.FilePath())]
- if !ok { // file wasn't changed
+// IsNew checks whether issue found by linter is new: it was found in changed lines.
+//
+// WARNING: it requires to call [Checker.Prepare] before call this method to load the changes from patch.
+func (c *Checker) IsNew(filePath string, line int) (hunkPos int, isNew bool) {
+ changes, ok := c.changes[filepath.ToSlash(filePath)]
+ if !ok {
+ // file wasn't changed
return 0, false
}
if c.WholeFiles {
- return i.Line(), true
+ return line, true
}
var (
fpos pos
changed bool
)
+
// found file, see if lines matched
- for _, pos := range fchanges {
- if pos.lineNo == i.Line() {
+ for _, pos := range changes {
+ if pos.lineNo == line {
fpos = pos
changed = true
+
break
}
}
- if changed || fchanges == nil {
+ if changed || changes == nil {
// either file changed or it's a new file
hunkPos := fpos.lineNo
- if changed { // existing file changed
+
+ // existing file changed
+ if changed {
hunkPos = fpos.hunkPos
}
@@ -129,7 +103,14 @@ func (c *Checker) IsNewIssue(i InputIssue) (hunkPos int, isNew bool) {
return 0, false
}
-// Check scans reader and writes any lines to writer that have been added in Checker.Patch.
+// IsNewIssue checks whether issue found by linter is new: it was found in changed lines.
+//
+// WARNING: it requires to call [Checker.Prepare] before call this method to load the changes from patch.
+func (c *Checker) IsNewIssue(i InputIssue) (hunkPos int, isNew bool) {
+ return c.IsNew(i.FilePath(), i.Line())
+}
+
+// Check scans reader and writes any lines to writer that have been added in [Checker.Patch].
//
// Returns the issues written to writer when no error occurs.
//
@@ -137,9 +118,10 @@ func (c *Checker) IsNewIssue(i InputIssue) (hunkPos int, isNew bool) {
// all issues are written to writer and an error is returned.
//
// File paths in reader must be relative to current working directory or absolute.
-func (c *Checker) Check(reader io.Reader, writer io.Writer) (issues []Issue, err error) {
- returnErr := c.Prepare()
- writeAll := returnErr != nil
+func (c *Checker) Check(ctx context.Context, reader io.Reader, writer io.Writer) (issues []Issue, err error) {
+ errPrepare := c.Prepare(ctx)
+
+ writeAll := errPrepare != nil
// file.go:lineNo:colNo:message
// colNo is optional, strip spaces before message
@@ -159,7 +141,7 @@ func (c *Checker) Check(reader io.Reader, writer io.Writer) (issues []Issue, err
if absPath == "" {
absPath, err = os.Getwd()
if err != nil {
- returnErr = fmt.Errorf("could not get current working directory: %w", err)
+ errPrepare = fmt.Errorf("could not get current working directory: %w", err)
}
}
@@ -227,30 +209,41 @@ func (c *Checker) Check(reader io.Reader, writer io.Writer) (issues []Issue, err
}
if err := scanner.Err(); err != nil {
- returnErr = fmt.Errorf("error reading standard input: %w", err)
+ errPrepare = fmt.Errorf("error reading standard input: %w", err)
}
- return issues, returnErr
+ return issues, errPrepare
}
-func (c *Checker) debugf(format string, s ...interface{}) {
- if c.Debug != nil {
- _, _ = fmt.Fprint(c.Debug, "DEBUG: ")
- _, _ = fmt.Fprintf(c.Debug, format+"\n", s...)
+func (c *Checker) debugf(format string, s ...any) {
+ if c.Debug == nil {
+ return
}
+
+ _, _ = fmt.Fprint(c.Debug, "DEBUG: ")
+ _, _ = fmt.Fprintf(c.Debug, format+"\n", s...)
}
-func (c *Checker) preparePatch() error {
- // Check if patch is supplied, if not, retrieve from VCS
+// loadPatch checks if patch is supplied, if not, retrieve from VCS.
+func (c *Checker) loadPatch(ctx context.Context) error {
+ if c.Patch != nil {
+ return nil
+ }
+
+ option := patchOption{
+ revisionFrom: c.RevisionFrom,
+ revisionTo: c.RevisionTo,
+ mergeBase: c.MergeBase,
+ }
+
+ var err error
+ c.Patch, c.NewFiles, err = GitPatch(ctx, option)
+ if err != nil {
+ return fmt.Errorf("could not read git repo: %w", err)
+ }
+
if c.Patch == nil {
- var err error
- c.Patch, c.NewFiles, err = GitPatch(c.RevisionFrom, c.RevisionTo)
- if err != nil {
- return fmt.Errorf("could not read git repo: %w", err)
- }
- if c.Patch == nil {
- return errors.New("no version control repository found")
- }
+ return errors.New("no version control repository found")
}
return nil
@@ -287,15 +280,19 @@ func (c *Checker) linesChanged() map[string][]pos {
// it's likey part of a file and not relevant to the patch.
continue
}
+
if err != nil {
scanErr = err
break
}
+
line := strings.TrimRight(string(lineB), "\n")
c.debugf(line)
+
s.lineNo++
s.hunkPos++
+
switch {
case strings.HasPrefix(line, "+++ ") && len(line) > 4:
if s.changes != nil {
@@ -304,6 +301,7 @@ func (c *Checker) linesChanged() map[string][]pos {
}
// 6 removes "+++ b/"
s = state{file: line[6:], hunkPos: -1, changes: []pos{}}
+
case strings.HasPrefix(line, "@@ "):
// @@ -1 +2,4 @@
// chdr ^^^^^^^^^^^^^
@@ -311,14 +309,18 @@ func (c *Checker) linesChanged() map[string][]pos {
// cstart ^
chdr := strings.Split(line, " ")
ahdr := strings.Split(chdr[2], ",")
+
// [1:] to remove leading plus
cstart, err := strconv.ParseUint(ahdr[0][1:], 10, 64)
if err != nil {
panic(err)
}
+
s.lineNo = int(cstart) - 1 // -1 as cstart is the next line number
+
case strings.HasPrefix(line, "-"):
s.lineNo--
+
case strings.HasPrefix(line, "+"):
s.changes = append(s.changes, pos{lineNo: s.lineNo, hunkPos: s.hunkPos})
}
@@ -334,150 +336,9 @@ func (c *Checker) linesChanged() map[string][]pos {
return changes
}
-// GitPatch returns a patch from a git repository.
-// If no git repository was found and no errors occurred, nil is returned,
-// else an error is returned revisionFrom and revisionTo defines the git diff parameters,
-// if left blank and there are unstaged changes or untracked files,
-// only those will be returned else only check changes since HEAD~.
-// If revisionFrom is set but revisionTo is not,
-// untracked files will be included, to exclude untracked files set revisionTo to HEAD~.
-// It's incorrect to specify revisionTo without a revisionFrom.
-func GitPatch(revisionFrom, revisionTo string) (io.Reader, []string, error) {
- // check if git repo exists
- if err := exec.Command("git", "status", "--porcelain").Run(); err != nil {
- // don't return an error, we assume the error is not repo exists
- return nil, nil, nil
- }
-
- // make a patch for untracked files
- ls, err := exec.Command("git", "ls-files", "--others", "--exclude-standard").CombinedOutput()
- if err != nil {
- return nil, nil, fmt.Errorf("error executing git ls-files: %w", err)
- }
-
- var newFiles []string
- for _, file := range bytes.Split(ls, []byte{'\n'}) {
- if len(file) == 0 || bytes.HasSuffix(file, []byte{'/'}) {
- // ls-files was sometimes showing directories when they were ignored
- // I couldn't create a test case for this as I couldn't reproduce correctly for the moment,
- // just exclude files with trailing /
- continue
- }
-
- newFiles = append(newFiles, string(file))
- }
-
- if revisionFrom != "" {
- args := []string{revisionFrom}
-
- if revisionTo != "" {
- args = append(args, revisionTo)
- }
-
- args = append(args, "--")
-
- patch, errDiff := gitDiff(args...)
- if errDiff != nil {
- return nil, nil, errDiff
- }
-
- if revisionTo == "" {
- return patch, newFiles, nil
- }
-
- return patch, nil, nil
- }
-
- // make a patch for unstaged changes
- patch, err := gitDiff("--")
- if err != nil {
- return nil, nil, err
- }
-
- unstaged := patch.Len() > 0
-
- // If there's unstaged changes OR untracked changes (or both),
- // then this is a suitable patch
- if unstaged || newFiles != nil {
- return patch, newFiles, nil
- }
-
- // check for changes in recent commit
- patch, err = gitDiff("HEAD~", "--")
- if err != nil {
- return nil, nil, err
- }
-
- return patch, nil, nil
-}
-
-func gitDiff(extraArgs ...string) (*bytes.Buffer, error) {
- cmd := exec.Command("git", "diff", "--color=never", "--no-ext-diff")
-
- if isSupportedByGit(2, 41, 0) {
- cmd.Args = append(cmd.Args, "--default-prefix")
- }
-
- cmd.Args = append(cmd.Args, "--relative")
- cmd.Args = append(cmd.Args, extraArgs...)
-
- patch := new(bytes.Buffer)
- errBuff := new(bytes.Buffer)
-
- cmd.Stdout = patch
- cmd.Stderr = errBuff
-
- if err := cmd.Run(); err != nil {
- return nil, fmt.Errorf("error executing %q: %w: %w", strings.Join(cmd.Args, " "), err, readAsError(errBuff))
- }
-
- return patch, nil
-}
-
-func readAsError(buff io.Reader) error {
- output, err := io.ReadAll(buff)
- if err != nil {
- return fmt.Errorf("read stderr: %w", err)
- }
-
- return errors.New(string(output))
-}
-
-func isSupportedByGit(major, minor, patch int) bool {
- output, err := exec.Command("git", "version").CombinedOutput()
- if err != nil {
- return false
- }
-
- parts := bytes.Split(bytes.TrimSpace(output), []byte(" "))
- if len(parts) < 3 {
- return false
- }
-
- v := string(parts[2])
- if v == "" {
- return false
- }
-
- vp := regexp.MustCompile(`^(\d+)\.(\d+)(?:\.(\d+))?.*$`).FindStringSubmatch(v)
- if len(vp) < 4 {
- return false
- }
-
- currentMajor, err := strconv.Atoi(vp[1])
- if err != nil {
- return false
- }
-
- currentMinor, err := strconv.Atoi(vp[2])
- if err != nil {
- return false
- }
-
- currentPatch, err := strconv.Atoi(vp[3])
- if err != nil {
- return false
- }
-
- return currentMajor*1_000_000_000+currentMinor*1_000_000+currentPatch*1_000 >= major*1_000_000_000+minor*1_000_000+patch*1_000
+type pos struct {
+ // Line number.
+ lineNo int
+ // Position relative to first @@ in file.
+ hunkPos int
}
diff --git a/vendor/github.com/google/go-cmp/cmp/internal/function/func.go b/vendor/github.com/google/go-cmp/cmp/internal/function/func.go
index d127d43623..def01a6be3 100644
--- a/vendor/github.com/google/go-cmp/cmp/internal/function/func.go
+++ b/vendor/github.com/google/go-cmp/cmp/internal/function/func.go
@@ -19,6 +19,7 @@ const (
tbFunc // func(T) bool
ttbFunc // func(T, T) bool
+ ttiFunc // func(T, T) int
trbFunc // func(T, R) bool
tibFunc // func(T, I) bool
trFunc // func(T) R
@@ -28,11 +29,13 @@ const (
Transformer = trFunc // func(T) R
ValueFilter = ttbFunc // func(T, T) bool
Less = ttbFunc // func(T, T) bool
+ Compare = ttiFunc // func(T, T) int
ValuePredicate = tbFunc // func(T) bool
KeyValuePredicate = trbFunc // func(T, R) bool
)
var boolType = reflect.TypeOf(true)
+var intType = reflect.TypeOf(0)
// IsType reports whether the reflect.Type is of the specified function type.
func IsType(t reflect.Type, ft funcType) bool {
@@ -49,6 +52,10 @@ func IsType(t reflect.Type, ft funcType) bool {
if ni == 2 && no == 1 && t.In(0) == t.In(1) && t.Out(0) == boolType {
return true
}
+ case ttiFunc: // func(T, T) int
+ if ni == 2 && no == 1 && t.In(0) == t.In(1) && t.Out(0) == intType {
+ return true
+ }
case trbFunc: // func(T, R) bool
if ni == 2 && no == 1 && t.Out(0) == boolType {
return true
diff --git a/vendor/github.com/google/go-cmp/cmp/options.go b/vendor/github.com/google/go-cmp/cmp/options.go
index 754496f3b3..ba3fce81ff 100644
--- a/vendor/github.com/google/go-cmp/cmp/options.go
+++ b/vendor/github.com/google/go-cmp/cmp/options.go
@@ -232,7 +232,15 @@ func (validator) apply(s *state, vx, vy reflect.Value) {
if t := s.curPath.Index(-2).Type(); t.Name() != "" {
// Named type with unexported fields.
name = fmt.Sprintf("%q.%v", t.PkgPath(), t.Name()) // e.g., "path/to/package".MyType
- if _, ok := reflect.New(t).Interface().(error); ok {
+ isProtoMessage := func(t reflect.Type) bool {
+ m, ok := reflect.PointerTo(t).MethodByName("ProtoReflect")
+ return ok && m.Type.NumIn() == 1 && m.Type.NumOut() == 1 &&
+ m.Type.Out(0).PkgPath() == "google.golang.org/protobuf/reflect/protoreflect" &&
+ m.Type.Out(0).Name() == "Message"
+ }
+ if isProtoMessage(t) {
+ help = `consider using "google.golang.org/protobuf/testing/protocmp".Transform to compare proto.Message types`
+ } else if _, ok := reflect.New(t).Interface().(error); ok {
help = "consider using cmpopts.EquateErrors to compare error values"
} else if t.Comparable() {
help = "consider using cmpopts.EquateComparable to compare comparable Go types"
diff --git a/vendor/github.com/gostaticanalysis/comment/.tagpr b/vendor/github.com/gostaticanalysis/comment/.tagpr
new file mode 100644
index 0000000000..59bf985413
--- /dev/null
+++ b/vendor/github.com/gostaticanalysis/comment/.tagpr
@@ -0,0 +1,35 @@
+# config file for the tagpr in git config format
+# The tagpr generates the initial configuration, which you can rewrite to suit your environment.
+# CONFIGURATIONS:
+# tagpr.releaseBranch
+# Generally, it is "main." It is the branch for releases. The pcpr tracks this branch,
+# creates or updates a pull request as a release candidate, or tags when they are merged.
+#
+# tagpr.versionFile
+# Versioning file containing the semantic version needed to be updated at release.
+# It will be synchronized with the "git tag".
+# Often this is a meta-information file such as gemspec, setup.cfg, package.json, etc.
+# Sometimes the source code file, such as version.go or Bar.pm, is used.
+# If you do not want to use versioning files but only git tags, specify the "-" string here.
+# You can specify multiple version files by comma separated strings.
+#
+# tagpr.vPrefix
+# Flag whether or not v-prefix is added to semver when git tagging. (e.g. v1.2.3 if true)
+# This is only a tagging convention, not how it is described in the version file.
+#
+# tagpr.changelog (Optional)
+# Flag whether or not changelog is added or changed during the release.
+#
+# tagpr.command (Optional)
+# Command to change files just before release.
+#
+# tagpr.tmplate (Optional)
+# Pull request template in go template format
+#
+# tagpr.release (Optional)
+# GitHub Release creation behavior after tagging [true, draft, false]
+# If this value is not set, the release is to be created.
+[tagpr]
+ vPrefix = true
+ releaseBranch = main
+ versionFile = version.txt
diff --git a/vendor/github.com/gostaticanalysis/comment/CHANGELOG.md b/vendor/github.com/gostaticanalysis/comment/CHANGELOG.md
new file mode 100644
index 0000000000..941cc15ff1
--- /dev/null
+++ b/vendor/github.com/gostaticanalysis/comment/CHANGELOG.md
@@ -0,0 +1,34 @@
+# Changelog
+
+## [v1.5.0](https://github.com/gostaticanalysis/comment/compare/v1.4.2...v1.5.0) - 2024-11-15
+- Add tagpr and testvet by @tenntenn in https://github.com/gostaticanalysis/comment/pull/18
+- Add IgnorePosLine and deprecate IgnoreLine by @neglect-yp in https://github.com/gostaticanalysis/comment/pull/17
+- Fix errors for testvet by @tenntenn in https://github.com/gostaticanalysis/comment/pull/20
+- Add version.txt by @tenntenn in https://github.com/gostaticanalysis/comment/pull/21
+- Update go version and dependencies by @tenntenn in https://github.com/gostaticanalysis/comment/pull/19
+
+## [v1.4.2](https://github.com/gostaticanalysis/comment/compare/v1.4.1...v1.4.2) - 2021-03-03
+- passes/commentmap: use txtar for testdata by @zchee in https://github.com/gostaticanalysis/comment/pull/14
+- github/workflows: add test GHA by @zchee in https://github.com/gostaticanalysis/comment/pull/15
+- omment: fix hasIgnoreCheck to more pares lines by @zchee in https://github.com/gostaticanalysis/comment/pull/16
+
+## [v1.4.1](https://github.com/gostaticanalysis/comment/compare/v1.4.0...v1.4.1) - 2020-09-10
+- Fix comment directive parsing in Go 1.15+ by @nmiyake in https://github.com/gostaticanalysis/comment/pull/13
+- gofmt files by @nmiyake in https://github.com/gostaticanalysis/comment/pull/12
+- Fix logic error in hasIgnoreCheck by @nmiyake in https://github.com/gostaticanalysis/comment/pull/11
+
+## [v1.4.0](https://github.com/gostaticanalysis/comment/compare/v1.3.0...v1.4.0) - 2020-08-20
+- Add CommentsByPosLine by @tenntenn in https://github.com/gostaticanalysis/comment/pull/9
+
+## [v1.3.0](https://github.com/gostaticanalysis/comment/compare/v1.2.0...v1.3.0) - 2020-01-30
+- Fix link to ast package by @po3rin in https://github.com/gostaticanalysis/comment/pull/4
+- Add IgnoreLine by @tenntenn in https://github.com/gostaticanalysis/comment/pull/5
+
+## [v1.2.0](https://github.com/gostaticanalysis/comment/compare/v1.1.0...v1.2.0) - 2019-03-18
+- Add IgnorePos by @tenntenn in https://github.com/gostaticanalysis/comment/pull/3
+
+## [v1.1.0](https://github.com/gostaticanalysis/comment/compare/v1.0.0...v1.1.0) - 2019-03-08
+- Add ignore by @tenntenn in https://github.com/gostaticanalysis/comment/pull/1
+- Fix Ignore and add tests by @tenntenn in https://github.com/gostaticanalysis/comment/pull/2
+
+## [v1.0.0](https://github.com/gostaticanalysis/comment/commits/v1.0.0) - 2019-03-08
diff --git a/vendor/github.com/gostaticanalysis/comment/comment.go b/vendor/github.com/gostaticanalysis/comment/comment.go
index 79cb093829..2e418a4669 100644
--- a/vendor/github.com/gostaticanalysis/comment/comment.go
+++ b/vendor/github.com/gostaticanalysis/comment/comment.go
@@ -52,7 +52,8 @@ func (maps Maps) Annotated(n ast.Node, annotation string) bool {
// Ignore checks either specified AST node is ignored by the check.
// It follows staticcheck style as the below.
-// //lint:ignore Check1[,Check2,...,CheckN] reason
+//
+// //lint:ignore Check1[,Check2,...,CheckN] reason
func (maps Maps) Ignore(n ast.Node, check string) bool {
for _, cg := range maps.Comments(n) {
if hasIgnoreCheck(cg, check) {
@@ -64,7 +65,8 @@ func (maps Maps) Ignore(n ast.Node, check string) bool {
// IgnorePos checks either specified postion of AST node is ignored by the check.
// It follows staticcheck style as the below.
-// //lint:ignore Check1[,Check2,...,CheckN] reason
+//
+// //lint:ignore Check1[,Check2,...,CheckN] reason
func (maps Maps) IgnorePos(pos token.Pos, check string) bool {
for _, cg := range maps.CommentsByPos(pos) {
if hasIgnoreCheck(cg, check) {
@@ -109,9 +111,11 @@ func (maps Maps) CommentsByPosLine(fset *token.FileSet, pos token.Pos) []*ast.Co
return nil
}
+// Deprecated: This function does not work with multiple files.
// IgnoreLine checks either specified lineof AST node is ignored by the check.
// It follows staticcheck style as the below.
-// //lint:ignore Check1[,Check2,...,CheckN] reason
+//
+// //lint:ignore Check1[,Check2,...,CheckN] reason
func (maps Maps) IgnoreLine(fset *token.FileSet, line int, check string) bool {
for _, cg := range maps.CommentsByLine(fset, line) {
if hasIgnoreCheck(cg, check) {
@@ -121,6 +125,19 @@ func (maps Maps) IgnoreLine(fset *token.FileSet, line int, check string) bool {
return false
}
+// IgnorePosLine checks either specified lineof AST node is ignored by the check.
+// It follows staticcheck style as the below.
+//
+// //lint:ignore Check1[,Check2,...,CheckN] reason
+func (maps Maps) IgnorePosLine(fset *token.FileSet, pos token.Pos, check string) bool {
+ for _, cg := range maps.CommentsByPosLine(fset, pos) {
+ if hasIgnoreCheck(cg, check) {
+ return true
+ }
+ }
+ return false
+}
+
// hasIgnoreCheck returns true if the provided CommentGroup starts with a comment
// of the form "//lint:ignore Check1[,Check2,...,CheckN] reason" and one of the
// checks matches the provided check.
diff --git a/vendor/github.com/gostaticanalysis/comment/version.txt b/vendor/github.com/gostaticanalysis/comment/version.txt
new file mode 100644
index 0000000000..2e7bd91085
--- /dev/null
+++ b/vendor/github.com/gostaticanalysis/comment/version.txt
@@ -0,0 +1 @@
+v1.5.0
diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/.tagpr b/vendor/github.com/gostaticanalysis/forcetypeassert/.tagpr
new file mode 100644
index 0000000000..59bf985413
--- /dev/null
+++ b/vendor/github.com/gostaticanalysis/forcetypeassert/.tagpr
@@ -0,0 +1,35 @@
+# config file for the tagpr in git config format
+# The tagpr generates the initial configuration, which you can rewrite to suit your environment.
+# CONFIGURATIONS:
+# tagpr.releaseBranch
+# Generally, it is "main." It is the branch for releases. The pcpr tracks this branch,
+# creates or updates a pull request as a release candidate, or tags when they are merged.
+#
+# tagpr.versionFile
+# Versioning file containing the semantic version needed to be updated at release.
+# It will be synchronized with the "git tag".
+# Often this is a meta-information file such as gemspec, setup.cfg, package.json, etc.
+# Sometimes the source code file, such as version.go or Bar.pm, is used.
+# If you do not want to use versioning files but only git tags, specify the "-" string here.
+# You can specify multiple version files by comma separated strings.
+#
+# tagpr.vPrefix
+# Flag whether or not v-prefix is added to semver when git tagging. (e.g. v1.2.3 if true)
+# This is only a tagging convention, not how it is described in the version file.
+#
+# tagpr.changelog (Optional)
+# Flag whether or not changelog is added or changed during the release.
+#
+# tagpr.command (Optional)
+# Command to change files just before release.
+#
+# tagpr.tmplate (Optional)
+# Pull request template in go template format
+#
+# tagpr.release (Optional)
+# GitHub Release creation behavior after tagging [true, draft, false]
+# If this value is not set, the release is to be created.
+[tagpr]
+ vPrefix = true
+ releaseBranch = main
+ versionFile = version.txt
diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/CHANGELOG.md b/vendor/github.com/gostaticanalysis/forcetypeassert/CHANGELOG.md
new file mode 100644
index 0000000000..7575fec629
--- /dev/null
+++ b/vendor/github.com/gostaticanalysis/forcetypeassert/CHANGELOG.md
@@ -0,0 +1,19 @@
+# Changelog
+
+## [v0.2.0](https://github.com/gostaticanalysis/forcetypeassert/compare/v0.1.0...v0.2.0) - 2025-02-13
+- Update x/tools to fix panic in tests by @alexandear in https://github.com/gostaticanalysis/forcetypeassert/pull/19
+- go.mod: bump golang.org/x/tools dependency by @egonelbre in https://github.com/gostaticanalysis/forcetypeassert/pull/20
+- Add tagpr and version up Go and dependencies by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/21
+- Support any by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/23
+- Fix for #18 by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/24
+
+## [v0.1.0](https://github.com/gostaticanalysis/forcetypeassert/commits/v0.1.0) - 2021-09-08
+- update check pattern by @knsh14 in https://github.com/gostaticanalysis/forcetypeassert/pull/1
+- Fix typo by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/3
+- Add reviewdog setting by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/4
+- Add an explanation on how to fix the linter errors by @ozon2 in https://github.com/gostaticanalysis/forcetypeassert/pull/9
+- Delete reviewdog.yml by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/11
+- Create testandvet.yml by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/10
+- Fix bug for valuespec by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/12
+- Fix bugs for expressions by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/13
+- Add result by @tenntenn in https://github.com/gostaticanalysis/forcetypeassert/pull/14
diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go b/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go
index bb48485d95..e1b21825b1 100644
--- a/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go
+++ b/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go
@@ -2,6 +2,7 @@ package forcetypeassert
import (
"go/ast"
+ "go/types"
"reflect"
"golang.org/x/tools/go/analysis"
@@ -42,7 +43,9 @@ func (p *Panicable) At(i int) ast.Node {
const Doc = "forcetypeassert is finds type assertions which did forcely"
-func run(pass *analysis.Pass) (interface{}, error) {
+var anyTyp = types.Universe.Lookup("any").Type()
+
+func run(pass *analysis.Pass) (any, error) {
inspect, _ := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
result := &Panicable{m: make(map[ast.Node]bool)}
@@ -62,7 +65,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
case *ast.ValueSpec:
return checkValueSpec(pass, result, n)
case *ast.TypeAssertExpr:
- if n.Type != nil {
+ if n.Type != nil && !isAny(pass, n.Type) {
result.m[n] = true
result.nodes = append(result.nodes, n)
pass.Reportf(n.Pos(), "type assertion must be checked")
@@ -76,6 +79,10 @@ func run(pass *analysis.Pass) (interface{}, error) {
return result, nil
}
+func isAny(pass *analysis.Pass, expr ast.Expr) bool {
+ return types.Identical(pass.TypesInfo.TypeOf(expr), anyTyp)
+}
+
func checkAssignStmt(pass *analysis.Pass, result *Panicable, n *ast.AssignStmt) bool {
tae := findTypeAssertion(n.Rhs)
if tae == nil {
@@ -83,11 +90,16 @@ func checkAssignStmt(pass *analysis.Pass, result *Panicable, n *ast.AssignStmt)
}
switch {
+
+ // if right hand is a call expression, assign statement can't assert boolean value which describes type assertion is succeeded
+ case len(n.Rhs) == 1 && isCallExpr(n.Rhs[0]):
+ pass.Reportf(n.Pos(), "right hand must be only type assertion")
+ return false
// if right hand has 2 or more values, assign statement can't assert boolean value which describes type assertion is succeeded
case len(n.Rhs) > 1:
pass.Reportf(n.Pos(), "right hand must be only type assertion")
return false
- case len(n.Lhs) != 2 && tae.Type != nil:
+ case len(n.Lhs) != 2 && tae.Type != nil && !isAny(pass, tae.Type):
result.m[n] = true
result.nodes = append(result.nodes, n)
pass.Reportf(n.Pos(), "type assertion must be checked")
@@ -106,11 +118,15 @@ func checkValueSpec(pass *analysis.Pass, result *Panicable, n *ast.ValueSpec) bo
}
switch {
+ // if right hand is a call expression, assign statement can't assert boolean value which describes type assertion is succeeded
+ case len(n.Values) == 1 && isCallExpr(n.Values[0]):
+ pass.Reportf(n.Pos(), "right hand must be only type assertion")
+ return false
// if right hand has 2 or more values, assign statement can't assert boolean value which describes type assertion is succeeded
case len(n.Values) > 1:
pass.Reportf(n.Pos(), "right hand must be only type assertion")
return false
- case len(n.Names) != 2 && tae.Type != nil:
+ case len(n.Names) != 2 && tae.Type != nil && !isAny(pass, tae.Type):
result.m[n] = true
result.nodes = append(result.nodes, n)
pass.Reportf(n.Pos(), "type assertion must be checked")
@@ -141,3 +157,8 @@ func findTypeAssertion(exprs []ast.Expr) *ast.TypeAssertExpr {
}
return nil
}
+
+func isCallExpr(expr ast.Expr) bool {
+ _, isCallExpr := expr.(*ast.CallExpr)
+ return isCallExpr
+}
diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/version.txt b/vendor/github.com/gostaticanalysis/forcetypeassert/version.txt
new file mode 100644
index 0000000000..1474d00f01
--- /dev/null
+++ b/vendor/github.com/gostaticanalysis/forcetypeassert/version.txt
@@ -0,0 +1 @@
+v0.2.0
diff --git a/vendor/github.com/hashicorp/go-cty/cty/convert/conversion_collection.go b/vendor/github.com/hashicorp/go-cty/cty/convert/conversion_collection.go
index 575973d3c3..469b4f718f 100644
--- a/vendor/github.com/hashicorp/go-cty/cty/convert/conversion_collection.go
+++ b/vendor/github.com/hashicorp/go-cty/cty/convert/conversion_collection.go
@@ -156,34 +156,45 @@ func conversionCollectionToMap(ety cty.Type, conv conversion) conversion {
// given tuple type and return a set of the given element type.
//
// Will panic if the given tupleType isn't actually a tuple type.
-func conversionTupleToSet(tupleType cty.Type, listEty cty.Type, unsafe bool) conversion {
+func conversionTupleToSet(tupleType cty.Type, setEty cty.Type, unsafe bool) conversion {
tupleEtys := tupleType.TupleElementTypes()
if len(tupleEtys) == 0 {
// Empty tuple short-circuit
return func(val cty.Value, path cty.Path) (cty.Value, error) {
- return cty.SetValEmpty(listEty), nil
+ return cty.SetValEmpty(setEty), nil
}
}
- if listEty == cty.DynamicPseudoType {
+ if setEty == cty.DynamicPseudoType {
// This is a special case where the caller wants us to find
// a suitable single type that all elements can convert to, if
// possible.
- listEty, _ = unify(tupleEtys, unsafe)
- if listEty == cty.NilType {
+ setEty, _ = unify(tupleEtys, unsafe)
+ if setEty == cty.NilType {
return nil
}
+
+ // If the set element type after unification is still the dynamic
+ // type, the only way this can result in a valid set is if all values
+ // are of dynamic type
+ if setEty == cty.DynamicPseudoType {
+ for _, tupleEty := range tupleEtys {
+ if !tupleEty.Equals(cty.DynamicPseudoType) {
+ return nil
+ }
+ }
+ }
}
elemConvs := make([]conversion, len(tupleEtys))
for i, tupleEty := range tupleEtys {
- if tupleEty.Equals(listEty) {
+ if tupleEty.Equals(setEty) {
// no conversion required
continue
}
- elemConvs[i] = getConversion(tupleEty, listEty, unsafe)
+ elemConvs[i] = getConversion(tupleEty, setEty, unsafe)
if elemConvs[i] == nil {
// If any of our element conversions are impossible, then the our
// whole conversion is impossible.
@@ -244,6 +255,17 @@ func conversionTupleToList(tupleType cty.Type, listEty cty.Type, unsafe bool) co
if listEty == cty.NilType {
return nil
}
+
+ // If the list element type after unification is still the dynamic
+ // type, the only way this can result in a valid list is if all values
+ // are of dynamic type
+ if listEty == cty.DynamicPseudoType {
+ for _, tupleEty := range tupleEtys {
+ if !tupleEty.Equals(cty.DynamicPseudoType) {
+ return nil
+ }
+ }
+ }
}
elemConvs := make([]conversion, len(tupleEtys))
@@ -265,6 +287,7 @@ func conversionTupleToList(tupleType cty.Type, listEty cty.Type, unsafe bool) co
// element conversions in elemConvs
return func(val cty.Value, path cty.Path) (cty.Value, error) {
elems := make([]cty.Value, 0, len(elemConvs))
+ elemTys := make([]cty.Type, 0, len(elems))
elemPath := append(path.Copy(), nil)
i := int64(0)
it := val.ElementIterator()
@@ -284,10 +307,15 @@ func conversionTupleToList(tupleType cty.Type, listEty cty.Type, unsafe bool) co
}
}
elems = append(elems, val)
+ elemTys = append(elemTys, val.Type())
i++
}
+ elems, err := conversionUnifyListElements(elems, elemPath, unsafe)
+ if err != nil {
+ return cty.NilVal, err
+ }
return cty.ListVal(elems), nil
}
}
@@ -441,6 +469,7 @@ func conversionUnifyCollectionElements(elems map[string]cty.Value, path cty.Path
}
unifiedType, _ := unify(elemTypes, unsafe)
if unifiedType == cty.NilType {
+ return nil, path.NewErrorf("collection elements cannot be unified")
}
unifiedElems := make(map[string]cty.Value)
@@ -486,3 +515,37 @@ func conversionCheckMapElementTypes(elems map[string]cty.Value, path cty.Path) e
return nil
}
+
+func conversionUnifyListElements(elems []cty.Value, path cty.Path, unsafe bool) ([]cty.Value, error) {
+ elemTypes := make([]cty.Type, len(elems))
+ for i, elem := range elems {
+ elemTypes[i] = elem.Type()
+ }
+ unifiedType, _ := unify(elemTypes, unsafe)
+ if unifiedType == cty.NilType {
+ return nil, path.NewErrorf("collection elements cannot be unified")
+ }
+
+ ret := make([]cty.Value, len(elems))
+ elemPath := append(path.Copy(), nil)
+
+ for i, elem := range elems {
+ if elem.Type().Equals(unifiedType) {
+ ret[i] = elem
+ continue
+ }
+ conv := getConversion(elem.Type(), unifiedType, unsafe)
+ if conv == nil {
+ }
+ elemPath[len(elemPath)-1] = cty.IndexStep{
+ Key: cty.NumberIntVal(int64(i)),
+ }
+ val, err := conv(elem, elemPath)
+ if err != nil {
+ return nil, err
+ }
+ ret[i] = val
+ }
+
+ return ret, nil
+}
diff --git a/vendor/github.com/hashicorp/go-cty/cty/path_set.go b/vendor/github.com/hashicorp/go-cty/cty/path_set.go
index 977523de57..599b1a4836 100644
--- a/vendor/github.com/hashicorp/go-cty/cty/path_set.go
+++ b/vendor/github.com/hashicorp/go-cty/cty/path_set.go
@@ -196,3 +196,9 @@ func (r pathSetRules) Equivalent(a, b interface{}) bool {
return true
}
+
+// SameRules is true if both Rules instances are pathSetRules structs.
+func (r pathSetRules) SameRules(other set.Rules) bool {
+ _, ok := other.(pathSetRules)
+ return ok
+}
diff --git a/vendor/github.com/hashicorp/go-cty/cty/set/rules.go b/vendor/github.com/hashicorp/go-cty/cty/set/rules.go
index 51f744b5e9..03ecd25b97 100644
--- a/vendor/github.com/hashicorp/go-cty/cty/set/rules.go
+++ b/vendor/github.com/hashicorp/go-cty/cty/set/rules.go
@@ -22,6 +22,10 @@ type Rules interface {
// though it is *not* required that two values with the same hash value
// be equivalent.
Equivalent(interface{}, interface{}) bool
+
+ // SameRules returns true if the instance is equivalent to another Rules
+ // instance.
+ SameRules(Rules) bool
}
// OrderedRules is an extension of Rules that can apply a partial order to
diff --git a/vendor/github.com/hashicorp/go-cty/cty/set/set.go b/vendor/github.com/hashicorp/go-cty/cty/set/set.go
index b4fb316f1c..15a76638f5 100644
--- a/vendor/github.com/hashicorp/go-cty/cty/set/set.go
+++ b/vendor/github.com/hashicorp/go-cty/cty/set/set.go
@@ -41,7 +41,7 @@ func NewSetFromSlice(rules Rules, vals []interface{}) Set {
}
func sameRules(s1 Set, s2 Set) bool {
- return s1.rules == s2.rules
+ return s1.rules.SameRules(s2.rules)
}
func mustHaveSameRules(s1 Set, s2 Set) {
@@ -53,7 +53,7 @@ func mustHaveSameRules(s1 Set, s2 Set) {
// HasRules returns true if and only if the receiving set has the given rules
// instance as its rules.
func (s Set) HasRules(rules Rules) bool {
- return s.rules == rules
+ return s.rules.SameRules(rules)
}
// Rules returns the receiving set's rules instance.
diff --git a/vendor/github.com/hashicorp/go-cty/cty/set_internals.go b/vendor/github.com/hashicorp/go-cty/cty/set_internals.go
index 4080198097..0b98a0b2d7 100644
--- a/vendor/github.com/hashicorp/go-cty/cty/set_internals.go
+++ b/vendor/github.com/hashicorp/go-cty/cty/set_internals.go
@@ -65,6 +65,17 @@ func (r setRules) Equivalent(v1 interface{}, v2 interface{}) bool {
return eqv.v == true
}
+// SameRules is only true if the other Rules instance is also a setRules struct,
+// and the types are considered equal.
+func (r setRules) SameRules(other set.Rules) bool {
+ rules, ok := other.(setRules)
+ if !ok {
+ return false
+ }
+
+ return r.Type.Equals(rules.Type)
+}
+
// Less is an implementation of set.OrderedRules so that we can iterate over
// set elements in a consistent order, where such an order is possible.
func (r setRules) Less(v1, v2 interface{}) bool {
diff --git a/vendor/github.com/hashicorp/go-cty/cty/type.go b/vendor/github.com/hashicorp/go-cty/cty/type.go
index 730cb9862e..5f7813e832 100644
--- a/vendor/github.com/hashicorp/go-cty/cty/type.go
+++ b/vendor/github.com/hashicorp/go-cty/cty/type.go
@@ -87,7 +87,7 @@ func (t Type) HasDynamicTypes() bool {
case t.IsPrimitiveType():
return false
case t.IsCollectionType():
- return false
+ return t.ElementType().HasDynamicTypes()
case t.IsObjectType():
attrTypes := t.AttributeTypes()
for _, at := range attrTypes {
diff --git a/vendor/github.com/hashicorp/go-cty/cty/value.go b/vendor/github.com/hashicorp/go-cty/cty/value.go
index 1025ba82eb..f6a25ddef9 100644
--- a/vendor/github.com/hashicorp/go-cty/cty/value.go
+++ b/vendor/github.com/hashicorp/go-cty/cty/value.go
@@ -106,3 +106,37 @@ func (val Value) IsWhollyKnown() bool {
return true
}
}
+
+// HasWhollyKnownType checks if the value is dynamic, or contains any nested
+// DynamicVal. This implies that both the value is not known, and the final
+// type may change.
+func (val Value) HasWhollyKnownType() bool {
+ // a null dynamic type is known
+ if val.IsNull() {
+ return true
+ }
+
+ // an unknown DynamicPseudoType is a DynamicVal, but we don't want to
+ // check that value for equality here, since this method is used within the
+ // equality check.
+ if !val.IsKnown() && val.ty == DynamicPseudoType {
+ return false
+ }
+
+ if val.CanIterateElements() {
+ // if the value is not known, then we can look directly at the internal
+ // types
+ if !val.IsKnown() {
+ return !val.ty.HasDynamicTypes()
+ }
+
+ for it := val.ElementIterator(); it.Next(); {
+ _, ev := it.Element()
+ if !ev.HasWhollyKnownType() {
+ return false
+ }
+ }
+ }
+
+ return true
+}
diff --git a/vendor/github.com/hashicorp/go-cty/cty/value_ops.go b/vendor/github.com/hashicorp/go-cty/cty/value_ops.go
index 69e5a8abbf..804db6cb69 100644
--- a/vendor/github.com/hashicorp/go-cty/cty/value_ops.go
+++ b/vendor/github.com/hashicorp/go-cty/cty/value_ops.go
@@ -133,9 +133,9 @@ func (val Value) Equals(other Value) Value {
case val.IsKnown() && !other.IsKnown():
switch {
case val.IsNull(), other.ty.HasDynamicTypes():
- // If known is Null, we need to wait for the unkown value since
+ // If known is Null, we need to wait for the unknown value since
// nulls of any type are equal.
- // An unkown with a dynamic type compares as unknown, which we need
+ // An unknown with a dynamic type compares as unknown, which we need
// to check before the type comparison below.
return UnknownVal(Bool)
case !val.ty.Equals(other.ty):
@@ -148,9 +148,9 @@ func (val Value) Equals(other Value) Value {
case other.IsKnown() && !val.IsKnown():
switch {
case other.IsNull(), val.ty.HasDynamicTypes():
- // If known is Null, we need to wait for the unkown value since
+ // If known is Null, we need to wait for the unknown value since
// nulls of any type are equal.
- // An unkown with a dynamic type compares as unknown, which we need
+ // An unknown with a dynamic type compares as unknown, which we need
// to check before the type comparison below.
return UnknownVal(Bool)
case !other.ty.Equals(val.ty):
@@ -171,7 +171,15 @@ func (val Value) Equals(other Value) Value {
return BoolVal(false)
}
- if val.ty.HasDynamicTypes() || other.ty.HasDynamicTypes() {
+ // Check if there are any nested dynamic values making this comparison
+ // unknown.
+ if !val.HasWhollyKnownType() || !other.HasWhollyKnownType() {
+ // Even if we have dynamic values, we can still determine inequality if
+ // there is no way the types could later conform.
+ if val.ty.TestConformance(other.ty) != nil && other.ty.TestConformance(val.ty) != nil {
+ return BoolVal(false)
+ }
+
return UnknownVal(Bool)
}
diff --git a/vendor/github.com/hashicorp/go-hclog/README.md b/vendor/github.com/hashicorp/go-hclog/README.md
index 21a17c5af3..983d44c7db 100644
--- a/vendor/github.com/hashicorp/go-hclog/README.md
+++ b/vendor/github.com/hashicorp/go-hclog/README.md
@@ -140,9 +140,10 @@ log.Printf("[DEBUG] %d", 42)
... [DEBUG] my-app: 42
```
-Notice that if `appLogger` is initialized with the `INFO` log level _and_ you
+Notice that if `appLogger` is initialized with the `INFO` log level, _and_ you
specify `InferLevels: true`, you will not see any output here. You must change
`appLogger` to `DEBUG` to see output. See the docs for more information.
If the log lines start with a timestamp you can use the
-`InferLevelsWithTimestamp` option to try and ignore them.
+`InferLevelsWithTimestamp` option to try and ignore them. Please note that in order
+for `InferLevelsWithTimestamp` to be relevant, `InferLevels` must be set to `true`.
diff --git a/vendor/github.com/hashicorp/go-hclog/intlogger.go b/vendor/github.com/hashicorp/go-hclog/intlogger.go
index b45064acf1..272a710c04 100644
--- a/vendor/github.com/hashicorp/go-hclog/intlogger.go
+++ b/vendor/github.com/hashicorp/go-hclog/intlogger.go
@@ -55,23 +55,38 @@ var (
faintBoldColor = color.New(color.Faint, color.Bold)
faintColor = color.New(color.Faint)
- faintMultiLinePrefix = faintColor.Sprint(" | ")
- faintFieldSeparator = faintColor.Sprint("=")
- faintFieldSeparatorWithNewLine = faintColor.Sprint("=\n")
+ faintMultiLinePrefix string
+ faintFieldSeparator string
+ faintFieldSeparatorWithNewLine string
)
+func init() {
+ // Force all the colors to enabled because we do our own detection of color usage.
+ for _, c := range _levelToColor {
+ c.EnableColor()
+ }
+
+ faintBoldColor.EnableColor()
+ faintColor.EnableColor()
+
+ faintMultiLinePrefix = faintColor.Sprint(" | ")
+ faintFieldSeparator = faintColor.Sprint("=")
+ faintFieldSeparatorWithNewLine = faintColor.Sprint("=\n")
+}
+
// Make sure that intLogger is a Logger
var _ Logger = &intLogger{}
// intLogger is an internal logger implementation. Internal in that it is
// defined entirely by this package.
type intLogger struct {
- json bool
- callerOffset int
- name string
- timeFormat string
- timeFn TimeFunction
- disableTime bool
+ json bool
+ jsonEscapeEnabled bool
+ callerOffset int
+ name string
+ timeFormat string
+ timeFn TimeFunction
+ disableTime bool
// This is an interface so that it's shared by any derived loggers, since
// those derived loggers share the bufio.Writer as well.
@@ -79,6 +94,19 @@ type intLogger struct {
writer *writer
level *int32
+ // The value of curEpoch when our level was set
+ setEpoch uint64
+
+ // The value of curEpoch the last time we performed the level sync process
+ ownEpoch uint64
+
+ // Shared amongst all the loggers created in this hierachy, used to determine
+ // if the level sync process should be run by comparing it with ownEpoch
+ curEpoch *uint64
+
+ // The logger this one was created from. Only set when syncParentLevel is set
+ parent *intLogger
+
headerColor ColorOption
fieldColor ColorOption
@@ -88,6 +116,7 @@ type intLogger struct {
// create subloggers with their own level setting
independentLevels bool
+ syncParentLevel bool
subloggerHook func(sub Logger) Logger
}
@@ -129,9 +158,9 @@ func newLogger(opts *LoggerOptions) *intLogger {
}
var (
- primaryColor ColorOption = ColorOff
- headerColor ColorOption = ColorOff
- fieldColor ColorOption = ColorOff
+ primaryColor = ColorOff
+ headerColor = ColorOff
+ fieldColor = ColorOff
)
switch {
case opts.ColorHeaderOnly:
@@ -145,6 +174,7 @@ func newLogger(opts *LoggerOptions) *intLogger {
l := &intLogger{
json: opts.JSONFormat,
+ jsonEscapeEnabled: !opts.JSONEscapeDisabled,
name: opts.Name,
timeFormat: TimeFormat,
timeFn: time.Now,
@@ -152,8 +182,10 @@ func newLogger(opts *LoggerOptions) *intLogger {
mutex: mutex,
writer: newWriter(output, primaryColor),
level: new(int32),
+ curEpoch: new(uint64),
exclude: opts.Exclude,
independentLevels: opts.IndependentLevels,
+ syncParentLevel: opts.SyncParentLevel,
headerColor: headerColor,
fieldColor: fieldColor,
subloggerHook: opts.SubloggerHook,
@@ -194,7 +226,7 @@ const offsetIntLogger = 3
// Log a message and a set of key/value pairs if the given level is at
// or more severe that the threshold configured in the Logger.
func (l *intLogger) log(name string, level Level, msg string, args ...interface{}) {
- if level < Level(atomic.LoadInt32(l.level)) {
+ if level < l.GetLevel() {
return
}
@@ -597,7 +629,7 @@ func (l *intLogger) logJSON(t time.Time, name string, level Level, msg string, a
vals := l.jsonMapEntry(t, name, level, msg)
args = append(l.implied, args...)
- if args != nil && len(args) > 0 {
+ if len(args) > 0 {
if len(args)%2 != 0 {
cs, ok := args[len(args)-1].(CapturedStacktrace)
if ok {
@@ -637,13 +669,17 @@ func (l *intLogger) logJSON(t time.Time, name string, level Level, msg string, a
}
}
- err := json.NewEncoder(l.writer).Encode(vals)
+ encoder := json.NewEncoder(l.writer)
+ encoder.SetEscapeHTML(l.jsonEscapeEnabled)
+ err := encoder.Encode(vals)
if err != nil {
if _, ok := err.(*json.UnsupportedTypeError); ok {
plainVal := l.jsonMapEntry(t, name, level, msg)
plainVal["@warn"] = errJsonUnsupportedTypeMsg
- json.NewEncoder(l.writer).Encode(plainVal)
+ errEncoder := json.NewEncoder(l.writer)
+ errEncoder.SetEscapeHTML(l.jsonEscapeEnabled)
+ errEncoder.Encode(plainVal)
}
}
}
@@ -718,27 +754,27 @@ func (l *intLogger) Error(msg string, args ...interface{}) {
// Indicate that the logger would emit TRACE level logs
func (l *intLogger) IsTrace() bool {
- return Level(atomic.LoadInt32(l.level)) == Trace
+ return l.GetLevel() == Trace
}
// Indicate that the logger would emit DEBUG level logs
func (l *intLogger) IsDebug() bool {
- return Level(atomic.LoadInt32(l.level)) <= Debug
+ return l.GetLevel() <= Debug
}
// Indicate that the logger would emit INFO level logs
func (l *intLogger) IsInfo() bool {
- return Level(atomic.LoadInt32(l.level)) <= Info
+ return l.GetLevel() <= Info
}
// Indicate that the logger would emit WARN level logs
func (l *intLogger) IsWarn() bool {
- return Level(atomic.LoadInt32(l.level)) <= Warn
+ return l.GetLevel() <= Warn
}
// Indicate that the logger would emit ERROR level logs
func (l *intLogger) IsError() bool {
- return Level(atomic.LoadInt32(l.level)) <= Error
+ return l.GetLevel() <= Error
}
const MissingKey = "EXTRA_VALUE_AT_END"
@@ -854,12 +890,63 @@ func (l *intLogger) resetOutput(opts *LoggerOptions) error {
// Update the logging level on-the-fly. This will affect all subloggers as
// well.
func (l *intLogger) SetLevel(level Level) {
- atomic.StoreInt32(l.level, int32(level))
+ if !l.syncParentLevel {
+ atomic.StoreInt32(l.level, int32(level))
+ return
+ }
+
+ nsl := new(int32)
+ *nsl = int32(level)
+
+ l.level = nsl
+
+ l.ownEpoch = atomic.AddUint64(l.curEpoch, 1)
+ l.setEpoch = l.ownEpoch
+}
+
+func (l *intLogger) searchLevelPtr() *int32 {
+ p := l.parent
+
+ ptr := l.level
+
+ max := l.setEpoch
+
+ for p != nil {
+ if p.setEpoch > max {
+ max = p.setEpoch
+ ptr = p.level
+ }
+
+ p = p.parent
+ }
+
+ return ptr
}
// Returns the current level
func (l *intLogger) GetLevel() Level {
- return Level(atomic.LoadInt32(l.level))
+ // We perform the loads immediately to keep the CPU pipeline busy, which
+ // effectively makes the second load cost nothing. Once loaded into registers
+ // the comparison returns the already loaded value. The comparison is almost
+ // always true, so the branch predictor should hit consistently with it.
+ var (
+ curEpoch = atomic.LoadUint64(l.curEpoch)
+ level = Level(atomic.LoadInt32(l.level))
+ own = l.ownEpoch
+ )
+
+ if curEpoch == own {
+ return level
+ }
+
+ // Perform the level sync process. We'll avoid doing this next time by seeing the
+ // epoch as current.
+
+ ptr := l.searchLevelPtr()
+ l.level = ptr
+ l.ownEpoch = curEpoch
+
+ return Level(atomic.LoadInt32(ptr))
}
// Create a *log.Logger that will send it's data through this Logger. This
@@ -912,6 +999,8 @@ func (l *intLogger) copy() *intLogger {
if l.independentLevels {
sl.level = new(int32)
*sl.level = *l.level
+ } else if l.syncParentLevel {
+ sl.parent = l
}
return &sl
diff --git a/vendor/github.com/hashicorp/go-hclog/logger.go b/vendor/github.com/hashicorp/go-hclog/logger.go
index 947ac0c9af..ad17544f55 100644
--- a/vendor/github.com/hashicorp/go-hclog/logger.go
+++ b/vendor/github.com/hashicorp/go-hclog/logger.go
@@ -233,6 +233,7 @@ type StandardLoggerOptions struct {
// [DEBUG] and strip it off before reapplying it.
// The timestamp detection may result in false positives and incomplete
// string outputs.
+ // InferLevelsWithTimestamp is only relevant if InferLevels is true.
InferLevelsWithTimestamp bool
// ForceLevel is used to force all output from the standard logger to be at
@@ -263,6 +264,9 @@ type LoggerOptions struct {
// Control if the output should be in JSON.
JSONFormat bool
+ // Control the escape switch of json.Encoder
+ JSONEscapeDisabled bool
+
// Include file and line information in each log line
IncludeLocation bool
@@ -303,6 +307,24 @@ type LoggerOptions struct {
// will not affect the parent or sibling loggers.
IndependentLevels bool
+ // When set, changing the level of a logger effects only it's direct sub-loggers
+ // rather than all sub-loggers. For example:
+ // a := logger.Named("a")
+ // a.SetLevel(Error)
+ // b := a.Named("b")
+ // c := a.Named("c")
+ // b.GetLevel() => Error
+ // c.GetLevel() => Error
+ // b.SetLevel(Info)
+ // a.GetLevel() => Error
+ // b.GetLevel() => Info
+ // c.GetLevel() => Error
+ // a.SetLevel(Warn)
+ // a.GetLevel() => Warn
+ // b.GetLevel() => Warn
+ // c.GetLevel() => Warn
+ SyncParentLevel bool
+
// SubloggerHook registers a function that is called when a sublogger via
// Named, With, or ResetNamed is created. If defined, the function is passed
// the newly created Logger and the returned Logger is returned from the
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/.gitignore b/vendor/github.com/hashicorp/go-immutable-radix/v2/.gitignore
new file mode 100644
index 0000000000..daf913b1b3
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/.gitignore
@@ -0,0 +1,24 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/CHANGELOG.md b/vendor/github.com/hashicorp/go-immutable-radix/v2/CHANGELOG.md
new file mode 100644
index 0000000000..556f1a67b1
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/CHANGELOG.md
@@ -0,0 +1,27 @@
+# UNRELEASED
+
+# 2.0.0 (December 15th, 2022)
+
+* Update API to use generics [[GH-43](https://github.com/hashicorp/go-immutable-radix/pull/43))
+
+# 1.3.0 (September 17th, 2020)
+
+FEATURES
+
+* Add reverse tree traversal [[GH-30](https://github.com/hashicorp/go-immutable-radix/pull/30)]
+
+# 1.2.0 (March 18th, 2020)
+
+FEATURES
+
+* Adds a `Clone` method to `Txn` allowing transactions to be split either into two independently mutable trees. [[GH-26](https://github.com/hashicorp/go-immutable-radix/pull/26)]
+
+# 1.1.0 (May 22nd, 2019)
+
+FEATURES
+
+* Add `SeekLowerBound` to allow for range scans. [[GH-24](https://github.com/hashicorp/go-immutable-radix/pull/24)]
+
+# 1.0.0 (August 30th, 2018)
+
+* go mod adopted
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/LICENSE b/vendor/github.com/hashicorp/go-immutable-radix/v2/LICENSE
new file mode 100644
index 0000000000..f4f97ee585
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/LICENSE
@@ -0,0 +1,365 @@
+Copyright (c) 2015 HashiCorp, Inc.
+
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. "Contributor"
+
+ means each individual or legal entity that creates, contributes to the
+ creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+
+ means the combination of the Contributions of others (if any) used by a
+ Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+
+ means Source Code Form to which the initial Contributor has attached the
+ notice in Exhibit A, the Executable Form of such Source Code Form, and
+ Modifications of such Source Code Form, in each case including portions
+ thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ a. that the initial Contributor has attached the notice described in
+ Exhibit B to the Covered Software; or
+
+ b. that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the terms of
+ a Secondary License.
+
+1.6. "Executable Form"
+
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+
+ means a work that combines Covered Software with other material, in a
+ separate file or files, that is not Covered Software.
+
+1.8. "License"
+
+ means this document.
+
+1.9. "Licensable"
+
+ means having the right to grant, to the maximum extent possible, whether
+ at the time of the initial grant or subsequently, any and all of the
+ rights conveyed by this License.
+
+1.10. "Modifications"
+
+ means any of the following:
+
+ a. any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered Software; or
+
+ b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. "Patent Claims" of a Contributor
+
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the License,
+ by the making, using, selling, offering for sale, having made, import,
+ or transfer of either its Contributions or its Contributor Version.
+
+1.12. "Secondary License"
+
+ means either the GNU General Public License, Version 2.0, the GNU Lesser
+ General Public License, Version 2.1, the GNU Affero General Public
+ License, Version 3.0, or any later versions of those licenses.
+
+1.13. "Source Code Form"
+
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that controls, is
+ controlled by, or is under common control with You. For purposes of this
+ definition, "control" means (a) the power, direct or indirect, to cause
+ the direction or management of such entity, whether by contract or
+ otherwise, or (b) ownership of more than fifty percent (50%) of the
+ outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+ Each Contributor hereby grants You a world-wide, royalty-free,
+ non-exclusive license:
+
+ a. under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+ b. under Patent Claims of such Contributor to make, use, sell, offer for
+ sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+ The licenses granted in Section 2.1 with respect to any Contribution
+ become effective for each Contribution on the date the Contributor first
+ distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+ The licenses granted in this Section 2 are the only rights granted under
+ this License. No additional rights or licenses will be implied from the
+ distribution or licensing of Covered Software under this License.
+ Notwithstanding Section 2.1(b) above, no patent license is granted by a
+ Contributor:
+
+ a. for any code that a Contributor has removed from Covered Software; or
+
+ b. for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+ c. under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+ This License does not grant any rights in the trademarks, service marks,
+ or logos of any Contributor (except as may be necessary to comply with
+ the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+ No Contributor makes additional grants as a result of Your choice to
+ distribute the Covered Software under a subsequent version of this
+ License (see Section 10.2) or under the terms of a Secondary License (if
+ permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+ Each Contributor represents that the Contributor believes its
+ Contributions are its original creation(s) or it has sufficient rights to
+ grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+ This License is not intended to limit any rights You have under
+ applicable copyright doctrines of fair use, fair dealing, or other
+ equivalents.
+
+2.7. Conditions
+
+ Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+ Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+ All distribution of Covered Software in Source Code Form, including any
+ Modifications that You create or to which You contribute, must be under
+ the terms of this License. You must inform recipients that the Source
+ Code Form of the Covered Software is governed by the terms of this
+ License, and how they can obtain a copy of this License. You may not
+ attempt to alter or restrict the recipients' rights in the Source Code
+ Form.
+
+3.2. Distribution of Executable Form
+
+ If You distribute Covered Software in Executable Form then:
+
+ a. such Covered Software must also be made available in Source Code Form,
+ as described in Section 3.1, and You must inform recipients of the
+ Executable Form how they can obtain a copy of such Source Code Form by
+ reasonable means in a timely manner, at a charge no more than the cost
+ of distribution to the recipient; and
+
+ b. You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter the
+ recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+ You may create and distribute a Larger Work under terms of Your choice,
+ provided that You also comply with the requirements of this License for
+ the Covered Software. If the Larger Work is a combination of Covered
+ Software with a work governed by one or more Secondary Licenses, and the
+ Covered Software is not Incompatible With Secondary Licenses, this
+ License permits You to additionally distribute such Covered Software
+ under the terms of such Secondary License(s), so that the recipient of
+ the Larger Work may, at their option, further distribute the Covered
+ Software under the terms of either this License or such Secondary
+ License(s).
+
+3.4. Notices
+
+ You may not remove or alter the substance of any license notices
+ (including copyright notices, patent notices, disclaimers of warranty, or
+ limitations of liability) contained within the Source Code Form of the
+ Covered Software, except that You may alter any license notices to the
+ extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+ You may choose to offer, and to charge a fee for, warranty, support,
+ indemnity or liability obligations to one or more recipients of Covered
+ Software. However, You may do so only on Your own behalf, and not on
+ behalf of any Contributor. You must make it absolutely clear that any
+ such warranty, support, indemnity, or liability obligation is offered by
+ You alone, and You hereby agree to indemnify every Contributor for any
+ liability incurred by such Contributor as a result of warranty, support,
+ indemnity or liability terms You offer. You may include additional
+ disclaimers of warranty and limitations of liability specific to any
+ jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+ If it is impossible for You to comply with any of the terms of this License
+ with respect to some or all of the Covered Software due to statute,
+ judicial order, or regulation then You must: (a) comply with the terms of
+ this License to the maximum extent possible; and (b) describe the
+ limitations and the code they affect. Such description must be placed in a
+ text file included with all distributions of the Covered Software under
+ this License. Except to the extent prohibited by statute or regulation,
+ such description must be sufficiently detailed for a recipient of ordinary
+ skill to be able to understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+ fail to comply with any of its terms. However, if You become compliant,
+ then the rights granted under this License from a particular Contributor
+ are reinstated (a) provisionally, unless and until such Contributor
+ explicitly and finally terminates Your grants, and (b) on an ongoing
+ basis, if such Contributor fails to notify You of the non-compliance by
+ some reasonable means prior to 60 days after You have come back into
+ compliance. Moreover, Your grants from a particular Contributor are
+ reinstated on an ongoing basis if such Contributor notifies You of the
+ non-compliance by some reasonable means, this is the first time You have
+ received notice of non-compliance with this License from such
+ Contributor, and You become compliant prior to 30 days after Your receipt
+ of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+ infringement claim (excluding declaratory judgment actions,
+ counter-claims, and cross-claims) alleging that a Contributor Version
+ directly or indirectly infringes any patent, then the rights granted to
+ You by any and all Contributors for the Covered Software under Section
+ 2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+ license agreements (excluding distributors and resellers) which have been
+ validly granted by You or Your distributors under this License prior to
+ termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+ Covered Software is provided under this License on an "as is" basis,
+ without warranty of any kind, either expressed, implied, or statutory,
+ including, without limitation, warranties that the Covered Software is free
+ of defects, merchantable, fit for a particular purpose or non-infringing.
+ The entire risk as to the quality and performance of the Covered Software
+ is with You. Should any Covered Software prove defective in any respect,
+ You (not any Contributor) assume the cost of any necessary servicing,
+ repair, or correction. This disclaimer of warranty constitutes an essential
+ part of this License. No use of any Covered Software is authorized under
+ this License except under this disclaimer.
+
+7. Limitation of Liability
+
+ Under no circumstances and under no legal theory, whether tort (including
+ negligence), contract, or otherwise, shall any Contributor, or anyone who
+ distributes Covered Software as permitted above, be liable to You for any
+ direct, indirect, special, incidental, or consequential damages of any
+ character including, without limitation, damages for lost profits, loss of
+ goodwill, work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses, even if such party shall have been
+ informed of the possibility of such damages. This limitation of liability
+ shall not apply to liability for death or personal injury resulting from
+ such party's negligence to the extent applicable law prohibits such
+ limitation. Some jurisdictions do not allow the exclusion or limitation of
+ incidental or consequential damages, so this exclusion and limitation may
+ not apply to You.
+
+8. Litigation
+
+ Any litigation relating to this License may be brought only in the courts
+ of a jurisdiction where the defendant maintains its principal place of
+ business and such litigation shall be governed by laws of that
+ jurisdiction, without reference to its conflict-of-law provisions. Nothing
+ in this Section shall prevent a party's ability to bring cross-claims or
+ counter-claims.
+
+9. Miscellaneous
+
+ This License represents the complete agreement concerning the subject
+ matter hereof. If any provision of this License is held to be
+ unenforceable, such provision shall be reformed only to the extent
+ necessary to make it enforceable. Any law or regulation which provides that
+ the language of a contract shall be construed against the drafter shall not
+ be used to construe this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+ Mozilla Foundation is the license steward. Except as provided in Section
+ 10.3, no one other than the license steward has the right to modify or
+ publish new versions of this License. Each version will be given a
+ distinguishing version number.
+
+10.2. Effect of New Versions
+
+ You may distribute the Covered Software under the terms of the version
+ of the License under which You originally received the Covered Software,
+ or under the terms of any subsequent version published by the license
+ steward.
+
+10.3. Modified Versions
+
+ If you create software not governed by this License, and you want to
+ create a new license for such software, you may create and use a
+ modified version of this License if you rename the license and remove
+ any references to the name of the license steward (except to note that
+ such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+ Licenses If You choose to distribute Source Code Form that is
+ Incompatible With Secondary Licenses under the terms of this version of
+ the License, the notice described in Exhibit B of this License must be
+ attached.
+
+Exhibit A - Source Code Form License Notice
+
+ This Source Code Form is subject to the
+ terms of the Mozilla Public License, v.
+ 2.0. If a copy of the MPL was not
+ distributed with this file, You can
+ obtain one at
+ http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file,
+then You may include the notice in a location (such as a LICENSE file in a
+relevant directory) where a recipient would be likely to look for such a
+notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+
+ This Source Code Form is "Incompatible
+ With Secondary Licenses", as defined by
+ the Mozilla Public License, v. 2.0.
+
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/README.md b/vendor/github.com/hashicorp/go-immutable-radix/v2/README.md
new file mode 100644
index 0000000000..e17ccf4d11
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/README.md
@@ -0,0 +1,73 @@
+go-immutable-radix [](https://github.com/hashicorp/go-immutable-radix/actions/workflows/ci.yaml)
+=========
+
+Provides the `iradix` package that implements an immutable [radix tree](http://en.wikipedia.org/wiki/Radix_tree).
+The package only provides a single `Tree` implementation, optimized for sparse nodes.
+
+As a radix tree, it provides the following:
+ * O(k) operations. In many cases, this can be faster than a hash table since
+ the hash function is an O(k) operation, and hash tables have very poor cache locality.
+ * Minimum / Maximum value lookups
+ * Ordered iteration
+
+A tree supports using a transaction to batch multiple updates (insert, delete)
+in a more efficient manner than performing each operation one at a time.
+
+For a mutable variant, see [go-radix](https://github.com/armon/go-radix).
+
+V2
+==
+
+The v2 of go-immutable-radix introduces generics to improve compile-time type
+safety for users of the package. The module name for v2 is
+`github.com/hashicorp/go-immutable-radix/v2`.
+
+Documentation
+=============
+
+The full documentation is available on [Godoc](http://godoc.org/github.com/hashicorp/go-immutable-radix).
+
+Example
+=======
+
+Below is a simple example of usage
+
+```go
+// Create a tree
+r := iradix.New[int]()
+r, _, _ = r.Insert([]byte("foo"), 1)
+r, _, _ = r.Insert([]byte("bar"), 2)
+r, _, _ = r.Insert([]byte("foobar"), 2)
+
+// Find the longest prefix match
+m, _, _ := r.Root().LongestPrefix([]byte("foozip"))
+if string(m) != "foo" {
+ panic("should be foo")
+}
+```
+
+Here is an example of performing a range scan of the keys.
+
+```go
+// Create a tree
+r := iradix.New[int]()
+r, _, _ = r.Insert([]byte("001"), 1)
+r, _, _ = r.Insert([]byte("002"), 2)
+r, _, _ = r.Insert([]byte("005"), 5)
+r, _, _ = r.Insert([]byte("010"), 10)
+r, _, _ = r.Insert([]byte("100"), 10)
+
+// Range scan over the keys that sort lexicographically between [003, 050)
+it := r.Root().Iterator()
+it.SeekLowerBound([]byte("003"))
+for key, _, ok := it.Next(); ok; key, _, ok = it.Next() {
+ if string(key) >= "050" {
+ break
+ }
+ fmt.Println(string(key))
+}
+// Output:
+// 005
+// 010
+```
+
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/edges.go b/vendor/github.com/hashicorp/go-immutable-radix/v2/edges.go
new file mode 100644
index 0000000000..2e452f3e6f
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/edges.go
@@ -0,0 +1,21 @@
+package iradix
+
+import "sort"
+
+type edges[T any] []edge[T]
+
+func (e edges[T]) Len() int {
+ return len(e)
+}
+
+func (e edges[T]) Less(i, j int) bool {
+ return e[i].label < e[j].label
+}
+
+func (e edges[T]) Swap(i, j int) {
+ e[i], e[j] = e[j], e[i]
+}
+
+func (e edges[T]) Sort() {
+ sort.Sort(e)
+}
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/iradix.go b/vendor/github.com/hashicorp/go-immutable-radix/v2/iradix.go
new file mode 100644
index 0000000000..8774020bcc
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/iradix.go
@@ -0,0 +1,679 @@
+package iradix
+
+import (
+ "bytes"
+ "strings"
+
+ "github.com/hashicorp/golang-lru/v2/simplelru"
+)
+
+const (
+ // defaultModifiedCache is the default size of the modified node
+ // cache used per transaction. This is used to cache the updates
+ // to the nodes near the root, while the leaves do not need to be
+ // cached. This is important for very large transactions to prevent
+ // the modified cache from growing to be enormous. This is also used
+ // to set the max size of the mutation notify maps since those should
+ // also be bounded in a similar way.
+ defaultModifiedCache = 8192
+)
+
+// Tree implements an immutable radix tree. This can be treated as a
+// Dictionary abstract data type. The main advantage over a standard
+// hash map is prefix-based lookups and ordered iteration. The immutability
+// means that it is safe to concurrently read from a Tree without any
+// coordination.
+type Tree[T any] struct {
+ root *Node[T]
+ size int
+}
+
+// New returns an empty Tree
+func New[T any]() *Tree[T] {
+ t := &Tree[T]{
+ root: &Node[T]{
+ mutateCh: make(chan struct{}),
+ },
+ }
+ return t
+}
+
+// Len is used to return the number of elements in the tree
+func (t *Tree[T]) Len() int {
+ return t.size
+}
+
+// Txn is a transaction on the tree. This transaction is applied
+// atomically and returns a new tree when committed. A transaction
+// is not thread safe, and should only be used by a single goroutine.
+type Txn[T any] struct {
+ // root is the modified root for the transaction.
+ root *Node[T]
+
+ // snap is a snapshot of the root node for use if we have to run the
+ // slow notify algorithm.
+ snap *Node[T]
+
+ // size tracks the size of the tree as it is modified during the
+ // transaction.
+ size int
+
+ // writable is a cache of writable nodes that have been created during
+ // the course of the transaction. This allows us to re-use the same
+ // nodes for further writes and avoid unnecessary copies of nodes that
+ // have never been exposed outside the transaction. This will only hold
+ // up to defaultModifiedCache number of entries.
+ writable *simplelru.LRU[*Node[T], any]
+
+ // trackChannels is used to hold channels that need to be notified to
+ // signal mutation of the tree. This will only hold up to
+ // defaultModifiedCache number of entries, after which we will set the
+ // trackOverflow flag, which will cause us to use a more expensive
+ // algorithm to perform the notifications. Mutation tracking is only
+ // performed if trackMutate is true.
+ trackChannels map[chan struct{}]struct{}
+ trackOverflow bool
+ trackMutate bool
+}
+
+// Txn starts a new transaction that can be used to mutate the tree
+func (t *Tree[T]) Txn() *Txn[T] {
+ txn := &Txn[T]{
+ root: t.root,
+ snap: t.root,
+ size: t.size,
+ }
+ return txn
+}
+
+// Clone makes an independent copy of the transaction. The new transaction
+// does not track any nodes and has TrackMutate turned off. The cloned transaction will contain any uncommitted writes in the original transaction but further mutations to either will be independent and result in different radix trees on Commit. A cloned transaction may be passed to another goroutine and mutated there independently however each transaction may only be mutated in a single thread.
+func (t *Txn[T]) Clone() *Txn[T] {
+ // reset the writable node cache to avoid leaking future writes into the clone
+ t.writable = nil
+
+ txn := &Txn[T]{
+ root: t.root,
+ snap: t.snap,
+ size: t.size,
+ }
+ return txn
+}
+
+// TrackMutate can be used to toggle if mutations are tracked. If this is enabled
+// then notifications will be issued for affected internal nodes and leaves when
+// the transaction is committed.
+func (t *Txn[T]) TrackMutate(track bool) {
+ t.trackMutate = track
+}
+
+// trackChannel safely attempts to track the given mutation channel, setting the
+// overflow flag if we can no longer track any more. This limits the amount of
+// state that will accumulate during a transaction and we have a slower algorithm
+// to switch to if we overflow.
+func (t *Txn[T]) trackChannel(ch chan struct{}) {
+ // In overflow, make sure we don't store any more objects.
+ if t.trackOverflow {
+ return
+ }
+
+ // If this would overflow the state we reject it and set the flag (since
+ // we aren't tracking everything that's required any longer).
+ if len(t.trackChannels) >= defaultModifiedCache {
+ // Mark that we are in the overflow state
+ t.trackOverflow = true
+
+ // Clear the map so that the channels can be garbage collected. It is
+ // safe to do this since we have already overflowed and will be using
+ // the slow notify algorithm.
+ t.trackChannels = nil
+ return
+ }
+
+ // Create the map on the fly when we need it.
+ if t.trackChannels == nil {
+ t.trackChannels = make(map[chan struct{}]struct{})
+ }
+
+ // Otherwise we are good to track it.
+ t.trackChannels[ch] = struct{}{}
+}
+
+// writeNode returns a node to be modified, if the current node has already been
+// modified during the course of the transaction, it is used in-place. Set
+// forLeafUpdate to true if you are getting a write node to update the leaf,
+// which will set leaf mutation tracking appropriately as well.
+func (t *Txn[T]) writeNode(n *Node[T], forLeafUpdate bool) *Node[T] {
+ // Ensure the writable set exists.
+ if t.writable == nil {
+ lru, err := simplelru.NewLRU[*Node[T], any](defaultModifiedCache, nil)
+ if err != nil {
+ panic(err)
+ }
+ t.writable = lru
+ }
+
+ // If this node has already been modified, we can continue to use it
+ // during this transaction. We know that we don't need to track it for
+ // a node update since the node is writable, but if this is for a leaf
+ // update we track it, in case the initial write to this node didn't
+ // update the leaf.
+ if _, ok := t.writable.Get(n); ok {
+ if t.trackMutate && forLeafUpdate && n.leaf != nil {
+ t.trackChannel(n.leaf.mutateCh)
+ }
+ return n
+ }
+
+ // Mark this node as being mutated.
+ if t.trackMutate {
+ t.trackChannel(n.mutateCh)
+ }
+
+ // Mark its leaf as being mutated, if appropriate.
+ if t.trackMutate && forLeafUpdate && n.leaf != nil {
+ t.trackChannel(n.leaf.mutateCh)
+ }
+
+ // Copy the existing node. If you have set forLeafUpdate it will be
+ // safe to replace this leaf with another after you get your node for
+ // writing. You MUST replace it, because the channel associated with
+ // this leaf will be closed when this transaction is committed.
+ nc := &Node[T]{
+ mutateCh: make(chan struct{}),
+ leaf: n.leaf,
+ }
+ if n.prefix != nil {
+ nc.prefix = make([]byte, len(n.prefix))
+ copy(nc.prefix, n.prefix)
+ }
+ if len(n.edges) != 0 {
+ nc.edges = make([]edge[T], len(n.edges))
+ copy(nc.edges, n.edges)
+ }
+
+ // Mark this node as writable.
+ t.writable.Add(nc, nil)
+ return nc
+}
+
+// Visit all the nodes in the tree under n, and add their mutateChannels to the transaction
+// Returns the size of the subtree visited
+func (t *Txn[T]) trackChannelsAndCount(n *Node[T]) int {
+ // Count only leaf nodes
+ leaves := 0
+ if n.leaf != nil {
+ leaves = 1
+ }
+ // Mark this node as being mutated.
+ if t.trackMutate {
+ t.trackChannel(n.mutateCh)
+ }
+
+ // Mark its leaf as being mutated, if appropriate.
+ if t.trackMutate && n.leaf != nil {
+ t.trackChannel(n.leaf.mutateCh)
+ }
+
+ // Recurse on the children
+ for _, e := range n.edges {
+ leaves += t.trackChannelsAndCount(e.node)
+ }
+ return leaves
+}
+
+// mergeChild is called to collapse the given node with its child. This is only
+// called when the given node is not a leaf and has a single edge.
+func (t *Txn[T]) mergeChild(n *Node[T]) {
+ // Mark the child node as being mutated since we are about to abandon
+ // it. We don't need to mark the leaf since we are retaining it if it
+ // is there.
+ e := n.edges[0]
+ child := e.node
+ if t.trackMutate {
+ t.trackChannel(child.mutateCh)
+ }
+
+ // Merge the nodes.
+ n.prefix = concat(n.prefix, child.prefix)
+ n.leaf = child.leaf
+ if len(child.edges) != 0 {
+ n.edges = make([]edge[T], len(child.edges))
+ copy(n.edges, child.edges)
+ } else {
+ n.edges = nil
+ }
+}
+
+// insert does a recursive insertion
+func (t *Txn[T]) insert(n *Node[T], k, search []byte, v T) (*Node[T], T, bool) {
+ var zero T
+
+ // Handle key exhaustion
+ if len(search) == 0 {
+ var oldVal T
+ didUpdate := false
+ if n.isLeaf() {
+ oldVal = n.leaf.val
+ didUpdate = true
+ }
+
+ nc := t.writeNode(n, true)
+ nc.leaf = &leafNode[T]{
+ mutateCh: make(chan struct{}),
+ key: k,
+ val: v,
+ }
+ return nc, oldVal, didUpdate
+ }
+
+ // Look for the edge
+ idx, child := n.getEdge(search[0])
+
+ // No edge, create one
+ if child == nil {
+ e := edge[T]{
+ label: search[0],
+ node: &Node[T]{
+ mutateCh: make(chan struct{}),
+ leaf: &leafNode[T]{
+ mutateCh: make(chan struct{}),
+ key: k,
+ val: v,
+ },
+ prefix: search,
+ },
+ }
+ nc := t.writeNode(n, false)
+ nc.addEdge(e)
+ return nc, zero, false
+ }
+
+ // Determine longest prefix of the search key on match
+ commonPrefix := longestPrefix(search, child.prefix)
+ if commonPrefix == len(child.prefix) {
+ search = search[commonPrefix:]
+ newChild, oldVal, didUpdate := t.insert(child, k, search, v)
+ if newChild != nil {
+ nc := t.writeNode(n, false)
+ nc.edges[idx].node = newChild
+ return nc, oldVal, didUpdate
+ }
+ return nil, oldVal, didUpdate
+ }
+
+ // Split the node
+ nc := t.writeNode(n, false)
+ splitNode := &Node[T]{
+ mutateCh: make(chan struct{}),
+ prefix: search[:commonPrefix],
+ }
+ nc.replaceEdge(edge[T]{
+ label: search[0],
+ node: splitNode,
+ })
+
+ // Restore the existing child node
+ modChild := t.writeNode(child, false)
+ splitNode.addEdge(edge[T]{
+ label: modChild.prefix[commonPrefix],
+ node: modChild,
+ })
+ modChild.prefix = modChild.prefix[commonPrefix:]
+
+ // Create a new leaf node
+ leaf := &leafNode[T]{
+ mutateCh: make(chan struct{}),
+ key: k,
+ val: v,
+ }
+
+ // If the new key is a subset, add to to this node
+ search = search[commonPrefix:]
+ if len(search) == 0 {
+ splitNode.leaf = leaf
+ return nc, zero, false
+ }
+
+ // Create a new edge for the node
+ splitNode.addEdge(edge[T]{
+ label: search[0],
+ node: &Node[T]{
+ mutateCh: make(chan struct{}),
+ leaf: leaf,
+ prefix: search,
+ },
+ })
+ return nc, zero, false
+}
+
+// delete does a recursive deletion
+func (t *Txn[T]) delete(n *Node[T], search []byte) (*Node[T], *leafNode[T]) {
+ // Check for key exhaustion
+ if len(search) == 0 {
+ if !n.isLeaf() {
+ return nil, nil
+ }
+ // Copy the pointer in case we are in a transaction that already
+ // modified this node since the node will be reused. Any changes
+ // made to the node will not affect returning the original leaf
+ // value.
+ oldLeaf := n.leaf
+
+ // Remove the leaf node
+ nc := t.writeNode(n, true)
+ nc.leaf = nil
+
+ // Check if this node should be merged
+ if n != t.root && len(nc.edges) == 1 {
+ t.mergeChild(nc)
+ }
+ return nc, oldLeaf
+ }
+
+ // Look for an edge
+ label := search[0]
+ idx, child := n.getEdge(label)
+ if child == nil || !bytes.HasPrefix(search, child.prefix) {
+ return nil, nil
+ }
+
+ // Consume the search prefix
+ search = search[len(child.prefix):]
+ newChild, leaf := t.delete(child, search)
+ if newChild == nil {
+ return nil, nil
+ }
+
+ // Copy this node. WATCH OUT - it's safe to pass "false" here because we
+ // will only ADD a leaf via nc.mergeChild() if there isn't one due to
+ // the !nc.isLeaf() check in the logic just below. This is pretty subtle,
+ // so be careful if you change any of the logic here.
+ nc := t.writeNode(n, false)
+
+ // Delete the edge if the node has no edges
+ if newChild.leaf == nil && len(newChild.edges) == 0 {
+ nc.delEdge(label)
+ if n != t.root && len(nc.edges) == 1 && !nc.isLeaf() {
+ t.mergeChild(nc)
+ }
+ } else {
+ nc.edges[idx].node = newChild
+ }
+ return nc, leaf
+}
+
+// delete does a recursive deletion
+func (t *Txn[T]) deletePrefix(n *Node[T], search []byte) (*Node[T], int) {
+ // Check for key exhaustion
+ if len(search) == 0 {
+ nc := t.writeNode(n, true)
+ if n.isLeaf() {
+ nc.leaf = nil
+ }
+ nc.edges = nil
+ return nc, t.trackChannelsAndCount(n)
+ }
+
+ // Look for an edge
+ label := search[0]
+ idx, child := n.getEdge(label)
+ // We make sure that either the child node's prefix starts with the search term, or the search term starts with the child node's prefix
+ // Need to do both so that we can delete prefixes that don't correspond to any node in the tree
+ if child == nil || (!bytes.HasPrefix(child.prefix, search) && !bytes.HasPrefix(search, child.prefix)) {
+ return nil, 0
+ }
+
+ // Consume the search prefix
+ if len(child.prefix) > len(search) {
+ search = []byte("")
+ } else {
+ search = search[len(child.prefix):]
+ }
+ newChild, numDeletions := t.deletePrefix(child, search)
+ if newChild == nil {
+ return nil, 0
+ }
+ // Copy this node. WATCH OUT - it's safe to pass "false" here because we
+ // will only ADD a leaf via nc.mergeChild() if there isn't one due to
+ // the !nc.isLeaf() check in the logic just below. This is pretty subtle,
+ // so be careful if you change any of the logic here.
+
+ nc := t.writeNode(n, false)
+
+ // Delete the edge if the node has no edges
+ if newChild.leaf == nil && len(newChild.edges) == 0 {
+ nc.delEdge(label)
+ if n != t.root && len(nc.edges) == 1 && !nc.isLeaf() {
+ t.mergeChild(nc)
+ }
+ } else {
+ nc.edges[idx].node = newChild
+ }
+ return nc, numDeletions
+}
+
+// Insert is used to add or update a given key. The return provides
+// the previous value and a bool indicating if any was set.
+func (t *Txn[T]) Insert(k []byte, v T) (T, bool) {
+ newRoot, oldVal, didUpdate := t.insert(t.root, k, k, v)
+ if newRoot != nil {
+ t.root = newRoot
+ }
+ if !didUpdate {
+ t.size++
+ }
+ return oldVal, didUpdate
+}
+
+// Delete is used to delete a given key. Returns the old value if any,
+// and a bool indicating if the key was set.
+func (t *Txn[T]) Delete(k []byte) (T, bool) {
+ var zero T
+ newRoot, leaf := t.delete(t.root, k)
+ if newRoot != nil {
+ t.root = newRoot
+ }
+ if leaf != nil {
+ t.size--
+ return leaf.val, true
+ }
+ return zero, false
+}
+
+// DeletePrefix is used to delete an entire subtree that matches the prefix
+// This will delete all nodes under that prefix
+func (t *Txn[T]) DeletePrefix(prefix []byte) bool {
+ newRoot, numDeletions := t.deletePrefix(t.root, prefix)
+ if newRoot != nil {
+ t.root = newRoot
+ t.size = t.size - numDeletions
+ return true
+ }
+ return false
+
+}
+
+// Root returns the current root of the radix tree within this
+// transaction. The root is not safe across insert and delete operations,
+// but can be used to read the current state during a transaction.
+func (t *Txn[T]) Root() *Node[T] {
+ return t.root
+}
+
+// Get is used to lookup a specific key, returning
+// the value and if it was found
+func (t *Txn[T]) Get(k []byte) (T, bool) {
+ return t.root.Get(k)
+}
+
+// GetWatch is used to lookup a specific key, returning
+// the watch channel, value and if it was found
+func (t *Txn[T]) GetWatch(k []byte) (<-chan struct{}, T, bool) {
+ return t.root.GetWatch(k)
+}
+
+// Commit is used to finalize the transaction and return a new tree. If mutation
+// tracking is turned on then notifications will also be issued.
+func (t *Txn[T]) Commit() *Tree[T] {
+ nt := t.CommitOnly()
+ if t.trackMutate {
+ t.Notify()
+ }
+ return nt
+}
+
+// CommitOnly is used to finalize the transaction and return a new tree, but
+// does not issue any notifications until Notify is called.
+func (t *Txn[T]) CommitOnly() *Tree[T] {
+ nt := &Tree[T]{t.root, t.size}
+ t.writable = nil
+ return nt
+}
+
+// slowNotify does a complete comparison of the before and after trees in order
+// to trigger notifications. This doesn't require any additional state but it
+// is very expensive to compute.
+func (t *Txn[T]) slowNotify() {
+ snapIter := t.snap.rawIterator()
+ rootIter := t.root.rawIterator()
+ for snapIter.Front() != nil || rootIter.Front() != nil {
+ // If we've exhausted the nodes in the old snapshot, we know
+ // there's nothing remaining to notify.
+ if snapIter.Front() == nil {
+ return
+ }
+ snapElem := snapIter.Front()
+
+ // If we've exhausted the nodes in the new root, we know we need
+ // to invalidate everything that remains in the old snapshot. We
+ // know from the loop condition there's something in the old
+ // snapshot.
+ if rootIter.Front() == nil {
+ close(snapElem.mutateCh)
+ if snapElem.isLeaf() {
+ close(snapElem.leaf.mutateCh)
+ }
+ snapIter.Next()
+ continue
+ }
+
+ // Do one string compare so we can check the various conditions
+ // below without repeating the compare.
+ cmp := strings.Compare(snapIter.Path(), rootIter.Path())
+
+ // If the snapshot is behind the root, then we must have deleted
+ // this node during the transaction.
+ if cmp < 0 {
+ close(snapElem.mutateCh)
+ if snapElem.isLeaf() {
+ close(snapElem.leaf.mutateCh)
+ }
+ snapIter.Next()
+ continue
+ }
+
+ // If the snapshot is ahead of the root, then we must have added
+ // this node during the transaction.
+ if cmp > 0 {
+ rootIter.Next()
+ continue
+ }
+
+ // If we have the same path, then we need to see if we mutated a
+ // node and possibly the leaf.
+ rootElem := rootIter.Front()
+ if snapElem != rootElem {
+ close(snapElem.mutateCh)
+ if snapElem.leaf != nil && (snapElem.leaf != rootElem.leaf) {
+ close(snapElem.leaf.mutateCh)
+ }
+ }
+ snapIter.Next()
+ rootIter.Next()
+ }
+}
+
+// Notify is used along with TrackMutate to trigger notifications. This must
+// only be done once a transaction is committed via CommitOnly, and it is called
+// automatically by Commit.
+func (t *Txn[T]) Notify() {
+ if !t.trackMutate {
+ return
+ }
+
+ // If we've overflowed the tracking state we can't use it in any way and
+ // need to do a full tree compare.
+ if t.trackOverflow {
+ t.slowNotify()
+ } else {
+ for ch := range t.trackChannels {
+ close(ch)
+ }
+ }
+
+ // Clean up the tracking state so that a re-notify is safe (will trigger
+ // the else clause above which will be a no-op).
+ t.trackChannels = nil
+ t.trackOverflow = false
+}
+
+// Insert is used to add or update a given key. The return provides
+// the new tree, previous value and a bool indicating if any was set.
+func (t *Tree[T]) Insert(k []byte, v T) (*Tree[T], T, bool) {
+ txn := t.Txn()
+ old, ok := txn.Insert(k, v)
+ return txn.Commit(), old, ok
+}
+
+// Delete is used to delete a given key. Returns the new tree,
+// old value if any, and a bool indicating if the key was set.
+func (t *Tree[T]) Delete(k []byte) (*Tree[T], T, bool) {
+ txn := t.Txn()
+ old, ok := txn.Delete(k)
+ return txn.Commit(), old, ok
+}
+
+// DeletePrefix is used to delete all nodes starting with a given prefix. Returns the new tree,
+// and a bool indicating if the prefix matched any nodes
+func (t *Tree[T]) DeletePrefix(k []byte) (*Tree[T], bool) {
+ txn := t.Txn()
+ ok := txn.DeletePrefix(k)
+ return txn.Commit(), ok
+}
+
+// Root returns the root node of the tree which can be used for richer
+// query operations.
+func (t *Tree[T]) Root() *Node[T] {
+ return t.root
+}
+
+// Get is used to lookup a specific key, returning
+// the value and if it was found
+func (t *Tree[T]) Get(k []byte) (T, bool) {
+ return t.root.Get(k)
+}
+
+// longestPrefix finds the length of the shared prefix
+// of two strings
+func longestPrefix(k1, k2 []byte) int {
+ max := len(k1)
+ if l := len(k2); l < max {
+ max = l
+ }
+ var i int
+ for i = 0; i < max; i++ {
+ if k1[i] != k2[i] {
+ break
+ }
+ }
+ return i
+}
+
+// concat two byte slices, returning a third new copy
+func concat(a, b []byte) []byte {
+ c := make([]byte, len(a)+len(b))
+ copy(c, a)
+ copy(c[len(a):], b)
+ return c
+}
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/iter.go b/vendor/github.com/hashicorp/go-immutable-radix/v2/iter.go
new file mode 100644
index 0000000000..ffd2721c1a
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/iter.go
@@ -0,0 +1,205 @@
+package iradix
+
+import (
+ "bytes"
+)
+
+// Iterator is used to iterate over a set of nodes
+// in pre-order
+type Iterator[T any] struct {
+ node *Node[T]
+ stack []edges[T]
+}
+
+// SeekPrefixWatch is used to seek the iterator to a given prefix
+// and returns the watch channel of the finest granularity
+func (i *Iterator[T]) SeekPrefixWatch(prefix []byte) (watch <-chan struct{}) {
+ // Wipe the stack
+ i.stack = nil
+ n := i.node
+ watch = n.mutateCh
+ search := prefix
+ for {
+ // Check for key exhaustion
+ if len(search) == 0 {
+ i.node = n
+ return
+ }
+
+ // Look for an edge
+ _, n = n.getEdge(search[0])
+ if n == nil {
+ i.node = nil
+ return
+ }
+
+ // Update to the finest granularity as the search makes progress
+ watch = n.mutateCh
+
+ // Consume the search prefix
+ if bytes.HasPrefix(search, n.prefix) {
+ search = search[len(n.prefix):]
+
+ } else if bytes.HasPrefix(n.prefix, search) {
+ i.node = n
+ return
+ } else {
+ i.node = nil
+ return
+ }
+ }
+}
+
+// SeekPrefix is used to seek the iterator to a given prefix
+func (i *Iterator[T]) SeekPrefix(prefix []byte) {
+ i.SeekPrefixWatch(prefix)
+}
+
+func (i *Iterator[T]) recurseMin(n *Node[T]) *Node[T] {
+ // Traverse to the minimum child
+ if n.leaf != nil {
+ return n
+ }
+ nEdges := len(n.edges)
+ if nEdges > 1 {
+ // Add all the other edges to the stack (the min node will be added as
+ // we recurse)
+ i.stack = append(i.stack, n.edges[1:])
+ }
+ if nEdges > 0 {
+ return i.recurseMin(n.edges[0].node)
+ }
+ // Shouldn't be possible
+ return nil
+}
+
+// SeekLowerBound is used to seek the iterator to the smallest key that is
+// greater or equal to the given key. There is no watch variant as it's hard to
+// predict based on the radix structure which node(s) changes might affect the
+// result.
+func (i *Iterator[T]) SeekLowerBound(key []byte) {
+ // Wipe the stack. Unlike Prefix iteration, we need to build the stack as we
+ // go because we need only a subset of edges of many nodes in the path to the
+ // leaf with the lower bound. Note that the iterator will still recurse into
+ // children that we don't traverse on the way to the reverse lower bound as it
+ // walks the stack.
+ i.stack = []edges[T]{}
+ // i.node starts off in the common case as pointing to the root node of the
+ // tree. By the time we return we have either found a lower bound and setup
+ // the stack to traverse all larger keys, or we have not and the stack and
+ // node should both be nil to prevent the iterator from assuming it is just
+ // iterating the whole tree from the root node. Either way this needs to end
+ // up as nil so just set it here.
+ n := i.node
+ i.node = nil
+ search := key
+
+ found := func(n *Node[T]) {
+ i.stack = append(
+ i.stack,
+ edges[T]{edge[T]{node: n}},
+ )
+ }
+
+ findMin := func(n *Node[T]) {
+ n = i.recurseMin(n)
+ if n != nil {
+ found(n)
+ return
+ }
+ }
+
+ for {
+ // Compare current prefix with the search key's same-length prefix.
+ var prefixCmp int
+ if len(n.prefix) < len(search) {
+ prefixCmp = bytes.Compare(n.prefix, search[0:len(n.prefix)])
+ } else {
+ prefixCmp = bytes.Compare(n.prefix, search)
+ }
+
+ if prefixCmp > 0 {
+ // Prefix is larger, that means the lower bound is greater than the search
+ // and from now on we need to follow the minimum path to the smallest
+ // leaf under this subtree.
+ findMin(n)
+ return
+ }
+
+ if prefixCmp < 0 {
+ // Prefix is smaller than search prefix, that means there is no lower
+ // bound
+ i.node = nil
+ return
+ }
+
+ // Prefix is equal, we are still heading for an exact match. If this is a
+ // leaf and an exact match we're done.
+ if n.leaf != nil && bytes.Equal(n.leaf.key, key) {
+ found(n)
+ return
+ }
+
+ // Consume the search prefix if the current node has one. Note that this is
+ // safe because if n.prefix is longer than the search slice prefixCmp would
+ // have been > 0 above and the method would have already returned.
+ search = search[len(n.prefix):]
+
+ if len(search) == 0 {
+ // We've exhausted the search key, but the current node is not an exact
+ // match or not a leaf. That means that the leaf value if it exists, and
+ // all child nodes must be strictly greater, the smallest key in this
+ // subtree must be the lower bound.
+ findMin(n)
+ return
+ }
+
+ // Otherwise, take the lower bound next edge.
+ idx, lbNode := n.getLowerBoundEdge(search[0])
+ if lbNode == nil {
+ return
+ }
+
+ // Create stack edges for the all strictly higher edges in this node.
+ if idx+1 < len(n.edges) {
+ i.stack = append(i.stack, n.edges[idx+1:])
+ }
+
+ // Recurse
+ n = lbNode
+ }
+}
+
+// Next returns the next node in order
+func (i *Iterator[T]) Next() ([]byte, T, bool) {
+ var zero T
+ // Initialize our stack if needed
+ if i.stack == nil && i.node != nil {
+ i.stack = []edges[T]{{edge[T]{node: i.node}}}
+ }
+
+ for len(i.stack) > 0 {
+ // Inspect the last element of the stack
+ n := len(i.stack)
+ last := i.stack[n-1]
+ elem := last[0].node
+
+ // Update the stack
+ if len(last) > 1 {
+ i.stack[n-1] = last[1:]
+ } else {
+ i.stack = i.stack[:n-1]
+ }
+
+ // Push the edges onto the frontier
+ if len(elem.edges) > 0 {
+ i.stack = append(i.stack, elem.edges)
+ }
+
+ // Return the leaf values if any
+ if elem.leaf != nil {
+ return elem.leaf.key, elem.leaf.val, true
+ }
+ }
+ return nil, zero, false
+}
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/node.go b/vendor/github.com/hashicorp/go-immutable-radix/v2/node.go
new file mode 100644
index 0000000000..1be963922f
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/node.go
@@ -0,0 +1,326 @@
+package iradix
+
+import (
+ "bytes"
+ "sort"
+)
+
+// WalkFn is used when walking the tree. Takes a
+// key and value, returning if iteration should
+// be terminated.
+type WalkFn[T any] func(k []byte, v T) bool
+
+// leafNode is used to represent a value
+type leafNode[T any] struct {
+ mutateCh chan struct{}
+ key []byte
+ val T
+}
+
+// edge is used to represent an edge node
+type edge[T any] struct {
+ label byte
+ node *Node[T]
+}
+
+// Node is an immutable node in the radix tree
+type Node[T any] struct {
+ // mutateCh is closed if this node is modified
+ mutateCh chan struct{}
+
+ // leaf is used to store possible leaf
+ leaf *leafNode[T]
+
+ // prefix is the common prefix we ignore
+ prefix []byte
+
+ // Edges should be stored in-order for iteration.
+ // We avoid a fully materialized slice to save memory,
+ // since in most cases we expect to be sparse
+ edges edges[T]
+}
+
+func (n *Node[T]) isLeaf() bool {
+ return n.leaf != nil
+}
+
+func (n *Node[T]) addEdge(e edge[T]) {
+ num := len(n.edges)
+ idx := sort.Search(num, func(i int) bool {
+ return n.edges[i].label >= e.label
+ })
+ n.edges = append(n.edges, e)
+ if idx != num {
+ copy(n.edges[idx+1:], n.edges[idx:num])
+ n.edges[idx] = e
+ }
+}
+
+func (n *Node[T]) replaceEdge(e edge[T]) {
+ num := len(n.edges)
+ idx := sort.Search(num, func(i int) bool {
+ return n.edges[i].label >= e.label
+ })
+ if idx < num && n.edges[idx].label == e.label {
+ n.edges[idx].node = e.node
+ return
+ }
+ panic("replacing missing edge")
+}
+
+func (n *Node[T]) getEdge(label byte) (int, *Node[T]) {
+ num := len(n.edges)
+ idx := sort.Search(num, func(i int) bool {
+ return n.edges[i].label >= label
+ })
+ if idx < num && n.edges[idx].label == label {
+ return idx, n.edges[idx].node
+ }
+ return -1, nil
+}
+
+func (n *Node[T]) getLowerBoundEdge(label byte) (int, *Node[T]) {
+ num := len(n.edges)
+ idx := sort.Search(num, func(i int) bool {
+ return n.edges[i].label >= label
+ })
+ // we want lower bound behavior so return even if it's not an exact match
+ if idx < num {
+ return idx, n.edges[idx].node
+ }
+ return -1, nil
+}
+
+func (n *Node[T]) delEdge(label byte) {
+ num := len(n.edges)
+ idx := sort.Search(num, func(i int) bool {
+ return n.edges[i].label >= label
+ })
+ if idx < num && n.edges[idx].label == label {
+ copy(n.edges[idx:], n.edges[idx+1:])
+ n.edges[len(n.edges)-1] = edge[T]{}
+ n.edges = n.edges[:len(n.edges)-1]
+ }
+}
+
+func (n *Node[T]) GetWatch(k []byte) (<-chan struct{}, T, bool) {
+ search := k
+ watch := n.mutateCh
+ for {
+ // Check for key exhaustion
+ if len(search) == 0 {
+ if n.isLeaf() {
+ return n.leaf.mutateCh, n.leaf.val, true
+ }
+ break
+ }
+
+ // Look for an edge
+ _, n = n.getEdge(search[0])
+ if n == nil {
+ break
+ }
+
+ // Update to the finest granularity as the search makes progress
+ watch = n.mutateCh
+
+ // Consume the search prefix
+ if bytes.HasPrefix(search, n.prefix) {
+ search = search[len(n.prefix):]
+ } else {
+ break
+ }
+ }
+ var zero T
+ return watch, zero, false
+}
+
+func (n *Node[T]) Get(k []byte) (T, bool) {
+ _, val, ok := n.GetWatch(k)
+ return val, ok
+}
+
+// LongestPrefix is like Get, but instead of an
+// exact match, it will return the longest prefix match.
+func (n *Node[T]) LongestPrefix(k []byte) ([]byte, T, bool) {
+ var last *leafNode[T]
+ search := k
+ for {
+ // Look for a leaf node
+ if n.isLeaf() {
+ last = n.leaf
+ }
+
+ // Check for key exhaustion
+ if len(search) == 0 {
+ break
+ }
+
+ // Look for an edge
+ _, n = n.getEdge(search[0])
+ if n == nil {
+ break
+ }
+
+ // Consume the search prefix
+ if bytes.HasPrefix(search, n.prefix) {
+ search = search[len(n.prefix):]
+ } else {
+ break
+ }
+ }
+ if last != nil {
+ return last.key, last.val, true
+ }
+ var zero T
+ return nil, zero, false
+}
+
+// Minimum is used to return the minimum value in the tree
+func (n *Node[T]) Minimum() ([]byte, T, bool) {
+ for {
+ if n.isLeaf() {
+ return n.leaf.key, n.leaf.val, true
+ }
+ if len(n.edges) > 0 {
+ n = n.edges[0].node
+ } else {
+ break
+ }
+ }
+ var zero T
+ return nil, zero, false
+}
+
+// Maximum is used to return the maximum value in the tree
+func (n *Node[T]) Maximum() ([]byte, T, bool) {
+ for {
+ if num := len(n.edges); num > 0 {
+ n = n.edges[num-1].node // bug?
+ continue
+ }
+ if n.isLeaf() {
+ return n.leaf.key, n.leaf.val, true
+ } else {
+ break
+ }
+ }
+ var zero T
+ return nil, zero, false
+}
+
+// Iterator is used to return an iterator at
+// the given node to walk the tree
+func (n *Node[T]) Iterator() *Iterator[T] {
+ return &Iterator[T]{node: n}
+}
+
+// ReverseIterator is used to return an iterator at
+// the given node to walk the tree backwards
+func (n *Node[T]) ReverseIterator() *ReverseIterator[T] {
+ return NewReverseIterator(n)
+}
+
+// Iterator is used to return an iterator at
+// the given node to walk the tree
+func (n *Node[T]) PathIterator(path []byte) *PathIterator[T] {
+ return &PathIterator[T]{node: n, path: path}
+}
+
+// rawIterator is used to return a raw iterator at the given node to walk the
+// tree.
+func (n *Node[T]) rawIterator() *rawIterator[T] {
+ iter := &rawIterator[T]{node: n}
+ iter.Next()
+ return iter
+}
+
+// Walk is used to walk the tree
+func (n *Node[T]) Walk(fn WalkFn[T]) {
+ recursiveWalk(n, fn)
+}
+
+// WalkBackwards is used to walk the tree in reverse order
+func (n *Node[T]) WalkBackwards(fn WalkFn[T]) {
+ reverseRecursiveWalk(n, fn)
+}
+
+// WalkPrefix is used to walk the tree under a prefix
+func (n *Node[T]) WalkPrefix(prefix []byte, fn WalkFn[T]) {
+ search := prefix
+ for {
+ // Check for key exhaustion
+ if len(search) == 0 {
+ recursiveWalk(n, fn)
+ return
+ }
+
+ // Look for an edge
+ _, n = n.getEdge(search[0])
+ if n == nil {
+ break
+ }
+
+ // Consume the search prefix
+ if bytes.HasPrefix(search, n.prefix) {
+ search = search[len(n.prefix):]
+
+ } else if bytes.HasPrefix(n.prefix, search) {
+ // Child may be under our search prefix
+ recursiveWalk(n, fn)
+ return
+ } else {
+ break
+ }
+ }
+}
+
+// WalkPath is used to walk the tree, but only visiting nodes
+// from the root down to a given leaf. Where WalkPrefix walks
+// all the entries *under* the given prefix, this walks the
+// entries *above* the given prefix.
+func (n *Node[T]) WalkPath(path []byte, fn WalkFn[T]) {
+ i := n.PathIterator(path)
+
+ for path, val, ok := i.Next(); ok; path, val, ok = i.Next() {
+ if fn(path, val) {
+ return
+ }
+ }
+}
+
+// recursiveWalk is used to do a pre-order walk of a node
+// recursively. Returns true if the walk should be aborted
+func recursiveWalk[T any](n *Node[T], fn WalkFn[T]) bool {
+ // Visit the leaf values if any
+ if n.leaf != nil && fn(n.leaf.key, n.leaf.val) {
+ return true
+ }
+
+ // Recurse on the children
+ for _, e := range n.edges {
+ if recursiveWalk(e.node, fn) {
+ return true
+ }
+ }
+ return false
+}
+
+// reverseRecursiveWalk is used to do a reverse pre-order
+// walk of a node recursively. Returns true if the walk
+// should be aborted
+func reverseRecursiveWalk[T any](n *Node[T], fn WalkFn[T]) bool {
+ // Visit the leaf values if any
+ if n.leaf != nil && fn(n.leaf.key, n.leaf.val) {
+ return true
+ }
+
+ // Recurse on the children in reverse order
+ for i := len(n.edges) - 1; i >= 0; i-- {
+ e := n.edges[i]
+ if reverseRecursiveWalk(e.node, fn) {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/path_iter.go b/vendor/github.com/hashicorp/go-immutable-radix/v2/path_iter.go
new file mode 100644
index 0000000000..21942afc8a
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/path_iter.go
@@ -0,0 +1,59 @@
+package iradix
+
+import "bytes"
+
+// PathIterator is used to iterate over a set of nodes from the root
+// down to a specified path. This will iterate over the same values that
+// the Node.WalkPath method will.
+type PathIterator[T any] struct {
+ node *Node[T]
+ path []byte
+ done bool
+}
+
+// Next returns the next node in order
+func (i *PathIterator[T]) Next() ([]byte, T, bool) {
+ // This is mostly just an asynchronous implementation of the WalkPath
+ // method on the node.
+ var zero T
+ var leaf *leafNode[T]
+
+ for leaf == nil && i.node != nil {
+ // visit the leaf values if any
+ if i.node.leaf != nil {
+ leaf = i.node.leaf
+ }
+
+ i.iterate()
+ }
+
+ if leaf != nil {
+ return leaf.key, leaf.val, true
+ }
+
+ return nil, zero, false
+}
+
+func (i *PathIterator[T]) iterate() {
+ // Check for key exhaustion
+ if len(i.path) == 0 {
+ i.node = nil
+ return
+ }
+
+ // Look for an edge
+ _, i.node = i.node.getEdge(i.path[0])
+ if i.node == nil {
+ return
+ }
+
+ // Consume the search prefix
+ if bytes.HasPrefix(i.path, i.node.prefix) {
+ i.path = i.path[len(i.node.prefix):]
+ } else {
+ // there are no more nodes to iterate through so
+ // nil out the node to prevent returning results
+ // for subsequent calls to Next()
+ i.node = nil
+ }
+}
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/raw_iter.go b/vendor/github.com/hashicorp/go-immutable-radix/v2/raw_iter.go
new file mode 100644
index 0000000000..dd84f089d7
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/raw_iter.go
@@ -0,0 +1,78 @@
+package iradix
+
+// rawIterator visits each of the nodes in the tree, even the ones that are not
+// leaves. It keeps track of the effective path (what a leaf at a given node
+// would be called), which is useful for comparing trees.
+type rawIterator[T any] struct {
+ // node is the starting node in the tree for the iterator.
+ node *Node[T]
+
+ // stack keeps track of edges in the frontier.
+ stack []rawStackEntry[T]
+
+ // pos is the current position of the iterator.
+ pos *Node[T]
+
+ // path is the effective path of the current iterator position,
+ // regardless of whether the current node is a leaf.
+ path string
+}
+
+// rawStackEntry is used to keep track of the cumulative common path as well as
+// its associated edges in the frontier.
+type rawStackEntry[T any] struct {
+ path string
+ edges edges[T]
+}
+
+// Front returns the current node that has been iterated to.
+func (i *rawIterator[T]) Front() *Node[T] {
+ return i.pos
+}
+
+// Path returns the effective path of the current node, even if it's not actually
+// a leaf.
+func (i *rawIterator[T]) Path() string {
+ return i.path
+}
+
+// Next advances the iterator to the next node.
+func (i *rawIterator[T]) Next() {
+ // Initialize our stack if needed.
+ if i.stack == nil && i.node != nil {
+ i.stack = []rawStackEntry[T]{
+ {
+ edges: edges[T]{
+ edge[T]{node: i.node},
+ },
+ },
+ }
+ }
+
+ for len(i.stack) > 0 {
+ // Inspect the last element of the stack.
+ n := len(i.stack)
+ last := i.stack[n-1]
+ elem := last.edges[0].node
+
+ // Update the stack.
+ if len(last.edges) > 1 {
+ i.stack[n-1].edges = last.edges[1:]
+ } else {
+ i.stack = i.stack[:n-1]
+ }
+
+ // Push the edges onto the frontier.
+ if len(elem.edges) > 0 {
+ path := last.path + string(elem.prefix)
+ i.stack = append(i.stack, rawStackEntry[T]{path, elem.edges})
+ }
+
+ i.pos = elem
+ i.path = last.path + string(elem.prefix)
+ return
+ }
+
+ i.pos = nil
+ i.path = ""
+}
diff --git a/vendor/github.com/hashicorp/go-immutable-radix/v2/reverse_iter.go b/vendor/github.com/hashicorp/go-immutable-radix/v2/reverse_iter.go
new file mode 100644
index 0000000000..2a06cde7cb
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-immutable-radix/v2/reverse_iter.go
@@ -0,0 +1,240 @@
+package iradix
+
+import (
+ "bytes"
+)
+
+// ReverseIterator is used to iterate over a set of nodes
+// in reverse in-order
+type ReverseIterator[T any] struct {
+ i *Iterator[T]
+
+ // expandedParents stores the set of parent nodes whose relevant children have
+ // already been pushed into the stack. This can happen during seek or during
+ // iteration.
+ //
+ // Unlike forward iteration we need to recurse into children before we can
+ // output the value stored in an internal leaf since all children are greater.
+ // We use this to track whether we have already ensured all the children are
+ // in the stack.
+ expandedParents map[*Node[T]]struct{}
+}
+
+// NewReverseIterator returns a new ReverseIterator at a node
+func NewReverseIterator[T any](n *Node[T]) *ReverseIterator[T] {
+ return &ReverseIterator[T]{
+ i: &Iterator[T]{node: n},
+ }
+}
+
+// SeekPrefixWatch is used to seek the iterator to a given prefix
+// and returns the watch channel of the finest granularity
+func (ri *ReverseIterator[T]) SeekPrefixWatch(prefix []byte) (watch <-chan struct{}) {
+ return ri.i.SeekPrefixWatch(prefix)
+}
+
+// SeekPrefix is used to seek the iterator to a given prefix
+func (ri *ReverseIterator[T]) SeekPrefix(prefix []byte) {
+ ri.i.SeekPrefixWatch(prefix)
+}
+
+// SeekReverseLowerBound is used to seek the iterator to the largest key that is
+// lower or equal to the given key. There is no watch variant as it's hard to
+// predict based on the radix structure which node(s) changes might affect the
+// result.
+func (ri *ReverseIterator[T]) SeekReverseLowerBound(key []byte) {
+ // Wipe the stack. Unlike Prefix iteration, we need to build the stack as we
+ // go because we need only a subset of edges of many nodes in the path to the
+ // leaf with the lower bound. Note that the iterator will still recurse into
+ // children that we don't traverse on the way to the reverse lower bound as it
+ // walks the stack.
+ ri.i.stack = []edges[T]{}
+ // ri.i.node starts off in the common case as pointing to the root node of the
+ // tree. By the time we return we have either found a lower bound and setup
+ // the stack to traverse all larger keys, or we have not and the stack and
+ // node should both be nil to prevent the iterator from assuming it is just
+ // iterating the whole tree from the root node. Either way this needs to end
+ // up as nil so just set it here.
+ n := ri.i.node
+ ri.i.node = nil
+ search := key
+
+ if ri.expandedParents == nil {
+ ri.expandedParents = make(map[*Node[T]]struct{})
+ }
+
+ found := func(n *Node[T]) {
+ ri.i.stack = append(ri.i.stack, edges[T]{edge[T]{node: n}})
+ // We need to mark this node as expanded in advance too otherwise the
+ // iterator will attempt to walk all of its children even though they are
+ // greater than the lower bound we have found. We've expanded it in the
+ // sense that all of its children that we want to walk are already in the
+ // stack (i.e. none of them).
+ ri.expandedParents[n] = struct{}{}
+ }
+
+ for {
+ // Compare current prefix with the search key's same-length prefix.
+ var prefixCmp int
+ if len(n.prefix) < len(search) {
+ prefixCmp = bytes.Compare(n.prefix, search[0:len(n.prefix)])
+ } else {
+ prefixCmp = bytes.Compare(n.prefix, search)
+ }
+
+ if prefixCmp < 0 {
+ // Prefix is smaller than search prefix, that means there is no exact
+ // match for the search key. But we are looking in reverse, so the reverse
+ // lower bound will be the largest leaf under this subtree, since it is
+ // the value that would come right before the current search key if it
+ // were in the tree. So we need to follow the maximum path in this subtree
+ // to find it. Note that this is exactly what the iterator will already do
+ // if it finds a node in the stack that has _not_ been marked as expanded
+ // so in this one case we don't call `found` and instead let the iterator
+ // do the expansion and recursion through all the children.
+ ri.i.stack = append(ri.i.stack, edges[T]{edge[T]{node: n}})
+ return
+ }
+
+ if prefixCmp > 0 {
+ // Prefix is larger than search prefix, or there is no prefix but we've
+ // also exhausted the search key. Either way, that means there is no
+ // reverse lower bound since nothing comes before our current search
+ // prefix.
+ return
+ }
+
+ // If this is a leaf, something needs to happen! Note that if it's a leaf
+ // and prefixCmp was zero (which it must be to get here) then the leaf value
+ // is either an exact match for the search, or it's lower. It can't be
+ // greater.
+ if n.isLeaf() {
+
+ // Firstly, if it's an exact match, we're done!
+ if bytes.Equal(n.leaf.key, key) {
+ found(n)
+ return
+ }
+
+ // It's not so this node's leaf value must be lower and could still be a
+ // valid contender for reverse lower bound.
+
+ // If it has no children then we are also done.
+ if len(n.edges) == 0 {
+ // This leaf is the lower bound.
+ found(n)
+ return
+ }
+
+ // Finally, this leaf is internal (has children) so we'll keep searching,
+ // but we need to add it to the iterator's stack since it has a leaf value
+ // that needs to be iterated over. It needs to be added to the stack
+ // before its children below as it comes first.
+ ri.i.stack = append(ri.i.stack, edges[T]{edge[T]{node: n}})
+ // We also need to mark it as expanded since we'll be adding any of its
+ // relevant children below and so don't want the iterator to re-add them
+ // on its way back up the stack.
+ ri.expandedParents[n] = struct{}{}
+ }
+
+ // Consume the search prefix. Note that this is safe because if n.prefix is
+ // longer than the search slice prefixCmp would have been > 0 above and the
+ // method would have already returned.
+ search = search[len(n.prefix):]
+
+ if len(search) == 0 {
+ // We've exhausted the search key but we are not at a leaf. That means all
+ // children are greater than the search key so a reverse lower bound
+ // doesn't exist in this subtree. Note that there might still be one in
+ // the whole radix tree by following a different path somewhere further
+ // up. If that's the case then the iterator's stack will contain all the
+ // smaller nodes already and Previous will walk through them correctly.
+ return
+ }
+
+ // Otherwise, take the lower bound next edge.
+ idx, lbNode := n.getLowerBoundEdge(search[0])
+
+ // From here, we need to update the stack with all values lower than
+ // the lower bound edge. Since getLowerBoundEdge() returns -1 when the
+ // search prefix is larger than all edges, we need to place idx at the
+ // last edge index so they can all be place in the stack, since they
+ // come before our search prefix.
+ if idx == -1 {
+ idx = len(n.edges)
+ }
+
+ // Create stack edges for the all strictly lower edges in this node.
+ if len(n.edges[:idx]) > 0 {
+ ri.i.stack = append(ri.i.stack, n.edges[:idx])
+ }
+
+ // Exit if there's no lower bound edge. The stack will have the previous
+ // nodes already.
+ if lbNode == nil {
+ return
+ }
+
+ // Recurse
+ n = lbNode
+ }
+}
+
+// Previous returns the previous node in reverse order
+func (ri *ReverseIterator[T]) Previous() ([]byte, T, bool) {
+ // Initialize our stack if needed
+ if ri.i.stack == nil && ri.i.node != nil {
+ ri.i.stack = []edges[T]{
+ {
+ edge[T]{node: ri.i.node},
+ },
+ }
+ }
+
+ if ri.expandedParents == nil {
+ ri.expandedParents = make(map[*Node[T]]struct{})
+ }
+
+ for len(ri.i.stack) > 0 {
+ // Inspect the last element of the stack
+ n := len(ri.i.stack)
+ last := ri.i.stack[n-1]
+ m := len(last)
+ elem := last[m-1].node
+
+ _, alreadyExpanded := ri.expandedParents[elem]
+
+ // If this is an internal node and we've not seen it already, we need to
+ // leave it in the stack so we can return its possible leaf value _after_
+ // we've recursed through all its children.
+ if len(elem.edges) > 0 && !alreadyExpanded {
+ // record that we've seen this node!
+ ri.expandedParents[elem] = struct{}{}
+ // push child edges onto stack and skip the rest of the loop to recurse
+ // into the largest one.
+ ri.i.stack = append(ri.i.stack, elem.edges)
+ continue
+ }
+
+ // Remove the node from the stack
+ if m > 1 {
+ ri.i.stack[n-1] = last[:m-1]
+ } else {
+ ri.i.stack = ri.i.stack[:n-1]
+ }
+ // We don't need this state any more as it's no longer in the stack so we
+ // won't visit it again
+ if alreadyExpanded {
+ delete(ri.expandedParents, elem)
+ }
+
+ // If this is a leaf, return it
+ if elem.leaf != nil {
+ return elem.leaf.key, elem.leaf.val, true
+ }
+
+ // it's not a leaf so keep walking the stack to find the previous leaf
+ }
+ var zero T
+ return nil, zero, false
+}
diff --git a/vendor/github.com/hashicorp/go-plugin/CHANGELOG.md b/vendor/github.com/hashicorp/go-plugin/CHANGELOG.md
index 3d0379c500..ca16273c1f 100644
--- a/vendor/github.com/hashicorp/go-plugin/CHANGELOG.md
+++ b/vendor/github.com/hashicorp/go-plugin/CHANGELOG.md
@@ -1,9 +1,29 @@
+## v1.6.2
+
+ENHANCEMENTS:
+
+* Added support for gRPC dial options to the `Dial` API [[GH-257](https://github.com/hashicorp/go-plugin/pull/257)]
+
+BUGS:
+
+* Fixed a bug where reattaching to a plugin that exits could kill an unrelated process [[GH-320](https://github.com/hashicorp/go-plugin/pull/320)]
+
+## v1.6.1
+
+BUGS:
+
+* Suppress spurious `os.ErrClosed` on plugin shutdown [[GH-299](https://github.com/hashicorp/go-plugin/pull/299)]
+
+ENHANCEMENTS:
+
+* deps: bump google.golang.org/grpc to v1.58.3 [[GH-296](https://github.com/hashicorp/go-plugin/pull/296)]
+
## v1.6.0
CHANGES:
* plugin: Plugins written in other languages can optionally start to advertise whether they support gRPC broker multiplexing.
- If the environment variable `PLUGIN_MULTIPLEX_GRPC` is set, it is safe to include a seventh field containing a boolean
+ If the environment variable `PLUGIN_MULTIPLEX_GRPC` is set, it is safe to include a seventh field containing a boolean
value in the `|`-separated protocol negotiation line.
ENHANCEMENTS:
diff --git a/vendor/github.com/hashicorp/go-plugin/client.go b/vendor/github.com/hashicorp/go-plugin/client.go
index 73f6b35151..df2fd22c82 100644
--- a/vendor/github.com/hashicorp/go-plugin/client.go
+++ b/vendor/github.com/hashicorp/go-plugin/client.go
@@ -82,7 +82,7 @@ const defaultPluginLogBufferSize = 64 * 1024
//
// Plugin hosts should use one Client for each plugin executable. To
// dispense a plugin type, use the `Client.Client` function, and then
-// cal `Dispense`. This awkward API is mostly historical but is used to split
+// call `Dispense`. This awkward API is mostly historical but is used to split
// the client that deals with subprocess management and the client that
// does RPC management.
//
@@ -104,9 +104,9 @@ type Client struct {
// goroutines.
clientWaitGroup sync.WaitGroup
- // stderrWaitGroup is used to prevent the command's Wait() function from
- // being called before we've finished reading from the stderr pipe.
- stderrWaitGroup sync.WaitGroup
+ // pipesWaitGroup is used to prevent the command's Wait() function from
+ // being called before we've finished reading from the stdout and stderr pipe.
+ pipesWaitGroup sync.WaitGroup
// processKilled is used for testing only, to flag when the process was
// forcefully killed.
@@ -756,8 +756,8 @@ func (c *Client) Start() (addr net.Addr, err error) {
// Start goroutine that logs the stderr
c.clientWaitGroup.Add(1)
- c.stderrWaitGroup.Add(1)
- // logStderr calls Done()
+ c.pipesWaitGroup.Add(1)
+ // logStderr calls c.pipesWaitGroup.Done()
go c.logStderr(runner.Name(), runner.Stderr())
c.clientWaitGroup.Add(1)
@@ -767,9 +767,9 @@ func (c *Client) Start() (addr net.Addr, err error) {
defer c.clientWaitGroup.Done()
- // wait to finish reading from stderr since the stderr pipe reader
+ // wait to finish reading from stdout/stderr since the stdout/stderr pipe readers
// will be closed by the subsequent call to cmd.Wait().
- c.stderrWaitGroup.Wait()
+ c.pipesWaitGroup.Wait()
// Wait for the command to end.
err := runner.Wait(context.Background())
@@ -792,8 +792,10 @@ func (c *Client) Start() (addr net.Addr, err error) {
// out of stdout
linesCh := make(chan string)
c.clientWaitGroup.Add(1)
+ c.pipesWaitGroup.Add(1)
go func() {
defer c.clientWaitGroup.Done()
+ defer c.pipesWaitGroup.Done()
defer close(linesCh)
scanner := bufio.NewScanner(runner.Stdout())
@@ -1159,7 +1161,7 @@ func (c *Client) getGRPCMuxer(addr net.Addr) (*grpcmux.GRPCClientMuxer, error) {
func (c *Client) logStderr(name string, r io.Reader) {
defer c.clientWaitGroup.Done()
- defer c.stderrWaitGroup.Done()
+ defer c.pipesWaitGroup.Done()
l := c.logger.Named(filepath.Base(name))
reader := bufio.NewReaderSize(r, c.config.PluginLogBufferSize)
diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_broker.go b/vendor/github.com/hashicorp/go-plugin/grpc_broker.go
index 5b17e37fef..4bac0a5f7d 100644
--- a/vendor/github.com/hashicorp/go-plugin/grpc_broker.go
+++ b/vendor/github.com/hashicorp/go-plugin/grpc_broker.go
@@ -523,9 +523,12 @@ func (b *GRPCBroker) muxDial(id uint32) func(string, time.Duration) (net.Conn, e
}
// Dial opens a connection by ID.
-func (b *GRPCBroker) Dial(id uint32) (conn *grpc.ClientConn, err error) {
+func (b *GRPCBroker) Dial(id uint32) (conn *grpc.ClientConn, err error) { return b.DialWithOptions(id) }
+
+// Dial opens a connection by ID with options.
+func (b *GRPCBroker) DialWithOptions(id uint32, opts ...grpc.DialOption) (conn *grpc.ClientConn, err error) {
if b.muxer.Enabled() {
- return dialGRPCConn(b.tls, b.muxDial(id))
+ return dialGRPCConn(b.tls, b.muxDial(id), opts...)
}
var c *plugin.ConnInfo
@@ -560,7 +563,7 @@ func (b *GRPCBroker) Dial(id uint32) (conn *grpc.ClientConn, err error) {
return nil, err
}
- return dialGRPCConn(b.tls, netAddrDialer(addr))
+ return dialGRPCConn(b.tls, netAddrDialer(addr), opts...)
}
// NextId returns a unique ID to use next.
diff --git a/vendor/github.com/hashicorp/go-plugin/internal/cmdrunner/cmd_reattach.go b/vendor/github.com/hashicorp/go-plugin/internal/cmdrunner/cmd_reattach.go
index dce1a86a88..984c875db9 100644
--- a/vendor/github.com/hashicorp/go-plugin/internal/cmdrunner/cmd_reattach.go
+++ b/vendor/github.com/hashicorp/go-plugin/internal/cmdrunner/cmd_reattach.go
@@ -28,7 +28,6 @@ func ReattachFunc(pid int, addr net.Addr) runner.ReattachFunc {
// doesn't actually return an error if it can't find the process.
conn, err := net.Dial(addr.Network(), addr.String())
if err != nil {
- p.Kill()
return nil, ErrProcessNotFound
}
conn.Close()
diff --git a/vendor/github.com/hashicorp/go-plugin/testing.go b/vendor/github.com/hashicorp/go-plugin/testing.go
index a8735dfc8c..1abd402a35 100644
--- a/vendor/github.com/hashicorp/go-plugin/testing.go
+++ b/vendor/github.com/hashicorp/go-plugin/testing.go
@@ -9,10 +9,10 @@ import (
"io"
"net"
"net/rpc"
+ "testing"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-plugin/internal/grpcmux"
- "github.com/mitchellh/go-testing-interface"
"google.golang.org/grpc"
)
@@ -33,7 +33,7 @@ type TestOptions struct {
// TestConn is a helper function for returning a client and server
// net.Conn connected to each other.
-func TestConn(t testing.T) (net.Conn, net.Conn) {
+func TestConn(t testing.TB) (net.Conn, net.Conn) {
// Listen to any local port. This listener will be closed
// after a single connection is established.
l, err := net.Listen("tcp", "127.0.0.1:0")
@@ -67,7 +67,7 @@ func TestConn(t testing.T) (net.Conn, net.Conn) {
}
// TestRPCConn returns a rpc client and server connected to each other.
-func TestRPCConn(t testing.T) (*rpc.Client, *rpc.Server) {
+func TestRPCConn(t testing.TB) (*rpc.Client, *rpc.Server) {
clientConn, serverConn := TestConn(t)
server := rpc.NewServer()
@@ -79,7 +79,7 @@ func TestRPCConn(t testing.T) (*rpc.Client, *rpc.Server) {
// TestPluginRPCConn returns a plugin RPC client and server that are connected
// together and configured.
-func TestPluginRPCConn(t testing.T, ps map[string]Plugin, opts *TestOptions) (*RPCClient, *RPCServer) {
+func TestPluginRPCConn(t testing.TB, ps map[string]Plugin, opts *TestOptions) (*RPCClient, *RPCServer) {
// Create two net.Conns we can use to shuttle our control connection
clientConn, serverConn := TestConn(t)
@@ -107,7 +107,7 @@ func TestPluginRPCConn(t testing.T, ps map[string]Plugin, opts *TestOptions) (*R
// TestGRPCConn returns a gRPC client conn and grpc server that are connected
// together and configured. The register function is used to register services
// prior to the Serve call. This is used to test gRPC connections.
-func TestGRPCConn(t testing.T, register func(*grpc.Server)) (*grpc.ClientConn, *grpc.Server) {
+func TestGRPCConn(t testing.TB, register func(*grpc.Server)) (*grpc.ClientConn, *grpc.Server) {
// Create a listener
l, err := net.Listen("tcp", "127.0.0.1:0")
if err != nil {
@@ -135,7 +135,7 @@ func TestGRPCConn(t testing.T, register func(*grpc.Server)) (*grpc.ClientConn, *
// TestPluginGRPCConn returns a plugin gRPC client and server that are connected
// together and configured. This is used to test gRPC connections.
-func TestPluginGRPCConn(t testing.T, multiplex bool, ps map[string]Plugin) (*GRPCClient, *GRPCServer) {
+func TestPluginGRPCConn(t testing.TB, multiplex bool, ps map[string]Plugin) (*GRPCClient, *GRPCServer) {
// Create a listener
ln, err := serverListener(UnixSocketConfig{})
if err != nil {
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/.gitignore b/vendor/github.com/hashicorp/go-retryablehttp/.gitignore
new file mode 100644
index 0000000000..4e309e0b32
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/.gitignore
@@ -0,0 +1,4 @@
+.idea/
+*.iml
+*.test
+.vscode/
\ No newline at end of file
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/.go-version b/vendor/github.com/hashicorp/go-retryablehttp/.go-version
new file mode 100644
index 0000000000..6fee2fedb0
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/.go-version
@@ -0,0 +1 @@
+1.22.2
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/CHANGELOG.md b/vendor/github.com/hashicorp/go-retryablehttp/CHANGELOG.md
new file mode 100644
index 0000000000..68a627c6d9
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/CHANGELOG.md
@@ -0,0 +1,33 @@
+## 0.7.7 (May 30, 2024)
+
+BUG FIXES:
+
+- client: avoid potentially leaking URL-embedded basic authentication credentials in logs (#158)
+
+## 0.7.6 (May 9, 2024)
+
+ENHANCEMENTS:
+
+- client: support a `RetryPrepare` function for modifying the request before retrying (#216)
+- client: support HTTP-date values for `Retry-After` header value (#138)
+- client: avoid reading entire body when the body is a `*bytes.Reader` (#197)
+
+BUG FIXES:
+
+- client: fix a broken check for invalid server certificate in go 1.20+ (#210)
+
+## 0.7.5 (Nov 8, 2023)
+
+BUG FIXES:
+
+- client: fixes an issue where the request body is not preserved on temporary redirects or re-established HTTP/2 connections (#207)
+
+## 0.7.4 (Jun 6, 2023)
+
+BUG FIXES:
+
+- client: fixing an issue where the Content-Type header wouldn't be sent with an empty payload when using HTTP/2 (#194)
+
+## 0.7.3 (May 15, 2023)
+
+Initial release
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS b/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS
new file mode 100644
index 0000000000..d6dd78a2dd
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/CODEOWNERS
@@ -0,0 +1 @@
+* @hashicorp/go-retryablehttp-maintainers
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/LICENSE b/vendor/github.com/hashicorp/go-retryablehttp/LICENSE
new file mode 100644
index 0000000000..f4f97ee585
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/LICENSE
@@ -0,0 +1,365 @@
+Copyright (c) 2015 HashiCorp, Inc.
+
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. "Contributor"
+
+ means each individual or legal entity that creates, contributes to the
+ creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+
+ means the combination of the Contributions of others (if any) used by a
+ Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+
+ means Source Code Form to which the initial Contributor has attached the
+ notice in Exhibit A, the Executable Form of such Source Code Form, and
+ Modifications of such Source Code Form, in each case including portions
+ thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ a. that the initial Contributor has attached the notice described in
+ Exhibit B to the Covered Software; or
+
+ b. that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the terms of
+ a Secondary License.
+
+1.6. "Executable Form"
+
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+
+ means a work that combines Covered Software with other material, in a
+ separate file or files, that is not Covered Software.
+
+1.8. "License"
+
+ means this document.
+
+1.9. "Licensable"
+
+ means having the right to grant, to the maximum extent possible, whether
+ at the time of the initial grant or subsequently, any and all of the
+ rights conveyed by this License.
+
+1.10. "Modifications"
+
+ means any of the following:
+
+ a. any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered Software; or
+
+ b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. "Patent Claims" of a Contributor
+
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the License,
+ by the making, using, selling, offering for sale, having made, import,
+ or transfer of either its Contributions or its Contributor Version.
+
+1.12. "Secondary License"
+
+ means either the GNU General Public License, Version 2.0, the GNU Lesser
+ General Public License, Version 2.1, the GNU Affero General Public
+ License, Version 3.0, or any later versions of those licenses.
+
+1.13. "Source Code Form"
+
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that controls, is
+ controlled by, or is under common control with You. For purposes of this
+ definition, "control" means (a) the power, direct or indirect, to cause
+ the direction or management of such entity, whether by contract or
+ otherwise, or (b) ownership of more than fifty percent (50%) of the
+ outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+ Each Contributor hereby grants You a world-wide, royalty-free,
+ non-exclusive license:
+
+ a. under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+ b. under Patent Claims of such Contributor to make, use, sell, offer for
+ sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+ The licenses granted in Section 2.1 with respect to any Contribution
+ become effective for each Contribution on the date the Contributor first
+ distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+ The licenses granted in this Section 2 are the only rights granted under
+ this License. No additional rights or licenses will be implied from the
+ distribution or licensing of Covered Software under this License.
+ Notwithstanding Section 2.1(b) above, no patent license is granted by a
+ Contributor:
+
+ a. for any code that a Contributor has removed from Covered Software; or
+
+ b. for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+ c. under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+ This License does not grant any rights in the trademarks, service marks,
+ or logos of any Contributor (except as may be necessary to comply with
+ the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+ No Contributor makes additional grants as a result of Your choice to
+ distribute the Covered Software under a subsequent version of this
+ License (see Section 10.2) or under the terms of a Secondary License (if
+ permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+ Each Contributor represents that the Contributor believes its
+ Contributions are its original creation(s) or it has sufficient rights to
+ grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+ This License is not intended to limit any rights You have under
+ applicable copyright doctrines of fair use, fair dealing, or other
+ equivalents.
+
+2.7. Conditions
+
+ Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+ Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+ All distribution of Covered Software in Source Code Form, including any
+ Modifications that You create or to which You contribute, must be under
+ the terms of this License. You must inform recipients that the Source
+ Code Form of the Covered Software is governed by the terms of this
+ License, and how they can obtain a copy of this License. You may not
+ attempt to alter or restrict the recipients' rights in the Source Code
+ Form.
+
+3.2. Distribution of Executable Form
+
+ If You distribute Covered Software in Executable Form then:
+
+ a. such Covered Software must also be made available in Source Code Form,
+ as described in Section 3.1, and You must inform recipients of the
+ Executable Form how they can obtain a copy of such Source Code Form by
+ reasonable means in a timely manner, at a charge no more than the cost
+ of distribution to the recipient; and
+
+ b. You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter the
+ recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+ You may create and distribute a Larger Work under terms of Your choice,
+ provided that You also comply with the requirements of this License for
+ the Covered Software. If the Larger Work is a combination of Covered
+ Software with a work governed by one or more Secondary Licenses, and the
+ Covered Software is not Incompatible With Secondary Licenses, this
+ License permits You to additionally distribute such Covered Software
+ under the terms of such Secondary License(s), so that the recipient of
+ the Larger Work may, at their option, further distribute the Covered
+ Software under the terms of either this License or such Secondary
+ License(s).
+
+3.4. Notices
+
+ You may not remove or alter the substance of any license notices
+ (including copyright notices, patent notices, disclaimers of warranty, or
+ limitations of liability) contained within the Source Code Form of the
+ Covered Software, except that You may alter any license notices to the
+ extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+ You may choose to offer, and to charge a fee for, warranty, support,
+ indemnity or liability obligations to one or more recipients of Covered
+ Software. However, You may do so only on Your own behalf, and not on
+ behalf of any Contributor. You must make it absolutely clear that any
+ such warranty, support, indemnity, or liability obligation is offered by
+ You alone, and You hereby agree to indemnify every Contributor for any
+ liability incurred by such Contributor as a result of warranty, support,
+ indemnity or liability terms You offer. You may include additional
+ disclaimers of warranty and limitations of liability specific to any
+ jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+ If it is impossible for You to comply with any of the terms of this License
+ with respect to some or all of the Covered Software due to statute,
+ judicial order, or regulation then You must: (a) comply with the terms of
+ this License to the maximum extent possible; and (b) describe the
+ limitations and the code they affect. Such description must be placed in a
+ text file included with all distributions of the Covered Software under
+ this License. Except to the extent prohibited by statute or regulation,
+ such description must be sufficiently detailed for a recipient of ordinary
+ skill to be able to understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+ fail to comply with any of its terms. However, if You become compliant,
+ then the rights granted under this License from a particular Contributor
+ are reinstated (a) provisionally, unless and until such Contributor
+ explicitly and finally terminates Your grants, and (b) on an ongoing
+ basis, if such Contributor fails to notify You of the non-compliance by
+ some reasonable means prior to 60 days after You have come back into
+ compliance. Moreover, Your grants from a particular Contributor are
+ reinstated on an ongoing basis if such Contributor notifies You of the
+ non-compliance by some reasonable means, this is the first time You have
+ received notice of non-compliance with this License from such
+ Contributor, and You become compliant prior to 30 days after Your receipt
+ of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+ infringement claim (excluding declaratory judgment actions,
+ counter-claims, and cross-claims) alleging that a Contributor Version
+ directly or indirectly infringes any patent, then the rights granted to
+ You by any and all Contributors for the Covered Software under Section
+ 2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+ license agreements (excluding distributors and resellers) which have been
+ validly granted by You or Your distributors under this License prior to
+ termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+ Covered Software is provided under this License on an "as is" basis,
+ without warranty of any kind, either expressed, implied, or statutory,
+ including, without limitation, warranties that the Covered Software is free
+ of defects, merchantable, fit for a particular purpose or non-infringing.
+ The entire risk as to the quality and performance of the Covered Software
+ is with You. Should any Covered Software prove defective in any respect,
+ You (not any Contributor) assume the cost of any necessary servicing,
+ repair, or correction. This disclaimer of warranty constitutes an essential
+ part of this License. No use of any Covered Software is authorized under
+ this License except under this disclaimer.
+
+7. Limitation of Liability
+
+ Under no circumstances and under no legal theory, whether tort (including
+ negligence), contract, or otherwise, shall any Contributor, or anyone who
+ distributes Covered Software as permitted above, be liable to You for any
+ direct, indirect, special, incidental, or consequential damages of any
+ character including, without limitation, damages for lost profits, loss of
+ goodwill, work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses, even if such party shall have been
+ informed of the possibility of such damages. This limitation of liability
+ shall not apply to liability for death or personal injury resulting from
+ such party's negligence to the extent applicable law prohibits such
+ limitation. Some jurisdictions do not allow the exclusion or limitation of
+ incidental or consequential damages, so this exclusion and limitation may
+ not apply to You.
+
+8. Litigation
+
+ Any litigation relating to this License may be brought only in the courts
+ of a jurisdiction where the defendant maintains its principal place of
+ business and such litigation shall be governed by laws of that
+ jurisdiction, without reference to its conflict-of-law provisions. Nothing
+ in this Section shall prevent a party's ability to bring cross-claims or
+ counter-claims.
+
+9. Miscellaneous
+
+ This License represents the complete agreement concerning the subject
+ matter hereof. If any provision of this License is held to be
+ unenforceable, such provision shall be reformed only to the extent
+ necessary to make it enforceable. Any law or regulation which provides that
+ the language of a contract shall be construed against the drafter shall not
+ be used to construe this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+ Mozilla Foundation is the license steward. Except as provided in Section
+ 10.3, no one other than the license steward has the right to modify or
+ publish new versions of this License. Each version will be given a
+ distinguishing version number.
+
+10.2. Effect of New Versions
+
+ You may distribute the Covered Software under the terms of the version
+ of the License under which You originally received the Covered Software,
+ or under the terms of any subsequent version published by the license
+ steward.
+
+10.3. Modified Versions
+
+ If you create software not governed by this License, and you want to
+ create a new license for such software, you may create and use a
+ modified version of this License if you rename the license and remove
+ any references to the name of the license steward (except to note that
+ such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+ Licenses If You choose to distribute Source Code Form that is
+ Incompatible With Secondary Licenses under the terms of this version of
+ the License, the notice described in Exhibit B of this License must be
+ attached.
+
+Exhibit A - Source Code Form License Notice
+
+ This Source Code Form is subject to the
+ terms of the Mozilla Public License, v.
+ 2.0. If a copy of the MPL was not
+ distributed with this file, You can
+ obtain one at
+ http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file,
+then You may include the notice in a location (such as a LICENSE file in a
+relevant directory) where a recipient would be likely to look for such a
+notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+
+ This Source Code Form is "Incompatible
+ With Secondary Licenses", as defined by
+ the Mozilla Public License, v. 2.0.
+
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/Makefile b/vendor/github.com/hashicorp/go-retryablehttp/Makefile
new file mode 100644
index 0000000000..5255241961
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/Makefile
@@ -0,0 +1,11 @@
+default: test
+
+test:
+ go vet ./...
+ go test -v -race ./...
+
+updatedeps:
+ go get -f -t -u ./...
+ go get -f -u ./...
+
+.PHONY: default test updatedeps
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/README.md b/vendor/github.com/hashicorp/go-retryablehttp/README.md
new file mode 100644
index 0000000000..145a62f218
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/README.md
@@ -0,0 +1,62 @@
+go-retryablehttp
+================
+
+[][travis]
+[][godocs]
+
+[travis]: http://travis-ci.org/hashicorp/go-retryablehttp
+[godocs]: http://godoc.org/github.com/hashicorp/go-retryablehttp
+
+The `retryablehttp` package provides a familiar HTTP client interface with
+automatic retries and exponential backoff. It is a thin wrapper over the
+standard `net/http` client library and exposes nearly the same public API. This
+makes `retryablehttp` very easy to drop into existing programs.
+
+`retryablehttp` performs automatic retries under certain conditions. Mainly, if
+an error is returned by the client (connection errors, etc.), or if a 500-range
+response code is received (except 501), then a retry is invoked after a wait
+period. Otherwise, the response is returned and left to the caller to
+interpret.
+
+The main difference from `net/http` is that requests which take a request body
+(POST/PUT et. al) can have the body provided in a number of ways (some more or
+less efficient) that allow "rewinding" the request body if the initial request
+fails so that the full request can be attempted again. See the
+[godoc](http://godoc.org/github.com/hashicorp/go-retryablehttp) for more
+details.
+
+Version 0.6.0 and before are compatible with Go prior to 1.12. From 0.6.1 onward, Go 1.12+ is required.
+From 0.6.7 onward, Go 1.13+ is required.
+
+Example Use
+===========
+
+Using this library should look almost identical to what you would do with
+`net/http`. The most simple example of a GET request is shown below:
+
+```go
+resp, err := retryablehttp.Get("/foo")
+if err != nil {
+ panic(err)
+}
+```
+
+The returned response object is an `*http.Response`, the same thing you would
+usually get from `net/http`. Had the request failed one or more times, the above
+call would block and retry with exponential backoff.
+
+## Getting a stdlib `*http.Client` with retries
+
+It's possible to convert a `*retryablehttp.Client` directly to a `*http.Client`.
+This makes use of retryablehttp broadly applicable with minimal effort. Simply
+configure a `*retryablehttp.Client` as you wish, and then call `StandardClient()`:
+
+```go
+retryClient := retryablehttp.NewClient()
+retryClient.RetryMax = 10
+
+standardClient := retryClient.StandardClient() // *http.Client
+```
+
+For more usage and examples see the
+[pkg.go.dev](https://pkg.go.dev/github.com/hashicorp/go-retryablehttp).
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go119.go b/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go119.go
new file mode 100644
index 0000000000..b2b27e8722
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go119.go
@@ -0,0 +1,14 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+//go:build !go1.20
+// +build !go1.20
+
+package retryablehttp
+
+import "crypto/x509"
+
+func isCertError(err error) bool {
+ _, ok := err.(x509.UnknownAuthorityError)
+ return ok
+}
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go120.go b/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go120.go
new file mode 100644
index 0000000000..a3cd315a28
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/cert_error_go120.go
@@ -0,0 +1,14 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+//go:build go1.20
+// +build go1.20
+
+package retryablehttp
+
+import "crypto/tls"
+
+func isCertError(err error) bool {
+ _, ok := err.(*tls.CertificateVerificationError)
+ return ok
+}
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/client.go b/vendor/github.com/hashicorp/go-retryablehttp/client.go
new file mode 100644
index 0000000000..efee53c400
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/client.go
@@ -0,0 +1,919 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+// Package retryablehttp provides a familiar HTTP client interface with
+// automatic retries and exponential backoff. It is a thin wrapper over the
+// standard net/http client library and exposes nearly the same public API.
+// This makes retryablehttp very easy to drop into existing programs.
+//
+// retryablehttp performs automatic retries under certain conditions. Mainly, if
+// an error is returned by the client (connection errors etc), or if a 500-range
+// response is received, then a retry is invoked. Otherwise, the response is
+// returned and left to the caller to interpret.
+//
+// Requests which take a request body should provide a non-nil function
+// parameter. The best choice is to provide either a function satisfying
+// ReaderFunc which provides multiple io.Readers in an efficient manner, a
+// *bytes.Buffer (the underlying raw byte slice will be used) or a raw byte
+// slice. As it is a reference type, and we will wrap it as needed by readers,
+// we can efficiently re-use the request body without needing to copy it. If an
+// io.Reader (such as a *bytes.Reader) is provided, the full body will be read
+// prior to the first request, and will be efficiently re-used for any retries.
+// ReadSeeker can be used, but some users have observed occasional data races
+// between the net/http library and the Seek functionality of some
+// implementations of ReadSeeker, so should be avoided if possible.
+package retryablehttp
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "log"
+ "math"
+ "math/rand"
+ "net/http"
+ "net/url"
+ "os"
+ "regexp"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ cleanhttp "github.com/hashicorp/go-cleanhttp"
+)
+
+var (
+ // Default retry configuration
+ defaultRetryWaitMin = 1 * time.Second
+ defaultRetryWaitMax = 30 * time.Second
+ defaultRetryMax = 4
+
+ // defaultLogger is the logger provided with defaultClient
+ defaultLogger = log.New(os.Stderr, "", log.LstdFlags)
+
+ // defaultClient is used for performing requests without explicitly making
+ // a new client. It is purposely private to avoid modifications.
+ defaultClient = NewClient()
+
+ // We need to consume response bodies to maintain http connections, but
+ // limit the size we consume to respReadLimit.
+ respReadLimit = int64(4096)
+
+ // timeNow sets the function that returns the current time.
+ // This defaults to time.Now. Changes to this should only be done in tests.
+ timeNow = time.Now
+
+ // A regular expression to match the error returned by net/http when the
+ // configured number of redirects is exhausted. This error isn't typed
+ // specifically so we resort to matching on the error string.
+ redirectsErrorRe = regexp.MustCompile(`stopped after \d+ redirects\z`)
+
+ // A regular expression to match the error returned by net/http when the
+ // scheme specified in the URL is invalid. This error isn't typed
+ // specifically so we resort to matching on the error string.
+ schemeErrorRe = regexp.MustCompile(`unsupported protocol scheme`)
+
+ // A regular expression to match the error returned by net/http when a
+ // request header or value is invalid. This error isn't typed
+ // specifically so we resort to matching on the error string.
+ invalidHeaderErrorRe = regexp.MustCompile(`invalid header`)
+
+ // A regular expression to match the error returned by net/http when the
+ // TLS certificate is not trusted. This error isn't typed
+ // specifically so we resort to matching on the error string.
+ notTrustedErrorRe = regexp.MustCompile(`certificate is not trusted`)
+)
+
+// ReaderFunc is the type of function that can be given natively to NewRequest
+type ReaderFunc func() (io.Reader, error)
+
+// ResponseHandlerFunc is a type of function that takes in a Response, and does something with it.
+// The ResponseHandlerFunc is called when the HTTP client successfully receives a response and the
+// CheckRetry function indicates that a retry of the base request is not necessary.
+// If an error is returned from this function, the CheckRetry policy will be used to determine
+// whether to retry the whole request (including this handler).
+//
+// Make sure to check status codes! Even if the request was completed it may have a non-2xx status code.
+//
+// The response body is not automatically closed. It must be closed either by the ResponseHandlerFunc or
+// by the caller out-of-band. Failure to do so will result in a memory leak.
+type ResponseHandlerFunc func(*http.Response) error
+
+// LenReader is an interface implemented by many in-memory io.Reader's. Used
+// for automatically sending the right Content-Length header when possible.
+type LenReader interface {
+ Len() int
+}
+
+// Request wraps the metadata needed to create HTTP requests.
+type Request struct {
+ // body is a seekable reader over the request body payload. This is
+ // used to rewind the request data in between retries.
+ body ReaderFunc
+
+ responseHandler ResponseHandlerFunc
+
+ // Embed an HTTP request directly. This makes a *Request act exactly
+ // like an *http.Request so that all meta methods are supported.
+ *http.Request
+}
+
+// WithContext returns wrapped Request with a shallow copy of underlying *http.Request
+// with its context changed to ctx. The provided ctx must be non-nil.
+func (r *Request) WithContext(ctx context.Context) *Request {
+ return &Request{
+ body: r.body,
+ responseHandler: r.responseHandler,
+ Request: r.Request.WithContext(ctx),
+ }
+}
+
+// SetResponseHandler allows setting the response handler.
+func (r *Request) SetResponseHandler(fn ResponseHandlerFunc) {
+ r.responseHandler = fn
+}
+
+// BodyBytes allows accessing the request body. It is an analogue to
+// http.Request's Body variable, but it returns a copy of the underlying data
+// rather than consuming it.
+//
+// This function is not thread-safe; do not call it at the same time as another
+// call, or at the same time this request is being used with Client.Do.
+func (r *Request) BodyBytes() ([]byte, error) {
+ if r.body == nil {
+ return nil, nil
+ }
+ body, err := r.body()
+ if err != nil {
+ return nil, err
+ }
+ buf := new(bytes.Buffer)
+ _, err = buf.ReadFrom(body)
+ if err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+// SetBody allows setting the request body.
+//
+// It is useful if a new body needs to be set without constructing a new Request.
+func (r *Request) SetBody(rawBody interface{}) error {
+ bodyReader, contentLength, err := getBodyReaderAndContentLength(rawBody)
+ if err != nil {
+ return err
+ }
+ r.body = bodyReader
+ r.ContentLength = contentLength
+ if bodyReader != nil {
+ r.GetBody = func() (io.ReadCloser, error) {
+ body, err := bodyReader()
+ if err != nil {
+ return nil, err
+ }
+ if rc, ok := body.(io.ReadCloser); ok {
+ return rc, nil
+ }
+ return io.NopCloser(body), nil
+ }
+ } else {
+ r.GetBody = func() (io.ReadCloser, error) { return http.NoBody, nil }
+ }
+ return nil
+}
+
+// WriteTo allows copying the request body into a writer.
+//
+// It writes data to w until there's no more data to write or
+// when an error occurs. The return int64 value is the number of bytes
+// written. Any error encountered during the write is also returned.
+// The signature matches io.WriterTo interface.
+func (r *Request) WriteTo(w io.Writer) (int64, error) {
+ body, err := r.body()
+ if err != nil {
+ return 0, err
+ }
+ if c, ok := body.(io.Closer); ok {
+ defer c.Close()
+ }
+ return io.Copy(w, body)
+}
+
+func getBodyReaderAndContentLength(rawBody interface{}) (ReaderFunc, int64, error) {
+ var bodyReader ReaderFunc
+ var contentLength int64
+
+ switch body := rawBody.(type) {
+ // If they gave us a function already, great! Use it.
+ case ReaderFunc:
+ bodyReader = body
+ tmp, err := body()
+ if err != nil {
+ return nil, 0, err
+ }
+ if lr, ok := tmp.(LenReader); ok {
+ contentLength = int64(lr.Len())
+ }
+ if c, ok := tmp.(io.Closer); ok {
+ c.Close()
+ }
+
+ case func() (io.Reader, error):
+ bodyReader = body
+ tmp, err := body()
+ if err != nil {
+ return nil, 0, err
+ }
+ if lr, ok := tmp.(LenReader); ok {
+ contentLength = int64(lr.Len())
+ }
+ if c, ok := tmp.(io.Closer); ok {
+ c.Close()
+ }
+
+ // If a regular byte slice, we can read it over and over via new
+ // readers
+ case []byte:
+ buf := body
+ bodyReader = func() (io.Reader, error) {
+ return bytes.NewReader(buf), nil
+ }
+ contentLength = int64(len(buf))
+
+ // If a bytes.Buffer we can read the underlying byte slice over and
+ // over
+ case *bytes.Buffer:
+ buf := body
+ bodyReader = func() (io.Reader, error) {
+ return bytes.NewReader(buf.Bytes()), nil
+ }
+ contentLength = int64(buf.Len())
+
+ // We prioritize *bytes.Reader here because we don't really want to
+ // deal with it seeking so want it to match here instead of the
+ // io.ReadSeeker case.
+ case *bytes.Reader:
+ snapshot := *body
+ bodyReader = func() (io.Reader, error) {
+ r := snapshot
+ return &r, nil
+ }
+ contentLength = int64(body.Len())
+
+ // Compat case
+ case io.ReadSeeker:
+ raw := body
+ bodyReader = func() (io.Reader, error) {
+ _, err := raw.Seek(0, 0)
+ return io.NopCloser(raw), err
+ }
+ if lr, ok := raw.(LenReader); ok {
+ contentLength = int64(lr.Len())
+ }
+
+ // Read all in so we can reset
+ case io.Reader:
+ buf, err := io.ReadAll(body)
+ if err != nil {
+ return nil, 0, err
+ }
+ if len(buf) == 0 {
+ bodyReader = func() (io.Reader, error) {
+ return http.NoBody, nil
+ }
+ contentLength = 0
+ } else {
+ bodyReader = func() (io.Reader, error) {
+ return bytes.NewReader(buf), nil
+ }
+ contentLength = int64(len(buf))
+ }
+
+ // No body provided, nothing to do
+ case nil:
+
+ // Unrecognized type
+ default:
+ return nil, 0, fmt.Errorf("cannot handle type %T", rawBody)
+ }
+ return bodyReader, contentLength, nil
+}
+
+// FromRequest wraps an http.Request in a retryablehttp.Request
+func FromRequest(r *http.Request) (*Request, error) {
+ bodyReader, _, err := getBodyReaderAndContentLength(r.Body)
+ if err != nil {
+ return nil, err
+ }
+ // Could assert contentLength == r.ContentLength
+ return &Request{body: bodyReader, Request: r}, nil
+}
+
+// NewRequest creates a new wrapped request.
+func NewRequest(method, url string, rawBody interface{}) (*Request, error) {
+ return NewRequestWithContext(context.Background(), method, url, rawBody)
+}
+
+// NewRequestWithContext creates a new wrapped request with the provided context.
+//
+// The context controls the entire lifetime of a request and its response:
+// obtaining a connection, sending the request, and reading the response headers and body.
+func NewRequestWithContext(ctx context.Context, method, url string, rawBody interface{}) (*Request, error) {
+ httpReq, err := http.NewRequestWithContext(ctx, method, url, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ req := &Request{
+ Request: httpReq,
+ }
+ if err := req.SetBody(rawBody); err != nil {
+ return nil, err
+ }
+
+ return req, nil
+}
+
+// Logger interface allows to use other loggers than
+// standard log.Logger.
+type Logger interface {
+ Printf(string, ...interface{})
+}
+
+// LeveledLogger is an interface that can be implemented by any logger or a
+// logger wrapper to provide leveled logging. The methods accept a message
+// string and a variadic number of key-value pairs. For log.Printf style
+// formatting where message string contains a format specifier, use Logger
+// interface.
+type LeveledLogger interface {
+ Error(msg string, keysAndValues ...interface{})
+ Info(msg string, keysAndValues ...interface{})
+ Debug(msg string, keysAndValues ...interface{})
+ Warn(msg string, keysAndValues ...interface{})
+}
+
+// hookLogger adapts an LeveledLogger to Logger for use by the existing hook functions
+// without changing the API.
+type hookLogger struct {
+ LeveledLogger
+}
+
+func (h hookLogger) Printf(s string, args ...interface{}) {
+ h.Info(fmt.Sprintf(s, args...))
+}
+
+// RequestLogHook allows a function to run before each retry. The HTTP
+// request which will be made, and the retry number (0 for the initial
+// request) are available to users. The internal logger is exposed to
+// consumers.
+type RequestLogHook func(Logger, *http.Request, int)
+
+// ResponseLogHook is like RequestLogHook, but allows running a function
+// on each HTTP response. This function will be invoked at the end of
+// every HTTP request executed, regardless of whether a subsequent retry
+// needs to be performed or not. If the response body is read or closed
+// from this method, this will affect the response returned from Do().
+type ResponseLogHook func(Logger, *http.Response)
+
+// CheckRetry specifies a policy for handling retries. It is called
+// following each request with the response and error values returned by
+// the http.Client. If CheckRetry returns false, the Client stops retrying
+// and returns the response to the caller. If CheckRetry returns an error,
+// that error value is returned in lieu of the error from the request. The
+// Client will close any response body when retrying, but if the retry is
+// aborted it is up to the CheckRetry callback to properly close any
+// response body before returning.
+type CheckRetry func(ctx context.Context, resp *http.Response, err error) (bool, error)
+
+// Backoff specifies a policy for how long to wait between retries.
+// It is called after a failing request to determine the amount of time
+// that should pass before trying again.
+type Backoff func(min, max time.Duration, attemptNum int, resp *http.Response) time.Duration
+
+// ErrorHandler is called if retries are expired, containing the last status
+// from the http library. If not specified, default behavior for the library is
+// to close the body and return an error indicating how many tries were
+// attempted. If overriding this, be sure to close the body if needed.
+type ErrorHandler func(resp *http.Response, err error, numTries int) (*http.Response, error)
+
+// PrepareRetry is called before retry operation. It can be used for example to re-sign the request
+type PrepareRetry func(req *http.Request) error
+
+// Client is used to make HTTP requests. It adds additional functionality
+// like automatic retries to tolerate minor outages.
+type Client struct {
+ HTTPClient *http.Client // Internal HTTP client.
+ Logger interface{} // Customer logger instance. Can be either Logger or LeveledLogger
+
+ RetryWaitMin time.Duration // Minimum time to wait
+ RetryWaitMax time.Duration // Maximum time to wait
+ RetryMax int // Maximum number of retries
+
+ // RequestLogHook allows a user-supplied function to be called
+ // before each retry.
+ RequestLogHook RequestLogHook
+
+ // ResponseLogHook allows a user-supplied function to be called
+ // with the response from each HTTP request executed.
+ ResponseLogHook ResponseLogHook
+
+ // CheckRetry specifies the policy for handling retries, and is called
+ // after each request. The default policy is DefaultRetryPolicy.
+ CheckRetry CheckRetry
+
+ // Backoff specifies the policy for how long to wait between retries
+ Backoff Backoff
+
+ // ErrorHandler specifies the custom error handler to use, if any
+ ErrorHandler ErrorHandler
+
+ // PrepareRetry can prepare the request for retry operation, for example re-sign it
+ PrepareRetry PrepareRetry
+
+ loggerInit sync.Once
+ clientInit sync.Once
+}
+
+// NewClient creates a new Client with default settings.
+func NewClient() *Client {
+ return &Client{
+ HTTPClient: cleanhttp.DefaultPooledClient(),
+ Logger: defaultLogger,
+ RetryWaitMin: defaultRetryWaitMin,
+ RetryWaitMax: defaultRetryWaitMax,
+ RetryMax: defaultRetryMax,
+ CheckRetry: DefaultRetryPolicy,
+ Backoff: DefaultBackoff,
+ }
+}
+
+func (c *Client) logger() interface{} {
+ c.loggerInit.Do(func() {
+ if c.Logger == nil {
+ return
+ }
+
+ switch c.Logger.(type) {
+ case Logger, LeveledLogger:
+ // ok
+ default:
+ // This should happen in dev when they are setting Logger and work on code, not in prod.
+ panic(fmt.Sprintf("invalid logger type passed, must be Logger or LeveledLogger, was %T", c.Logger))
+ }
+ })
+
+ return c.Logger
+}
+
+// DefaultRetryPolicy provides a default callback for Client.CheckRetry, which
+// will retry on connection errors and server errors.
+func DefaultRetryPolicy(ctx context.Context, resp *http.Response, err error) (bool, error) {
+ // do not retry on context.Canceled or context.DeadlineExceeded
+ if ctx.Err() != nil {
+ return false, ctx.Err()
+ }
+
+ // don't propagate other errors
+ shouldRetry, _ := baseRetryPolicy(resp, err)
+ return shouldRetry, nil
+}
+
+// ErrorPropagatedRetryPolicy is the same as DefaultRetryPolicy, except it
+// propagates errors back instead of returning nil. This allows you to inspect
+// why it decided to retry or not.
+func ErrorPropagatedRetryPolicy(ctx context.Context, resp *http.Response, err error) (bool, error) {
+ // do not retry on context.Canceled or context.DeadlineExceeded
+ if ctx.Err() != nil {
+ return false, ctx.Err()
+ }
+
+ return baseRetryPolicy(resp, err)
+}
+
+func baseRetryPolicy(resp *http.Response, err error) (bool, error) {
+ if err != nil {
+ if v, ok := err.(*url.Error); ok {
+ // Don't retry if the error was due to too many redirects.
+ if redirectsErrorRe.MatchString(v.Error()) {
+ return false, v
+ }
+
+ // Don't retry if the error was due to an invalid protocol scheme.
+ if schemeErrorRe.MatchString(v.Error()) {
+ return false, v
+ }
+
+ // Don't retry if the error was due to an invalid header.
+ if invalidHeaderErrorRe.MatchString(v.Error()) {
+ return false, v
+ }
+
+ // Don't retry if the error was due to TLS cert verification failure.
+ if notTrustedErrorRe.MatchString(v.Error()) {
+ return false, v
+ }
+ if isCertError(v.Err) {
+ return false, v
+ }
+ }
+
+ // The error is likely recoverable so retry.
+ return true, nil
+ }
+
+ // 429 Too Many Requests is recoverable. Sometimes the server puts
+ // a Retry-After response header to indicate when the server is
+ // available to start processing request from client.
+ if resp.StatusCode == http.StatusTooManyRequests {
+ return true, nil
+ }
+
+ // Check the response code. We retry on 500-range responses to allow
+ // the server time to recover, as 500's are typically not permanent
+ // errors and may relate to outages on the server side. This will catch
+ // invalid response codes as well, like 0 and 999.
+ if resp.StatusCode == 0 || (resp.StatusCode >= 500 && resp.StatusCode != http.StatusNotImplemented) {
+ return true, fmt.Errorf("unexpected HTTP status %s", resp.Status)
+ }
+
+ return false, nil
+}
+
+// DefaultBackoff provides a default callback for Client.Backoff which
+// will perform exponential backoff based on the attempt number and limited
+// by the provided minimum and maximum durations.
+//
+// It also tries to parse Retry-After response header when a http.StatusTooManyRequests
+// (HTTP Code 429) is found in the resp parameter. Hence it will return the number of
+// seconds the server states it may be ready to process more requests from this client.
+func DefaultBackoff(min, max time.Duration, attemptNum int, resp *http.Response) time.Duration {
+ if resp != nil {
+ if resp.StatusCode == http.StatusTooManyRequests || resp.StatusCode == http.StatusServiceUnavailable {
+ if sleep, ok := parseRetryAfterHeader(resp.Header["Retry-After"]); ok {
+ return sleep
+ }
+ }
+ }
+
+ mult := math.Pow(2, float64(attemptNum)) * float64(min)
+ sleep := time.Duration(mult)
+ if float64(sleep) != mult || sleep > max {
+ sleep = max
+ }
+ return sleep
+}
+
+// parseRetryAfterHeader parses the Retry-After header and returns the
+// delay duration according to the spec: https://httpwg.org/specs/rfc7231.html#header.retry-after
+// The bool returned will be true if the header was successfully parsed.
+// Otherwise, the header was either not present, or was not parseable according to the spec.
+//
+// Retry-After headers come in two flavors: Seconds or HTTP-Date
+//
+// Examples:
+// * Retry-After: Fri, 31 Dec 1999 23:59:59 GMT
+// * Retry-After: 120
+func parseRetryAfterHeader(headers []string) (time.Duration, bool) {
+ if len(headers) == 0 || headers[0] == "" {
+ return 0, false
+ }
+ header := headers[0]
+ // Retry-After: 120
+ if sleep, err := strconv.ParseInt(header, 10, 64); err == nil {
+ if sleep < 0 { // a negative sleep doesn't make sense
+ return 0, false
+ }
+ return time.Second * time.Duration(sleep), true
+ }
+
+ // Retry-After: Fri, 31 Dec 1999 23:59:59 GMT
+ retryTime, err := time.Parse(time.RFC1123, header)
+ if err != nil {
+ return 0, false
+ }
+ if until := retryTime.Sub(timeNow()); until > 0 {
+ return until, true
+ }
+ // date is in the past
+ return 0, true
+}
+
+// LinearJitterBackoff provides a callback for Client.Backoff which will
+// perform linear backoff based on the attempt number and with jitter to
+// prevent a thundering herd.
+//
+// min and max here are *not* absolute values. The number to be multiplied by
+// the attempt number will be chosen at random from between them, thus they are
+// bounding the jitter.
+//
+// For instance:
+// * To get strictly linear backoff of one second increasing each retry, set
+// both to one second (1s, 2s, 3s, 4s, ...)
+// * To get a small amount of jitter centered around one second increasing each
+// retry, set to around one second, such as a min of 800ms and max of 1200ms
+// (892ms, 2102ms, 2945ms, 4312ms, ...)
+// * To get extreme jitter, set to a very wide spread, such as a min of 100ms
+// and a max of 20s (15382ms, 292ms, 51321ms, 35234ms, ...)
+func LinearJitterBackoff(min, max time.Duration, attemptNum int, resp *http.Response) time.Duration {
+ // attemptNum always starts at zero but we want to start at 1 for multiplication
+ attemptNum++
+
+ if max <= min {
+ // Unclear what to do here, or they are the same, so return min *
+ // attemptNum
+ return min * time.Duration(attemptNum)
+ }
+
+ // Seed rand; doing this every time is fine
+ source := rand.New(rand.NewSource(int64(time.Now().Nanosecond())))
+
+ // Pick a random number that lies somewhere between the min and max and
+ // multiply by the attemptNum. attemptNum starts at zero so we always
+ // increment here. We first get a random percentage, then apply that to the
+ // difference between min and max, and add to min.
+ jitter := source.Float64() * float64(max-min)
+ jitterMin := int64(jitter) + int64(min)
+ return time.Duration(jitterMin * int64(attemptNum))
+}
+
+// PassthroughErrorHandler is an ErrorHandler that directly passes through the
+// values from the net/http library for the final request. The body is not
+// closed.
+func PassthroughErrorHandler(resp *http.Response, err error, _ int) (*http.Response, error) {
+ return resp, err
+}
+
+// Do wraps calling an HTTP method with retries.
+func (c *Client) Do(req *Request) (*http.Response, error) {
+ c.clientInit.Do(func() {
+ if c.HTTPClient == nil {
+ c.HTTPClient = cleanhttp.DefaultPooledClient()
+ }
+ })
+
+ logger := c.logger()
+
+ if logger != nil {
+ switch v := logger.(type) {
+ case LeveledLogger:
+ v.Debug("performing request", "method", req.Method, "url", redactURL(req.URL))
+ case Logger:
+ v.Printf("[DEBUG] %s %s", req.Method, redactURL(req.URL))
+ }
+ }
+
+ var resp *http.Response
+ var attempt int
+ var shouldRetry bool
+ var doErr, respErr, checkErr, prepareErr error
+
+ for i := 0; ; i++ {
+ doErr, respErr, prepareErr = nil, nil, nil
+ attempt++
+
+ // Always rewind the request body when non-nil.
+ if req.body != nil {
+ body, err := req.body()
+ if err != nil {
+ c.HTTPClient.CloseIdleConnections()
+ return resp, err
+ }
+ if c, ok := body.(io.ReadCloser); ok {
+ req.Body = c
+ } else {
+ req.Body = io.NopCloser(body)
+ }
+ }
+
+ if c.RequestLogHook != nil {
+ switch v := logger.(type) {
+ case LeveledLogger:
+ c.RequestLogHook(hookLogger{v}, req.Request, i)
+ case Logger:
+ c.RequestLogHook(v, req.Request, i)
+ default:
+ c.RequestLogHook(nil, req.Request, i)
+ }
+ }
+
+ // Attempt the request
+ resp, doErr = c.HTTPClient.Do(req.Request)
+
+ // Check if we should continue with retries.
+ shouldRetry, checkErr = c.CheckRetry(req.Context(), resp, doErr)
+ if !shouldRetry && doErr == nil && req.responseHandler != nil {
+ respErr = req.responseHandler(resp)
+ shouldRetry, checkErr = c.CheckRetry(req.Context(), resp, respErr)
+ }
+
+ err := doErr
+ if respErr != nil {
+ err = respErr
+ }
+ if err != nil {
+ switch v := logger.(type) {
+ case LeveledLogger:
+ v.Error("request failed", "error", err, "method", req.Method, "url", redactURL(req.URL))
+ case Logger:
+ v.Printf("[ERR] %s %s request failed: %v", req.Method, redactURL(req.URL), err)
+ }
+ } else {
+ // Call this here to maintain the behavior of logging all requests,
+ // even if CheckRetry signals to stop.
+ if c.ResponseLogHook != nil {
+ // Call the response logger function if provided.
+ switch v := logger.(type) {
+ case LeveledLogger:
+ c.ResponseLogHook(hookLogger{v}, resp)
+ case Logger:
+ c.ResponseLogHook(v, resp)
+ default:
+ c.ResponseLogHook(nil, resp)
+ }
+ }
+ }
+
+ if !shouldRetry {
+ break
+ }
+
+ // We do this before drainBody because there's no need for the I/O if
+ // we're breaking out
+ remain := c.RetryMax - i
+ if remain <= 0 {
+ break
+ }
+
+ // We're going to retry, consume any response to reuse the connection.
+ if doErr == nil {
+ c.drainBody(resp.Body)
+ }
+
+ wait := c.Backoff(c.RetryWaitMin, c.RetryWaitMax, i, resp)
+ if logger != nil {
+ desc := fmt.Sprintf("%s %s", req.Method, redactURL(req.URL))
+ if resp != nil {
+ desc = fmt.Sprintf("%s (status: %d)", desc, resp.StatusCode)
+ }
+ switch v := logger.(type) {
+ case LeveledLogger:
+ v.Debug("retrying request", "request", desc, "timeout", wait, "remaining", remain)
+ case Logger:
+ v.Printf("[DEBUG] %s: retrying in %s (%d left)", desc, wait, remain)
+ }
+ }
+ timer := time.NewTimer(wait)
+ select {
+ case <-req.Context().Done():
+ timer.Stop()
+ c.HTTPClient.CloseIdleConnections()
+ return nil, req.Context().Err()
+ case <-timer.C:
+ }
+
+ // Make shallow copy of http Request so that we can modify its body
+ // without racing against the closeBody call in persistConn.writeLoop.
+ httpreq := *req.Request
+ req.Request = &httpreq
+
+ if c.PrepareRetry != nil {
+ if err := c.PrepareRetry(req.Request); err != nil {
+ prepareErr = err
+ break
+ }
+ }
+ }
+
+ // this is the closest we have to success criteria
+ if doErr == nil && respErr == nil && checkErr == nil && prepareErr == nil && !shouldRetry {
+ return resp, nil
+ }
+
+ defer c.HTTPClient.CloseIdleConnections()
+
+ var err error
+ if prepareErr != nil {
+ err = prepareErr
+ } else if checkErr != nil {
+ err = checkErr
+ } else if respErr != nil {
+ err = respErr
+ } else {
+ err = doErr
+ }
+
+ if c.ErrorHandler != nil {
+ return c.ErrorHandler(resp, err, attempt)
+ }
+
+ // By default, we close the response body and return an error without
+ // returning the response
+ if resp != nil {
+ c.drainBody(resp.Body)
+ }
+
+ // this means CheckRetry thought the request was a failure, but didn't
+ // communicate why
+ if err == nil {
+ return nil, fmt.Errorf("%s %s giving up after %d attempt(s)",
+ req.Method, redactURL(req.URL), attempt)
+ }
+
+ return nil, fmt.Errorf("%s %s giving up after %d attempt(s): %w",
+ req.Method, redactURL(req.URL), attempt, err)
+}
+
+// Try to read the response body so we can reuse this connection.
+func (c *Client) drainBody(body io.ReadCloser) {
+ defer body.Close()
+ _, err := io.Copy(io.Discard, io.LimitReader(body, respReadLimit))
+ if err != nil {
+ if c.logger() != nil {
+ switch v := c.logger().(type) {
+ case LeveledLogger:
+ v.Error("error reading response body", "error", err)
+ case Logger:
+ v.Printf("[ERR] error reading response body: %v", err)
+ }
+ }
+ }
+}
+
+// Get is a shortcut for doing a GET request without making a new client.
+func Get(url string) (*http.Response, error) {
+ return defaultClient.Get(url)
+}
+
+// Get is a convenience helper for doing simple GET requests.
+func (c *Client) Get(url string) (*http.Response, error) {
+ req, err := NewRequest("GET", url, nil)
+ if err != nil {
+ return nil, err
+ }
+ return c.Do(req)
+}
+
+// Head is a shortcut for doing a HEAD request without making a new client.
+func Head(url string) (*http.Response, error) {
+ return defaultClient.Head(url)
+}
+
+// Head is a convenience method for doing simple HEAD requests.
+func (c *Client) Head(url string) (*http.Response, error) {
+ req, err := NewRequest("HEAD", url, nil)
+ if err != nil {
+ return nil, err
+ }
+ return c.Do(req)
+}
+
+// Post is a shortcut for doing a POST request without making a new client.
+func Post(url, bodyType string, body interface{}) (*http.Response, error) {
+ return defaultClient.Post(url, bodyType, body)
+}
+
+// Post is a convenience method for doing simple POST requests.
+func (c *Client) Post(url, bodyType string, body interface{}) (*http.Response, error) {
+ req, err := NewRequest("POST", url, body)
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Content-Type", bodyType)
+ return c.Do(req)
+}
+
+// PostForm is a shortcut to perform a POST with form data without creating
+// a new client.
+func PostForm(url string, data url.Values) (*http.Response, error) {
+ return defaultClient.PostForm(url, data)
+}
+
+// PostForm is a convenience method for doing simple POST operations using
+// pre-filled url.Values form data.
+func (c *Client) PostForm(url string, data url.Values) (*http.Response, error) {
+ return c.Post(url, "application/x-www-form-urlencoded", strings.NewReader(data.Encode()))
+}
+
+// StandardClient returns a stdlib *http.Client with a custom Transport, which
+// shims in a *retryablehttp.Client for added retries.
+func (c *Client) StandardClient() *http.Client {
+ return &http.Client{
+ Transport: &RoundTripper{Client: c},
+ }
+}
+
+// Taken from url.URL#Redacted() which was introduced in go 1.15.
+// We can switch to using it directly if we'll bump the minimum required go version.
+func redactURL(u *url.URL) string {
+ if u == nil {
+ return ""
+ }
+
+ ru := *u
+ if _, has := ru.User.Password(); has {
+ ru.User = url.UserPassword(ru.User.Username(), "xxxxx")
+ }
+ return ru.String()
+}
diff --git a/vendor/github.com/hashicorp/go-retryablehttp/roundtripper.go b/vendor/github.com/hashicorp/go-retryablehttp/roundtripper.go
new file mode 100644
index 0000000000..8c407adb3b
--- /dev/null
+++ b/vendor/github.com/hashicorp/go-retryablehttp/roundtripper.go
@@ -0,0 +1,55 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package retryablehttp
+
+import (
+ "errors"
+ "net/http"
+ "net/url"
+ "sync"
+)
+
+// RoundTripper implements the http.RoundTripper interface, using a retrying
+// HTTP client to execute requests.
+//
+// It is important to note that retryablehttp doesn't always act exactly as a
+// RoundTripper should. This is highly dependent on the retryable client's
+// configuration.
+type RoundTripper struct {
+ // The client to use during requests. If nil, the default retryablehttp
+ // client and settings will be used.
+ Client *Client
+
+ // once ensures that the logic to initialize the default client runs at
+ // most once, in a single thread.
+ once sync.Once
+}
+
+// init initializes the underlying retryable client.
+func (rt *RoundTripper) init() {
+ if rt.Client == nil {
+ rt.Client = NewClient()
+ }
+}
+
+// RoundTrip satisfies the http.RoundTripper interface.
+func (rt *RoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
+ rt.once.Do(rt.init)
+
+ // Convert the request to be retryable.
+ retryableReq, err := FromRequest(req)
+ if err != nil {
+ return nil, err
+ }
+
+ // Execute the request.
+ resp, err := rt.Client.Do(retryableReq)
+ // If we got an error returned by standard library's `Do` method, unwrap it
+ // otherwise we will wind up erroneously re-nesting the error.
+ if _, ok := err.(*url.Error); ok {
+ return resp, errors.Unwrap(err)
+ }
+
+ return resp, err
+}
diff --git a/vendor/github.com/hashicorp/golang-lru/v2/LICENSE b/vendor/github.com/hashicorp/golang-lru/v2/LICENSE
new file mode 100644
index 0000000000..0e5d580e0e
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/v2/LICENSE
@@ -0,0 +1,364 @@
+Copyright (c) 2014 HashiCorp, Inc.
+
+Mozilla Public License, version 2.0
+
+1. Definitions
+
+1.1. "Contributor"
+
+ means each individual or legal entity that creates, contributes to the
+ creation of, or owns Covered Software.
+
+1.2. "Contributor Version"
+
+ means the combination of the Contributions of others (if any) used by a
+ Contributor and that particular Contributor's Contribution.
+
+1.3. "Contribution"
+
+ means Covered Software of a particular Contributor.
+
+1.4. "Covered Software"
+
+ means Source Code Form to which the initial Contributor has attached the
+ notice in Exhibit A, the Executable Form of such Source Code Form, and
+ Modifications of such Source Code Form, in each case including portions
+ thereof.
+
+1.5. "Incompatible With Secondary Licenses"
+ means
+
+ a. that the initial Contributor has attached the notice described in
+ Exhibit B to the Covered Software; or
+
+ b. that the Covered Software was made available under the terms of
+ version 1.1 or earlier of the License, but not also under the terms of
+ a Secondary License.
+
+1.6. "Executable Form"
+
+ means any form of the work other than Source Code Form.
+
+1.7. "Larger Work"
+
+ means a work that combines Covered Software with other material, in a
+ separate file or files, that is not Covered Software.
+
+1.8. "License"
+
+ means this document.
+
+1.9. "Licensable"
+
+ means having the right to grant, to the maximum extent possible, whether
+ at the time of the initial grant or subsequently, any and all of the
+ rights conveyed by this License.
+
+1.10. "Modifications"
+
+ means any of the following:
+
+ a. any file in Source Code Form that results from an addition to,
+ deletion from, or modification of the contents of Covered Software; or
+
+ b. any new file in Source Code Form that contains any Covered Software.
+
+1.11. "Patent Claims" of a Contributor
+
+ means any patent claim(s), including without limitation, method,
+ process, and apparatus claims, in any patent Licensable by such
+ Contributor that would be infringed, but for the grant of the License,
+ by the making, using, selling, offering for sale, having made, import,
+ or transfer of either its Contributions or its Contributor Version.
+
+1.12. "Secondary License"
+
+ means either the GNU General Public License, Version 2.0, the GNU Lesser
+ General Public License, Version 2.1, the GNU Affero General Public
+ License, Version 3.0, or any later versions of those licenses.
+
+1.13. "Source Code Form"
+
+ means the form of the work preferred for making modifications.
+
+1.14. "You" (or "Your")
+
+ means an individual or a legal entity exercising rights under this
+ License. For legal entities, "You" includes any entity that controls, is
+ controlled by, or is under common control with You. For purposes of this
+ definition, "control" means (a) the power, direct or indirect, to cause
+ the direction or management of such entity, whether by contract or
+ otherwise, or (b) ownership of more than fifty percent (50%) of the
+ outstanding shares or beneficial ownership of such entity.
+
+
+2. License Grants and Conditions
+
+2.1. Grants
+
+ Each Contributor hereby grants You a world-wide, royalty-free,
+ non-exclusive license:
+
+ a. under intellectual property rights (other than patent or trademark)
+ Licensable by such Contributor to use, reproduce, make available,
+ modify, display, perform, distribute, and otherwise exploit its
+ Contributions, either on an unmodified basis, with Modifications, or
+ as part of a Larger Work; and
+
+ b. under Patent Claims of such Contributor to make, use, sell, offer for
+ sale, have made, import, and otherwise transfer either its
+ Contributions or its Contributor Version.
+
+2.2. Effective Date
+
+ The licenses granted in Section 2.1 with respect to any Contribution
+ become effective for each Contribution on the date the Contributor first
+ distributes such Contribution.
+
+2.3. Limitations on Grant Scope
+
+ The licenses granted in this Section 2 are the only rights granted under
+ this License. No additional rights or licenses will be implied from the
+ distribution or licensing of Covered Software under this License.
+ Notwithstanding Section 2.1(b) above, no patent license is granted by a
+ Contributor:
+
+ a. for any code that a Contributor has removed from Covered Software; or
+
+ b. for infringements caused by: (i) Your and any other third party's
+ modifications of Covered Software, or (ii) the combination of its
+ Contributions with other software (except as part of its Contributor
+ Version); or
+
+ c. under Patent Claims infringed by Covered Software in the absence of
+ its Contributions.
+
+ This License does not grant any rights in the trademarks, service marks,
+ or logos of any Contributor (except as may be necessary to comply with
+ the notice requirements in Section 3.4).
+
+2.4. Subsequent Licenses
+
+ No Contributor makes additional grants as a result of Your choice to
+ distribute the Covered Software under a subsequent version of this
+ License (see Section 10.2) or under the terms of a Secondary License (if
+ permitted under the terms of Section 3.3).
+
+2.5. Representation
+
+ Each Contributor represents that the Contributor believes its
+ Contributions are its original creation(s) or it has sufficient rights to
+ grant the rights to its Contributions conveyed by this License.
+
+2.6. Fair Use
+
+ This License is not intended to limit any rights You have under
+ applicable copyright doctrines of fair use, fair dealing, or other
+ equivalents.
+
+2.7. Conditions
+
+ Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
+ Section 2.1.
+
+
+3. Responsibilities
+
+3.1. Distribution of Source Form
+
+ All distribution of Covered Software in Source Code Form, including any
+ Modifications that You create or to which You contribute, must be under
+ the terms of this License. You must inform recipients that the Source
+ Code Form of the Covered Software is governed by the terms of this
+ License, and how they can obtain a copy of this License. You may not
+ attempt to alter or restrict the recipients' rights in the Source Code
+ Form.
+
+3.2. Distribution of Executable Form
+
+ If You distribute Covered Software in Executable Form then:
+
+ a. such Covered Software must also be made available in Source Code Form,
+ as described in Section 3.1, and You must inform recipients of the
+ Executable Form how they can obtain a copy of such Source Code Form by
+ reasonable means in a timely manner, at a charge no more than the cost
+ of distribution to the recipient; and
+
+ b. You may distribute such Executable Form under the terms of this
+ License, or sublicense it under different terms, provided that the
+ license for the Executable Form does not attempt to limit or alter the
+ recipients' rights in the Source Code Form under this License.
+
+3.3. Distribution of a Larger Work
+
+ You may create and distribute a Larger Work under terms of Your choice,
+ provided that You also comply with the requirements of this License for
+ the Covered Software. If the Larger Work is a combination of Covered
+ Software with a work governed by one or more Secondary Licenses, and the
+ Covered Software is not Incompatible With Secondary Licenses, this
+ License permits You to additionally distribute such Covered Software
+ under the terms of such Secondary License(s), so that the recipient of
+ the Larger Work may, at their option, further distribute the Covered
+ Software under the terms of either this License or such Secondary
+ License(s).
+
+3.4. Notices
+
+ You may not remove or alter the substance of any license notices
+ (including copyright notices, patent notices, disclaimers of warranty, or
+ limitations of liability) contained within the Source Code Form of the
+ Covered Software, except that You may alter any license notices to the
+ extent required to remedy known factual inaccuracies.
+
+3.5. Application of Additional Terms
+
+ You may choose to offer, and to charge a fee for, warranty, support,
+ indemnity or liability obligations to one or more recipients of Covered
+ Software. However, You may do so only on Your own behalf, and not on
+ behalf of any Contributor. You must make it absolutely clear that any
+ such warranty, support, indemnity, or liability obligation is offered by
+ You alone, and You hereby agree to indemnify every Contributor for any
+ liability incurred by such Contributor as a result of warranty, support,
+ indemnity or liability terms You offer. You may include additional
+ disclaimers of warranty and limitations of liability specific to any
+ jurisdiction.
+
+4. Inability to Comply Due to Statute or Regulation
+
+ If it is impossible for You to comply with any of the terms of this License
+ with respect to some or all of the Covered Software due to statute,
+ judicial order, or regulation then You must: (a) comply with the terms of
+ this License to the maximum extent possible; and (b) describe the
+ limitations and the code they affect. Such description must be placed in a
+ text file included with all distributions of the Covered Software under
+ this License. Except to the extent prohibited by statute or regulation,
+ such description must be sufficiently detailed for a recipient of ordinary
+ skill to be able to understand it.
+
+5. Termination
+
+5.1. The rights granted under this License will terminate automatically if You
+ fail to comply with any of its terms. However, if You become compliant,
+ then the rights granted under this License from a particular Contributor
+ are reinstated (a) provisionally, unless and until such Contributor
+ explicitly and finally terminates Your grants, and (b) on an ongoing
+ basis, if such Contributor fails to notify You of the non-compliance by
+ some reasonable means prior to 60 days after You have come back into
+ compliance. Moreover, Your grants from a particular Contributor are
+ reinstated on an ongoing basis if such Contributor notifies You of the
+ non-compliance by some reasonable means, this is the first time You have
+ received notice of non-compliance with this License from such
+ Contributor, and You become compliant prior to 30 days after Your receipt
+ of the notice.
+
+5.2. If You initiate litigation against any entity by asserting a patent
+ infringement claim (excluding declaratory judgment actions,
+ counter-claims, and cross-claims) alleging that a Contributor Version
+ directly or indirectly infringes any patent, then the rights granted to
+ You by any and all Contributors for the Covered Software under Section
+ 2.1 of this License shall terminate.
+
+5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
+ license agreements (excluding distributors and resellers) which have been
+ validly granted by You or Your distributors under this License prior to
+ termination shall survive termination.
+
+6. Disclaimer of Warranty
+
+ Covered Software is provided under this License on an "as is" basis,
+ without warranty of any kind, either expressed, implied, or statutory,
+ including, without limitation, warranties that the Covered Software is free
+ of defects, merchantable, fit for a particular purpose or non-infringing.
+ The entire risk as to the quality and performance of the Covered Software
+ is with You. Should any Covered Software prove defective in any respect,
+ You (not any Contributor) assume the cost of any necessary servicing,
+ repair, or correction. This disclaimer of warranty constitutes an essential
+ part of this License. No use of any Covered Software is authorized under
+ this License except under this disclaimer.
+
+7. Limitation of Liability
+
+ Under no circumstances and under no legal theory, whether tort (including
+ negligence), contract, or otherwise, shall any Contributor, or anyone who
+ distributes Covered Software as permitted above, be liable to You for any
+ direct, indirect, special, incidental, or consequential damages of any
+ character including, without limitation, damages for lost profits, loss of
+ goodwill, work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses, even if such party shall have been
+ informed of the possibility of such damages. This limitation of liability
+ shall not apply to liability for death or personal injury resulting from
+ such party's negligence to the extent applicable law prohibits such
+ limitation. Some jurisdictions do not allow the exclusion or limitation of
+ incidental or consequential damages, so this exclusion and limitation may
+ not apply to You.
+
+8. Litigation
+
+ Any litigation relating to this License may be brought only in the courts
+ of a jurisdiction where the defendant maintains its principal place of
+ business and such litigation shall be governed by laws of that
+ jurisdiction, without reference to its conflict-of-law provisions. Nothing
+ in this Section shall prevent a party's ability to bring cross-claims or
+ counter-claims.
+
+9. Miscellaneous
+
+ This License represents the complete agreement concerning the subject
+ matter hereof. If any provision of this License is held to be
+ unenforceable, such provision shall be reformed only to the extent
+ necessary to make it enforceable. Any law or regulation which provides that
+ the language of a contract shall be construed against the drafter shall not
+ be used to construe this License against a Contributor.
+
+
+10. Versions of the License
+
+10.1. New Versions
+
+ Mozilla Foundation is the license steward. Except as provided in Section
+ 10.3, no one other than the license steward has the right to modify or
+ publish new versions of this License. Each version will be given a
+ distinguishing version number.
+
+10.2. Effect of New Versions
+
+ You may distribute the Covered Software under the terms of the version
+ of the License under which You originally received the Covered Software,
+ or under the terms of any subsequent version published by the license
+ steward.
+
+10.3. Modified Versions
+
+ If you create software not governed by this License, and you want to
+ create a new license for such software, you may create and use a
+ modified version of this License if you rename the license and remove
+ any references to the name of the license steward (except to note that
+ such modified license differs from this License).
+
+10.4. Distributing Source Code Form that is Incompatible With Secondary
+ Licenses If You choose to distribute Source Code Form that is
+ Incompatible With Secondary Licenses under the terms of this version of
+ the License, the notice described in Exhibit B of this License must be
+ attached.
+
+Exhibit A - Source Code Form License Notice
+
+ This Source Code Form is subject to the
+ terms of the Mozilla Public License, v.
+ 2.0. If a copy of the MPL was not
+ distributed with this file, You can
+ obtain one at
+ http://mozilla.org/MPL/2.0/.
+
+If it is not possible or desirable to put the notice in a particular file,
+then You may include the notice in a location (such as a LICENSE file in a
+relevant directory) where a recipient would be likely to look for such a
+notice.
+
+You may add additional accurate notices of copyright ownership.
+
+Exhibit B - "Incompatible With Secondary Licenses" Notice
+
+ This Source Code Form is "Incompatible
+ With Secondary Licenses", as defined by
+ the Mozilla Public License, v. 2.0.
diff --git a/vendor/github.com/hashicorp/golang-lru/v2/internal/list.go b/vendor/github.com/hashicorp/golang-lru/v2/internal/list.go
new file mode 100644
index 0000000000..5cd74a0343
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/v2/internal/list.go
@@ -0,0 +1,142 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE_list file.
+
+package internal
+
+import "time"
+
+// Entry is an LRU Entry
+type Entry[K comparable, V any] struct {
+ // Next and previous pointers in the doubly-linked list of elements.
+ // To simplify the implementation, internally a list l is implemented
+ // as a ring, such that &l.root is both the next element of the last
+ // list element (l.Back()) and the previous element of the first list
+ // element (l.Front()).
+ next, prev *Entry[K, V]
+
+ // The list to which this element belongs.
+ list *LruList[K, V]
+
+ // The LRU Key of this element.
+ Key K
+
+ // The Value stored with this element.
+ Value V
+
+ // The time this element would be cleaned up, optional
+ ExpiresAt time.Time
+
+ // The expiry bucket item was put in, optional
+ ExpireBucket uint8
+}
+
+// PrevEntry returns the previous list element or nil.
+func (e *Entry[K, V]) PrevEntry() *Entry[K, V] {
+ if p := e.prev; e.list != nil && p != &e.list.root {
+ return p
+ }
+ return nil
+}
+
+// LruList represents a doubly linked list.
+// The zero Value for LruList is an empty list ready to use.
+type LruList[K comparable, V any] struct {
+ root Entry[K, V] // sentinel list element, only &root, root.prev, and root.next are used
+ len int // current list Length excluding (this) sentinel element
+}
+
+// Init initializes or clears list l.
+func (l *LruList[K, V]) Init() *LruList[K, V] {
+ l.root.next = &l.root
+ l.root.prev = &l.root
+ l.len = 0
+ return l
+}
+
+// NewList returns an initialized list.
+func NewList[K comparable, V any]() *LruList[K, V] { return new(LruList[K, V]).Init() }
+
+// Length returns the number of elements of list l.
+// The complexity is O(1).
+func (l *LruList[K, V]) Length() int { return l.len }
+
+// Back returns the last element of list l or nil if the list is empty.
+func (l *LruList[K, V]) Back() *Entry[K, V] {
+ if l.len == 0 {
+ return nil
+ }
+ return l.root.prev
+}
+
+// lazyInit lazily initializes a zero List Value.
+func (l *LruList[K, V]) lazyInit() {
+ if l.root.next == nil {
+ l.Init()
+ }
+}
+
+// insert inserts e after at, increments l.len, and returns e.
+func (l *LruList[K, V]) insert(e, at *Entry[K, V]) *Entry[K, V] {
+ e.prev = at
+ e.next = at.next
+ e.prev.next = e
+ e.next.prev = e
+ e.list = l
+ l.len++
+ return e
+}
+
+// insertValue is a convenience wrapper for insert(&Entry{Value: v, ExpiresAt: ExpiresAt}, at).
+func (l *LruList[K, V]) insertValue(k K, v V, expiresAt time.Time, at *Entry[K, V]) *Entry[K, V] {
+ return l.insert(&Entry[K, V]{Value: v, Key: k, ExpiresAt: expiresAt}, at)
+}
+
+// Remove removes e from its list, decrements l.len
+func (l *LruList[K, V]) Remove(e *Entry[K, V]) V {
+ e.prev.next = e.next
+ e.next.prev = e.prev
+ e.next = nil // avoid memory leaks
+ e.prev = nil // avoid memory leaks
+ e.list = nil
+ l.len--
+
+ return e.Value
+}
+
+// move moves e to next to at.
+func (l *LruList[K, V]) move(e, at *Entry[K, V]) {
+ if e == at {
+ return
+ }
+ e.prev.next = e.next
+ e.next.prev = e.prev
+
+ e.prev = at
+ e.next = at.next
+ e.prev.next = e
+ e.next.prev = e
+}
+
+// PushFront inserts a new element e with value v at the front of list l and returns e.
+func (l *LruList[K, V]) PushFront(k K, v V) *Entry[K, V] {
+ l.lazyInit()
+ return l.insertValue(k, v, time.Time{}, &l.root)
+}
+
+// PushFrontExpirable inserts a new expirable element e with Value v at the front of list l and returns e.
+func (l *LruList[K, V]) PushFrontExpirable(k K, v V, expiresAt time.Time) *Entry[K, V] {
+ l.lazyInit()
+ return l.insertValue(k, v, expiresAt, &l.root)
+}
+
+// MoveToFront moves element e to the front of list l.
+// If e is not an element of l, the list is not modified.
+// The element must not be nil.
+func (l *LruList[K, V]) MoveToFront(e *Entry[K, V]) {
+ if e.list != l || l.root.next == e {
+ return
+ }
+ // see comment in List.Remove about initialization of l
+ l.move(e, &l.root)
+}
diff --git a/vendor/github.com/hashicorp/golang-lru/v2/simplelru/LICENSE_list b/vendor/github.com/hashicorp/golang-lru/v2/simplelru/LICENSE_list
new file mode 100644
index 0000000000..c4764e6b2f
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/v2/simplelru/LICENSE_list
@@ -0,0 +1,29 @@
+This license applies to simplelru/list.go
+
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/hashicorp/golang-lru/v2/simplelru/lru.go b/vendor/github.com/hashicorp/golang-lru/v2/simplelru/lru.go
new file mode 100644
index 0000000000..f69792388c
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/v2/simplelru/lru.go
@@ -0,0 +1,177 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package simplelru
+
+import (
+ "errors"
+
+ "github.com/hashicorp/golang-lru/v2/internal"
+)
+
+// EvictCallback is used to get a callback when a cache entry is evicted
+type EvictCallback[K comparable, V any] func(key K, value V)
+
+// LRU implements a non-thread safe fixed size LRU cache
+type LRU[K comparable, V any] struct {
+ size int
+ evictList *internal.LruList[K, V]
+ items map[K]*internal.Entry[K, V]
+ onEvict EvictCallback[K, V]
+}
+
+// NewLRU constructs an LRU of the given size
+func NewLRU[K comparable, V any](size int, onEvict EvictCallback[K, V]) (*LRU[K, V], error) {
+ if size <= 0 {
+ return nil, errors.New("must provide a positive size")
+ }
+
+ c := &LRU[K, V]{
+ size: size,
+ evictList: internal.NewList[K, V](),
+ items: make(map[K]*internal.Entry[K, V]),
+ onEvict: onEvict,
+ }
+ return c, nil
+}
+
+// Purge is used to completely clear the cache.
+func (c *LRU[K, V]) Purge() {
+ for k, v := range c.items {
+ if c.onEvict != nil {
+ c.onEvict(k, v.Value)
+ }
+ delete(c.items, k)
+ }
+ c.evictList.Init()
+}
+
+// Add adds a value to the cache. Returns true if an eviction occurred.
+func (c *LRU[K, V]) Add(key K, value V) (evicted bool) {
+ // Check for existing item
+ if ent, ok := c.items[key]; ok {
+ c.evictList.MoveToFront(ent)
+ ent.Value = value
+ return false
+ }
+
+ // Add new item
+ ent := c.evictList.PushFront(key, value)
+ c.items[key] = ent
+
+ evict := c.evictList.Length() > c.size
+ // Verify size not exceeded
+ if evict {
+ c.removeOldest()
+ }
+ return evict
+}
+
+// Get looks up a key's value from the cache.
+func (c *LRU[K, V]) Get(key K) (value V, ok bool) {
+ if ent, ok := c.items[key]; ok {
+ c.evictList.MoveToFront(ent)
+ return ent.Value, true
+ }
+ return
+}
+
+// Contains checks if a key is in the cache, without updating the recent-ness
+// or deleting it for being stale.
+func (c *LRU[K, V]) Contains(key K) (ok bool) {
+ _, ok = c.items[key]
+ return ok
+}
+
+// Peek returns the key value (or undefined if not found) without updating
+// the "recently used"-ness of the key.
+func (c *LRU[K, V]) Peek(key K) (value V, ok bool) {
+ var ent *internal.Entry[K, V]
+ if ent, ok = c.items[key]; ok {
+ return ent.Value, true
+ }
+ return
+}
+
+// Remove removes the provided key from the cache, returning if the
+// key was contained.
+func (c *LRU[K, V]) Remove(key K) (present bool) {
+ if ent, ok := c.items[key]; ok {
+ c.removeElement(ent)
+ return true
+ }
+ return false
+}
+
+// RemoveOldest removes the oldest item from the cache.
+func (c *LRU[K, V]) RemoveOldest() (key K, value V, ok bool) {
+ if ent := c.evictList.Back(); ent != nil {
+ c.removeElement(ent)
+ return ent.Key, ent.Value, true
+ }
+ return
+}
+
+// GetOldest returns the oldest entry
+func (c *LRU[K, V]) GetOldest() (key K, value V, ok bool) {
+ if ent := c.evictList.Back(); ent != nil {
+ return ent.Key, ent.Value, true
+ }
+ return
+}
+
+// Keys returns a slice of the keys in the cache, from oldest to newest.
+func (c *LRU[K, V]) Keys() []K {
+ keys := make([]K, c.evictList.Length())
+ i := 0
+ for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() {
+ keys[i] = ent.Key
+ i++
+ }
+ return keys
+}
+
+// Values returns a slice of the values in the cache, from oldest to newest.
+func (c *LRU[K, V]) Values() []V {
+ values := make([]V, len(c.items))
+ i := 0
+ for ent := c.evictList.Back(); ent != nil; ent = ent.PrevEntry() {
+ values[i] = ent.Value
+ i++
+ }
+ return values
+}
+
+// Len returns the number of items in the cache.
+func (c *LRU[K, V]) Len() int {
+ return c.evictList.Length()
+}
+
+// Resize changes the cache size.
+func (c *LRU[K, V]) Resize(size int) (evicted int) {
+ diff := c.Len() - size
+ if diff < 0 {
+ diff = 0
+ }
+ for i := 0; i < diff; i++ {
+ c.removeOldest()
+ }
+ c.size = size
+ return diff
+}
+
+// removeOldest removes the oldest item from the cache.
+func (c *LRU[K, V]) removeOldest() {
+ if ent := c.evictList.Back(); ent != nil {
+ c.removeElement(ent)
+ }
+}
+
+// removeElement is used to remove a given list element from the cache
+func (c *LRU[K, V]) removeElement(e *internal.Entry[K, V]) {
+ c.evictList.Remove(e)
+ delete(c.items, e.Key)
+ if c.onEvict != nil {
+ c.onEvict(e.Key, e.Value)
+ }
+}
diff --git a/vendor/github.com/hashicorp/golang-lru/v2/simplelru/lru_interface.go b/vendor/github.com/hashicorp/golang-lru/v2/simplelru/lru_interface.go
new file mode 100644
index 0000000000..043b8bcc3f
--- /dev/null
+++ b/vendor/github.com/hashicorp/golang-lru/v2/simplelru/lru_interface.go
@@ -0,0 +1,46 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+// Package simplelru provides simple LRU implementation based on build-in container/list.
+package simplelru
+
+// LRUCache is the interface for simple LRU cache.
+type LRUCache[K comparable, V any] interface {
+ // Adds a value to the cache, returns true if an eviction occurred and
+ // updates the "recently used"-ness of the key.
+ Add(key K, value V) bool
+
+ // Returns key's value from the cache and
+ // updates the "recently used"-ness of the key. #value, isFound
+ Get(key K) (value V, ok bool)
+
+ // Checks if a key exists in cache without updating the recent-ness.
+ Contains(key K) (ok bool)
+
+ // Returns key's value without updating the "recently used"-ness of the key.
+ Peek(key K) (value V, ok bool)
+
+ // Removes a key from the cache.
+ Remove(key K) bool
+
+ // Removes the oldest entry from cache.
+ RemoveOldest() (K, V, bool)
+
+ // Returns the oldest entry from the cache. #key, value, isFound
+ GetOldest() (K, V, bool)
+
+ // Returns a slice of the keys in the cache, from oldest to newest.
+ Keys() []K
+
+ // Values returns a slice of the values in the cache, from oldest to newest.
+ Values() []V
+
+ // Returns the number of items in the cache.
+ Len() int
+
+ // Clears all cache entries.
+ Purge()
+
+ // Resizes cache, returning number evicted
+ Resize(int) int
+}
diff --git a/vendor/github.com/hashicorp/hc-install/.go-version b/vendor/github.com/hashicorp/hc-install/.go-version
index ce2dd53570..2a0ba77cc5 100644
--- a/vendor/github.com/hashicorp/hc-install/.go-version
+++ b/vendor/github.com/hashicorp/hc-install/.go-version
@@ -1 +1 @@
-1.21.5
+1.22.4
diff --git a/vendor/github.com/hashicorp/hc-install/README.md b/vendor/github.com/hashicorp/hc-install/README.md
index 6e78b5a610..0d55191bf2 100644
--- a/vendor/github.com/hashicorp/hc-install/README.md
+++ b/vendor/github.com/hashicorp/hc-install/README.md
@@ -14,55 +14,55 @@ the library in ad-hoc or CI shell scripting outside of Go.
`hc-install` does **not**:
- - Determine suitable installation path based on target system. e.g. in `/usr/bin` or `/usr/local/bin` on Unix based system.
- - Deal with execution of installed binaries (via service files or otherwise).
- - Upgrade existing binaries on your system.
- - Add nor link downloaded binaries to your `$PATH`.
+- Determine suitable installation path based on target system. e.g. in `/usr/bin` or `/usr/local/bin` on Unix based system.
+- Deal with execution of installed binaries (via service files or otherwise).
+- Upgrade existing binaries on your system.
+- Add nor link downloaded binaries to your `$PATH`.
## API
The `Installer` offers a few high-level methods:
- - `Ensure(context.Context, []src.Source)` to find, install, or build a product version
- - `Install(context.Context, []src.Installable)` to install a product version
+- `Ensure(context.Context, []src.Source)` to find, install, or build a product version
+- `Install(context.Context, []src.Installable)` to install a product version
### Sources
The `Installer` methods accept number of different `Source` types.
Each comes with different trade-offs described below.
- - `fs.{AnyVersion,ExactVersion,Version}` - Finds a binary in `$PATH` (or additional paths)
- - **Pros:**
- - This is most convenient when you already have the product installed on your system
+- `fs.{AnyVersion,ExactVersion,Version}` - Finds a binary in `$PATH` (or additional paths)
+ - **Pros:**
+ - This is most convenient when you already have the product installed on your system
which you already manage.
- - **Cons:**
- - Only relies on a single version, expects _you_ to manage the installation
- - _Not recommended_ for any environment where product installation is not controlled or managed by you (e.g. default GitHub Actions image managed by GitHub)
- - `releases.{LatestVersion,ExactVersion}` - Downloads, verifies & installs any known product from `releases.hashicorp.com`
- - **Pros:**
- - Fast and reliable way of obtaining any pre-built version of any product
- - Allows installation of enterprise versions
- - **Cons:**
- - Installation may consume some bandwidth, disk space and a little time
- - Potentially less stable builds (see `checkpoint` below)
- - `checkpoint.LatestVersion` - Downloads, verifies & installs any known product available in HashiCorp Checkpoint
- - **Pros:**
- - Checkpoint typically contains only product versions considered stable
- - **Cons:**
- - Installation may consume some bandwidth, disk space and a little time
- - Currently doesn't allow installation of old versions or enterprise versions (see `releases` above)
- - `build.GitRevision` - Clones raw source code and builds the product from it
- - **Pros:**
- - Useful for catching bugs and incompatibilities as early as possible (prior to product release).
- - **Cons:**
- - Building from scratch can consume significant amount of time & resources (CPU, memory, bandwith, disk space)
- - There are no guarantees that build instructions will always be up-to-date
- - There's increased likelihood of build containing bugs prior to release
- - Any CI builds relying on this are likely to be fragile
+ - **Cons:**
+ - Only relies on a single version, expects _you_ to manage the installation
+ - _Not recommended_ for any environment where product installation is not controlled or managed by you (e.g. default GitHub Actions image managed by GitHub)
+- `releases.{LatestVersion,ExactVersion}` - Downloads, verifies & installs any known product from `releases.hashicorp.com`
+ - **Pros:**
+ - Fast and reliable way of obtaining any pre-built version of any product
+ - Allows installation of enterprise versions
+ - **Cons:**
+ - Installation may consume some bandwidth, disk space and a little time
+ - Potentially less stable builds (see `checkpoint` below)
+- `checkpoint.LatestVersion` - Downloads, verifies & installs any known product available in HashiCorp Checkpoint
+ - **Pros:**
+ - Checkpoint typically contains only product versions considered stable
+ - **Cons:**
+ - Installation may consume some bandwidth, disk space and a little time
+ - Currently doesn't allow installation of old versions or enterprise versions (see `releases` above)
+- `build.GitRevision` - Clones raw source code and builds the product from it
+ - **Pros:**
+ - Useful for catching bugs and incompatibilities as early as possible (prior to product release).
+ - **Cons:**
+ - Building from scratch can consume significant amount of time & resources (CPU, memory, bandwidth, disk space)
+ - There are no guarantees that build instructions will always be up-to-date
+ - There's increased likelihood of build containing bugs prior to release
+ - Any CI builds relying on this are likely to be fragile
## Example Usage
-See examples at https://pkg.go.dev/github.com/hashicorp/hc-install#example-Installer.
+See examples at .
## CLI
@@ -70,9 +70,9 @@ In addition to the Go library, which is the intended primary use case of `hc-ins
The CLI comes with some trade-offs:
- - more limited interface compared to the flexible Go API (installs specific versions of products via `releases.ExactVersion`)
- - minimal environment pre-requisites (no need to compile Go code)
- - see ["hc-install is not a package manager"](https://github.com/hashicorp/hc-install#hc-install-is-not-a-package-manager)
+- more limited interface compared to the flexible Go API (installs specific versions of products via `releases.ExactVersion`)
+- minimal environment pre-requisites (no need to compile Go code)
+- see ["hc-install is not a package manager"](https://github.com/hashicorp/hc-install#hc-install-is-not-a-package-manager)
### Installation
@@ -82,7 +82,7 @@ Given that one of the key roles of the CLI/library is integrity checking, you sh
[Homebrew](https://brew.sh)
-```
+```sh
brew install hashicorp/tap/hc-install
```
@@ -102,19 +102,23 @@ You can follow the instructions in the [Official Packaging Guide](https://www.ha
### Usage
-```
+```text
Usage: hc-install install [options] -version
This command installs a HashiCorp product.
Options:
-version [REQUIRED] Version of product to install.
- -path Path to directory where the product will be installed. Defaults
- to current working directory.
+ -path Path to directory where the product will be installed.
+ Defaults to current working directory.
+ -log-file Path to file where logs will be written. /dev/stdout
+ or /dev/stderr can be used to log to STDOUT/STDERR.
```
+
```sh
hc-install install -version 1.3.7 terraform
```
-```
+
+```sh
hc-install: will install terraform@1.3.7
installed terraform@1.3.7 to /current/working/dir/terraform
```
diff --git a/vendor/github.com/hashicorp/hc-install/catalog-info.yaml b/vendor/github.com/hashicorp/hc-install/catalog-info.yaml
new file mode 100644
index 0000000000..be5c06e9d1
--- /dev/null
+++ b/vendor/github.com/hashicorp/hc-install/catalog-info.yaml
@@ -0,0 +1,17 @@
+# Copyright (c) HashiCorp, Inc.
+# SPDX-License-Identifier: MPL-2.0
+#
+# Intended for internal HashiCorp use only
+apiVersion: backstage.io/v1alpha1
+kind: Component
+metadata:
+ name: hc-install
+ description: Go module for downloading or locating HashiCorp binaries
+ annotations:
+ github.com/project-slug: hashicorp/hc-install
+ jira/project-key: TF
+ jira/label: hc-install
+spec:
+ type: library
+ owner: terraform-core
+ lifecycle: production
diff --git a/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go b/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go
index 2cd5379fb8..a382cb1066 100644
--- a/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go
+++ b/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go
@@ -6,7 +6,7 @@ package checkpoint
import (
"context"
"fmt"
- "io/ioutil"
+ "io"
"log"
"os"
"path/filepath"
@@ -24,7 +24,7 @@ import (
var (
defaultTimeout = 30 * time.Second
- discardLogger = log.New(ioutil.Discard, "", 0)
+ discardLogger = log.New(io.Discard, "", 0)
)
// LatestVersion installs the latest version known to Checkpoint
@@ -35,6 +35,10 @@ type LatestVersion struct {
SkipChecksumVerification bool
InstallDir string
+ // LicenseDir represents directory path where to install license files.
+ // If empty, license files will placed in the same directory as the binary.
+ LicenseDir string
+
// ArmoredPublicKey is a public PGP key in ASCII/armor format to use
// instead of built-in pubkey to verify signature of downloaded checksums
ArmoredPublicKey string
@@ -101,7 +105,7 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) {
if dstDir == "" {
var err error
dirName := fmt.Sprintf("%s_*", lv.Product.Name)
- dstDir, err = ioutil.TempDir("", dirName)
+ dstDir, err = os.MkdirTemp("", dirName)
if err != nil {
return "", err
}
@@ -126,9 +130,11 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) {
if lv.ArmoredPublicKey != "" {
d.ArmoredPublicKey = lv.ArmoredPublicKey
}
- zipFilePath, err := d.DownloadAndUnpack(ctx, pv, dstDir, "")
- if zipFilePath != "" {
- lv.pathsToRemove = append(lv.pathsToRemove, zipFilePath)
+
+ licenseDir := lv.LicenseDir
+ up, err := d.DownloadAndUnpack(ctx, pv, dstDir, licenseDir)
+ if up != nil {
+ lv.pathsToRemove = append(lv.pathsToRemove, up.PathsToRemove...)
}
if err != nil {
return "", err
diff --git a/vendor/github.com/hashicorp/hc-install/fs/fs.go b/vendor/github.com/hashicorp/hc-install/fs/fs.go
index 216df2c2cd..ac6f5cf9cd 100644
--- a/vendor/github.com/hashicorp/hc-install/fs/fs.go
+++ b/vendor/github.com/hashicorp/hc-install/fs/fs.go
@@ -4,14 +4,14 @@
package fs
import (
- "io/ioutil"
+ "io"
"log"
"time"
)
var (
defaultTimeout = 10 * time.Second
- discardLogger = log.New(ioutil.Discard, "", 0)
+ discardLogger = log.New(io.Discard, "", 0)
)
type fileCheckFunc func(path string) error
diff --git a/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go b/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go
index eebd98b82c..5aed844484 100644
--- a/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go
+++ b/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go
@@ -16,9 +16,7 @@ import (
func lookupDirs(extraDirs []string) []string {
pathVar := os.Getenv("PATH")
dirs := filepath.SplitList(pathVar)
- for _, ep := range extraDirs {
- dirs = append(dirs, ep)
- }
+ dirs = append(dirs, extraDirs...)
return dirs
}
diff --git a/vendor/github.com/hashicorp/hc-install/installer.go b/vendor/github.com/hashicorp/hc-install/installer.go
index 6c704eede3..01c1fdeed9 100644
--- a/vendor/github.com/hashicorp/hc-install/installer.go
+++ b/vendor/github.com/hashicorp/hc-install/installer.go
@@ -6,7 +6,7 @@ package install
import (
"context"
"fmt"
- "io/ioutil"
+ "io"
"log"
"github.com/hashicorp/go-multierror"
@@ -23,7 +23,7 @@ type Installer struct {
type RemoveFunc func(ctx context.Context) error
func NewInstaller() *Installer {
- discardLogger := log.New(ioutil.Discard, "", 0)
+ discardLogger := log.New(io.Discard, "", 0)
return &Installer{
logger: discardLogger,
}
diff --git a/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go b/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go
index 504bf45a30..6eef755bdb 100644
--- a/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go
+++ b/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go
@@ -7,7 +7,7 @@ import (
"bytes"
"context"
"fmt"
- "io/ioutil"
+ "io"
"log"
"os"
"os/exec"
@@ -17,7 +17,7 @@ import (
"golang.org/x/mod/modfile"
)
-var discardLogger = log.New(ioutil.Discard, "", 0)
+var discardLogger = log.New(io.Discard, "", 0)
// GoBuild represents a Go builder (to run "go build")
type GoBuild struct {
@@ -161,7 +161,7 @@ type CleanupFunc func(context.Context)
func guessRequiredGoVersion(repoDir string) (*version.Version, bool) {
goEnvFile := filepath.Join(repoDir, ".go-version")
if fi, err := os.Stat(goEnvFile); err == nil && !fi.IsDir() {
- b, err := ioutil.ReadFile(goEnvFile)
+ b, err := os.ReadFile(goEnvFile)
if err != nil {
return nil, false
}
@@ -174,7 +174,7 @@ func guessRequiredGoVersion(repoDir string) (*version.Version, bool) {
goModFile := filepath.Join(repoDir, "go.mod")
if fi, err := os.Stat(goModFile); err == nil && !fi.IsDir() {
- b, err := ioutil.ReadFile(goModFile)
+ b, err := os.ReadFile(goModFile)
if err != nil {
return nil, false
}
diff --git a/vendor/github.com/hashicorp/hc-install/internal/httpclient/httpclient.go b/vendor/github.com/hashicorp/hc-install/internal/httpclient/httpclient.go
index a9503dfdb8..7cdcf5b15f 100644
--- a/vendor/github.com/hashicorp/hc-install/internal/httpclient/httpclient.go
+++ b/vendor/github.com/hashicorp/hc-install/internal/httpclient/httpclient.go
@@ -5,25 +5,23 @@ package httpclient
import (
"fmt"
+ "log"
"net/http"
- "github.com/hashicorp/go-cleanhttp"
+ "github.com/hashicorp/go-retryablehttp"
"github.com/hashicorp/hc-install/version"
)
// NewHTTPClient provides a pre-configured http.Client
// e.g. with relevant User-Agent header
-func NewHTTPClient() *http.Client {
- client := cleanhttp.DefaultClient()
-
- userAgent := fmt.Sprintf("hc-install/%s", version.Version())
-
- cli := cleanhttp.DefaultPooledClient()
- cli.Transport = &userAgentRoundTripper{
- userAgent: userAgent,
- inner: cli.Transport,
+func NewHTTPClient(logger *log.Logger) *http.Client {
+ rc := retryablehttp.NewClient()
+ rc.Logger = logger
+ client := rc.StandardClient()
+ client.Transport = &userAgentRoundTripper{
+ userAgent: fmt.Sprintf("hc-install/%s", version.Version()),
+ inner: client.Transport,
}
-
return client
}
diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go
index 843de8cdfa..264801b809 100644
--- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go
+++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go
@@ -52,10 +52,10 @@ func (cd *ChecksumDownloader) DownloadAndVerifyChecksums(ctx context.Context) (C
return nil, err
}
- client := httpclient.NewHTTPClient()
+ client := httpclient.NewHTTPClient(cd.Logger)
sigURL := fmt.Sprintf("%s/%s/%s/%s", cd.BaseURL,
url.PathEscape(cd.ProductVersion.Name),
- url.PathEscape(cd.ProductVersion.RawVersion),
+ url.PathEscape(cd.ProductVersion.Version.String()),
url.PathEscape(sigFilename))
cd.Logger.Printf("downloading signature from %s", sigURL)
@@ -76,7 +76,7 @@ func (cd *ChecksumDownloader) DownloadAndVerifyChecksums(ctx context.Context) (C
shasumsURL := fmt.Sprintf("%s/%s/%s/%s", cd.BaseURL,
url.PathEscape(cd.ProductVersion.Name),
- url.PathEscape(cd.ProductVersion.RawVersion),
+ url.PathEscape(cd.ProductVersion.Version.String()),
url.PathEscape(cd.ProductVersion.SHASUMS))
cd.Logger.Printf("downloading checksums from %s", shasumsURL)
diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go
index 146c1cf029..708915ea65 100644
--- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go
+++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go
@@ -10,7 +10,6 @@ import (
"crypto/sha256"
"fmt"
"io"
- "io/ioutil"
"log"
"net/http"
"net/url"
@@ -29,14 +28,18 @@ type Downloader struct {
BaseURL string
}
-func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, binDir string, licenseDir string) (zipFilePath string, err error) {
+type UnpackedProduct struct {
+ PathsToRemove []string
+}
+
+func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, binDir string, licenseDir string) (up *UnpackedProduct, err error) {
if len(pv.Builds) == 0 {
- return "", fmt.Errorf("no builds found for %s %s", pv.Name, pv.Version)
+ return nil, fmt.Errorf("no builds found for %s %s", pv.Name, pv.Version)
}
pb, ok := pv.Builds.FilterBuild(runtime.GOOS, runtime.GOARCH, "zip")
if !ok {
- return "", fmt.Errorf("no ZIP archive found for %s %s %s/%s",
+ return nil, fmt.Errorf("no ZIP archive found for %s %s %s/%s",
pv.Name, pv.Version, runtime.GOOS, runtime.GOARCH)
}
@@ -50,48 +53,35 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion,
}
verifiedChecksums, err := v.DownloadAndVerifyChecksums(ctx)
if err != nil {
- return "", err
+ return nil, err
}
var ok bool
verifiedChecksum, ok = verifiedChecksums[pb.Filename]
if !ok {
- return "", fmt.Errorf("no checksum found for %q", pb.Filename)
+ return nil, fmt.Errorf("no checksum found for %q", pb.Filename)
}
}
- client := httpclient.NewHTTPClient()
+ client := httpclient.NewHTTPClient(d.Logger)
- archiveURL := pb.URL
- if d.BaseURL != "" {
- // ensure that absolute download links from mocked responses
- // are still pointing to the mock server if one is set
- baseURL, err := url.Parse(d.BaseURL)
- if err != nil {
- return "", err
- }
-
- u, err := url.Parse(archiveURL)
- if err != nil {
- return "", err
- }
- u.Scheme = baseURL.Scheme
- u.Host = baseURL.Host
- archiveURL = u.String()
+ archiveURL, err := determineArchiveURL(pb.URL, d.BaseURL)
+ if err != nil {
+ return nil, err
}
d.Logger.Printf("downloading archive from %s", archiveURL)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, archiveURL, nil)
if err != nil {
- return "", fmt.Errorf("failed to create request for %q: %w", archiveURL, err)
+ return nil, fmt.Errorf("failed to create request for %q: %w", archiveURL, err)
}
resp, err := client.Do(req)
if err != nil {
- return "", err
+ return nil, err
}
if resp.StatusCode != 200 {
- return "", fmt.Errorf("failed to download ZIP archive from %q: %s", archiveURL, resp.Status)
+ return nil, fmt.Errorf("failed to download ZIP archive from %q: %s", archiveURL, resp.Status)
}
defer resp.Body.Close()
@@ -100,19 +90,22 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion,
contentType := resp.Header.Get("content-type")
if !contentTypeIsZip(contentType) {
- return "", fmt.Errorf("unexpected content-type: %s (expected any of %q)",
+ return nil, fmt.Errorf("unexpected content-type: %s (expected any of %q)",
contentType, zipMimeTypes)
}
expectedSize := resp.ContentLength
- pkgFile, err := ioutil.TempFile("", pb.Filename)
+ pkgFile, err := os.CreateTemp("", pb.Filename)
if err != nil {
- return "", err
+ return nil, err
}
defer pkgFile.Close()
pkgFilePath, err := filepath.Abs(pkgFile.Name())
+ up = &UnpackedProduct{}
+ up.PathsToRemove = append(up.PathsToRemove, pkgFilePath)
+
d.Logger.Printf("copying %q (%d bytes) to %s", pb.Filename, expectedSize, pkgFile.Name())
var bytesCopied int64
@@ -123,12 +116,12 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion,
bytesCopied, err = io.Copy(h, r)
if err != nil {
- return "", err
+ return nil, err
}
calculatedSum := h.Sum(nil)
if !bytes.Equal(calculatedSum, verifiedChecksum) {
- return pkgFilePath, fmt.Errorf(
+ return up, fmt.Errorf(
"checksum mismatch (expected: %x, got: %x)",
verifiedChecksum, calculatedSum,
)
@@ -136,14 +129,14 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion,
} else {
bytesCopied, err = io.Copy(pkgFile, pkgReader)
if err != nil {
- return pkgFilePath, err
+ return up, err
}
}
d.Logger.Printf("copied %d bytes to %s", bytesCopied, pkgFile.Name())
if expectedSize != 0 && bytesCopied != int64(expectedSize) {
- return pkgFilePath, fmt.Errorf(
+ return up, fmt.Errorf(
"unexpected size (downloaded: %d, expected: %d)",
bytesCopied, expectedSize,
)
@@ -151,7 +144,7 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion,
r, err := zip.OpenReader(pkgFile.Name())
if err != nil {
- return pkgFilePath, err
+ return up, err
}
defer r.Close()
@@ -163,31 +156,37 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion,
}
srcFile, err := f.Open()
if err != nil {
- return pkgFilePath, err
+ return up, err
}
// Determine the appropriate destination file path
dstDir := binDir
+ // for license files, use binDir if licenseDir is not set
if isLicenseFile(f.Name) && licenseDir != "" {
dstDir = licenseDir
}
d.Logger.Printf("unpacking %s to %s", f.Name, dstDir)
dstPath := filepath.Join(dstDir, f.Name)
+
+ if isLicenseFile(f.Name) {
+ up.PathsToRemove = append(up.PathsToRemove, dstPath)
+ }
+
dstFile, err := os.Create(dstPath)
if err != nil {
- return pkgFilePath, err
+ return up, err
}
_, err = io.Copy(dstFile, srcFile)
if err != nil {
- return pkgFilePath, err
+ return up, err
}
srcFile.Close()
dstFile.Close()
}
- return pkgFilePath, nil
+ return up, nil
}
// The production release site uses consistent single mime type
@@ -207,11 +206,13 @@ func contentTypeIsZip(contentType string) bool {
return false
}
-// Enterprise products have a few additional license files
-// that need to be extracted to a separate directory
+// Product archives may have a few license files
+// which may be extracted to a separate directory
+// and may need to be tracked for later cleanup.
var licenseFiles = []string{
"EULA.txt",
"TermsOfEvaluation.txt",
+ "LICENSE.txt",
}
func isLicenseFile(filename string) bool {
@@ -222,3 +223,28 @@ func isLicenseFile(filename string) bool {
}
return false
}
+
+// determineArchiveURL determines the archive URL based on the base URL provided.
+func determineArchiveURL(archiveURL, baseURL string) (string, error) {
+ // If custom URL is set, use that instead of the one from the JSON.
+ // Also ensures that absolute download links from mocked responses
+ // are still pointing to the mock server if one is set.
+ if baseURL == "" {
+ return archiveURL, nil
+ }
+
+ base, err := url.Parse(baseURL)
+ if err != nil {
+ return "", err
+ }
+
+ u, err := url.Parse(archiveURL)
+ if err != nil {
+ return "", err
+ }
+
+ // Use base URL path and append the path from the archive URL.
+ newArchiveURL := base.JoinPath(u.Path)
+
+ return newArchiveURL.String(), nil
+}
diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go
index 99b811a645..94152b131a 100644
--- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go
+++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go
@@ -9,8 +9,7 @@ import "github.com/hashicorp/go-version"
// "consul 0.5.1". A ProductVersion may have one or more builds.
type ProductVersion struct {
Name string `json:"name"`
- RawVersion string `json:"version"`
- Version *version.Version `json:"-"`
+ Version *version.Version `json:"version"`
SHASUMS string `json:"shasums,omitempty"`
SHASUMSSig string `json:"shasums_signature,omitempty"`
SHASUMSSigs []string `json:"shasums_signatures,omitempty"`
diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go
index 755019f2f2..cae7f53024 100644
--- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go
+++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go
@@ -7,7 +7,7 @@ import (
"context"
"encoding/json"
"fmt"
- "io/ioutil"
+ "io"
"log"
"net/http"
"net/url"
@@ -55,7 +55,7 @@ type Releases struct {
func NewReleases() *Releases {
return &Releases{
- logger: log.New(ioutil.Discard, "", 0),
+ logger: log.New(io.Discard, "", 0),
BaseURL: defaultBaseURL,
}
}
@@ -65,7 +65,7 @@ func (r *Releases) SetLogger(logger *log.Logger) {
}
func (r *Releases) ListProductVersions(ctx context.Context, productName string) (ProductVersionsMap, error) {
- client := httpclient.NewHTTPClient()
+ client := httpclient.NewHTTPClient(r.logger)
productIndexURL := fmt.Sprintf("%s/%s/index.json",
r.BaseURL,
@@ -95,7 +95,7 @@ func (r *Releases) ListProductVersions(ctx context.Context, productName string)
r.logger.Printf("received %s", resp.Status)
- body, err := ioutil.ReadAll(resp.Body)
+ body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
@@ -122,7 +122,7 @@ func (r *Releases) ListProductVersions(ctx context.Context, productName string)
}
func (r *Releases) GetProductVersion(ctx context.Context, product string, version *version.Version) (*ProductVersion, error) {
- client := httpclient.NewHTTPClient()
+ client := httpclient.NewHTTPClient(r.logger)
indexURL := fmt.Sprintf("%s/%s/%s/index.json",
r.BaseURL,
@@ -153,7 +153,7 @@ func (r *Releases) GetProductVersion(ctx context.Context, product string, versio
r.logger.Printf("received %s", resp.Status)
- body, err := ioutil.ReadAll(resp.Body)
+ body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
diff --git a/vendor/github.com/hashicorp/hc-install/product/consul.go b/vendor/github.com/hashicorp/hc-install/product/consul.go
index 9789d7c318..03b0326057 100644
--- a/vendor/github.com/hashicorp/hc-install/product/consul.go
+++ b/vendor/github.com/hashicorp/hc-install/product/consul.go
@@ -17,10 +17,6 @@ import (
var consulVersionOutputRe = regexp.MustCompile(`Consul ` + simpleVersionRe)
-var (
- v1_18 = version.Must(version.NewVersion("1.18"))
-)
-
var Consul = Product{
Name: "consul",
BinaryName: func() string {
diff --git a/vendor/github.com/hashicorp/hc-install/releases/enterprise.go b/vendor/github.com/hashicorp/hc-install/releases/enterprise.go
index 179d40d1cd..cfef088afb 100644
--- a/vendor/github.com/hashicorp/hc-install/releases/enterprise.go
+++ b/vendor/github.com/hashicorp/hc-install/releases/enterprise.go
@@ -6,9 +6,6 @@ package releases
import "fmt"
type EnterpriseOptions struct {
- // LicenseDir represents directory path where to install license files (required)
- LicenseDir string
-
// Meta represents optional version metadata (e.g. hsm, fips1402)
Meta string
}
@@ -25,12 +22,12 @@ func enterpriseVersionMetadata(eo *EnterpriseOptions) string {
return metadata
}
-func validateEnterpriseOptions(eo *EnterpriseOptions) error {
+func validateEnterpriseOptions(eo *EnterpriseOptions, licenseDir string) error {
if eo == nil {
return nil
}
- if eo.LicenseDir == "" {
+ if licenseDir == "" {
return fmt.Errorf("LicenseDir must be provided when requesting enterprise versions")
}
diff --git a/vendor/github.com/hashicorp/hc-install/releases/exact_version.go b/vendor/github.com/hashicorp/hc-install/releases/exact_version.go
index e42f4d239f..597e9ae362 100644
--- a/vendor/github.com/hashicorp/hc-install/releases/exact_version.go
+++ b/vendor/github.com/hashicorp/hc-install/releases/exact_version.go
@@ -6,7 +6,6 @@ package releases
import (
"context"
"fmt"
- "io/ioutil"
"log"
"os"
"path/filepath"
@@ -28,6 +27,10 @@ type ExactVersion struct {
InstallDir string
Timeout time.Duration
+ // LicenseDir represents directory path where to install license files
+ // (required for enterprise versions, optional for Community editions).
+ LicenseDir string
+
// Enterprise indicates installation of enterprise version (leave nil for Community editions)
Enterprise *EnterpriseOptions
@@ -37,7 +40,10 @@ type ExactVersion struct {
// instead of built-in pubkey to verify signature of downloaded checksums
ArmoredPublicKey string
- apiBaseURL string
+ // ApiBaseURL is an optional field that specifies a custom URL to download the product from.
+ // If ApiBaseURL is set, the product will be downloaded from this base URL instead of the default site.
+ // Note: The directory structure of the custom URL must match the HashiCorp releases site (including the index.json files).
+ ApiBaseURL string
logger *log.Logger
pathsToRemove []string
}
@@ -70,7 +76,7 @@ func (ev *ExactVersion) Validate() error {
return fmt.Errorf("unknown version")
}
- if err := validateEnterpriseOptions(ev.Enterprise); err != nil {
+ if err := validateEnterpriseOptions(ev.Enterprise, ev.LicenseDir); err != nil {
return err
}
@@ -93,7 +99,7 @@ func (ev *ExactVersion) Install(ctx context.Context) (string, error) {
if dstDir == "" {
var err error
dirName := fmt.Sprintf("%s_*", ev.Product.Name)
- dstDir, err = ioutil.TempDir("", dirName)
+ dstDir, err = os.MkdirTemp("", dirName)
if err != nil {
return "", err
}
@@ -103,8 +109,8 @@ func (ev *ExactVersion) Install(ctx context.Context) (string, error) {
ev.log().Printf("will install into dir at %s", dstDir)
rels := rjson.NewReleases()
- if ev.apiBaseURL != "" {
- rels.BaseURL = ev.apiBaseURL
+ if ev.ApiBaseURL != "" {
+ rels.BaseURL = ev.ApiBaseURL
}
rels.SetLogger(ev.log())
installVersion := ev.Version
@@ -125,17 +131,14 @@ func (ev *ExactVersion) Install(ctx context.Context) (string, error) {
if ev.ArmoredPublicKey != "" {
d.ArmoredPublicKey = ev.ArmoredPublicKey
}
- if ev.apiBaseURL != "" {
- d.BaseURL = ev.apiBaseURL
+ if ev.ApiBaseURL != "" {
+ d.BaseURL = ev.ApiBaseURL
}
- licenseDir := ""
- if ev.Enterprise != nil {
- licenseDir = ev.Enterprise.LicenseDir
- }
- zipFilePath, err := d.DownloadAndUnpack(ctx, pv, dstDir, licenseDir)
- if zipFilePath != "" {
- ev.pathsToRemove = append(ev.pathsToRemove, zipFilePath)
+ licenseDir := ev.LicenseDir
+ up, err := d.DownloadAndUnpack(ctx, pv, dstDir, licenseDir)
+ if up != nil {
+ ev.pathsToRemove = append(ev.pathsToRemove, up.PathsToRemove...)
}
if err != nil {
return "", err
diff --git a/vendor/github.com/hashicorp/hc-install/releases/latest_version.go b/vendor/github.com/hashicorp/hc-install/releases/latest_version.go
index 9893b223ad..ee70782b29 100644
--- a/vendor/github.com/hashicorp/hc-install/releases/latest_version.go
+++ b/vendor/github.com/hashicorp/hc-install/releases/latest_version.go
@@ -6,7 +6,6 @@ package releases
import (
"context"
"fmt"
- "io/ioutil"
"log"
"os"
"path/filepath"
@@ -28,6 +27,10 @@ type LatestVersion struct {
Timeout time.Duration
IncludePrereleases bool
+ // LicenseDir represents directory path where to install license files
+ // (required for enterprise versions, optional for Community editions).
+ LicenseDir string
+
// Enterprise indicates installation of enterprise version (leave nil for Community editions)
Enterprise *EnterpriseOptions
@@ -37,7 +40,10 @@ type LatestVersion struct {
// instead of built-in pubkey to verify signature of downloaded checksums
ArmoredPublicKey string
- apiBaseURL string
+ // ApiBaseURL is an optional field that specifies a custom URL to download the product from.
+ // If ApiBaseURL is set, the product will be downloaded from this base URL instead of the default site.
+ // Note: The directory structure of the custom URL must match the HashiCorp releases site (including the index.json files).
+ ApiBaseURL string
logger *log.Logger
pathsToRemove []string
}
@@ -66,7 +72,7 @@ func (lv *LatestVersion) Validate() error {
return fmt.Errorf("invalid binary name: %q", lv.Product.BinaryName())
}
- if err := validateEnterpriseOptions(lv.Enterprise); err != nil {
+ if err := validateEnterpriseOptions(lv.Enterprise, lv.LicenseDir); err != nil {
return err
}
@@ -89,7 +95,7 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) {
if dstDir == "" {
var err error
dirName := fmt.Sprintf("%s_*", lv.Product.Name)
- dstDir, err = ioutil.TempDir("", dirName)
+ dstDir, err = os.MkdirTemp("", dirName)
if err != nil {
return "", err
}
@@ -99,8 +105,8 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) {
lv.log().Printf("will install into dir at %s", dstDir)
rels := rjson.NewReleases()
- if lv.apiBaseURL != "" {
- rels.BaseURL = lv.apiBaseURL
+ if lv.ApiBaseURL != "" {
+ rels.BaseURL = lv.ApiBaseURL
}
rels.SetLogger(lv.log())
versions, err := rels.ListProductVersions(ctx, lv.Product.Name)
@@ -126,16 +132,13 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) {
if lv.ArmoredPublicKey != "" {
d.ArmoredPublicKey = lv.ArmoredPublicKey
}
- if lv.apiBaseURL != "" {
- d.BaseURL = lv.apiBaseURL
- }
- licenseDir := ""
- if lv.Enterprise != nil {
- licenseDir = lv.Enterprise.LicenseDir
+ if lv.ApiBaseURL != "" {
+ d.BaseURL = lv.ApiBaseURL
}
- zipFilePath, err := d.DownloadAndUnpack(ctx, versionToInstall, dstDir, licenseDir)
- if zipFilePath != "" {
- lv.pathsToRemove = append(lv.pathsToRemove, zipFilePath)
+ licenseDir := lv.LicenseDir
+ up, err := d.DownloadAndUnpack(ctx, versionToInstall, dstDir, licenseDir)
+ if up != nil {
+ lv.pathsToRemove = append(lv.pathsToRemove, up.PathsToRemove...)
}
if err != nil {
return "", err
diff --git a/vendor/github.com/hashicorp/hc-install/releases/releases.go b/vendor/github.com/hashicorp/hc-install/releases/releases.go
index 7bef49ba30..a24db6c63e 100644
--- a/vendor/github.com/hashicorp/hc-install/releases/releases.go
+++ b/vendor/github.com/hashicorp/hc-install/releases/releases.go
@@ -4,7 +4,7 @@
package releases
import (
- "io/ioutil"
+ "io"
"log"
"time"
)
@@ -12,5 +12,5 @@ import (
var (
defaultInstallTimeout = 30 * time.Second
defaultListTimeout = 10 * time.Second
- discardLogger = log.New(ioutil.Discard, "", 0)
+ discardLogger = log.New(io.Discard, "", 0)
)
diff --git a/vendor/github.com/hashicorp/hc-install/releases/versions.go b/vendor/github.com/hashicorp/hc-install/releases/versions.go
index 49b1af78ca..a4316090ec 100644
--- a/vendor/github.com/hashicorp/hc-install/releases/versions.go
+++ b/vendor/github.com/hashicorp/hc-install/releases/versions.go
@@ -30,8 +30,9 @@ type Versions struct {
}
type InstallationOptions struct {
- Timeout time.Duration
- Dir string
+ Timeout time.Duration
+ Dir string
+ LicenseDir string
SkipChecksumVerification bool
@@ -46,7 +47,7 @@ func (v *Versions) List(ctx context.Context) ([]src.Source, error) {
return nil, fmt.Errorf("invalid product name: %q", v.Product.Name)
}
- if err := validateEnterpriseOptions(v.Enterprise); err != nil {
+ if err := validateEnterpriseOptions(v.Enterprise, v.Install.LicenseDir); err != nil {
return nil, err
}
@@ -85,6 +86,7 @@ func (v *Versions) List(ctx context.Context) ([]src.Source, error) {
Version: pv.Version,
InstallDir: v.Install.Dir,
Timeout: v.Install.Timeout,
+ LicenseDir: v.Install.LicenseDir,
ArmoredPublicKey: v.Install.ArmoredPublicKey,
SkipChecksumVerification: v.Install.SkipChecksumVerification,
@@ -92,8 +94,7 @@ func (v *Versions) List(ctx context.Context) ([]src.Source, error) {
if v.Enterprise != nil {
ev.Enterprise = &EnterpriseOptions{
- Meta: v.Enterprise.Meta,
- LicenseDir: v.Enterprise.LicenseDir,
+ Meta: v.Enterprise.Meta,
}
}
diff --git a/vendor/github.com/hashicorp/hc-install/version/VERSION b/vendor/github.com/hashicorp/hc-install/version/VERSION
index d2b13eb644..f374f6662e 100644
--- a/vendor/github.com/hashicorp/hc-install/version/VERSION
+++ b/vendor/github.com/hashicorp/hc-install/version/VERSION
@@ -1 +1 @@
-0.6.4
+0.9.1
diff --git a/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md b/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md
index 2eebedbc76..b2ff2631d2 100644
--- a/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md
+++ b/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md
@@ -1,5 +1,22 @@
# HCL Changelog
+## v2.22.0 (August 26, 2024)
+
+### Enhancements
+
+* feat: return an ExprSyntaxError for invalid references that end in a dot ([#692](https://github.com/hashicorp/hcl/pull/692))
+
+## v2.21.0 (June 19, 2024)
+
+### Enhancements
+
+* Introduce `ParseTraversalPartial`, which allows traversals that include the splat (`[*]`) index operator. ([#673](https://github.com/hashicorp/hcl/pull/673))
+* ext/dynblock: Now accepts marked values in `for_each`, and will transfer those marks (as much as technically possible) to values in the generated blocks. ([#679](https://github.com/hashicorp/hcl/pull/679))
+
+### Bugs Fixed
+
+* Expression evaluation will no longer panic if the splat operator is applied to an unknown value that has cty marks. ([#678](https://github.com/hashicorp/hcl/pull/678))
+
## v2.20.1 (March 26, 2024)
### Bugs Fixed
diff --git a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go
index 815973996b..f4c3a6d79b 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go
@@ -788,21 +788,24 @@ func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostic
})
return cty.UnknownVal(resultType), diags
}
- if !condResult.IsKnown() {
- // we use the unmarked values throughout the unknown branch
- _, condResultMarks := condResult.Unmark()
- trueResult, trueResultMarks := trueResult.Unmark()
- falseResult, falseResultMarks := falseResult.Unmark()
- // use a value to merge marks
- _, resMarks := cty.DynamicVal.WithMarks(condResultMarks, trueResultMarks, falseResultMarks).Unmark()
+ // Now that we have all three values, collect all the marks for the result.
+ // Since it's possible that a condition value could be unknown, and the
+ // consumer needs to deal with any marks from either branch anyway, we must
+ // always combine them for consistent results.
+ condResult, condResultMarks := condResult.Unmark()
+ trueResult, trueResultMarks := trueResult.Unmark()
+ falseResult, falseResultMarks := falseResult.Unmark()
+ var resMarks []cty.ValueMarks
+ resMarks = append(resMarks, condResultMarks, trueResultMarks, falseResultMarks)
+ if !condResult.IsKnown() {
trueRange := trueResult.Range()
falseRange := falseResult.Range()
// if both branches are known to be null, then the result must still be null
if trueResult.IsNull() && falseResult.IsNull() {
- return cty.NullVal(resultType).WithMarks(resMarks), diags
+ return cty.NullVal(resultType).WithMarks(resMarks...), diags
}
// We might be able to offer a refined range for the result based on
@@ -841,7 +844,7 @@ func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostic
ref = ref.NumberRangeUpperBound(hi, hiInc)
}
- return ref.NewValue().WithMarks(resMarks), diags
+ return ref.NewValue().WithMarks(resMarks...), diags
}
if trueResult.Type().IsCollectionType() && falseResult.Type().IsCollectionType() {
@@ -867,7 +870,7 @@ func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostic
}
ref = ref.CollectionLengthLowerBound(lo).CollectionLengthUpperBound(hi)
- return ref.NewValue().WithMarks(resMarks), diags
+ return ref.NewValue().WithMarks(resMarks...), diags
}
}
@@ -875,7 +878,7 @@ func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostic
if trueRange.DefinitelyNotNull() && falseRange.DefinitelyNotNull() {
ret = ret.RefineNotNull()
}
- return ret.WithMarks(resMarks), diags
+ return ret.WithMarks(resMarks...), diags
}
condResult, err := convert.Convert(condResult, cty.Bool)
@@ -892,8 +895,6 @@ func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostic
return cty.UnknownVal(resultType), diags
}
- // Unmark result before testing for truthiness
- condResult, _ = condResult.UnmarkDeep()
if condResult.True() {
diags = append(diags, trueDiags...)
if convs[0] != nil {
@@ -916,7 +917,7 @@ func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostic
trueResult = cty.UnknownVal(resultType)
}
}
- return trueResult, diags
+ return trueResult.WithMarks(resMarks...), diags
} else {
diags = append(diags, falseDiags...)
if convs[1] != nil {
@@ -939,7 +940,7 @@ func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostic
falseResult = cty.UnknownVal(resultType)
}
}
- return falseResult, diags
+ return falseResult.WithMarks(resMarks...), diags
}
}
@@ -1429,9 +1430,9 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
})
return cty.DynamicVal, diags
}
- if !collVal.IsKnown() {
- return cty.DynamicVal, diags
- }
+
+ // Grab the CondExpr marks when we're returning early with an unknown
+ var condMarks cty.ValueMarks
// Before we start we'll do an early check to see if any CondExpr we've
// been given is of the wrong type. This isn't 100% reliable (it may
@@ -1459,6 +1460,9 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
})
return cty.DynamicVal, diags
}
+
+ _, condMarks = result.Unmark()
+
_, err := convert.Convert(result, cty.Bool)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
@@ -1477,6 +1481,10 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
}
}
+ if !collVal.IsKnown() {
+ return cty.DynamicVal.WithMarks(append(marks, condMarks)...), diags
+ }
+
if e.KeyExpr != nil {
// Producing an object
var vals map[string]cty.Value
@@ -1517,6 +1525,12 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
known = false
continue
}
+
+ // Extract and merge marks from the include expression into the
+ // main set of marks
+ _, includeMarks := includeRaw.Unmark()
+ marks = append(marks, includeMarks)
+
include, err := convert.Convert(includeRaw, cty.Bool)
if err != nil {
if known {
@@ -1540,7 +1554,7 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
// Extract and merge marks from the include expression into the
// main set of marks
- includeUnmarked, includeMarks := include.Unmark()
+ includeUnmarked, _ := include.Unmark()
marks = append(marks, includeMarks)
if includeUnmarked.False() {
// Skip this element
@@ -1565,6 +1579,10 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
known = false
continue
}
+
+ _, keyMarks := keyRaw.Unmark()
+ marks = append(marks, keyMarks)
+
if !keyRaw.IsKnown() {
known = false
continue
@@ -1587,8 +1605,7 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
continue
}
- key, keyMarks := key.Unmark()
- marks = append(marks, keyMarks)
+ key, _ = key.Unmark()
val, valDiags := e.ValExpr.Value(childCtx)
diags = append(diags, valDiags...)
@@ -1618,7 +1635,7 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
}
if !known {
- return cty.DynamicVal, diags
+ return cty.DynamicVal.WithMarks(marks...), diags
}
if e.Group {
@@ -1664,6 +1681,12 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
known = false
continue
}
+
+ // Extract and merge marks from the include expression into the
+ // main set of marks
+ _, includeMarks := includeRaw.Unmark()
+ marks = append(marks, includeMarks)
+
if !includeRaw.IsKnown() {
// We will eventually return DynamicVal, but we'll continue
// iterating in case there are other diagnostics to gather
@@ -1689,10 +1712,7 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
continue
}
- // Extract and merge marks from the include expression into the
- // main set of marks
- includeUnmarked, includeMarks := include.Unmark()
- marks = append(marks, includeMarks)
+ includeUnmarked, _ := include.Unmark()
if includeUnmarked.False() {
// Skip this element
continue
@@ -1705,7 +1725,7 @@ func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
}
if !known {
- return cty.DynamicVal, diags
+ return cty.DynamicVal.WithMarks(marks...), diags
}
return cty.TupleVal(vals).WithMarks(marks...), diags
@@ -1780,7 +1800,7 @@ func (e *SplatExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
if sourceVal.IsNull() {
if autoUpgrade {
- return cty.EmptyTupleVal, diags
+ return cty.EmptyTupleVal.WithSameMarks(sourceVal), diags
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
@@ -1798,7 +1818,7 @@ func (e *SplatExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
// If we don't even know the _type_ of our source value yet then
// we'll need to defer all processing, since we can't decide our
// result type either.
- return cty.DynamicVal, diags
+ return cty.DynamicVal.WithSameMarks(sourceVal), diags
}
upgradedUnknown := false
@@ -1813,13 +1833,14 @@ func (e *SplatExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
// list of a single attribute, but we still need to check if that
// attribute actually exists.
if !sourceVal.IsKnown() {
- sourceRng := sourceVal.Range()
+ unmarkedVal, _ := sourceVal.Unmark()
+ sourceRng := unmarkedVal.Range()
if sourceRng.CouldBeNull() {
upgradedUnknown = true
}
}
- sourceVal = cty.TupleVal([]cty.Value{sourceVal})
+ sourceVal = cty.TupleVal([]cty.Value{sourceVal}).WithSameMarks(sourceVal)
sourceTy = sourceVal.Type()
}
@@ -1900,14 +1921,14 @@ func (e *SplatExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
e.Item.clearValue(ctx) // clean up our temporary value
if upgradedUnknown {
- return cty.DynamicVal, diags
+ return cty.DynamicVal.WithMarks(marks), diags
}
if !isKnown {
// We'll ingore the resultTy diagnostics in this case since they
// will just be the same errors we saw while iterating above.
ty, _ := resultTy()
- return cty.UnknownVal(ty), diags
+ return cty.UnknownVal(ty).WithMarks(marks), diags
}
switch {
@@ -1915,7 +1936,7 @@ func (e *SplatExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
if len(vals) == 0 {
ty, tyDiags := resultTy()
diags = append(diags, tyDiags...)
- return cty.ListValEmpty(ty.ElementType()), diags
+ return cty.ListValEmpty(ty.ElementType()).WithMarks(marks), diags
}
return cty.ListVal(vals).WithMarks(marks), diags
default:
diff --git a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go
index ce96ae35b4..fec7861a29 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go
@@ -811,9 +811,16 @@ Traversal:
// will probably be misparsed until we hit something that
// allows us to re-sync.
//
- // We will probably need to do something better here eventually
- // in order to support autocomplete triggered by typing a
- // period.
+ // Returning an ExprSyntaxError allows us to pass more information
+ // about the invalid expression to the caller, which can then
+ // use this for example for completions that happen after typing
+ // a dot in an editor.
+ ret = &ExprSyntaxError{
+ Placeholder: cty.DynamicVal,
+ ParseDiags: diags,
+ SrcRange: hcl.RangeBetween(from.Range(), dot.Range),
+ }
+
p.setRecovery()
}
@@ -1516,6 +1523,16 @@ func (p *parser) parseObjectCons() (Expression, hcl.Diagnostics) {
diags = append(diags, valueDiags...)
if p.recovery && valueDiags.HasErrors() {
+ // If the value is an ExprSyntaxError, we can add an item with it, even though we will recover afterwards
+ // This allows downstream consumers to still retrieve this first invalid item, even though following items
+ // won't be parsed. This is useful for supplying completions.
+ if exprSyntaxError, ok := value.(*ExprSyntaxError); ok {
+ items = append(items, ObjectConsItem{
+ KeyExpr: key,
+ ValueExpr: exprSyntaxError,
+ })
+ }
+
// If expression parsing failed then we are probably in a strange
// place in the token stream, so we'll bail out and try to reset
// to after our closing brace to allow parsing to continue.
diff --git a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser_traversal.go b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser_traversal.go
index 3afa6ab064..f7d4062f09 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser_traversal.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser_traversal.go
@@ -4,8 +4,9 @@
package hclsyntax
import (
- "github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty/cty"
+
+ "github.com/hashicorp/hcl/v2"
)
// ParseTraversalAbs parses an absolute traversal that is assumed to consume
@@ -13,6 +14,26 @@ import (
// behavior is not supported here because traversals are not expected to
// be parsed as part of a larger program.
func (p *parser) ParseTraversalAbs() (hcl.Traversal, hcl.Diagnostics) {
+ return p.parseTraversal(false)
+}
+
+// ParseTraversalPartial parses an absolute traversal that is permitted
+// to contain splat ([*]) expressions. Only splat expressions within square
+// brackets are permitted ([*]); splat expressions within attribute names are
+// not permitted (.*).
+//
+// The meaning of partial here is that the traversal may be incomplete, in that
+// any splat expression indicates reference to a potentially unknown number of
+// elements.
+//
+// Traversals that include splats cannot be automatically traversed by HCL using
+// the TraversalAbs or TraversalRel methods. Instead, the caller must handle
+// the traversals manually.
+func (p *parser) ParseTraversalPartial() (hcl.Traversal, hcl.Diagnostics) {
+ return p.parseTraversal(true)
+}
+
+func (p *parser) parseTraversal(allowSplats bool) (hcl.Traversal, hcl.Diagnostics) {
var ret hcl.Traversal
var diags hcl.Diagnostics
@@ -127,6 +148,34 @@ func (p *parser) ParseTraversalAbs() (hcl.Traversal, hcl.Diagnostics) {
return ret, diags
}
+ case TokenStar:
+ if allowSplats {
+
+ p.Read() // Eat the star.
+ close := p.Read()
+ if close.Type != TokenCBrack {
+ diags = append(diags, &hcl.Diagnostic{
+ Severity: hcl.DiagError,
+ Summary: "Unclosed index brackets",
+ Detail: "Index key must be followed by a closing bracket.",
+ Subject: &close.Range,
+ Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
+ })
+ }
+
+ ret = append(ret, hcl.TraverseSplat{
+ SrcRange: hcl.RangeBetween(open.Range, close.Range),
+ })
+
+ if diags.HasErrors() {
+ return ret, diags
+ }
+
+ continue
+ }
+
+ // Otherwise, return the error below for the star.
+ fallthrough
default:
if next.Type == TokenStar {
diags = append(diags, &hcl.Diagnostic{
diff --git a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/public.go b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/public.go
index d56f8e50be..17dc1ed419 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/public.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/public.go
@@ -118,6 +118,37 @@ func ParseTraversalAbs(src []byte, filename string, start hcl.Pos) (hcl.Traversa
return expr, diags
}
+// ParseTraversalPartial matches the behavior of ParseTraversalAbs except
+// that it allows splat expressions ([*]) to appear in the traversal.
+//
+// The returned traversals are "partial" in that the splat expression indicates
+// an unknown value for the index.
+//
+// Traversals that include splats cannot be automatically traversed by HCL using
+// the TraversalAbs or TraversalRel methods. Instead, the caller must handle
+// the traversals manually.
+func ParseTraversalPartial(src []byte, filename string, start hcl.Pos) (hcl.Traversal, hcl.Diagnostics) {
+ tokens, diags := LexExpression(src, filename, start)
+ peeker := newPeeker(tokens, false)
+ parser := &parser{peeker: peeker}
+
+ // Bare traverals are always parsed in "ignore newlines" mode, as if
+ // they were wrapped in parentheses.
+ parser.PushIncludeNewlines(false)
+
+ expr, parseDiags := parser.ParseTraversalPartial()
+ diags = append(diags, parseDiags...)
+
+ parser.PopIncludeNewlines()
+
+ // Panic if the parser uses incorrect stack discipline with the peeker's
+ // newlines stack, since otherwise it will produce confusing downstream
+ // errors.
+ peeker.AssertEmptyIncludeNewlinesStack()
+
+ return expr, diags
+}
+
// LexConfig performs lexical analysis on the given buffer, treating it as a
// whole HCL config file, and returns the resulting tokens.
//
diff --git a/vendor/github.com/hashicorp/hcl/v2/ops.go b/vendor/github.com/hashicorp/hcl/v2/ops.go
index bdf23614d6..3cd7b205ef 100644
--- a/vendor/github.com/hashicorp/hcl/v2/ops.go
+++ b/vendor/github.com/hashicorp/hcl/v2/ops.go
@@ -49,7 +49,7 @@ func Index(collection, key cty.Value, srcRange *Range) (cty.Value, Diagnostics)
ty := collection.Type()
kty := key.Type()
if kty == cty.DynamicPseudoType || ty == cty.DynamicPseudoType {
- return cty.DynamicVal, nil
+ return cty.DynamicVal.WithSameMarks(collection), nil
}
switch {
@@ -87,9 +87,9 @@ func Index(collection, key cty.Value, srcRange *Range) (cty.Value, Diagnostics)
has, _ := collection.HasIndex(key).Unmark()
if !has.IsKnown() {
if ty.IsTupleType() {
- return cty.DynamicVal, nil
+ return cty.DynamicVal.WithSameMarks(collection), nil
} else {
- return cty.UnknownVal(ty.ElementType()), nil
+ return cty.UnknownVal(ty.ElementType()).WithSameMarks(collection), nil
}
}
if has.False() {
@@ -196,10 +196,10 @@ func Index(collection, key cty.Value, srcRange *Range) (cty.Value, Diagnostics)
}
}
if !collection.IsKnown() {
- return cty.DynamicVal, nil
+ return cty.DynamicVal.WithSameMarks(collection), nil
}
if !key.IsKnown() {
- return cty.DynamicVal, nil
+ return cty.DynamicVal.WithSameMarks(collection), nil
}
key, _ = key.Unmark()
@@ -291,13 +291,13 @@ func GetAttr(obj cty.Value, attrName string, srcRange *Range) (cty.Value, Diagno
}
if !obj.IsKnown() {
- return cty.UnknownVal(ty.AttributeType(attrName)), nil
+ return cty.UnknownVal(ty.AttributeType(attrName)).WithSameMarks(obj), nil
}
return obj.GetAttr(attrName), nil
case ty.IsMapType():
if !obj.IsKnown() {
- return cty.UnknownVal(ty.ElementType()), nil
+ return cty.UnknownVal(ty.ElementType()).WithSameMarks(obj), nil
}
idx := cty.StringVal(attrName)
@@ -319,7 +319,7 @@ func GetAttr(obj cty.Value, attrName string, srcRange *Range) (cty.Value, Diagno
return obj.Index(idx), nil
case ty == cty.DynamicPseudoType:
- return cty.DynamicVal, nil
+ return cty.DynamicVal.WithSameMarks(obj), nil
case ty.IsListType() && ty.ElementType().IsObjectType():
// It seems a common mistake to try to access attributes on a whole
// list of objects rather than on a specific individual element, so
diff --git a/vendor/github.com/hashicorp/hcl/v2/spec.md b/vendor/github.com/hashicorp/hcl/v2/spec.md
index 97ef613182..d52ed70bb5 100644
--- a/vendor/github.com/hashicorp/hcl/v2/spec.md
+++ b/vendor/github.com/hashicorp/hcl/v2/spec.md
@@ -96,7 +96,7 @@ of the implementation language.
### _Dynamic Attributes_ Processing
The _schema-driven_ processing model is useful when the expected structure
-of a body is known a priori by the calling application. Some blocks are
+of a body is known by the calling application. Some blocks are
instead more free-form, such as a user-provided set of arbitrary key/value
pairs.
diff --git a/vendor/github.com/hashicorp/terraform-exec/internal/version/version.go b/vendor/github.com/hashicorp/terraform-exec/internal/version/version.go
index 235d561265..bcb308e015 100644
--- a/vendor/github.com/hashicorp/terraform-exec/internal/version/version.go
+++ b/vendor/github.com/hashicorp/terraform-exec/internal/version/version.go
@@ -3,7 +3,7 @@
package version
-const version = "0.21.0"
+const version = "0.22.0"
// ModuleVersion returns the current version of the github.com/hashicorp/terraform-exec Go module.
// This is a function to allow for future possible enhancement using debug.BuildInfo.
diff --git a/vendor/github.com/hashicorp/terraform-exec/tfexec/init.go b/vendor/github.com/hashicorp/terraform-exec/tfexec/init.go
index c292fdc0f1..ac5eea5508 100644
--- a/vendor/github.com/hashicorp/terraform-exec/tfexec/init.go
+++ b/vendor/github.com/hashicorp/terraform-exec/tfexec/init.go
@@ -6,6 +6,7 @@ package tfexec
import (
"context"
"fmt"
+ "io"
"os/exec"
)
@@ -99,6 +100,21 @@ func (opt *VerifyPluginsOption) configureInit(conf *initConfig) {
conf.verifyPlugins = opt.verifyPlugins
}
+func (tf *Terraform) configureInitOptions(ctx context.Context, c *initConfig, opts ...InitOption) error {
+ for _, o := range opts {
+ switch o.(type) {
+ case *LockOption, *LockTimeoutOption, *VerifyPluginsOption, *GetPluginsOption:
+ err := tf.compatible(ctx, nil, tf0_15_0)
+ if err != nil {
+ return fmt.Errorf("-lock, -lock-timeout, -verify-plugins, and -get-plugins options are no longer available as of Terraform 0.15: %w", err)
+ }
+ }
+
+ o.configureInit(c)
+ }
+ return nil
+}
+
// Init represents the terraform init subcommand.
func (tf *Terraform) Init(ctx context.Context, opts ...InitOption) error {
cmd, err := tf.initCmd(ctx, opts...)
@@ -108,21 +124,71 @@ func (tf *Terraform) Init(ctx context.Context, opts ...InitOption) error {
return tf.runTerraformCmd(ctx, cmd)
}
+// InitJSON represents the terraform init subcommand with the `-json` flag.
+// Using the `-json` flag will result in
+// [machine-readable](https://developer.hashicorp.com/terraform/internals/machine-readable-ui)
+// JSON being written to the supplied `io.Writer`.
+func (tf *Terraform) InitJSON(ctx context.Context, w io.Writer, opts ...InitOption) error {
+ err := tf.compatible(ctx, tf1_9_0, nil)
+ if err != nil {
+ return fmt.Errorf("terraform init -json was added in 1.9.0: %w", err)
+ }
+
+ tf.SetStdout(w)
+
+ cmd, err := tf.initJSONCmd(ctx, opts...)
+ if err != nil {
+ return err
+ }
+
+ return tf.runTerraformCmd(ctx, cmd)
+}
+
func (tf *Terraform) initCmd(ctx context.Context, opts ...InitOption) (*exec.Cmd, error) {
c := defaultInitOptions
- for _, o := range opts {
- switch o.(type) {
- case *LockOption, *LockTimeoutOption, *VerifyPluginsOption, *GetPluginsOption:
- err := tf.compatible(ctx, nil, tf0_15_0)
- if err != nil {
- return nil, fmt.Errorf("-lock, -lock-timeout, -verify-plugins, and -get-plugins options are no longer available as of Terraform 0.15: %w", err)
- }
- }
+ err := tf.configureInitOptions(ctx, &c, opts...)
+ if err != nil {
+ return nil, err
+ }
+
+ args, err := tf.buildInitArgs(ctx, c)
+ if err != nil {
+ return nil, err
+ }
+
+ // Optional positional argument; must be last as flags precede positional arguments.
+ if c.dir != "" {
+ args = append(args, c.dir)
+ }
+
+ return tf.buildInitCmd(ctx, c, args)
+}
+
+func (tf *Terraform) initJSONCmd(ctx context.Context, opts ...InitOption) (*exec.Cmd, error) {
+ c := defaultInitOptions
+
+ err := tf.configureInitOptions(ctx, &c, opts...)
+ if err != nil {
+ return nil, err
+ }
- o.configureInit(&c)
+ args, err := tf.buildInitArgs(ctx, c)
+ if err != nil {
+ return nil, err
+ }
+
+ args = append(args, "-json")
+
+ // Optional positional argument; must be last as flags precede positional arguments.
+ if c.dir != "" {
+ args = append(args, c.dir)
}
+ return tf.buildInitCmd(ctx, c, args)
+}
+
+func (tf *Terraform) buildInitArgs(ctx context.Context, c initConfig) ([]string, error) {
args := []string{"init", "-no-color", "-input=false"}
// string opts: only pass if set
@@ -172,11 +238,10 @@ func (tf *Terraform) initCmd(ctx context.Context, opts ...InitOption) (*exec.Cmd
}
}
- // optional positional argument
- if c.dir != "" {
- args = append(args, c.dir)
- }
+ return args, nil
+}
+func (tf *Terraform) buildInitCmd(ctx context.Context, c initConfig, args []string) (*exec.Cmd, error) {
mergeEnv := map[string]string{}
if c.reattachInfo != nil {
reattachStr, err := c.reattachInfo.marshalString()
diff --git a/vendor/github.com/hashicorp/terraform-json/CONTRIBUTING.md b/vendor/github.com/hashicorp/terraform-json/CONTRIBUTING.md
new file mode 100644
index 0000000000..07b5fd5c8f
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-json/CONTRIBUTING.md
@@ -0,0 +1,22 @@
+# Contributing to terraform-json
+
+## Versioning
+
+The `github.com/hashicorp/terraform-json` Go module in its entirety is versioned according to [Go module versioning](https://golang.org/ref/mod#versions) with Git tags.
+
+There is currently no firm plan for releasing v1.
+
+## Releases
+
+Releases are made on a reasonably regular basis by the Terraform team, using our custom CI workflows. There is currently no set release schedule and no requirement for _contributors_ to write changelog entries.
+
+The following notes are only relevant to maintainers.
+
+[Create new release](https://github.com/hashicorp/terraform-json/releases/new) via GitHub UI to point to the new tag and use GitHub to generate the changelog (`Generate release notes` button).
+
+You can format the generated changelog before publishing - e.g. ensure entries are grouped into categories such as `ENHANCEMENTS`, `BUG FIXES` and `INTERNAL`.
+
+## Security vulnerabilities
+
+Please disclose security vulnerabilities by following the procedure
+described at https://www.hashicorp.com/security#vulnerability-reporting.
diff --git a/vendor/github.com/hashicorp/terraform-json/action.go b/vendor/github.com/hashicorp/terraform-json/action.go
index c74f7e68a3..9dcab8569e 100644
--- a/vendor/github.com/hashicorp/terraform-json/action.go
+++ b/vendor/github.com/hashicorp/terraform-json/action.go
@@ -26,6 +26,9 @@ const (
// ActionDelete denotes a delete operation.
ActionDelete Action = "delete"
+
+ // ActionForget denotes a forget operation.
+ ActionForget Action = "forget"
)
// Actions denotes a valid change type.
@@ -105,3 +108,12 @@ func (a Actions) CreateBeforeDestroy() bool {
func (a Actions) Replace() bool {
return a.DestroyBeforeCreate() || a.CreateBeforeDestroy()
}
+
+// Forget is true if this set of Actions denotes a forget operation.
+func (a Actions) Forget() bool {
+ if len(a) != 1 {
+ return false
+ }
+
+ return a[0] == ActionForget
+}
diff --git a/vendor/github.com/hashicorp/terraform-json/catalog-info.yaml b/vendor/github.com/hashicorp/terraform-json/catalog-info.yaml
new file mode 100644
index 0000000000..984285471b
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-json/catalog-info.yaml
@@ -0,0 +1,17 @@
+# Copyright (c) HashiCorp, Inc.
+# SPDX-License-Identifier: MPL-2.0
+#
+# Intended for internal HashiCorp use only
+apiVersion: backstage.io/v1alpha1
+kind: Component
+metadata:
+ name: terraform-json
+ description: Helper types for the Terraform external data representation
+ annotations:
+ github.com/project-slug: hashicorp/terraform-json
+ jira/project-key: TF
+ jira/label: terraform-json
+spec:
+ type: library
+ owner: terraform-core
+ lifecycle: production
diff --git a/vendor/github.com/hashicorp/terraform-json/schemas.go b/vendor/github.com/hashicorp/terraform-json/schemas.go
index a2918ef480..13d0d38562 100644
--- a/vendor/github.com/hashicorp/terraform-json/schemas.go
+++ b/vendor/github.com/hashicorp/terraform-json/schemas.go
@@ -87,6 +87,9 @@ type ProviderSchema struct {
// The schemas for any data sources in this provider.
DataSourceSchemas map[string]*Schema `json:"data_source_schemas,omitempty"`
+ // The schemas for any ephemeral resources in this provider.
+ EphemeralResourceSchemas map[string]*Schema `json:"ephemeral_resource_schemas,omitempty"`
+
// The definitions for any functions in this provider.
Functions map[string]*FunctionSignature `json:"functions,omitempty"`
}
@@ -227,6 +230,10 @@ type SchemaAttribute struct {
// in logs. Future versions of Terraform may encrypt or otherwise
// treat these values with greater care than non-sensitive fields.
Sensitive bool `json:"sensitive,omitempty"`
+
+ // If true, this attribute is write only and its value will not be
+ // persisted in artifacts such as plan files or state.
+ WriteOnly bool `json:"write_only,omitempty"`
}
// jsonSchemaAttribute describes an attribute within a schema block
@@ -246,6 +253,7 @@ type jsonSchemaAttribute struct {
Optional bool `json:"optional,omitempty"`
Computed bool `json:"computed,omitempty"`
Sensitive bool `json:"sensitive,omitempty"`
+ WriteOnly bool `json:"write_only,omitempty"`
}
func (as *SchemaAttribute) MarshalJSON() ([]byte, error) {
@@ -258,6 +266,7 @@ func (as *SchemaAttribute) MarshalJSON() ([]byte, error) {
Optional: as.Optional,
Computed: as.Computed,
Sensitive: as.Sensitive,
+ WriteOnly: as.WriteOnly,
}
if as.AttributeType != cty.NilType {
attrTy, _ := as.AttributeType.MarshalJSON()
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/internal/logging/context.go b/vendor/github.com/hashicorp/terraform-plugin-go/internal/logging/context.go
index d99e19796c..67ddfe3f15 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/internal/logging/context.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/internal/logging/context.go
@@ -82,6 +82,15 @@ func ResourceContext(ctx context.Context, resource string) context.Context {
return ctx
}
+// EphemeralResourceContext injects the ephemeral resource type into logger contexts.
+func EphemeralResourceContext(ctx context.Context, ephemeralResource string) context.Context {
+ ctx = tfsdklog.SetField(ctx, KeyEphemeralResourceType, ephemeralResource)
+ ctx = tfsdklog.SubsystemSetField(ctx, SubsystemProto, KeyEphemeralResourceType, ephemeralResource)
+ ctx = tflog.SetField(ctx, KeyEphemeralResourceType, ephemeralResource)
+
+ return ctx
+}
+
// RpcContext injects the RPC name into logger contexts.
func RpcContext(ctx context.Context, rpc string) context.Context {
ctx = tfsdklog.SetField(ctx, KeyRPC, rpc)
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/internal/logging/keys.go b/vendor/github.com/hashicorp/terraform-plugin-go/internal/logging/keys.go
index fb82144294..afa76f9d3e 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/internal/logging/keys.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/internal/logging/keys.go
@@ -57,6 +57,9 @@ const (
// The type of data source being operated on, such as "archive_file"
KeyDataSourceType = "tf_data_source_type"
+ // The type of ephemeral resource being operated on, such as "random_password"
+ KeyEphemeralResourceType = "tf_ephemeral_resource_type"
+
// Path to protocol data file, such as "/tmp/example.json"
KeyProtocolDataFile = "tf_proto_data_file"
@@ -69,9 +72,15 @@ const (
// Whether the GetProviderSchemaOptional server capability is enabled
KeyServerCapabilityGetProviderSchemaOptional = "tf_server_capability_get_provider_schema_optional"
+ // Whether the MoveResourceState server capability is enabled
+ KeyServerCapabilityMoveResourceState = "tf_server_capability_move_resource_state"
+
// Whether the PlanDestroy server capability is enabled
KeyServerCapabilityPlanDestroy = "tf_server_capability_plan_destroy"
// Whether the DeferralAllowed client capability is enabled
KeyClientCapabilityDeferralAllowed = "tf_client_capability_deferral_allowed"
+
+ // Whether the WriteOnlyAttributesAllowed client capability is enabled
+ KeyClientCapabilityWriteOnlyAttributesAllowed = "tf_client_capability_write_only_attributes_allowed"
)
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/client_capabilities.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/client_capabilities.go
index ba01cd8b8f..8a177724b9 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/client_capabilities.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/client_capabilities.go
@@ -3,6 +3,15 @@
package tfprotov5
+// ValidateResourceTypeConfigClientCapabilities allows Terraform to publish information
+// regarding optionally supported protocol features for the ValidateResourceTypeConfig RPC,
+// such as forward-compatible Terraform behavior changes.
+type ValidateResourceTypeConfigClientCapabilities struct {
+ // WriteOnlyAttributesAllowed signals that the client is able to
+ // handle write_only attributes for managed resources.
+ WriteOnlyAttributesAllowed bool
+}
+
// ConfigureProviderClientCapabilities allows Terraform to publish information
// regarding optionally supported protocol features for the ConfigureProvider RPC,
// such as forward-compatible Terraform behavior changes.
@@ -47,3 +56,12 @@ type ImportResourceStateClientCapabilities struct {
// handle deferred responses from the provider.
DeferralAllowed bool
}
+
+// OpenEphemeralResourceClientCapabilities allows Terraform to publish information
+// regarding optionally supported protocol features for the OpenEphemeralResource RPC,
+// such as forward-compatible Terraform behavior changes.
+type OpenEphemeralResourceClientCapabilities struct {
+ // DeferralAllowed signals that the request from Terraform is able to
+ // handle deferred responses from the provider.
+ DeferralAllowed bool
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/ephemeral_resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/ephemeral_resource.go
new file mode 100644
index 0000000000..1794f91b43
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/ephemeral_resource.go
@@ -0,0 +1,185 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package tfprotov5
+
+import (
+ "context"
+ "time"
+)
+
+// EphemeralResourceMetadata describes metadata for an ephemeral resource in the GetMetadata
+// RPC.
+type EphemeralResourceMetadata struct {
+ // TypeName is the name of the ephemeral resource.
+ TypeName string
+}
+
+// EphemeralResourceServer is an interface containing the methods an ephemeral resource
+// implementation needs to fill.
+type EphemeralResourceServer interface {
+ // ValidateEphemeralResourceConfig is called when Terraform is checking that an
+ // ephemeral resource configuration is valid. It is guaranteed to have types
+ // conforming to your schema, but it is not guaranteed that all values
+ // will be known. This is your opportunity to do custom or advanced
+ // validation prior to an ephemeral resource being opened.
+ ValidateEphemeralResourceConfig(context.Context, *ValidateEphemeralResourceConfigRequest) (*ValidateEphemeralResourceConfigResponse, error)
+
+ // OpenEphemeralResource is called when Terraform wants to open the ephemeral resource,
+ // usually during planning. If the config for the ephemeral resource contains unknown
+ // values, Terraform will defer the OpenEphemeralResource call until apply.
+ OpenEphemeralResource(context.Context, *OpenEphemeralResourceRequest) (*OpenEphemeralResourceResponse, error)
+
+ // RenewEphemeralResource is called when Terraform detects that the previously specified
+ // RenewAt timestamp has passed. The RenewAt timestamp is supplied either from the
+ // OpenEphemeralResource call or a previous RenewEphemeralResource call.
+ RenewEphemeralResource(context.Context, *RenewEphemeralResourceRequest) (*RenewEphemeralResourceResponse, error)
+
+ // CloseEphemeralResource is called when Terraform is closing the ephemeral resource.
+ CloseEphemeralResource(context.Context, *CloseEphemeralResourceRequest) (*CloseEphemeralResourceResponse, error)
+}
+
+// ValidateEphemeralResourceConfigRequest is the request Terraform sends when it
+// wants to validate an ephemeral resource's configuration.
+type ValidateEphemeralResourceConfigRequest struct {
+ // TypeName is the type of resource Terraform is validating.
+ TypeName string
+
+ // Config is the configuration the user supplied for that ephemeral resource. See
+ // the documentation on `DynamicValue` for more information about
+ // safely accessing the configuration.
+ //
+ // The configuration is represented as a tftypes.Object, with each
+ // attribute and nested block getting its own key and value.
+ //
+ // This configuration may contain unknown values if a user uses
+ // interpolation or other functionality that would prevent Terraform
+ // from knowing the value at request time. Any attributes not directly
+ // set in the configuration will be null.
+ Config *DynamicValue
+}
+
+// ValidateEphemeralResourceConfigResponse is the response from the provider about
+// the validity of an ephemeral resource's configuration.
+type ValidateEphemeralResourceConfigResponse struct {
+ // Diagnostics report errors or warnings related to the given
+ // configuration. Returning an empty slice indicates a successful
+ // validation with no warnings or errors generated.
+ Diagnostics []*Diagnostic
+}
+
+// OpenEphemeralResourceRequest is the request Terraform sends when it
+// wants to open an ephemeral resource.
+type OpenEphemeralResourceRequest struct {
+ // TypeName is the type of resource Terraform is opening.
+ TypeName string
+
+ // Config is the configuration the user supplied for that ephemeral resource. See
+ // the documentation on `DynamicValue` for more information about
+ // safely accessing the configuration.
+ //
+ // The configuration is represented as a tftypes.Object, with each
+ // attribute and nested block getting its own key and value.
+ //
+ // This configuration will always be fully known. If Config contains unknown values,
+ // Terraform will defer the OpenEphemeralResource RPC until apply.
+ Config *DynamicValue
+
+ // ClientCapabilities defines optionally supported protocol features for the
+ // OpenEphemeralResource RPC, such as forward-compatible Terraform behavior changes.
+ ClientCapabilities *OpenEphemeralResourceClientCapabilities
+}
+
+// OpenEphemeralResourceResponse is the response from the provider about the current
+// state of the opened ephemeral resource.
+type OpenEphemeralResourceResponse struct {
+ // Result is the provider's understanding of what the ephemeral resource's
+ // data is after it has been opened, represented as a `DynamicValue`.
+ // See the documentation for `DynamicValue` for information about
+ // safely creating the `DynamicValue`.
+ //
+ // Any attribute, whether computed or not, that has a known value in
+ // the Config in the OpenEphemeralResourceRequest must be preserved
+ // exactly as it was in Result.
+ //
+ // Any attribute in the Config in the OpenEphemeralResourceRequest
+ // that is unknown must take on a known value at this time. No unknown
+ // values are allowed in the Result.
+ //
+ // The result should be represented as a tftypes.Object, with each
+ // attribute and nested block getting its own key and value.
+ Result *DynamicValue
+
+ // Diagnostics report errors or warnings related to opening the
+ // requested ephemeral resource. Returning an empty slice
+ // indicates a successful creation with no warnings or errors
+ // generated.
+ Diagnostics []*Diagnostic
+
+ // Private should be set to any private data that the provider would like to be
+ // sent to the next Renew or Close call.
+ Private []byte
+
+ // RenewAt indicates to Terraform that the ephemeral resource
+ // needs to be renewed at the specified time. Terraform will
+ // call the RenewEphemeralResource RPC when the specified time has passed.
+ RenewAt time.Time
+
+ // Deferred is used to indicate to Terraform that the OpenEphemeralResource operation
+ // needs to be deferred for a reason.
+ Deferred *Deferred
+}
+
+// RenewEphemeralResourceRequest is the request Terraform sends when it
+// wants to renew an ephemeral resource.
+type RenewEphemeralResourceRequest struct {
+ // TypeName is the type of resource Terraform is renewing.
+ TypeName string
+
+ // Private is any provider-defined private data stored with the
+ // ephemeral resource from the most recent Open or Renew call.
+ //
+ // To ensure private data is preserved, copy any necessary data to
+ // the RenewEphemeralResourceResponse type Private field.
+ Private []byte
+}
+
+// RenewEphemeralResourceResponse is the response from the provider after an ephemeral resource
+// has been renewed.
+type RenewEphemeralResourceResponse struct {
+ // Diagnostics report errors or warnings related to renewing the
+ // requested ephemeral resource. Returning an empty slice
+ // indicates a successful creation with no warnings or errors
+ // generated.
+ Diagnostics []*Diagnostic
+
+ // Private should be set to any private data that the provider would like to be
+ // sent to the next Renew or Close call.
+ Private []byte
+
+ // RenewAt indicates to Terraform that the ephemeral resource
+ // needs to be renewed at the specified time. Terraform will
+ // call the RenewEphemeralResource RPC when the specified time has passed.
+ RenewAt time.Time
+}
+
+// CloseEphemeralResourceRequest is the request Terraform sends when it
+// wants to close an ephemeral resource.
+type CloseEphemeralResourceRequest struct {
+ // TypeName is the type of resource Terraform is closing.
+ TypeName string
+
+ // Private is any provider-defined private data stored with the
+ // ephemeral resource from the most recent Open or Renew call.
+ Private []byte
+}
+
+// CloseEphemeralResourceResponse is the response from the provider about
+// the closed ephemeral resource.
+type CloseEphemeralResourceResponse struct {
+ // Diagnostics report errors or warnings related to closing the
+ // requested ephemeral resource. Returning an empty slice
+ // indicates a successful creation with no warnings or errors
+ // generated.
+ Diagnostics []*Diagnostic
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/client_capabilities.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/client_capabilities.go
index 94ddc3d435..3bff55c1a2 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/client_capabilities.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/client_capabilities.go
@@ -8,6 +8,18 @@ import (
"github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5"
)
+func ValidateResourceTypeConfigClientCapabilities(in *tfplugin5.ClientCapabilities) *tfprotov5.ValidateResourceTypeConfigClientCapabilities {
+ if in == nil {
+ return nil
+ }
+
+ resp := &tfprotov5.ValidateResourceTypeConfigClientCapabilities{
+ WriteOnlyAttributesAllowed: in.WriteOnlyAttributesAllowed,
+ }
+
+ return resp
+}
+
func ConfigureProviderClientCapabilities(in *tfplugin5.ClientCapabilities) *tfprotov5.ConfigureProviderClientCapabilities {
if in == nil {
return nil
@@ -67,3 +79,15 @@ func ImportResourceStateClientCapabilities(in *tfplugin5.ClientCapabilities) *tf
return resp
}
+
+func OpenEphemeralResourceClientCapabilities(in *tfplugin5.ClientCapabilities) *tfprotov5.OpenEphemeralResourceClientCapabilities {
+ if in == nil {
+ return nil
+ }
+
+ resp := &tfprotov5.OpenEphemeralResourceClientCapabilities{
+ DeferralAllowed: in.DeferralAllowed,
+ }
+
+ return resp
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/ephemeral_resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/ephemeral_resource.go
new file mode 100644
index 0000000000..bb8c123005
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/ephemeral_resource.go
@@ -0,0 +1,54 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package fromproto
+
+import (
+ "github.com/hashicorp/terraform-plugin-go/tfprotov5"
+ "github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5"
+)
+
+func ValidateEphemeralResourceConfigRequest(in *tfplugin5.ValidateEphemeralResourceConfig_Request) *tfprotov5.ValidateEphemeralResourceConfigRequest {
+ if in == nil {
+ return nil
+ }
+
+ return &tfprotov5.ValidateEphemeralResourceConfigRequest{
+ TypeName: in.TypeName,
+ Config: DynamicValue(in.Config),
+ }
+}
+
+func OpenEphemeralResourceRequest(in *tfplugin5.OpenEphemeralResource_Request) *tfprotov5.OpenEphemeralResourceRequest {
+ if in == nil {
+ return nil
+ }
+
+ return &tfprotov5.OpenEphemeralResourceRequest{
+ TypeName: in.TypeName,
+ Config: DynamicValue(in.Config),
+ ClientCapabilities: OpenEphemeralResourceClientCapabilities(in.ClientCapabilities),
+ }
+}
+
+func RenewEphemeralResourceRequest(in *tfplugin5.RenewEphemeralResource_Request) *tfprotov5.RenewEphemeralResourceRequest {
+ if in == nil {
+ return nil
+ }
+
+ return &tfprotov5.RenewEphemeralResourceRequest{
+ TypeName: in.TypeName,
+ Private: in.Private,
+ }
+}
+
+func CloseEphemeralResourceRequest(in *tfplugin5.CloseEphemeralResource_Request) *tfprotov5.CloseEphemeralResourceRequest {
+ if in == nil {
+ return nil
+ }
+
+ return &tfprotov5.CloseEphemeralResourceRequest{
+ TypeName: in.TypeName,
+ Private: in.Private,
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/resource.go
index f531b48707..02a09dc9f2 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto/resource.go
@@ -14,8 +14,9 @@ func ValidateResourceTypeConfigRequest(in *tfplugin5.ValidateResourceTypeConfig_
}
resp := &tfprotov5.ValidateResourceTypeConfigRequest{
- Config: DynamicValue(in.Config),
- TypeName: in.TypeName,
+ ClientCapabilities: ValidateResourceTypeConfigClientCapabilities(in.ClientCapabilities),
+ Config: DynamicValue(in.Config),
+ TypeName: in.TypeName,
}
return resp
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tf5serverlogging/client_capabilities.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tf5serverlogging/client_capabilities.go
index d64557b83a..6aa0741c95 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tf5serverlogging/client_capabilities.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tf5serverlogging/client_capabilities.go
@@ -10,6 +10,20 @@ import (
"github.com/hashicorp/terraform-plugin-go/tfprotov5"
)
+// ValidateResourceTypeConfigClientCapabilities generates a TRACE "Announced client capabilities" log.
+func ValidateResourceTypeConfigClientCapabilities(ctx context.Context, capabilities *tfprotov5.ValidateResourceTypeConfigClientCapabilities) {
+ if capabilities == nil {
+ logging.ProtocolTrace(ctx, "No announced client capabilities", map[string]interface{}{})
+ return
+ }
+
+ responseFields := map[string]interface{}{
+ logging.KeyClientCapabilityWriteOnlyAttributesAllowed: capabilities.WriteOnlyAttributesAllowed,
+ }
+
+ logging.ProtocolTrace(ctx, "Announced client capabilities", responseFields)
+}
+
// ConfigureProviderClientCapabilities generates a TRACE "Announced client capabilities" log.
func ConfigureProviderClientCapabilities(ctx context.Context, capabilities *tfprotov5.ConfigureProviderClientCapabilities) {
if capabilities == nil {
@@ -79,3 +93,17 @@ func ImportResourceStateClientCapabilities(ctx context.Context, capabilities *tf
logging.ProtocolTrace(ctx, "Announced client capabilities", responseFields)
}
+
+// OpenEphemeralResourceClientCapabilities generates a TRACE "Announced client capabilities" log.
+func OpenEphemeralResourceClientCapabilities(ctx context.Context, capabilities *tfprotov5.OpenEphemeralResourceClientCapabilities) {
+ if capabilities == nil {
+ logging.ProtocolTrace(ctx, "No announced client capabilities", map[string]interface{}{})
+ return
+ }
+
+ responseFields := map[string]interface{}{
+ logging.KeyClientCapabilityDeferralAllowed: capabilities.DeferralAllowed,
+ }
+
+ logging.ProtocolTrace(ctx, "Announced client capabilities", responseFields)
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tf5serverlogging/server_capabilities.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tf5serverlogging/server_capabilities.go
index d0f86c8427..aff78da75a 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tf5serverlogging/server_capabilities.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tf5serverlogging/server_capabilities.go
@@ -14,11 +14,13 @@ import (
func ServerCapabilities(ctx context.Context, capabilities *tfprotov5.ServerCapabilities) {
responseFields := map[string]interface{}{
logging.KeyServerCapabilityGetProviderSchemaOptional: false,
+ logging.KeyServerCapabilityMoveResourceState: false,
logging.KeyServerCapabilityPlanDestroy: false,
}
if capabilities != nil {
responseFields[logging.KeyServerCapabilityGetProviderSchemaOptional] = capabilities.GetProviderSchemaOptional
+ responseFields[logging.KeyServerCapabilityMoveResourceState] = capabilities.MoveResourceState
responseFields[logging.KeyServerCapabilityPlanDestroy] = capabilities.PlanDestroy
}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5.pb.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5.pb.go
index 46ce948a15..0edbb22352 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5.pb.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5.pb.go
@@ -1,9 +1,9 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0
-// Terraform Plugin RPC protocol version 5.6
+// Terraform Plugin RPC protocol version 5.8
//
-// This file defines version 5.6 of the RPC protocol. To implement a plugin
+// This file defines version 5.8 of the RPC protocol. To implement a plugin
// against this protocol, copy this definition into your own codebase and
// use protoc to generate stubs for your target language.
//
@@ -22,8 +22,8 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.34.0
-// protoc v5.26.1
+// protoc-gen-go v1.36.3
+// protoc v5.29.3
// source: tfplugin5.proto
package tfplugin5
@@ -31,6 +31,7 @@ package tfplugin5
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+ timestamppb "google.golang.org/protobuf/types/known/timestamppb"
reflect "reflect"
sync "sync"
)
@@ -257,21 +258,18 @@ func (Deferred_Reason) EnumDescriptor() ([]byte, []int) {
// DynamicValue is an opaque encoding of terraform data, with the field name
// indicating the encoding scheme used.
type DynamicValue struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Msgpack []byte `protobuf:"bytes,1,opt,name=msgpack,proto3" json:"msgpack,omitempty"`
+ Json []byte `protobuf:"bytes,2,opt,name=json,proto3" json:"json,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Msgpack []byte `protobuf:"bytes,1,opt,name=msgpack,proto3" json:"msgpack,omitempty"`
- Json []byte `protobuf:"bytes,2,opt,name=json,proto3" json:"json,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *DynamicValue) Reset() {
*x = DynamicValue{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *DynamicValue) String() string {
@@ -282,7 +280,7 @@ func (*DynamicValue) ProtoMessage() {}
func (x *DynamicValue) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[0]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -312,23 +310,20 @@ func (x *DynamicValue) GetJson() []byte {
}
type Diagnostic struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Severity Diagnostic_Severity `protobuf:"varint,1,opt,name=severity,proto3,enum=tfplugin5.Diagnostic_Severity" json:"severity,omitempty"`
+ Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"`
+ Detail string `protobuf:"bytes,3,opt,name=detail,proto3" json:"detail,omitempty"`
+ Attribute *AttributePath `protobuf:"bytes,4,opt,name=attribute,proto3" json:"attribute,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Severity Diagnostic_Severity `protobuf:"varint,1,opt,name=severity,proto3,enum=tfplugin5.Diagnostic_Severity" json:"severity,omitempty"`
- Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"`
- Detail string `protobuf:"bytes,3,opt,name=detail,proto3" json:"detail,omitempty"`
- Attribute *AttributePath `protobuf:"bytes,4,opt,name=attribute,proto3" json:"attribute,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *Diagnostic) Reset() {
*x = Diagnostic{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Diagnostic) String() string {
@@ -339,7 +334,7 @@ func (*Diagnostic) ProtoMessage() {}
func (x *Diagnostic) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[1]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -383,23 +378,20 @@ func (x *Diagnostic) GetAttribute() *AttributePath {
}
type FunctionError struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
// The optional function_argument records the index position of the
// argument which caused the error.
FunctionArgument *int64 `protobuf:"varint,2,opt,name=function_argument,json=functionArgument,proto3,oneof" json:"function_argument,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *FunctionError) Reset() {
*x = FunctionError{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *FunctionError) String() string {
@@ -410,7 +402,7 @@ func (*FunctionError) ProtoMessage() {}
func (x *FunctionError) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[2]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -440,20 +432,17 @@ func (x *FunctionError) GetFunctionArgument() int64 {
}
type AttributePath struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Steps []*AttributePath_Step `protobuf:"bytes,1,rep,name=steps,proto3" json:"steps,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Steps []*AttributePath_Step `protobuf:"bytes,1,rep,name=steps,proto3" json:"steps,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *AttributePath) Reset() {
*x = AttributePath{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *AttributePath) String() string {
@@ -464,7 +453,7 @@ func (*AttributePath) ProtoMessage() {}
func (x *AttributePath) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[3]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -487,18 +476,16 @@ func (x *AttributePath) GetSteps() []*AttributePath_Step {
}
type Stop struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Stop) Reset() {
*x = Stop{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Stop) String() string {
@@ -509,7 +496,7 @@ func (*Stop) ProtoMessage() {}
func (x *Stop) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[4]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -528,21 +515,18 @@ func (*Stop) Descriptor() ([]byte, []int) {
// provider. It can be in one of two formats, the current json encoded format
// in bytes, or the legacy flatmap format as a map of strings.
type RawState struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Json []byte `protobuf:"bytes,1,opt,name=json,proto3" json:"json,omitempty"`
+ Flatmap map[string]string `protobuf:"bytes,2,rep,name=flatmap,proto3" json:"flatmap,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
unknownFields protoimpl.UnknownFields
-
- Json []byte `protobuf:"bytes,1,opt,name=json,proto3" json:"json,omitempty"`
- Flatmap map[string]string `protobuf:"bytes,2,rep,name=flatmap,proto3" json:"flatmap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
+ sizeCache protoimpl.SizeCache
}
func (x *RawState) Reset() {
*x = RawState{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *RawState) String() string {
@@ -553,7 +537,7 @@ func (*RawState) ProtoMessage() {}
func (x *RawState) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[5]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -584,25 +568,22 @@ func (x *RawState) GetFlatmap() map[string]string {
// Schema is the configuration schema for a Resource, Provider, or Provisioner.
type Schema struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The version of the schema.
// Schemas are versioned, so that providers can upgrade a saved resource
// state when the schema is changed.
Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"`
// Block is the top level configuration block for this schema.
- Block *Schema_Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"`
+ Block *Schema_Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Schema) Reset() {
*x = Schema{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[6]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[6]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Schema) String() string {
@@ -613,7 +594,7 @@ func (*Schema) ProtoMessage() {}
func (x *Schema) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[6]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -647,10 +628,7 @@ func (x *Schema) GetBlock() *Schema_Block {
// availability of certain forward-compatible changes which may be optional
// in a major protocol version, but cannot be tested for directly.
type ServerCapabilities struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The plan_destroy capability signals that a provider expects a call
// to PlanResourceChange when a resource is going to be destroyed.
PlanDestroy bool `protobuf:"varint,1,opt,name=plan_destroy,json=planDestroy,proto3" json:"plan_destroy,omitempty"`
@@ -662,15 +640,15 @@ type ServerCapabilities struct {
// The move_resource_state capability signals that a provider supports the
// MoveResourceState RPC.
MoveResourceState bool `protobuf:"varint,3,opt,name=move_resource_state,json=moveResourceState,proto3" json:"move_resource_state,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ServerCapabilities) Reset() {
*x = ServerCapabilities{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[7]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[7]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ServerCapabilities) String() string {
@@ -681,7 +659,7 @@ func (*ServerCapabilities) ProtoMessage() {}
func (x *ServerCapabilities) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[7]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -722,22 +700,22 @@ func (x *ServerCapabilities) GetMoveResourceState() bool {
// certain forward-compatible changes which may be optional in a major
// protocol version, but cannot be tested for directly.
type ClientCapabilities struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The deferral_allowed capability signals that the client is able to
// handle deferred responses from the provider.
DeferralAllowed bool `protobuf:"varint,1,opt,name=deferral_allowed,json=deferralAllowed,proto3" json:"deferral_allowed,omitempty"`
+ // The write_only_attributes_allowed capability signals that the client
+ // is able to handle write_only attributes for managed resources.
+ WriteOnlyAttributesAllowed bool `protobuf:"varint,2,opt,name=write_only_attributes_allowed,json=writeOnlyAttributesAllowed,proto3" json:"write_only_attributes_allowed,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ClientCapabilities) Reset() {
*x = ClientCapabilities{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[8]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[8]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ClientCapabilities) String() string {
@@ -748,7 +726,7 @@ func (*ClientCapabilities) ProtoMessage() {}
func (x *ClientCapabilities) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[8]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -770,11 +748,15 @@ func (x *ClientCapabilities) GetDeferralAllowed() bool {
return false
}
-type Function struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
+func (x *ClientCapabilities) GetWriteOnlyAttributesAllowed() bool {
+ if x != nil {
+ return x.WriteOnlyAttributesAllowed
+ }
+ return false
+}
+type Function struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
// parameters is the ordered list of positional function parameters.
Parameters []*Function_Parameter `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty"`
// variadic_parameter is an optional final parameter which accepts
@@ -792,15 +774,15 @@ type Function struct {
// deprecation_message is human-readable documentation if the
// function is deprecated.
DeprecationMessage string `protobuf:"bytes,7,opt,name=deprecation_message,json=deprecationMessage,proto3" json:"deprecation_message,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Function) Reset() {
*x = Function{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[9]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[9]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Function) String() string {
@@ -811,7 +793,7 @@ func (*Function) ProtoMessage() {}
func (x *Function) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[9]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -877,21 +859,18 @@ func (x *Function) GetDeprecationMessage() string {
// Deferred is a message that indicates that change is deferred for a reason.
type Deferred struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// reason is the reason for deferring the change.
- Reason Deferred_Reason `protobuf:"varint,1,opt,name=reason,proto3,enum=tfplugin5.Deferred_Reason" json:"reason,omitempty"`
+ Reason Deferred_Reason `protobuf:"varint,1,opt,name=reason,proto3,enum=tfplugin5.Deferred_Reason" json:"reason,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Deferred) Reset() {
*x = Deferred{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[10]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[10]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Deferred) String() string {
@@ -902,7 +881,7 @@ func (*Deferred) ProtoMessage() {}
func (x *Deferred) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[10]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -925,18 +904,16 @@ func (x *Deferred) GetReason() Deferred_Reason {
}
type GetMetadata struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata) Reset() {
*x = GetMetadata{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[11]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[11]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata) String() string {
@@ -947,7 +924,7 @@ func (*GetMetadata) ProtoMessage() {}
func (x *GetMetadata) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[11]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -963,18 +940,16 @@ func (*GetMetadata) Descriptor() ([]byte, []int) {
}
type GetProviderSchema struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetProviderSchema) Reset() {
*x = GetProviderSchema{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[12]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[12]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetProviderSchema) String() string {
@@ -985,7 +960,7 @@ func (*GetProviderSchema) ProtoMessage() {}
func (x *GetProviderSchema) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[12]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1001,18 +976,16 @@ func (*GetProviderSchema) Descriptor() ([]byte, []int) {
}
type PrepareProviderConfig struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *PrepareProviderConfig) Reset() {
*x = PrepareProviderConfig{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[13]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[13]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *PrepareProviderConfig) String() string {
@@ -1023,7 +996,7 @@ func (*PrepareProviderConfig) ProtoMessage() {}
func (x *PrepareProviderConfig) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[13]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1039,18 +1012,16 @@ func (*PrepareProviderConfig) Descriptor() ([]byte, []int) {
}
type UpgradeResourceState struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *UpgradeResourceState) Reset() {
*x = UpgradeResourceState{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[14]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[14]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *UpgradeResourceState) String() string {
@@ -1061,7 +1032,7 @@ func (*UpgradeResourceState) ProtoMessage() {}
func (x *UpgradeResourceState) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[14]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1077,18 +1048,16 @@ func (*UpgradeResourceState) Descriptor() ([]byte, []int) {
}
type ValidateResourceTypeConfig struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateResourceTypeConfig) Reset() {
*x = ValidateResourceTypeConfig{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[15]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[15]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateResourceTypeConfig) String() string {
@@ -1099,7 +1068,7 @@ func (*ValidateResourceTypeConfig) ProtoMessage() {}
func (x *ValidateResourceTypeConfig) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[15]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1115,18 +1084,16 @@ func (*ValidateResourceTypeConfig) Descriptor() ([]byte, []int) {
}
type ValidateDataSourceConfig struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateDataSourceConfig) Reset() {
*x = ValidateDataSourceConfig{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[16]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[16]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateDataSourceConfig) String() string {
@@ -1137,7 +1104,7 @@ func (*ValidateDataSourceConfig) ProtoMessage() {}
func (x *ValidateDataSourceConfig) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[16]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1153,18 +1120,16 @@ func (*ValidateDataSourceConfig) Descriptor() ([]byte, []int) {
}
type Configure struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Configure) Reset() {
*x = Configure{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[17]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[17]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Configure) String() string {
@@ -1175,7 +1140,7 @@ func (*Configure) ProtoMessage() {}
func (x *Configure) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[17]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1191,18 +1156,16 @@ func (*Configure) Descriptor() ([]byte, []int) {
}
type ReadResource struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadResource) Reset() {
*x = ReadResource{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[18]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[18]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadResource) String() string {
@@ -1213,7 +1176,7 @@ func (*ReadResource) ProtoMessage() {}
func (x *ReadResource) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[18]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1229,18 +1192,16 @@ func (*ReadResource) Descriptor() ([]byte, []int) {
}
type PlanResourceChange struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *PlanResourceChange) Reset() {
*x = PlanResourceChange{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[19]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[19]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *PlanResourceChange) String() string {
@@ -1251,7 +1212,7 @@ func (*PlanResourceChange) ProtoMessage() {}
func (x *PlanResourceChange) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[19]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1267,18 +1228,16 @@ func (*PlanResourceChange) Descriptor() ([]byte, []int) {
}
type ApplyResourceChange struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ApplyResourceChange) Reset() {
*x = ApplyResourceChange{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[20]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[20]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ApplyResourceChange) String() string {
@@ -1289,7 +1248,7 @@ func (*ApplyResourceChange) ProtoMessage() {}
func (x *ApplyResourceChange) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[20]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1305,18 +1264,16 @@ func (*ApplyResourceChange) Descriptor() ([]byte, []int) {
}
type ImportResourceState struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ImportResourceState) Reset() {
*x = ImportResourceState{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[21]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[21]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ImportResourceState) String() string {
@@ -1327,7 +1284,7 @@ func (*ImportResourceState) ProtoMessage() {}
func (x *ImportResourceState) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[21]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1343,18 +1300,16 @@ func (*ImportResourceState) Descriptor() ([]byte, []int) {
}
type MoveResourceState struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *MoveResourceState) Reset() {
*x = MoveResourceState{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[22]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[22]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *MoveResourceState) String() string {
@@ -1365,7 +1320,7 @@ func (*MoveResourceState) ProtoMessage() {}
func (x *MoveResourceState) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[22]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1381,18 +1336,16 @@ func (*MoveResourceState) Descriptor() ([]byte, []int) {
}
type ReadDataSource struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadDataSource) Reset() {
*x = ReadDataSource{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[23]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[23]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadDataSource) String() string {
@@ -1403,7 +1356,7 @@ func (*ReadDataSource) ProtoMessage() {}
func (x *ReadDataSource) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[23]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1419,18 +1372,16 @@ func (*ReadDataSource) Descriptor() ([]byte, []int) {
}
type GetProvisionerSchema struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetProvisionerSchema) Reset() {
*x = GetProvisionerSchema{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[24]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[24]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetProvisionerSchema) String() string {
@@ -1441,7 +1392,7 @@ func (*GetProvisionerSchema) ProtoMessage() {}
func (x *GetProvisionerSchema) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[24]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1457,18 +1408,16 @@ func (*GetProvisionerSchema) Descriptor() ([]byte, []int) {
}
type ValidateProvisionerConfig struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateProvisionerConfig) Reset() {
*x = ValidateProvisionerConfig{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[25]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[25]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateProvisionerConfig) String() string {
@@ -1479,7 +1428,7 @@ func (*ValidateProvisionerConfig) ProtoMessage() {}
func (x *ValidateProvisionerConfig) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[25]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1495,18 +1444,16 @@ func (*ValidateProvisionerConfig) Descriptor() ([]byte, []int) {
}
type ProvisionResource struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ProvisionResource) Reset() {
*x = ProvisionResource{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[26]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[26]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ProvisionResource) String() string {
@@ -1517,7 +1464,7 @@ func (*ProvisionResource) ProtoMessage() {}
func (x *ProvisionResource) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[26]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1533,18 +1480,16 @@ func (*ProvisionResource) Descriptor() ([]byte, []int) {
}
type GetFunctions struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetFunctions) Reset() {
*x = GetFunctions{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[27]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[27]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetFunctions) String() string {
@@ -1555,7 +1500,7 @@ func (*GetFunctions) ProtoMessage() {}
func (x *GetFunctions) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[27]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1571,18 +1516,16 @@ func (*GetFunctions) Descriptor() ([]byte, []int) {
}
type CallFunction struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *CallFunction) Reset() {
*x = CallFunction{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[28]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[28]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *CallFunction) String() string {
@@ -1593,7 +1536,7 @@ func (*CallFunction) ProtoMessage() {}
func (x *CallFunction) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[28]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1608,37 +1551,28 @@ func (*CallFunction) Descriptor() ([]byte, []int) {
return file_tfplugin5_proto_rawDescGZIP(), []int{28}
}
-type AttributePath_Step struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+type ValidateEphemeralResourceConfig struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
-
- // Types that are assignable to Selector:
- //
- // *AttributePath_Step_AttributeName
- // *AttributePath_Step_ElementKeyString
- // *AttributePath_Step_ElementKeyInt
- Selector isAttributePath_Step_Selector `protobuf_oneof:"selector"`
+ sizeCache protoimpl.SizeCache
}
-func (x *AttributePath_Step) Reset() {
- *x = AttributePath_Step{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[29]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+func (x *ValidateEphemeralResourceConfig) Reset() {
+ *x = ValidateEphemeralResourceConfig{}
+ mi := &file_tfplugin5_proto_msgTypes[29]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
-func (x *AttributePath_Step) String() string {
+func (x *ValidateEphemeralResourceConfig) String() string {
return protoimpl.X.MessageStringOf(x)
}
-func (*AttributePath_Step) ProtoMessage() {}
+func (*ValidateEphemeralResourceConfig) ProtoMessage() {}
-func (x *AttributePath_Step) ProtoReflect() protoreflect.Message {
+func (x *ValidateEphemeralResourceConfig) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin5_proto_msgTypes[29]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1648,78 +1582,232 @@ func (x *AttributePath_Step) ProtoReflect() protoreflect.Message {
return mi.MessageOf(x)
}
-// Deprecated: Use AttributePath_Step.ProtoReflect.Descriptor instead.
-func (*AttributePath_Step) Descriptor() ([]byte, []int) {
- return file_tfplugin5_proto_rawDescGZIP(), []int{3, 0}
+// Deprecated: Use ValidateEphemeralResourceConfig.ProtoReflect.Descriptor instead.
+func (*ValidateEphemeralResourceConfig) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{29}
}
-func (m *AttributePath_Step) GetSelector() isAttributePath_Step_Selector {
- if m != nil {
- return m.Selector
- }
- return nil
+type OpenEphemeralResource struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
-func (x *AttributePath_Step) GetAttributeName() string {
- if x, ok := x.GetSelector().(*AttributePath_Step_AttributeName); ok {
- return x.AttributeName
- }
- return ""
+func (x *OpenEphemeralResource) Reset() {
+ *x = OpenEphemeralResource{}
+ mi := &file_tfplugin5_proto_msgTypes[30]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
-func (x *AttributePath_Step) GetElementKeyString() string {
- if x, ok := x.GetSelector().(*AttributePath_Step_ElementKeyString); ok {
- return x.ElementKeyString
- }
- return ""
+func (x *OpenEphemeralResource) String() string {
+ return protoimpl.X.MessageStringOf(x)
}
-func (x *AttributePath_Step) GetElementKeyInt() int64 {
- if x, ok := x.GetSelector().(*AttributePath_Step_ElementKeyInt); ok {
- return x.ElementKeyInt
+func (*OpenEphemeralResource) ProtoMessage() {}
+
+func (x *OpenEphemeralResource) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[30]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
}
- return 0
+ return mi.MessageOf(x)
}
-type isAttributePath_Step_Selector interface {
- isAttributePath_Step_Selector()
+// Deprecated: Use OpenEphemeralResource.ProtoReflect.Descriptor instead.
+func (*OpenEphemeralResource) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{30}
}
-type AttributePath_Step_AttributeName struct {
- // Set "attribute_name" to represent looking up an attribute
- // in the current object value.
- AttributeName string `protobuf:"bytes,1,opt,name=attribute_name,json=attributeName,proto3,oneof"`
+type RenewEphemeralResource struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
-type AttributePath_Step_ElementKeyString struct {
- // Set "element_key_*" to represent looking up an element in
- // an indexable collection type.
- ElementKeyString string `protobuf:"bytes,2,opt,name=element_key_string,json=elementKeyString,proto3,oneof"`
+func (x *RenewEphemeralResource) Reset() {
+ *x = RenewEphemeralResource{}
+ mi := &file_tfplugin5_proto_msgTypes[31]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
-type AttributePath_Step_ElementKeyInt struct {
- ElementKeyInt int64 `protobuf:"varint,3,opt,name=element_key_int,json=elementKeyInt,proto3,oneof"`
+func (x *RenewEphemeralResource) String() string {
+ return protoimpl.X.MessageStringOf(x)
}
-func (*AttributePath_Step_AttributeName) isAttributePath_Step_Selector() {}
-
-func (*AttributePath_Step_ElementKeyString) isAttributePath_Step_Selector() {}
+func (*RenewEphemeralResource) ProtoMessage() {}
-func (*AttributePath_Step_ElementKeyInt) isAttributePath_Step_Selector() {}
+func (x *RenewEphemeralResource) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[31]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
-type Stop_Request struct {
- state protoimpl.MessageState
+// Deprecated: Use RenewEphemeralResource.ProtoReflect.Descriptor instead.
+func (*RenewEphemeralResource) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{31}
+}
+
+type CloseEphemeralResource struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
+}
+
+func (x *CloseEphemeralResource) Reset() {
+ *x = CloseEphemeralResource{}
+ mi := &file_tfplugin5_proto_msgTypes[32]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *CloseEphemeralResource) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CloseEphemeralResource) ProtoMessage() {}
+
+func (x *CloseEphemeralResource) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[32]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CloseEphemeralResource.ProtoReflect.Descriptor instead.
+func (*CloseEphemeralResource) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{32}
+}
+
+type AttributePath_Step struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ // Types that are valid to be assigned to Selector:
+ //
+ // *AttributePath_Step_AttributeName
+ // *AttributePath_Step_ElementKeyString
+ // *AttributePath_Step_ElementKeyInt
+ Selector isAttributePath_Step_Selector `protobuf_oneof:"selector"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
-func (x *Stop_Request) Reset() {
- *x = Stop_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[30]
+func (x *AttributePath_Step) Reset() {
+ *x = AttributePath_Step{}
+ mi := &file_tfplugin5_proto_msgTypes[33]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *AttributePath_Step) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*AttributePath_Step) ProtoMessage() {}
+
+func (x *AttributePath_Step) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[33]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use AttributePath_Step.ProtoReflect.Descriptor instead.
+func (*AttributePath_Step) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{3, 0}
+}
+
+func (x *AttributePath_Step) GetSelector() isAttributePath_Step_Selector {
+ if x != nil {
+ return x.Selector
+ }
+ return nil
+}
+
+func (x *AttributePath_Step) GetAttributeName() string {
+ if x != nil {
+ if x, ok := x.Selector.(*AttributePath_Step_AttributeName); ok {
+ return x.AttributeName
+ }
+ }
+ return ""
+}
+
+func (x *AttributePath_Step) GetElementKeyString() string {
+ if x != nil {
+ if x, ok := x.Selector.(*AttributePath_Step_ElementKeyString); ok {
+ return x.ElementKeyString
+ }
+ }
+ return ""
+}
+
+func (x *AttributePath_Step) GetElementKeyInt() int64 {
+ if x != nil {
+ if x, ok := x.Selector.(*AttributePath_Step_ElementKeyInt); ok {
+ return x.ElementKeyInt
+ }
}
+ return 0
+}
+
+type isAttributePath_Step_Selector interface {
+ isAttributePath_Step_Selector()
+}
+
+type AttributePath_Step_AttributeName struct {
+ // Set "attribute_name" to represent looking up an attribute
+ // in the current object value.
+ AttributeName string `protobuf:"bytes,1,opt,name=attribute_name,json=attributeName,proto3,oneof"`
+}
+
+type AttributePath_Step_ElementKeyString struct {
+ // Set "element_key_*" to represent looking up an element in
+ // an indexable collection type.
+ ElementKeyString string `protobuf:"bytes,2,opt,name=element_key_string,json=elementKeyString,proto3,oneof"`
+}
+
+type AttributePath_Step_ElementKeyInt struct {
+ ElementKeyInt int64 `protobuf:"varint,3,opt,name=element_key_int,json=elementKeyInt,proto3,oneof"`
+}
+
+func (*AttributePath_Step_AttributeName) isAttributePath_Step_Selector() {}
+
+func (*AttributePath_Step_ElementKeyString) isAttributePath_Step_Selector() {}
+
+func (*AttributePath_Step_ElementKeyInt) isAttributePath_Step_Selector() {}
+
+type Stop_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *Stop_Request) Reset() {
+ *x = Stop_Request{}
+ mi := &file_tfplugin5_proto_msgTypes[34]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Stop_Request) String() string {
@@ -1729,8 +1817,8 @@ func (x *Stop_Request) String() string {
func (*Stop_Request) ProtoMessage() {}
func (x *Stop_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[30]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[34]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1746,20 +1834,17 @@ func (*Stop_Request) Descriptor() ([]byte, []int) {
}
type Stop_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Error string `protobuf:"bytes,1,opt,name=Error,proto3" json:"Error,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Error string `protobuf:"bytes,1,opt,name=Error,proto3" json:"Error,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *Stop_Response) Reset() {
*x = Stop_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[31]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[35]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Stop_Response) String() string {
@@ -1769,8 +1854,8 @@ func (x *Stop_Response) String() string {
func (*Stop_Response) ProtoMessage() {}
func (x *Stop_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[31]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[35]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1793,25 +1878,22 @@ func (x *Stop_Response) GetError() string {
}
type Schema_Block struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"`
- Attributes []*Schema_Attribute `protobuf:"bytes,2,rep,name=attributes,proto3" json:"attributes,omitempty"`
- BlockTypes []*Schema_NestedBlock `protobuf:"bytes,3,rep,name=block_types,json=blockTypes,proto3" json:"block_types,omitempty"`
- Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
- DescriptionKind StringKind `protobuf:"varint,5,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin5.StringKind" json:"description_kind,omitempty"`
- Deprecated bool `protobuf:"varint,6,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"`
+ Attributes []*Schema_Attribute `protobuf:"bytes,2,rep,name=attributes,proto3" json:"attributes,omitempty"`
+ BlockTypes []*Schema_NestedBlock `protobuf:"bytes,3,rep,name=block_types,json=blockTypes,proto3" json:"block_types,omitempty"`
+ Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
+ DescriptionKind StringKind `protobuf:"varint,5,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin5.StringKind" json:"description_kind,omitempty"`
+ Deprecated bool `protobuf:"varint,6,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Schema_Block) Reset() {
*x = Schema_Block{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[33]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[37]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Schema_Block) String() string {
@@ -1821,8 +1903,8 @@ func (x *Schema_Block) String() string {
func (*Schema_Block) ProtoMessage() {}
func (x *Schema_Block) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[33]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[37]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1880,28 +1962,30 @@ func (x *Schema_Block) GetDeprecated() bool {
}
type Schema_Attribute struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ Type []byte `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"`
+ Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
+ Required bool `protobuf:"varint,4,opt,name=required,proto3" json:"required,omitempty"`
+ Optional bool `protobuf:"varint,5,opt,name=optional,proto3" json:"optional,omitempty"`
+ Computed bool `protobuf:"varint,6,opt,name=computed,proto3" json:"computed,omitempty"`
+ Sensitive bool `protobuf:"varint,7,opt,name=sensitive,proto3" json:"sensitive,omitempty"`
+ DescriptionKind StringKind `protobuf:"varint,8,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin5.StringKind" json:"description_kind,omitempty"`
+ Deprecated bool `protobuf:"varint,9,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
+ // write_only indicates that the attribute value will be provided via
+ // configuration and must be omitted from state. write_only must be
+ // combined with optional or required, and is only valid for managed
+ // resource schemas.
+ WriteOnly bool `protobuf:"varint,10,opt,name=write_only,json=writeOnly,proto3" json:"write_only,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
- Type []byte `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"`
- Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
- Required bool `protobuf:"varint,4,opt,name=required,proto3" json:"required,omitempty"`
- Optional bool `protobuf:"varint,5,opt,name=optional,proto3" json:"optional,omitempty"`
- Computed bool `protobuf:"varint,6,opt,name=computed,proto3" json:"computed,omitempty"`
- Sensitive bool `protobuf:"varint,7,opt,name=sensitive,proto3" json:"sensitive,omitempty"`
- DescriptionKind StringKind `protobuf:"varint,8,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin5.StringKind" json:"description_kind,omitempty"`
- Deprecated bool `protobuf:"varint,9,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *Schema_Attribute) Reset() {
*x = Schema_Attribute{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[34]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[38]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Schema_Attribute) String() string {
@@ -1911,8 +1995,8 @@ func (x *Schema_Attribute) String() string {
func (*Schema_Attribute) ProtoMessage() {}
func (x *Schema_Attribute) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[34]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[38]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1990,25 +2074,29 @@ func (x *Schema_Attribute) GetDeprecated() bool {
return false
}
+func (x *Schema_Attribute) GetWriteOnly() bool {
+ if x != nil {
+ return x.WriteOnly
+ }
+ return false
+}
+
type Schema_NestedBlock struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Block *Schema_Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"`
+ Nesting Schema_NestedBlock_NestingMode `protobuf:"varint,3,opt,name=nesting,proto3,enum=tfplugin5.Schema_NestedBlock_NestingMode" json:"nesting,omitempty"`
+ MinItems int64 `protobuf:"varint,4,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"`
+ MaxItems int64 `protobuf:"varint,5,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Block *Schema_Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"`
- Nesting Schema_NestedBlock_NestingMode `protobuf:"varint,3,opt,name=nesting,proto3,enum=tfplugin5.Schema_NestedBlock_NestingMode" json:"nesting,omitempty"`
- MinItems int64 `protobuf:"varint,4,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"`
- MaxItems int64 `protobuf:"varint,5,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *Schema_NestedBlock) Reset() {
*x = Schema_NestedBlock{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[35]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[39]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Schema_NestedBlock) String() string {
@@ -2018,8 +2106,8 @@ func (x *Schema_NestedBlock) String() string {
func (*Schema_NestedBlock) ProtoMessage() {}
func (x *Schema_NestedBlock) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[35]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[39]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2070,10 +2158,7 @@ func (x *Schema_NestedBlock) GetMaxItems() int64 {
}
type Function_Parameter struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// name is the human-readable display name for the parameter.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// type is the type constraint for the parameter.
@@ -2091,15 +2176,15 @@ type Function_Parameter struct {
Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"`
// description_kind is the formatting of the description.
DescriptionKind StringKind `protobuf:"varint,6,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin5.StringKind" json:"description_kind,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Function_Parameter) Reset() {
*x = Function_Parameter{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[36]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[40]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Function_Parameter) String() string {
@@ -2109,8 +2194,8 @@ func (x *Function_Parameter) String() string {
func (*Function_Parameter) ProtoMessage() {}
func (x *Function_Parameter) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[36]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[40]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2168,21 +2253,18 @@ func (x *Function_Parameter) GetDescriptionKind() StringKind {
}
type Function_Return struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// type is the type constraint for the function result.
- Type []byte `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"`
+ Type []byte `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Function_Return) Reset() {
*x = Function_Return{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[37]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[41]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Function_Return) String() string {
@@ -2192,8 +2274,8 @@ func (x *Function_Return) String() string {
func (*Function_Return) ProtoMessage() {}
func (x *Function_Return) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[37]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[41]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2216,18 +2298,16 @@ func (x *Function_Return) GetType() []byte {
}
type GetMetadata_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_Request) Reset() {
*x = GetMetadata_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[38]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[42]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_Request) String() string {
@@ -2237,8 +2317,8 @@ func (x *GetMetadata_Request) String() string {
func (*GetMetadata_Request) ProtoMessage() {}
func (x *GetMetadata_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[38]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[42]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2254,25 +2334,23 @@ func (*GetMetadata_Request) Descriptor() ([]byte, []int) {
}
type GetMetadata_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
ServerCapabilities *ServerCapabilities `protobuf:"bytes,1,opt,name=server_capabilities,json=serverCapabilities,proto3" json:"server_capabilities,omitempty"`
Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
DataSources []*GetMetadata_DataSourceMetadata `protobuf:"bytes,3,rep,name=data_sources,json=dataSources,proto3" json:"data_sources,omitempty"`
Resources []*GetMetadata_ResourceMetadata `protobuf:"bytes,4,rep,name=resources,proto3" json:"resources,omitempty"`
// functions returns metadata for any functions.
- Functions []*GetMetadata_FunctionMetadata `protobuf:"bytes,5,rep,name=functions,proto3" json:"functions,omitempty"`
+ Functions []*GetMetadata_FunctionMetadata `protobuf:"bytes,5,rep,name=functions,proto3" json:"functions,omitempty"`
+ EphemeralResources []*GetMetadata_EphemeralResourceMetadata `protobuf:"bytes,6,rep,name=ephemeral_resources,json=ephemeralResources,proto3" json:"ephemeral_resources,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_Response) Reset() {
*x = GetMetadata_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[39]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[43]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_Response) String() string {
@@ -2282,8 +2360,8 @@ func (x *GetMetadata_Response) String() string {
func (*GetMetadata_Response) ProtoMessage() {}
func (x *GetMetadata_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[39]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[43]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2333,22 +2411,26 @@ func (x *GetMetadata_Response) GetFunctions() []*GetMetadata_FunctionMetadata {
return nil
}
-type GetMetadata_FunctionMetadata struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
+func (x *GetMetadata_Response) GetEphemeralResources() []*GetMetadata_EphemeralResourceMetadata {
+ if x != nil {
+ return x.EphemeralResources
+ }
+ return nil
+}
+type GetMetadata_FunctionMetadata struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
// name is the function name.
- Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_FunctionMetadata) Reset() {
*x = GetMetadata_FunctionMetadata{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[40]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[44]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_FunctionMetadata) String() string {
@@ -2358,8 +2440,8 @@ func (x *GetMetadata_FunctionMetadata) String() string {
func (*GetMetadata_FunctionMetadata) ProtoMessage() {}
func (x *GetMetadata_FunctionMetadata) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[40]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[44]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2382,20 +2464,17 @@ func (x *GetMetadata_FunctionMetadata) GetName() string {
}
type GetMetadata_DataSourceMetadata struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_DataSourceMetadata) Reset() {
*x = GetMetadata_DataSourceMetadata{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[41]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[45]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_DataSourceMetadata) String() string {
@@ -2405,8 +2484,8 @@ func (x *GetMetadata_DataSourceMetadata) String() string {
func (*GetMetadata_DataSourceMetadata) ProtoMessage() {}
func (x *GetMetadata_DataSourceMetadata) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[41]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[45]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2429,20 +2508,17 @@ func (x *GetMetadata_DataSourceMetadata) GetTypeName() string {
}
type GetMetadata_ResourceMetadata struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_ResourceMetadata) Reset() {
*x = GetMetadata_ResourceMetadata{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[42]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[46]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_ResourceMetadata) String() string {
@@ -2452,8 +2528,8 @@ func (x *GetMetadata_ResourceMetadata) String() string {
func (*GetMetadata_ResourceMetadata) ProtoMessage() {}
func (x *GetMetadata_ResourceMetadata) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[42]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[46]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2475,30 +2551,29 @@ func (x *GetMetadata_ResourceMetadata) GetTypeName() string {
return ""
}
-type GetProviderSchema_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+type GetMetadata_EphemeralResourceMetadata struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
-func (x *GetProviderSchema_Request) Reset() {
- *x = GetProviderSchema_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[43]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+func (x *GetMetadata_EphemeralResourceMetadata) Reset() {
+ *x = GetMetadata_EphemeralResourceMetadata{}
+ mi := &file_tfplugin5_proto_msgTypes[47]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
-func (x *GetProviderSchema_Request) String() string {
+func (x *GetMetadata_EphemeralResourceMetadata) String() string {
return protoimpl.X.MessageStringOf(x)
}
-func (*GetProviderSchema_Request) ProtoMessage() {}
+func (*GetMetadata_EphemeralResourceMetadata) ProtoMessage() {}
-func (x *GetProviderSchema_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[43]
- if protoimpl.UnsafeEnabled && x != nil {
+func (x *GetMetadata_EphemeralResourceMetadata) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[47]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2508,33 +2583,74 @@ func (x *GetProviderSchema_Request) ProtoReflect() protoreflect.Message {
return mi.MessageOf(x)
}
-// Deprecated: Use GetProviderSchema_Request.ProtoReflect.Descriptor instead.
-func (*GetProviderSchema_Request) Descriptor() ([]byte, []int) {
- return file_tfplugin5_proto_rawDescGZIP(), []int{12, 0}
+// Deprecated: Use GetMetadata_EphemeralResourceMetadata.ProtoReflect.Descriptor instead.
+func (*GetMetadata_EphemeralResourceMetadata) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{11, 5}
}
-type GetProviderSchema_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+func (x *GetMetadata_EphemeralResourceMetadata) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+type GetProviderSchema_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *GetProviderSchema_Request) Reset() {
+ *x = GetProviderSchema_Request{}
+ mi := &file_tfplugin5_proto_msgTypes[48]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *GetProviderSchema_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*GetProviderSchema_Request) ProtoMessage() {}
+
+func (x *GetProviderSchema_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[48]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
- Provider *Schema `protobuf:"bytes,1,opt,name=provider,proto3" json:"provider,omitempty"`
- ResourceSchemas map[string]*Schema `protobuf:"bytes,2,rep,name=resource_schemas,json=resourceSchemas,proto3" json:"resource_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- DataSourceSchemas map[string]*Schema `protobuf:"bytes,3,rep,name=data_source_schemas,json=dataSourceSchemas,proto3" json:"data_source_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- Diagnostics []*Diagnostic `protobuf:"bytes,4,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
- ProviderMeta *Schema `protobuf:"bytes,5,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
- ServerCapabilities *ServerCapabilities `protobuf:"bytes,6,opt,name=server_capabilities,json=serverCapabilities,proto3" json:"server_capabilities,omitempty"`
+// Deprecated: Use GetProviderSchema_Request.ProtoReflect.Descriptor instead.
+func (*GetProviderSchema_Request) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{12, 0}
+}
+
+type GetProviderSchema_Response struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Provider *Schema `protobuf:"bytes,1,opt,name=provider,proto3" json:"provider,omitempty"`
+ ResourceSchemas map[string]*Schema `protobuf:"bytes,2,rep,name=resource_schemas,json=resourceSchemas,proto3" json:"resource_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+ DataSourceSchemas map[string]*Schema `protobuf:"bytes,3,rep,name=data_source_schemas,json=dataSourceSchemas,proto3" json:"data_source_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,4,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ ProviderMeta *Schema `protobuf:"bytes,5,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ ServerCapabilities *ServerCapabilities `protobuf:"bytes,6,opt,name=server_capabilities,json=serverCapabilities,proto3" json:"server_capabilities,omitempty"`
// functions is a mapping of function names to definitions.
- Functions map[string]*Function `protobuf:"bytes,7,rep,name=functions,proto3" json:"functions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
+ Functions map[string]*Function `protobuf:"bytes,7,rep,name=functions,proto3" json:"functions,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+ EphemeralResourceSchemas map[string]*Schema `protobuf:"bytes,8,rep,name=ephemeral_resource_schemas,json=ephemeralResourceSchemas,proto3" json:"ephemeral_resource_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetProviderSchema_Response) Reset() {
*x = GetProviderSchema_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[44]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[49]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetProviderSchema_Response) String() string {
@@ -2544,8 +2660,8 @@ func (x *GetProviderSchema_Response) String() string {
func (*GetProviderSchema_Response) ProtoMessage() {}
func (x *GetProviderSchema_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[44]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[49]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2609,21 +2725,25 @@ func (x *GetProviderSchema_Response) GetFunctions() map[string]*Function {
return nil
}
+func (x *GetProviderSchema_Response) GetEphemeralResourceSchemas() map[string]*Schema {
+ if x != nil {
+ return x.EphemeralResourceSchemas
+ }
+ return nil
+}
+
type PrepareProviderConfig_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Config *DynamicValue `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Config *DynamicValue `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *PrepareProviderConfig_Request) Reset() {
*x = PrepareProviderConfig_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[48]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[54]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *PrepareProviderConfig_Request) String() string {
@@ -2633,8 +2753,8 @@ func (x *PrepareProviderConfig_Request) String() string {
func (*PrepareProviderConfig_Request) ProtoMessage() {}
func (x *PrepareProviderConfig_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[48]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[54]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2657,21 +2777,18 @@ func (x *PrepareProviderConfig_Request) GetConfig() *DynamicValue {
}
type PrepareProviderConfig_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- PreparedConfig *DynamicValue `protobuf:"bytes,1,opt,name=prepared_config,json=preparedConfig,proto3" json:"prepared_config,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ PreparedConfig *DynamicValue `protobuf:"bytes,1,opt,name=prepared_config,json=preparedConfig,proto3" json:"prepared_config,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *PrepareProviderConfig_Response) Reset() {
*x = PrepareProviderConfig_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[49]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[55]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *PrepareProviderConfig_Response) String() string {
@@ -2681,8 +2798,8 @@ func (x *PrepareProviderConfig_Response) String() string {
func (*PrepareProviderConfig_Response) ProtoMessage() {}
func (x *PrepareProviderConfig_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[49]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[55]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2721,11 +2838,8 @@ func (x *PrepareProviderConfig_Response) GetDiagnostics() []*Diagnostic {
// known, nor match the given prior state, which could lead to unexpected
// provider behaviors for practitioners.
type UpgradeResourceState_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
// version is the schema_version number recorded in the state file
Version int64 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"`
// raw_state is the raw states as stored for the resource. Core does
@@ -2733,16 +2847,16 @@ type UpgradeResourceState_Request struct {
// provider's responsibility to interpret this value using the
// appropriate older schema. The raw_state will be the json encoded
// state, or a legacy flat-mapped format.
- RawState *RawState `protobuf:"bytes,3,opt,name=raw_state,json=rawState,proto3" json:"raw_state,omitempty"`
+ RawState *RawState `protobuf:"bytes,3,opt,name=raw_state,json=rawState,proto3" json:"raw_state,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *UpgradeResourceState_Request) Reset() {
*x = UpgradeResourceState_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[50]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[56]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *UpgradeResourceState_Request) String() string {
@@ -2752,8 +2866,8 @@ func (x *UpgradeResourceState_Request) String() string {
func (*UpgradeResourceState_Request) ProtoMessage() {}
func (x *UpgradeResourceState_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[50]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[56]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2790,10 +2904,7 @@ func (x *UpgradeResourceState_Request) GetRawState() *RawState {
}
type UpgradeResourceState_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// new_state is a msgpack-encoded data structure that, when interpreted with
// the _current_ schema for this resource type, is functionally equivalent to
// that which was given in prior_state_raw.
@@ -2801,16 +2912,16 @@ type UpgradeResourceState_Response struct {
// diagnostics describes any errors encountered during migration that could not
// be safely resolved, and warnings about any possibly-risky assumptions made
// in the upgrade process.
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *UpgradeResourceState_Response) Reset() {
*x = UpgradeResourceState_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[51]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[57]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *UpgradeResourceState_Response) String() string {
@@ -2820,8 +2931,8 @@ func (x *UpgradeResourceState_Response) String() string {
func (*UpgradeResourceState_Response) ProtoMessage() {}
func (x *UpgradeResourceState_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[51]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[57]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2851,21 +2962,19 @@ func (x *UpgradeResourceState_Response) GetDiagnostics() []*Diagnostic {
}
type ValidateResourceTypeConfig_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateResourceTypeConfig_Request) Reset() {
*x = ValidateResourceTypeConfig_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[52]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[58]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateResourceTypeConfig_Request) String() string {
@@ -2875,8 +2984,8 @@ func (x *ValidateResourceTypeConfig_Request) String() string {
func (*ValidateResourceTypeConfig_Request) ProtoMessage() {}
func (x *ValidateResourceTypeConfig_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[52]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[58]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2905,21 +3014,25 @@ func (x *ValidateResourceTypeConfig_Request) GetConfig() *DynamicValue {
return nil
}
+func (x *ValidateResourceTypeConfig_Request) GetClientCapabilities() *ClientCapabilities {
+ if x != nil {
+ return x.ClientCapabilities
+ }
+ return nil
+}
+
type ValidateResourceTypeConfig_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateResourceTypeConfig_Response) Reset() {
*x = ValidateResourceTypeConfig_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[53]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[59]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateResourceTypeConfig_Response) String() string {
@@ -2929,8 +3042,8 @@ func (x *ValidateResourceTypeConfig_Response) String() string {
func (*ValidateResourceTypeConfig_Response) ProtoMessage() {}
func (x *ValidateResourceTypeConfig_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[53]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[59]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2953,21 +3066,18 @@ func (x *ValidateResourceTypeConfig_Response) GetDiagnostics() []*Diagnostic {
}
type ValidateDataSourceConfig_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateDataSourceConfig_Request) Reset() {
*x = ValidateDataSourceConfig_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[54]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[60]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateDataSourceConfig_Request) String() string {
@@ -2977,8 +3087,8 @@ func (x *ValidateDataSourceConfig_Request) String() string {
func (*ValidateDataSourceConfig_Request) ProtoMessage() {}
func (x *ValidateDataSourceConfig_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[54]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[60]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3008,20 +3118,17 @@ func (x *ValidateDataSourceConfig_Request) GetConfig() *DynamicValue {
}
type ValidateDataSourceConfig_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateDataSourceConfig_Response) Reset() {
*x = ValidateDataSourceConfig_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[55]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[61]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateDataSourceConfig_Response) String() string {
@@ -3031,8 +3138,8 @@ func (x *ValidateDataSourceConfig_Response) String() string {
func (*ValidateDataSourceConfig_Response) ProtoMessage() {}
func (x *ValidateDataSourceConfig_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[55]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[61]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3055,22 +3162,19 @@ func (x *ValidateDataSourceConfig_Response) GetDiagnostics() []*Diagnostic {
}
type Configure_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TerraformVersion string `protobuf:"bytes,1,opt,name=terraform_version,json=terraformVersion,proto3" json:"terraform_version,omitempty"`
- Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TerraformVersion string `protobuf:"bytes,1,opt,name=terraform_version,json=terraformVersion,proto3" json:"terraform_version,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Configure_Request) Reset() {
*x = Configure_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[56]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[62]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Configure_Request) String() string {
@@ -3080,8 +3184,8 @@ func (x *Configure_Request) String() string {
func (*Configure_Request) ProtoMessage() {}
func (x *Configure_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[56]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[62]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3118,20 +3222,17 @@ func (x *Configure_Request) GetClientCapabilities() *ClientCapabilities {
}
type Configure_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *Configure_Response) Reset() {
*x = Configure_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[57]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[63]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Configure_Response) String() string {
@@ -3141,8 +3242,8 @@ func (x *Configure_Response) String() string {
func (*Configure_Response) ProtoMessage() {}
func (x *Configure_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[57]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[63]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3173,24 +3274,21 @@ func (x *Configure_Response) GetDiagnostics() []*Diagnostic {
// not guaranteed to be wholly known nor match the given prior state, which
// could lead to unexpected provider behaviors for practitioners.
type ReadResource_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- CurrentState *DynamicValue `protobuf:"bytes,2,opt,name=current_state,json=currentState,proto3" json:"current_state,omitempty"`
- Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
- ProviderMeta *DynamicValue `protobuf:"bytes,4,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,5,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ CurrentState *DynamicValue `protobuf:"bytes,2,opt,name=current_state,json=currentState,proto3" json:"current_state,omitempty"`
+ Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
+ ProviderMeta *DynamicValue `protobuf:"bytes,4,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,5,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadResource_Request) Reset() {
*x = ReadResource_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[58]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[64]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadResource_Request) String() string {
@@ -3200,8 +3298,8 @@ func (x *ReadResource_Request) String() string {
func (*ReadResource_Request) ProtoMessage() {}
func (x *ReadResource_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[58]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[64]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3252,25 +3350,22 @@ func (x *ReadResource_Request) GetClientCapabilities() *ClientCapabilities {
}
type ReadResource_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- NewState *DynamicValue `protobuf:"bytes,1,opt,name=new_state,json=newState,proto3" json:"new_state,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
- Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ NewState *DynamicValue `protobuf:"bytes,1,opt,name=new_state,json=newState,proto3" json:"new_state,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
// deferred is set if the provider is deferring the change. If set the caller
// needs to handle the deferral.
- Deferred *Deferred `protobuf:"bytes,4,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ Deferred *Deferred `protobuf:"bytes,4,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadResource_Response) Reset() {
*x = ReadResource_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[59]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[65]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadResource_Response) String() string {
@@ -3280,8 +3375,8 @@ func (x *ReadResource_Response) String() string {
func (*ReadResource_Response) ProtoMessage() {}
func (x *ReadResource_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[59]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[65]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3325,26 +3420,23 @@ func (x *ReadResource_Response) GetDeferred() *Deferred {
}
type PlanResourceChange_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- PriorState *DynamicValue `protobuf:"bytes,2,opt,name=prior_state,json=priorState,proto3" json:"prior_state,omitempty"`
- ProposedNewState *DynamicValue `protobuf:"bytes,3,opt,name=proposed_new_state,json=proposedNewState,proto3" json:"proposed_new_state,omitempty"`
- Config *DynamicValue `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"`
- PriorPrivate []byte `protobuf:"bytes,5,opt,name=prior_private,json=priorPrivate,proto3" json:"prior_private,omitempty"`
- ProviderMeta *DynamicValue `protobuf:"bytes,6,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,7,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ PriorState *DynamicValue `protobuf:"bytes,2,opt,name=prior_state,json=priorState,proto3" json:"prior_state,omitempty"`
+ ProposedNewState *DynamicValue `protobuf:"bytes,3,opt,name=proposed_new_state,json=proposedNewState,proto3" json:"proposed_new_state,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"`
+ PriorPrivate []byte `protobuf:"bytes,5,opt,name=prior_private,json=priorPrivate,proto3" json:"prior_private,omitempty"`
+ ProviderMeta *DynamicValue `protobuf:"bytes,6,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,7,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *PlanResourceChange_Request) Reset() {
*x = PlanResourceChange_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[60]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[66]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *PlanResourceChange_Request) String() string {
@@ -3354,8 +3446,8 @@ func (x *PlanResourceChange_Request) String() string {
func (*PlanResourceChange_Request) ProtoMessage() {}
func (x *PlanResourceChange_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[60]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[66]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3420,14 +3512,11 @@ func (x *PlanResourceChange_Request) GetClientCapabilities() *ClientCapabilities
}
type PlanResourceChange_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- PlannedState *DynamicValue `protobuf:"bytes,1,opt,name=planned_state,json=plannedState,proto3" json:"planned_state,omitempty"`
- RequiresReplace []*AttributePath `protobuf:"bytes,2,rep,name=requires_replace,json=requiresReplace,proto3" json:"requires_replace,omitempty"`
- PlannedPrivate []byte `protobuf:"bytes,3,opt,name=planned_private,json=plannedPrivate,proto3" json:"planned_private,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,4,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ PlannedState *DynamicValue `protobuf:"bytes,1,opt,name=planned_state,json=plannedState,proto3" json:"planned_state,omitempty"`
+ RequiresReplace []*AttributePath `protobuf:"bytes,2,rep,name=requires_replace,json=requiresReplace,proto3" json:"requires_replace,omitempty"`
+ PlannedPrivate []byte `protobuf:"bytes,3,opt,name=planned_private,json=plannedPrivate,proto3" json:"planned_private,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,4,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// This may be set only by the helper/schema "SDK" in the main Terraform
// repository, to request that Terraform Core >=0.12 permit additional
// inconsistencies that can result from the legacy SDK type system
@@ -3442,16 +3531,16 @@ type PlanResourceChange_Response struct {
LegacyTypeSystem bool `protobuf:"varint,5,opt,name=legacy_type_system,json=legacyTypeSystem,proto3" json:"legacy_type_system,omitempty"`
// deferred is set if the provider is deferring the change. If set the caller
// needs to handle the deferral.
- Deferred *Deferred `protobuf:"bytes,6,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ Deferred *Deferred `protobuf:"bytes,6,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *PlanResourceChange_Response) Reset() {
*x = PlanResourceChange_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[61]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[67]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *PlanResourceChange_Response) String() string {
@@ -3461,8 +3550,8 @@ func (x *PlanResourceChange_Response) String() string {
func (*PlanResourceChange_Response) ProtoMessage() {}
func (x *PlanResourceChange_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[61]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[67]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3520,25 +3609,22 @@ func (x *PlanResourceChange_Response) GetDeferred() *Deferred {
}
type ApplyResourceChange_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- PriorState *DynamicValue `protobuf:"bytes,2,opt,name=prior_state,json=priorState,proto3" json:"prior_state,omitempty"`
- PlannedState *DynamicValue `protobuf:"bytes,3,opt,name=planned_state,json=plannedState,proto3" json:"planned_state,omitempty"`
- Config *DynamicValue `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"`
- PlannedPrivate []byte `protobuf:"bytes,5,opt,name=planned_private,json=plannedPrivate,proto3" json:"planned_private,omitempty"`
- ProviderMeta *DynamicValue `protobuf:"bytes,6,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ PriorState *DynamicValue `protobuf:"bytes,2,opt,name=prior_state,json=priorState,proto3" json:"prior_state,omitempty"`
+ PlannedState *DynamicValue `protobuf:"bytes,3,opt,name=planned_state,json=plannedState,proto3" json:"planned_state,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"`
+ PlannedPrivate []byte `protobuf:"bytes,5,opt,name=planned_private,json=plannedPrivate,proto3" json:"planned_private,omitempty"`
+ ProviderMeta *DynamicValue `protobuf:"bytes,6,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ApplyResourceChange_Request) Reset() {
*x = ApplyResourceChange_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[62]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[68]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ApplyResourceChange_Request) String() string {
@@ -3548,8 +3634,8 @@ func (x *ApplyResourceChange_Request) String() string {
func (*ApplyResourceChange_Request) ProtoMessage() {}
func (x *ApplyResourceChange_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[62]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[68]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3607,13 +3693,10 @@ func (x *ApplyResourceChange_Request) GetProviderMeta() *DynamicValue {
}
type ApplyResourceChange_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- NewState *DynamicValue `protobuf:"bytes,1,opt,name=new_state,json=newState,proto3" json:"new_state,omitempty"`
- Private []byte `protobuf:"bytes,2,opt,name=private,proto3" json:"private,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,3,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ NewState *DynamicValue `protobuf:"bytes,1,opt,name=new_state,json=newState,proto3" json:"new_state,omitempty"`
+ Private []byte `protobuf:"bytes,2,opt,name=private,proto3" json:"private,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,3,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// This may be set only by the helper/schema "SDK" in the main Terraform
// repository, to request that Terraform Core >=0.12 permit additional
// inconsistencies that can result from the legacy SDK type system
@@ -3626,15 +3709,15 @@ type ApplyResourceChange_Response struct {
// ==== THIS MUST BE LEFT UNSET IN ALL OTHER SDKS ====
// ==== DO NOT USE THIS ====
LegacyTypeSystem bool `protobuf:"varint,4,opt,name=legacy_type_system,json=legacyTypeSystem,proto3" json:"legacy_type_system,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ApplyResourceChange_Response) Reset() {
*x = ApplyResourceChange_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[63]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[69]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ApplyResourceChange_Response) String() string {
@@ -3644,8 +3727,8 @@ func (x *ApplyResourceChange_Response) String() string {
func (*ApplyResourceChange_Response) ProtoMessage() {}
func (x *ApplyResourceChange_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[63]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[69]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3689,22 +3772,19 @@ func (x *ApplyResourceChange_Response) GetLegacyTypeSystem() bool {
}
type ImportResourceState_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ImportResourceState_Request) Reset() {
*x = ImportResourceState_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[64]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[70]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ImportResourceState_Request) String() string {
@@ -3714,8 +3794,8 @@ func (x *ImportResourceState_Request) String() string {
func (*ImportResourceState_Request) ProtoMessage() {}
func (x *ImportResourceState_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[64]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[70]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3752,22 +3832,19 @@ func (x *ImportResourceState_Request) GetClientCapabilities() *ClientCapabilitie
}
type ImportResourceState_ImportedResource struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ State *DynamicValue `protobuf:"bytes,2,opt,name=state,proto3" json:"state,omitempty"`
+ Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- State *DynamicValue `protobuf:"bytes,2,opt,name=state,proto3" json:"state,omitempty"`
- Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ImportResourceState_ImportedResource) Reset() {
*x = ImportResourceState_ImportedResource{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[65]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[71]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ImportResourceState_ImportedResource) String() string {
@@ -3777,8 +3854,8 @@ func (x *ImportResourceState_ImportedResource) String() string {
func (*ImportResourceState_ImportedResource) ProtoMessage() {}
func (x *ImportResourceState_ImportedResource) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[65]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[71]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3815,24 +3892,21 @@ func (x *ImportResourceState_ImportedResource) GetPrivate() []byte {
}
type ImportResourceState_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
ImportedResources []*ImportResourceState_ImportedResource `protobuf:"bytes,1,rep,name=imported_resources,json=importedResources,proto3" json:"imported_resources,omitempty"`
Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// deferred is set if the provider is deferring the change. If set the caller
// needs to handle the deferral.
- Deferred *Deferred `protobuf:"bytes,3,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ Deferred *Deferred `protobuf:"bytes,3,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ImportResourceState_Response) Reset() {
*x = ImportResourceState_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[66]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[72]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ImportResourceState_Response) String() string {
@@ -3842,8 +3916,8 @@ func (x *ImportResourceState_Response) String() string {
func (*ImportResourceState_Response) ProtoMessage() {}
func (x *ImportResourceState_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[66]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[72]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3880,10 +3954,7 @@ func (x *ImportResourceState_Response) GetDeferred() *Deferred {
}
type MoveResourceState_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The address of the provider the resource is being moved from.
SourceProviderAddress string `protobuf:"bytes,1,opt,name=source_provider_address,json=sourceProviderAddress,proto3" json:"source_provider_address,omitempty"`
// The resource type that the resource is being moved from.
@@ -3899,15 +3970,15 @@ type MoveResourceState_Request struct {
TargetTypeName string `protobuf:"bytes,5,opt,name=target_type_name,json=targetTypeName,proto3" json:"target_type_name,omitempty"`
// The private state of the resource being moved.
SourcePrivate []byte `protobuf:"bytes,6,opt,name=source_private,json=sourcePrivate,proto3" json:"source_private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *MoveResourceState_Request) Reset() {
*x = MoveResourceState_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[67]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[73]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *MoveResourceState_Request) String() string {
@@ -3917,8 +3988,8 @@ func (x *MoveResourceState_Request) String() string {
func (*MoveResourceState_Request) ProtoMessage() {}
func (x *MoveResourceState_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[67]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[73]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3976,25 +4047,22 @@ func (x *MoveResourceState_Request) GetSourcePrivate() []byte {
}
type MoveResourceState_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The state of the resource after it has been moved.
TargetState *DynamicValue `protobuf:"bytes,1,opt,name=target_state,json=targetState,proto3" json:"target_state,omitempty"`
// Any diagnostics that occurred during the move.
Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// The private state of the resource after it has been moved.
TargetPrivate []byte `protobuf:"bytes,3,opt,name=target_private,json=targetPrivate,proto3" json:"target_private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *MoveResourceState_Response) Reset() {
*x = MoveResourceState_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[68]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[74]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *MoveResourceState_Response) String() string {
@@ -4004,8 +4072,8 @@ func (x *MoveResourceState_Response) String() string {
func (*MoveResourceState_Response) ProtoMessage() {}
func (x *MoveResourceState_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[68]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[74]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4042,23 +4110,20 @@ func (x *MoveResourceState_Response) GetTargetPrivate() []byte {
}
type ReadDataSource_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
- ProviderMeta *DynamicValue `protobuf:"bytes,3,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,4,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ ProviderMeta *DynamicValue `protobuf:"bytes,3,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,4,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadDataSource_Request) Reset() {
*x = ReadDataSource_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[69]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[75]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadDataSource_Request) String() string {
@@ -4068,8 +4133,8 @@ func (x *ReadDataSource_Request) String() string {
func (*ReadDataSource_Request) ProtoMessage() {}
func (x *ReadDataSource_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[69]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[75]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4113,24 +4178,21 @@ func (x *ReadDataSource_Request) GetClientCapabilities() *ClientCapabilities {
}
type ReadDataSource_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- State *DynamicValue `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ State *DynamicValue `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// deferred is set if the provider is deferring the change. If set the caller
// needs to handle the deferral.
- Deferred *Deferred `protobuf:"bytes,3,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ Deferred *Deferred `protobuf:"bytes,3,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadDataSource_Response) Reset() {
*x = ReadDataSource_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[70]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[76]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadDataSource_Response) String() string {
@@ -4140,8 +4202,8 @@ func (x *ReadDataSource_Response) String() string {
func (*ReadDataSource_Response) ProtoMessage() {}
func (x *ReadDataSource_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[70]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[76]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4178,18 +4240,16 @@ func (x *ReadDataSource_Response) GetDeferred() *Deferred {
}
type GetProvisionerSchema_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetProvisionerSchema_Request) Reset() {
*x = GetProvisionerSchema_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[71]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[77]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetProvisionerSchema_Request) String() string {
@@ -4199,8 +4259,8 @@ func (x *GetProvisionerSchema_Request) String() string {
func (*GetProvisionerSchema_Request) ProtoMessage() {}
func (x *GetProvisionerSchema_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[71]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[77]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4216,21 +4276,18 @@ func (*GetProvisionerSchema_Request) Descriptor() ([]byte, []int) {
}
type GetProvisionerSchema_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Provisioner *Schema `protobuf:"bytes,1,opt,name=provisioner,proto3" json:"provisioner,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Provisioner *Schema `protobuf:"bytes,1,opt,name=provisioner,proto3" json:"provisioner,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *GetProvisionerSchema_Response) Reset() {
*x = GetProvisionerSchema_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[72]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[78]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetProvisionerSchema_Response) String() string {
@@ -4240,8 +4297,8 @@ func (x *GetProvisionerSchema_Response) String() string {
func (*GetProvisionerSchema_Response) ProtoMessage() {}
func (x *GetProvisionerSchema_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[72]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[78]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4271,20 +4328,17 @@ func (x *GetProvisionerSchema_Response) GetDiagnostics() []*Diagnostic {
}
type ValidateProvisionerConfig_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Config *DynamicValue `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Config *DynamicValue `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateProvisionerConfig_Request) Reset() {
*x = ValidateProvisionerConfig_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[73]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[79]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateProvisionerConfig_Request) String() string {
@@ -4294,8 +4348,8 @@ func (x *ValidateProvisionerConfig_Request) String() string {
func (*ValidateProvisionerConfig_Request) ProtoMessage() {}
func (x *ValidateProvisionerConfig_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[73]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[79]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4318,20 +4372,17 @@ func (x *ValidateProvisionerConfig_Request) GetConfig() *DynamicValue {
}
type ValidateProvisionerConfig_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateProvisionerConfig_Response) Reset() {
*x = ValidateProvisionerConfig_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[74]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[80]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateProvisionerConfig_Response) String() string {
@@ -4341,8 +4392,8 @@ func (x *ValidateProvisionerConfig_Response) String() string {
func (*ValidateProvisionerConfig_Response) ProtoMessage() {}
func (x *ValidateProvisionerConfig_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[74]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[80]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4365,21 +4416,18 @@ func (x *ValidateProvisionerConfig_Response) GetDiagnostics() []*Diagnostic {
}
type ProvisionResource_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Config *DynamicValue `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"`
+ Connection *DynamicValue `protobuf:"bytes,2,opt,name=connection,proto3" json:"connection,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Config *DynamicValue `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"`
- Connection *DynamicValue `protobuf:"bytes,2,opt,name=connection,proto3" json:"connection,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ProvisionResource_Request) Reset() {
*x = ProvisionResource_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[75]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[81]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ProvisionResource_Request) String() string {
@@ -4389,8 +4437,8 @@ func (x *ProvisionResource_Request) String() string {
func (*ProvisionResource_Request) ProtoMessage() {}
func (x *ProvisionResource_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[75]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[81]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4420,21 +4468,18 @@ func (x *ProvisionResource_Request) GetConnection() *DynamicValue {
}
type ProvisionResource_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Output string `protobuf:"bytes,1,opt,name=output,proto3" json:"output,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Output string `protobuf:"bytes,1,opt,name=output,proto3" json:"output,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ProvisionResource_Response) Reset() {
*x = ProvisionResource_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[76]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[82]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ProvisionResource_Response) String() string {
@@ -4444,8 +4489,8 @@ func (x *ProvisionResource_Response) String() string {
func (*ProvisionResource_Response) ProtoMessage() {}
func (x *ProvisionResource_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[76]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[82]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4475,18 +4520,16 @@ func (x *ProvisionResource_Response) GetDiagnostics() []*Diagnostic {
}
type GetFunctions_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetFunctions_Request) Reset() {
*x = GetFunctions_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[77]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[83]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetFunctions_Request) String() string {
@@ -4496,8 +4539,8 @@ func (x *GetFunctions_Request) String() string {
func (*GetFunctions_Request) ProtoMessage() {}
func (x *GetFunctions_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[77]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[83]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4513,23 +4556,20 @@ func (*GetFunctions_Request) Descriptor() ([]byte, []int) {
}
type GetFunctions_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// functions is a mapping of function names to definitions.
- Functions map[string]*Function `protobuf:"bytes,1,rep,name=functions,proto3" json:"functions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
+ Functions map[string]*Function `protobuf:"bytes,1,rep,name=functions,proto3" json:"functions,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
// diagnostics is any warnings or errors.
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetFunctions_Response) Reset() {
*x = GetFunctions_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[78]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[84]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetFunctions_Response) String() string {
@@ -4539,8 +4579,8 @@ func (x *GetFunctions_Response) String() string {
func (*GetFunctions_Response) ProtoMessage() {}
func (x *GetFunctions_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[78]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[84]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4570,23 +4610,20 @@ func (x *GetFunctions_Response) GetDiagnostics() []*Diagnostic {
}
type CallFunction_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// name is the name of the function being called.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// arguments is the data of each function argument value.
- Arguments []*DynamicValue `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"`
+ Arguments []*DynamicValue `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *CallFunction_Request) Reset() {
*x = CallFunction_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[80]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[86]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *CallFunction_Request) String() string {
@@ -4596,8 +4633,8 @@ func (x *CallFunction_Request) String() string {
func (*CallFunction_Request) ProtoMessage() {}
func (x *CallFunction_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[80]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[86]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4627,23 +4664,20 @@ func (x *CallFunction_Request) GetArguments() []*DynamicValue {
}
type CallFunction_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// result is result value after running the function logic.
Result *DynamicValue `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"`
// error is any error from the function logic.
- Error *FunctionError `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"`
+ Error *FunctionError `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *CallFunction_Response) Reset() {
*x = CallFunction_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin5_proto_msgTypes[81]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin5_proto_msgTypes[87]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *CallFunction_Response) String() string {
@@ -4653,8 +4687,8 @@ func (x *CallFunction_Response) String() string {
func (*CallFunction_Response) ProtoMessage() {}
func (x *CallFunction_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin5_proto_msgTypes[81]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin5_proto_msgTypes[87]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4683,715 +4717,1296 @@ func (x *CallFunction_Response) GetError() *FunctionError {
return nil
}
-var File_tfplugin5_proto protoreflect.FileDescriptor
+type ValidateEphemeralResourceConfig_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
-var file_tfplugin5_proto_rawDesc = []byte{
- 0x0a, 0x0f, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x70, 0x72, 0x6f, 0x74,
- 0x6f, 0x12, 0x09, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x22, 0x3c, 0x0a, 0x0c,
- 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x18, 0x0a, 0x07,
- 0x6d, 0x73, 0x67, 0x70, 0x61, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x6d,
- 0x73, 0x67, 0x70, 0x61, 0x63, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x02,
- 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0xe3, 0x01, 0x0a, 0x0a, 0x44,
- 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x12, 0x3a, 0x0a, 0x08, 0x73, 0x65, 0x76,
- 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
- 0x69, 0x63, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76,
- 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79,
- 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12,
- 0x16, 0x0a, 0x06, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x06, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x36, 0x0a, 0x09, 0x61, 0x74, 0x74, 0x72, 0x69,
- 0x62, 0x75, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
- 0x50, 0x61, 0x74, 0x68, 0x52, 0x09, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x22,
- 0x2f, 0x0a, 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, 0x07, 0x49,
- 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f,
- 0x52, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x02,
- 0x22, 0x6b, 0x0a, 0x0d, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f,
- 0x72, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x30, 0x0a, 0x11, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f,
- 0x6e, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03,
- 0x48, 0x00, 0x52, 0x10, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x72, 0x67, 0x75,
- 0x6d, 0x65, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x66, 0x75, 0x6e, 0x63,
- 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xdc, 0x01,
- 0x0a, 0x0d, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12,
- 0x33, 0x0a, 0x05, 0x73, 0x74, 0x65, 0x70, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69,
- 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x52, 0x05, 0x73,
- 0x74, 0x65, 0x70, 0x73, 0x1a, 0x95, 0x01, 0x0a, 0x04, 0x53, 0x74, 0x65, 0x70, 0x12, 0x27, 0x0a,
- 0x0e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18,
- 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0d, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75,
- 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2e, 0x0a, 0x12, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e,
- 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01,
- 0x28, 0x09, 0x48, 0x00, 0x52, 0x10, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4b, 0x65, 0x79,
- 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x28, 0x0a, 0x0f, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e,
- 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x48,
- 0x00, 0x52, 0x0d, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4b, 0x65, 0x79, 0x49, 0x6e, 0x74,
- 0x42, 0x0a, 0x0a, 0x08, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x33, 0x0a, 0x04,
- 0x53, 0x74, 0x6f, 0x70, 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
- 0x20, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x45,
- 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x45, 0x72, 0x72, 0x6f,
- 0x72, 0x22, 0x96, 0x01, 0x0a, 0x08, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x12,
- 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6a, 0x73,
- 0x6f, 0x6e, 0x12, 0x3a, 0x0a, 0x07, 0x66, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x18, 0x02, 0x20,
- 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
- 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x46, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70,
- 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x66, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x1a, 0x3a,
- 0x0a, 0x0c, 0x46, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10,
- 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79,
- 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xcc, 0x07, 0x0a, 0x06, 0x53,
- 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
- 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12,
- 0x2d, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d,
- 0x61, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x1a, 0xa2,
- 0x02, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73,
- 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69,
- 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73,
- 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62,
- 0x75, 0x74, 0x65, 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12,
- 0x3e, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x03,
- 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42, 0x6c,
- 0x6f, 0x63, 0x6b, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x12,
- 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04,
- 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f,
- 0x6e, 0x12, 0x40, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e,
- 0x5f, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69,
- 0x6e, 0x64, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b,
- 0x69, 0x6e, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65,
- 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61,
- 0x74, 0x65, 0x64, 0x1a, 0xa9, 0x02, 0x0a, 0x09, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74,
- 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20,
- 0x01, 0x28, 0x0c, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73,
- 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b,
- 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08, 0x72,
- 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x72,
- 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f,
- 0x6e, 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6f, 0x70, 0x74, 0x69, 0x6f,
- 0x6e, 0x61, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x18,
- 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64, 0x12,
- 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x07, 0x20, 0x01,
- 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x40, 0x0a,
- 0x10, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6b, 0x69, 0x6e,
- 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
- 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x0f,
- 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x69, 0x6e, 0x64, 0x12,
- 0x1e, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, 0x09, 0x20,
- 0x01, 0x28, 0x08, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x1a,
- 0xa7, 0x02, 0x0a, 0x0b, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12,
- 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2d, 0x0a, 0x05,
- 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x42,
- 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x43, 0x0a, 0x07, 0x6e,
- 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x29, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e,
- 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x2e, 0x4e, 0x65, 0x73, 0x74,
- 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x07, 0x6e, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67,
- 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x04, 0x20,
- 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x1b, 0x0a,
- 0x09, 0x6d, 0x61, 0x78, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03,
- 0x52, 0x08, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x22, 0x4d, 0x0a, 0x0b, 0x4e, 0x65,
- 0x73, 0x74, 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56,
- 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x49, 0x4e, 0x47, 0x4c, 0x45,
- 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x02, 0x12, 0x07, 0x0a, 0x03,
- 0x53, 0x45, 0x54, 0x10, 0x03, 0x12, 0x07, 0x0a, 0x03, 0x4d, 0x41, 0x50, 0x10, 0x04, 0x12, 0x09,
- 0x0a, 0x05, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x10, 0x05, 0x22, 0xa8, 0x01, 0x0a, 0x12, 0x53, 0x65,
- 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73,
- 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x64, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79,
- 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x70, 0x6c, 0x61, 0x6e, 0x44, 0x65, 0x73, 0x74,
- 0x72, 0x6f, 0x79, 0x12, 0x3f, 0x0a, 0x1c, 0x67, 0x65, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69,
- 0x64, 0x65, 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f,
- 0x6e, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x19, 0x67, 0x65, 0x74, 0x50, 0x72,
- 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x4f, 0x70, 0x74, 0x69,
- 0x6f, 0x6e, 0x61, 0x6c, 0x12, 0x2e, 0x0a, 0x13, 0x6d, 0x6f, 0x76, 0x65, 0x5f, 0x72, 0x65, 0x73,
- 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28,
- 0x08, 0x52, 0x11, 0x6d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53,
- 0x74, 0x61, 0x74, 0x65, 0x22, 0x3f, 0x0a, 0x12, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61,
- 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x29, 0x0a, 0x10, 0x64, 0x65,
- 0x66, 0x65, 0x72, 0x72, 0x61, 0x6c, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x61, 0x6c, 0x41, 0x6c,
- 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x22, 0x8e, 0x05, 0x0a, 0x08, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69,
- 0x6f, 0x6e, 0x12, 0x3d, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73,
- 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x50, 0x61, 0x72, 0x61,
- 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
- 0x73, 0x12, 0x4c, 0x0a, 0x12, 0x76, 0x61, 0x72, 0x69, 0x61, 0x64, 0x69, 0x63, 0x5f, 0x70, 0x61,
- 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e,
- 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69,
- 0x6f, 0x6e, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x11, 0x76, 0x61,
- 0x72, 0x69, 0x61, 0x64, 0x69, 0x63, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12,
- 0x32, 0x0a, 0x06, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
- 0x1a, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x46, 0x75, 0x6e, 0x63,
- 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x52, 0x06, 0x72, 0x65, 0x74,
- 0x75, 0x72, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x04,
- 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x20, 0x0a,
- 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12,
- 0x40, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6b,
- 0x69, 0x6e, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e, 0x64,
- 0x52, 0x0f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x69, 0x6e,
- 0x64, 0x12, 0x2f, 0x0a, 0x13, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e,
- 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12,
- 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x73, 0x73, 0x61,
- 0x67, 0x65, 0x1a, 0xf3, 0x01, 0x0a, 0x09, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
- 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
- 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01,
- 0x28, 0x0c, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x61, 0x6c, 0x6c, 0x6f,
- 0x77, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01,
- 0x28, 0x08, 0x52, 0x0e, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x4e, 0x75, 0x6c, 0x6c, 0x56, 0x61, 0x6c,
- 0x75, 0x65, 0x12, 0x30, 0x0a, 0x14, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x75, 0x6e, 0x6b, 0x6e,
- 0x6f, 0x77, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08,
- 0x52, 0x12, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x55, 0x6e, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x56, 0x61,
- 0x6c, 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74,
- 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72,
- 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69,
- 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e,
- 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x72,
- 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70,
- 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x69, 0x6e, 0x64, 0x1a, 0x1c, 0x0a, 0x06, 0x52, 0x65, 0x74, 0x75,
- 0x72, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c,
- 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xa2, 0x01, 0x0a, 0x08, 0x44, 0x65, 0x66, 0x65, 0x72,
- 0x72, 0x65, 0x64, 0x12, 0x32, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
- 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x2e, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x52,
- 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x22, 0x62, 0x0a, 0x06, 0x52, 0x65, 0x61, 0x73, 0x6f,
- 0x6e, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1b,
- 0x0a, 0x17, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x43, 0x4f, 0x4e, 0x46, 0x49,
- 0x47, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x12, 0x1b, 0x0a, 0x17, 0x50,
- 0x52, 0x4f, 0x56, 0x49, 0x44, 0x45, 0x52, 0x5f, 0x43, 0x4f, 0x4e, 0x46, 0x49, 0x47, 0x5f, 0x55,
- 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x02, 0x12, 0x11, 0x0a, 0x0d, 0x41, 0x42, 0x53, 0x45,
- 0x4e, 0x54, 0x5f, 0x50, 0x52, 0x45, 0x52, 0x45, 0x51, 0x10, 0x03, 0x22, 0x96, 0x04, 0x0a, 0x0b,
- 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x09, 0x0a, 0x07, 0x52,
- 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0xef, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
- 0x6e, 0x73, 0x65, 0x12, 0x4e, 0x0a, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x63, 0x61,
- 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b,
- 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x65, 0x72,
- 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52,
- 0x12, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74,
- 0x69, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
- 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
- 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52,
- 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x4c, 0x0a, 0x0c,
- 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03,
- 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47,
- 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x44, 0x61, 0x74, 0x61, 0x53,
- 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0b, 0x64,
- 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x45, 0x0a, 0x09, 0x72, 0x65,
- 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e,
- 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74,
- 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65,
- 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x73, 0x12, 0x45, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05,
- 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x46, 0x75, 0x6e,
- 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x09, 0x66,
- 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x26, 0x0a, 0x10, 0x46, 0x75, 0x6e, 0x63,
- 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04,
- 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65,
- 0x1a, 0x31, 0x0a, 0x12, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65,
- 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e,
- 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e,
- 0x61, 0x6d, 0x65, 0x1a, 0x2f, 0x0a, 0x10, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d,
- 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f,
- 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65,
- 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xc7, 0x06, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76,
- 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0xa6, 0x06, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65,
- 0x72, 0x12, 0x65, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63,
- 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69,
- 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d,
- 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x6c, 0x0a, 0x13, 0x64, 0x61, 0x74, 0x61,
- 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18,
- 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68,
- 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x61, 0x74,
- 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45, 0x6e,
- 0x74, 0x72, 0x79, 0x52, 0x11, 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53,
- 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f,
- 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
- 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12,
- 0x36, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61,
- 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69,
- 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65,
- 0x72, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x06,
- 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74,
- 0x69, 0x65, 0x73, 0x52, 0x12, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62,
- 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x52, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74,
- 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64,
- 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
- 0x65, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79,
- 0x52, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x55, 0x0a, 0x14, 0x52,
- 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45, 0x6e,
- 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
- 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02,
- 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02,
- 0x38, 0x01, 0x1a, 0x57, 0x0a, 0x16, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03,
- 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27,
- 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e,
- 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61,
- 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x51, 0x0a, 0x0e, 0x46,
- 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a,
- 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12,
- 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74,
- 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xdb,
- 0x01, 0x0a, 0x15, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64,
- 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x3a, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75,
- 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
- 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f,
- 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x85, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
- 0x65, 0x12, 0x40, 0x0a, 0x0f, 0x70, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x64, 0x5f, 0x63, 0x6f,
- 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61,
- 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x70, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x64, 0x43, 0x6f, 0x6e,
- 0x66, 0x69, 0x67, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
- 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
- 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52,
- 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0x90, 0x02, 0x0a,
- 0x14, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x72, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
- 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a,
- 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07,
- 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x09, 0x72, 0x61, 0x77, 0x5f, 0x73,
- 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52,
- 0x08, 0x72, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x83, 0x01, 0x0a, 0x08, 0x52, 0x65,
- 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x0e, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64,
- 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d,
- 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0d, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65,
- 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f,
- 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
- 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22,
- 0xba, 0x01, 0x0a, 0x1a, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f,
- 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x57,
- 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70,
- 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79,
- 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67,
- 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52,
- 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
- 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
- 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
- 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52,
- 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0xb8, 0x01, 0x0a,
- 0x18, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x57, 0x0a, 0x07, 0x52, 0x65, 0x71,
- 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d,
- 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d,
- 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79,
- 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66,
- 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37,
- 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20,
- 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
- 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67,
- 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0x8a, 0x02, 0x0a, 0x09, 0x43, 0x6f, 0x6e, 0x66,
- 0x69, 0x67, 0x75, 0x72, 0x65, 0x1a, 0xb7, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x12, 0x2b, 0x0a, 0x11, 0x74, 0x65, 0x72, 0x72, 0x61, 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x76,
- 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x74, 0x65,
- 0x72, 0x72, 0x61, 0x66, 0x6f, 0x72, 0x6d, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2f,
- 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d,
- 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12,
- 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69,
- 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43,
- 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69,
- 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a,
- 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64,
- 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b,
- 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61,
- 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
- 0x74, 0x69, 0x63, 0x73, 0x22, 0xe4, 0x03, 0x0a, 0x0c, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73,
- 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x8c, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x3c,
- 0x0a, 0x0d, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18,
- 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c,
- 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07,
- 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70,
- 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64,
- 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e,
- 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69,
- 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
- 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63,
- 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c,
- 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73,
- 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
- 0x74, 0x69, 0x65, 0x73, 0x1a, 0xc4, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
- 0x65, 0x12, 0x34, 0x0a, 0x09, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x08, 0x6e,
- 0x65, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
- 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
- 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73,
- 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28,
- 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65,
- 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65,
- 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0xf3, 0x05, 0x0a, 0x12,
- 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e,
- 0x67, 0x65, 0x1a, 0x8b, 0x03, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b,
- 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x0b, 0x70,
- 0x72, 0x69, 0x6f, 0x72, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b,
- 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e,
- 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6f, 0x72,
- 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x45, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65,
- 0x64, 0x5f, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79,
- 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x10, 0x70, 0x72, 0x6f, 0x70,
- 0x6f, 0x73, 0x65, 0x64, 0x4e, 0x65, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x06,
- 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63,
- 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x23, 0x0a,
- 0x0d, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x05,
- 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x50, 0x72, 0x69, 0x76, 0x61,
- 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d,
- 0x65, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c,
- 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61,
- 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62,
- 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e,
- 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74,
- 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c,
- 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73,
- 0x1a, 0xce, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3c, 0x0a,
- 0x0d, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70,
- 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x43, 0x0a, 0x10, 0x72,
- 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x73, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x18,
- 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x35, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68, 0x52,
- 0x0f, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65,
- 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x69, 0x76,
- 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e, 0x70, 0x6c, 0x61, 0x6e, 0x6e,
- 0x65, 0x64, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61,
- 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e,
- 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
- 0x63, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x5f, 0x74, 0x79, 0x70,
- 0x65, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10,
- 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d,
- 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01,
- 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44,
- 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65,
- 0x64, 0x22, 0x92, 0x04, 0x0a, 0x13, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x1a, 0xb6, 0x02, 0x0a, 0x07, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61,
- 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61,
- 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x5f, 0x73, 0x74, 0x61, 0x74,
- 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
- 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65,
- 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d,
- 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20,
- 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
- 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x6c,
- 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f,
- 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61,
- 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x27, 0x0a, 0x0f, 0x70,
- 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x05,
- 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x50, 0x72, 0x69,
- 0x76, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
- 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56,
- 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65,
- 0x74, 0x61, 0x1a, 0xc1, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
- 0x34, 0x0a, 0x09, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01,
- 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44,
- 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x08, 0x6e, 0x65, 0x77,
- 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65,
- 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12,
- 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x03,
- 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61,
- 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x6c, 0x65, 0x67, 0x61,
- 0x63, 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x04,
- 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x54, 0x79, 0x70, 0x65,
- 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x22, 0xef, 0x03, 0x0a, 0x13, 0x49, 0x6d, 0x70, 0x6f, 0x72,
- 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x86,
- 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79,
- 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74,
- 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e,
- 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03,
- 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74,
- 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62,
- 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x78, 0x0a, 0x10, 0x49, 0x6d, 0x70, 0x6f, 0x72,
- 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x74,
- 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
- 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74,
- 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
- 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65,
- 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61,
- 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74,
- 0x65, 0x1a, 0xd4, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5e,
- 0x0a, 0x12, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73,
- 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72,
- 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x11, 0x69, 0x6d, 0x70,
- 0x6f, 0x72, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x37,
- 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20,
- 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
- 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67,
- 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72,
- 0x72, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08,
- 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0xe7, 0x03, 0x0a, 0x11, 0x4d, 0x6f, 0x76,
- 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0xa8,
- 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x36, 0x0a, 0x17, 0x73, 0x6f,
- 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x61, 0x64,
- 0x64, 0x72, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x15, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x65,
- 0x73, 0x73, 0x12, 0x28, 0x0a, 0x10, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x74, 0x79, 0x70,
- 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x6f,
- 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x32, 0x0a, 0x15,
- 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x76, 0x65,
- 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x13, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
- 0x12, 0x36, 0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65,
- 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x0b, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x74, 0x61, 0x72, 0x67,
- 0x65, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x0e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61,
- 0x6d, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x72, 0x69,
- 0x76, 0x61, 0x74, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x73, 0x6f, 0x75, 0x72,
- 0x63, 0x65, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a, 0xa6, 0x01, 0x0a, 0x08, 0x52, 0x65,
- 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3a, 0x0a, 0x0c, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74,
- 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63,
- 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0b, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x53, 0x74, 0x61,
- 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
- 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
- 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b,
- 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x74,
- 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20,
- 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x50, 0x72, 0x69, 0x76, 0x61,
- 0x74, 0x65, 0x22, 0x9e, 0x03, 0x0a, 0x0e, 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53,
- 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0xe5, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f,
- 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d,
- 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12,
- 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61,
- 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52,
- 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a,
- 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
- 0x74, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70,
- 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e,
- 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0xa3, 0x01,
- 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x73, 0x74,
- 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c,
- 0x75, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61,
- 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e,
- 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
- 0x63, 0x73, 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x03,
- 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72,
- 0x72, 0x65, 0x64, 0x22, 0x9b, 0x01, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69,
- 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x1a, 0x09, 0x0a, 0x07,
- 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x78, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
- 0x6e, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x0b, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e,
- 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
- 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0b, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67,
- 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e,
- 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f,
- 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
- 0x73, 0x22, 0x9c, 0x01, 0x0a, 0x19, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72,
- 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a,
- 0x3a, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f,
- 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61,
- 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52,
- 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
- 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
- 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73,
- 0x22, 0xe5, 0x01, 0x0a, 0x11, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65,
- 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x73, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79,
- 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66,
- 0x69, 0x67, 0x12, 0x37, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e,
- 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52,
- 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x5b, 0x0a, 0x08, 0x52,
- 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75,
- 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12,
- 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02,
- 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61,
- 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0x81, 0x02, 0x0a, 0x0c, 0x47, 0x65, 0x74,
- 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71,
- 0x75, 0x65, 0x73, 0x74, 0x1a, 0xe5, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
- 0x65, 0x12, 0x4d, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01,
- 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x52, 0x65,
- 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73,
- 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73,
- 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18,
- 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69,
- 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x1a, 0x51, 0x0a, 0x0e, 0x46, 0x75, 0x6e,
- 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b,
- 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a,
- 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f,
- 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xd1, 0x01, 0x0a,
- 0x0c, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x54, 0x0a,
- 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65,
- 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x09,
- 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32,
- 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61,
- 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65,
- 0x6e, 0x74, 0x73, 0x1a, 0x6b, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
- 0x2f, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32,
- 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61,
- 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74,
- 0x12, 0x2e, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32,
- 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x46, 0x75, 0x6e, 0x63,
- 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72,
- 0x2a, 0x25, 0x0a, 0x0a, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x09,
- 0x0a, 0x05, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x41, 0x52,
- 0x4b, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x32, 0xef, 0x0b, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x76,
- 0x69, 0x64, 0x65, 0x72, 0x12, 0x4e, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64,
- 0x61, 0x74, 0x61, 0x12, 0x1e, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
- 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52, 0x65, 0x71, 0x75,
- 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
- 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70,
- 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x58, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x53, 0x63, 0x68, 0x65, 0x6d,
- 0x61, 0x12, 0x24, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65,
- 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e,
- 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
- 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53,
- 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6c,
- 0x0a, 0x15, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65,
- 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x28, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
- 0x69, 0x6e, 0x35, 0x2e, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69,
- 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x1a, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x50, 0x72,
- 0x65, 0x70, 0x61, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e,
- 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x7b, 0x0a, 0x1a,
- 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2d, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x52,
- 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69,
- 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65,
- 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67,
- 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x75, 0x0a, 0x18, 0x56, 0x61, 0x6c,
- 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43,
- 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2b, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x35, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f,
- 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65,
- 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x56,
- 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
- 0x12, 0x69, 0x0a, 0x14, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
- 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f,
- 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x1a, 0x28, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x55, 0x70,
- 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61,
- 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x48, 0x0a, 0x09, 0x43,
- 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x12, 0x1c, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
- 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x2e, 0x52,
- 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x2e, 0x52, 0x65, 0x73,
- 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73,
- 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x35, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52,
- 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e,
- 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x12, 0x50, 0x6c, 0x61, 0x6e,
- 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x25,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52,
- 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x35, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68,
- 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a,
- 0x13, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68,
- 0x61, 0x6e, 0x67, 0x65, 0x12, 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68,
- 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65,
- 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x73,
- 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x13, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52,
- 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x26, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52,
- 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x71,
- 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53,
- 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x60, 0x0a,
- 0x11, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61,
- 0x74, 0x65, 0x12, 0x24, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x4d,
- 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65,
- 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
- 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
- 0x57, 0x0a, 0x0e, 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x12, 0x21, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x52, 0x65,
- 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x71,
- 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
- 0x2e, 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e,
- 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x46,
- 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
- 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e,
- 0x73, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f,
- 0x6e, 0x73, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x43,
- 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63,
- 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e,
- 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x39,
- 0x0a, 0x04, 0x53, 0x74, 0x6f, 0x70, 0x12, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
- 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x6f, 0x70,
- 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x86, 0x03, 0x0a, 0x0b, 0x50, 0x72,
- 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x5e, 0x0a, 0x09, 0x47, 0x65, 0x74,
- 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65,
- 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
- 0x28, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50,
- 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61,
- 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x78, 0x0a, 0x19, 0x56, 0x61, 0x6c,
+func (x *ValidateEphemeralResourceConfig_Request) Reset() {
+ *x = ValidateEphemeralResourceConfig_Request{}
+ mi := &file_tfplugin5_proto_msgTypes[88]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *ValidateEphemeralResourceConfig_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ValidateEphemeralResourceConfig_Request) ProtoMessage() {}
+
+func (x *ValidateEphemeralResourceConfig_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[88]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use ValidateEphemeralResourceConfig_Request.ProtoReflect.Descriptor instead.
+func (*ValidateEphemeralResourceConfig_Request) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{29, 0}
+}
+
+func (x *ValidateEphemeralResourceConfig_Request) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+func (x *ValidateEphemeralResourceConfig_Request) GetConfig() *DynamicValue {
+ if x != nil {
+ return x.Config
+ }
+ return nil
+}
+
+type ValidateEphemeralResourceConfig_Response struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *ValidateEphemeralResourceConfig_Response) Reset() {
+ *x = ValidateEphemeralResourceConfig_Response{}
+ mi := &file_tfplugin5_proto_msgTypes[89]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *ValidateEphemeralResourceConfig_Response) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ValidateEphemeralResourceConfig_Response) ProtoMessage() {}
+
+func (x *ValidateEphemeralResourceConfig_Response) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[89]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use ValidateEphemeralResourceConfig_Response.ProtoReflect.Descriptor instead.
+func (*ValidateEphemeralResourceConfig_Response) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{29, 1}
+}
+
+func (x *ValidateEphemeralResourceConfig_Response) GetDiagnostics() []*Diagnostic {
+ if x != nil {
+ return x.Diagnostics
+ }
+ return nil
+}
+
+type OpenEphemeralResource_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *OpenEphemeralResource_Request) Reset() {
+ *x = OpenEphemeralResource_Request{}
+ mi := &file_tfplugin5_proto_msgTypes[90]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *OpenEphemeralResource_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*OpenEphemeralResource_Request) ProtoMessage() {}
+
+func (x *OpenEphemeralResource_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[90]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use OpenEphemeralResource_Request.ProtoReflect.Descriptor instead.
+func (*OpenEphemeralResource_Request) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{30, 0}
+}
+
+func (x *OpenEphemeralResource_Request) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+func (x *OpenEphemeralResource_Request) GetConfig() *DynamicValue {
+ if x != nil {
+ return x.Config
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Request) GetClientCapabilities() *ClientCapabilities {
+ if x != nil {
+ return x.ClientCapabilities
+ }
+ return nil
+}
+
+type OpenEphemeralResource_Response struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ RenewAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=renew_at,json=renewAt,proto3,oneof" json:"renew_at,omitempty"`
+ Result *DynamicValue `protobuf:"bytes,3,opt,name=result,proto3" json:"result,omitempty"`
+ Private []byte `protobuf:"bytes,4,opt,name=private,proto3,oneof" json:"private,omitempty"`
+ // deferred is set if the provider is deferring the change. If set the caller
+ // needs to handle the deferral.
+ Deferred *Deferred `protobuf:"bytes,5,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *OpenEphemeralResource_Response) Reset() {
+ *x = OpenEphemeralResource_Response{}
+ mi := &file_tfplugin5_proto_msgTypes[91]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *OpenEphemeralResource_Response) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*OpenEphemeralResource_Response) ProtoMessage() {}
+
+func (x *OpenEphemeralResource_Response) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[91]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use OpenEphemeralResource_Response.ProtoReflect.Descriptor instead.
+func (*OpenEphemeralResource_Response) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{30, 1}
+}
+
+func (x *OpenEphemeralResource_Response) GetDiagnostics() []*Diagnostic {
+ if x != nil {
+ return x.Diagnostics
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Response) GetRenewAt() *timestamppb.Timestamp {
+ if x != nil {
+ return x.RenewAt
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Response) GetResult() *DynamicValue {
+ if x != nil {
+ return x.Result
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Response) GetPrivate() []byte {
+ if x != nil {
+ return x.Private
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Response) GetDeferred() *Deferred {
+ if x != nil {
+ return x.Deferred
+ }
+ return nil
+}
+
+type RenewEphemeralResource_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Private []byte `protobuf:"bytes,2,opt,name=private,proto3,oneof" json:"private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *RenewEphemeralResource_Request) Reset() {
+ *x = RenewEphemeralResource_Request{}
+ mi := &file_tfplugin5_proto_msgTypes[92]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *RenewEphemeralResource_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*RenewEphemeralResource_Request) ProtoMessage() {}
+
+func (x *RenewEphemeralResource_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[92]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use RenewEphemeralResource_Request.ProtoReflect.Descriptor instead.
+func (*RenewEphemeralResource_Request) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{31, 0}
+}
+
+func (x *RenewEphemeralResource_Request) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+func (x *RenewEphemeralResource_Request) GetPrivate() []byte {
+ if x != nil {
+ return x.Private
+ }
+ return nil
+}
+
+type RenewEphemeralResource_Response struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ RenewAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=renew_at,json=renewAt,proto3,oneof" json:"renew_at,omitempty"`
+ Private []byte `protobuf:"bytes,3,opt,name=private,proto3,oneof" json:"private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *RenewEphemeralResource_Response) Reset() {
+ *x = RenewEphemeralResource_Response{}
+ mi := &file_tfplugin5_proto_msgTypes[93]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *RenewEphemeralResource_Response) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*RenewEphemeralResource_Response) ProtoMessage() {}
+
+func (x *RenewEphemeralResource_Response) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[93]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use RenewEphemeralResource_Response.ProtoReflect.Descriptor instead.
+func (*RenewEphemeralResource_Response) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{31, 1}
+}
+
+func (x *RenewEphemeralResource_Response) GetDiagnostics() []*Diagnostic {
+ if x != nil {
+ return x.Diagnostics
+ }
+ return nil
+}
+
+func (x *RenewEphemeralResource_Response) GetRenewAt() *timestamppb.Timestamp {
+ if x != nil {
+ return x.RenewAt
+ }
+ return nil
+}
+
+func (x *RenewEphemeralResource_Response) GetPrivate() []byte {
+ if x != nil {
+ return x.Private
+ }
+ return nil
+}
+
+type CloseEphemeralResource_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Private []byte `protobuf:"bytes,2,opt,name=private,proto3,oneof" json:"private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *CloseEphemeralResource_Request) Reset() {
+ *x = CloseEphemeralResource_Request{}
+ mi := &file_tfplugin5_proto_msgTypes[94]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *CloseEphemeralResource_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CloseEphemeralResource_Request) ProtoMessage() {}
+
+func (x *CloseEphemeralResource_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[94]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CloseEphemeralResource_Request.ProtoReflect.Descriptor instead.
+func (*CloseEphemeralResource_Request) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{32, 0}
+}
+
+func (x *CloseEphemeralResource_Request) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+func (x *CloseEphemeralResource_Request) GetPrivate() []byte {
+ if x != nil {
+ return x.Private
+ }
+ return nil
+}
+
+type CloseEphemeralResource_Response struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *CloseEphemeralResource_Response) Reset() {
+ *x = CloseEphemeralResource_Response{}
+ mi := &file_tfplugin5_proto_msgTypes[95]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *CloseEphemeralResource_Response) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CloseEphemeralResource_Response) ProtoMessage() {}
+
+func (x *CloseEphemeralResource_Response) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin5_proto_msgTypes[95]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CloseEphemeralResource_Response.ProtoReflect.Descriptor instead.
+func (*CloseEphemeralResource_Response) Descriptor() ([]byte, []int) {
+ return file_tfplugin5_proto_rawDescGZIP(), []int{32, 1}
+}
+
+func (x *CloseEphemeralResource_Response) GetDiagnostics() []*Diagnostic {
+ if x != nil {
+ return x.Diagnostics
+ }
+ return nil
+}
+
+var File_tfplugin5_proto protoreflect.FileDescriptor
+
+var file_tfplugin5_proto_rawDesc = []byte{
+ 0x0a, 0x0f, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x70, 0x72, 0x6f, 0x74,
+ 0x6f, 0x12, 0x09, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x1a, 0x1f, 0x67, 0x6f,
+ 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69,
+ 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x3c, 0x0a,
+ 0x0c, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x18, 0x0a,
+ 0x07, 0x6d, 0x73, 0x67, 0x70, 0x61, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07,
+ 0x6d, 0x73, 0x67, 0x70, 0x61, 0x63, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18,
+ 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0xe3, 0x01, 0x0a, 0x0a,
+ 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x12, 0x3a, 0x0a, 0x08, 0x73, 0x65,
+ 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
+ 0x74, 0x69, 0x63, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65,
+ 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72,
+ 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79,
+ 0x12, 0x16, 0x0a, 0x06, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x06, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x36, 0x0a, 0x09, 0x61, 0x74, 0x74, 0x72,
+ 0x69, 0x62, 0x75, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74,
+ 0x65, 0x50, 0x61, 0x74, 0x68, 0x52, 0x09, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
+ 0x22, 0x2f, 0x0a, 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, 0x07,
+ 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52,
+ 0x4f, 0x52, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, 0x47, 0x10,
+ 0x02, 0x22, 0x6b, 0x0a, 0x0d, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72,
+ 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x30, 0x0a, 0x11, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69,
+ 0x6f, 0x6e, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28,
+ 0x03, 0x48, 0x00, 0x52, 0x10, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x72, 0x67,
+ 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x66, 0x75, 0x6e,
+ 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xdc,
+ 0x01, 0x0a, 0x0d, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68,
+ 0x12, 0x33, 0x0a, 0x05, 0x73, 0x74, 0x65, 0x70, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32,
+ 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x41, 0x74, 0x74, 0x72,
+ 0x69, 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x52, 0x05,
+ 0x73, 0x74, 0x65, 0x70, 0x73, 0x1a, 0x95, 0x01, 0x0a, 0x04, 0x53, 0x74, 0x65, 0x70, 0x12, 0x27,
+ 0x0a, 0x0e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65,
+ 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0d, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62,
+ 0x75, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2e, 0x0a, 0x12, 0x65, 0x6c, 0x65, 0x6d, 0x65,
+ 0x6e, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20,
+ 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x10, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4b, 0x65,
+ 0x79, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x28, 0x0a, 0x0f, 0x65, 0x6c, 0x65, 0x6d, 0x65,
+ 0x6e, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03,
+ 0x48, 0x00, 0x52, 0x0d, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4b, 0x65, 0x79, 0x49, 0x6e,
+ 0x74, 0x42, 0x0a, 0x0a, 0x08, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x33, 0x0a,
+ 0x04, 0x53, 0x74, 0x6f, 0x70, 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
+ 0x1a, 0x20, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05,
+ 0x45, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x45, 0x72, 0x72,
+ 0x6f, 0x72, 0x22, 0x96, 0x01, 0x0a, 0x08, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12,
+ 0x12, 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6a,
+ 0x73, 0x6f, 0x6e, 0x12, 0x3a, 0x0a, 0x07, 0x66, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x18, 0x02,
+ 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x46, 0x6c, 0x61, 0x74, 0x6d, 0x61,
+ 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x66, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x1a,
+ 0x3a, 0x0a, 0x0c, 0x46, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12,
+ 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65,
+ 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xeb, 0x07, 0x0a, 0x06,
+ 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f,
+ 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
+ 0x12, 0x2d, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65,
+ 0x6d, 0x61, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x1a,
+ 0xa2, 0x02, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72,
+ 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73,
+ 0x69, 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
+ 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69,
+ 0x62, 0x75, 0x74, 0x65, 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73,
+ 0x12, 0x3e, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18,
+ 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42,
+ 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x0a, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73,
+ 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18,
+ 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69,
+ 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f,
+ 0x6e, 0x5f, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b,
+ 0x69, 0x6e, 0x64, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e,
+ 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74,
+ 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63,
+ 0x61, 0x74, 0x65, 0x64, 0x1a, 0xc8, 0x02, 0x0a, 0x09, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75,
+ 0x74, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65,
+ 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a, 0x0a, 0x08,
+ 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08,
+ 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6f, 0x70, 0x74, 0x69,
+ 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6f, 0x70, 0x74, 0x69,
+ 0x6f, 0x6e, 0x61, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64,
+ 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74, 0x65, 0x64,
+ 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x07, 0x20,
+ 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x40,
+ 0x0a, 0x10, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6b, 0x69,
+ 0x6e, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e, 0x64, 0x52,
+ 0x0f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x69, 0x6e, 0x64,
+ 0x12, 0x1e, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, 0x09,
+ 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64,
+ 0x12, 0x1d, 0x0a, 0x0a, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x6f, 0x6e, 0x6c, 0x79, 0x18, 0x0a,
+ 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x77, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x6e, 0x6c, 0x79, 0x1a,
+ 0xa7, 0x02, 0x0a, 0x0b, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x12,
+ 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2d, 0x0a, 0x05,
+ 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x42,
+ 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x12, 0x43, 0x0a, 0x07, 0x6e,
+ 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x29, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e,
+ 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x2e, 0x4e, 0x65, 0x73, 0x74,
+ 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x52, 0x07, 0x6e, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x67,
+ 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x69, 0x6e, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x04, 0x20,
+ 0x01, 0x28, 0x03, 0x52, 0x08, 0x6d, 0x69, 0x6e, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x12, 0x1b, 0x0a,
+ 0x09, 0x6d, 0x61, 0x78, 0x5f, 0x69, 0x74, 0x65, 0x6d, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03,
+ 0x52, 0x08, 0x6d, 0x61, 0x78, 0x49, 0x74, 0x65, 0x6d, 0x73, 0x22, 0x4d, 0x0a, 0x0b, 0x4e, 0x65,
+ 0x73, 0x74, 0x69, 0x6e, 0x67, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56,
+ 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x49, 0x4e, 0x47, 0x4c, 0x45,
+ 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x4c, 0x49, 0x53, 0x54, 0x10, 0x02, 0x12, 0x07, 0x0a, 0x03,
+ 0x53, 0x45, 0x54, 0x10, 0x03, 0x12, 0x07, 0x0a, 0x03, 0x4d, 0x41, 0x50, 0x10, 0x04, 0x12, 0x09,
+ 0x0a, 0x05, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x10, 0x05, 0x22, 0xa8, 0x01, 0x0a, 0x12, 0x53, 0x65,
+ 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73,
+ 0x12, 0x21, 0x0a, 0x0c, 0x70, 0x6c, 0x61, 0x6e, 0x5f, 0x64, 0x65, 0x73, 0x74, 0x72, 0x6f, 0x79,
+ 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0b, 0x70, 0x6c, 0x61, 0x6e, 0x44, 0x65, 0x73, 0x74,
+ 0x72, 0x6f, 0x79, 0x12, 0x3f, 0x0a, 0x1c, 0x67, 0x65, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69,
+ 0x64, 0x65, 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f,
+ 0x6e, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x19, 0x67, 0x65, 0x74, 0x50, 0x72,
+ 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x4f, 0x70, 0x74, 0x69,
+ 0x6f, 0x6e, 0x61, 0x6c, 0x12, 0x2e, 0x0a, 0x13, 0x6d, 0x6f, 0x76, 0x65, 0x5f, 0x72, 0x65, 0x73,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28,
+ 0x08, 0x52, 0x11, 0x6d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53,
+ 0x74, 0x61, 0x74, 0x65, 0x22, 0x82, 0x01, 0x0a, 0x12, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43,
+ 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x29, 0x0a, 0x10, 0x64,
+ 0x65, 0x66, 0x65, 0x72, 0x72, 0x61, 0x6c, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x61, 0x6c, 0x41,
+ 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x12, 0x41, 0x0a, 0x1d, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f,
+ 0x6f, 0x6e, 0x6c, 0x79, 0x5f, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x5f,
+ 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x1a, 0x77,
+ 0x72, 0x69, 0x74, 0x65, 0x4f, 0x6e, 0x6c, 0x79, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74,
+ 0x65, 0x73, 0x41, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x22, 0x8e, 0x05, 0x0a, 0x08, 0x46, 0x75,
+ 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3d, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65,
+ 0x74, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2e,
+ 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d,
+ 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x4c, 0x0a, 0x12, 0x76, 0x61, 0x72, 0x69, 0x61, 0x64, 0x69,
+ 0x63, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28,
+ 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x46, 0x75,
+ 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72,
+ 0x52, 0x11, 0x76, 0x61, 0x72, 0x69, 0x61, 0x64, 0x69, 0x63, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65,
+ 0x74, 0x65, 0x72, 0x12, 0x32, 0x0a, 0x06, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x18, 0x03, 0x20,
+ 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
+ 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x52,
+ 0x06, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61,
+ 0x72, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72,
+ 0x79, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e,
+ 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74,
+ 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69,
+ 0x6f, 0x6e, 0x5f, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67,
+ 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f,
+ 0x6e, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x2f, 0x0a, 0x13, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61,
+ 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x07, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x12, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d,
+ 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x1a, 0xf3, 0x01, 0x0a, 0x09, 0x50, 0x61, 0x72, 0x61, 0x6d,
+ 0x65, 0x74, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65,
+ 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x28, 0x0a, 0x10,
+ 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65,
+ 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x4e, 0x75, 0x6c,
+ 0x6c, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x30, 0x0a, 0x14, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f,
+ 0x75, 0x6e, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x04,
+ 0x20, 0x01, 0x28, 0x08, 0x52, 0x12, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x55, 0x6e, 0x6b, 0x6e, 0x6f,
+ 0x77, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63,
+ 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64,
+ 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x10, 0x64, 0x65,
+ 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x06,
+ 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x0f, 0x64, 0x65, 0x73,
+ 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x69, 0x6e, 0x64, 0x1a, 0x1c, 0x0a, 0x06,
+ 0x52, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01,
+ 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xa2, 0x01, 0x0a, 0x08, 0x44,
+ 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x12, 0x32, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f,
+ 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x2e, 0x52, 0x65, 0x61,
+ 0x73, 0x6f, 0x6e, 0x52, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x22, 0x62, 0x0a, 0x06, 0x52,
+ 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e,
+ 0x10, 0x00, 0x12, 0x1b, 0x0a, 0x17, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x43,
+ 0x4f, 0x4e, 0x46, 0x49, 0x47, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x12,
+ 0x1b, 0x0a, 0x17, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x44, 0x45, 0x52, 0x5f, 0x43, 0x4f, 0x4e, 0x46,
+ 0x49, 0x47, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x02, 0x12, 0x11, 0x0a, 0x0d,
+ 0x41, 0x42, 0x53, 0x45, 0x4e, 0x54, 0x5f, 0x50, 0x52, 0x45, 0x52, 0x45, 0x51, 0x10, 0x03, 0x22,
+ 0xb3, 0x05, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a,
+ 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0xd2, 0x03, 0x0a, 0x08, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4e, 0x0a, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65,
+ 0x72, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74,
+ 0x69, 0x65, 0x73, 0x52, 0x12, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62,
+ 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
+ 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73,
+ 0x12, 0x4c, 0x0a, 0x0c, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73,
+ 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x44,
+ 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
+ 0x61, 0x52, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x45,
+ 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28,
+ 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65,
+ 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x45, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f,
+ 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
+ 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
+ 0x61, 0x52, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x61, 0x0a, 0x13,
+ 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
+ 0x61, 0x2e, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x12, 0x65, 0x70, 0x68,
+ 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x1a,
+ 0x26, 0x0a, 0x10, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64,
+ 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x1a, 0x31, 0x0a, 0x12, 0x44, 0x61, 0x74, 0x61, 0x53,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a,
+ 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x2f, 0x0a, 0x10, 0x52, 0x65,
+ 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b,
+ 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x38, 0x0a, 0x19, 0x45,
+ 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65,
+ 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70,
+ 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xab, 0x08, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f,
+ 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x1a, 0x09, 0x0a, 0x07, 0x52,
+ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x8a, 0x08, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64,
+ 0x65, 0x72, 0x12, 0x65, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73,
+ 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76,
+ 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65,
+ 0x6d, 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x6c, 0x0a, 0x13, 0x64, 0x61, 0x74,
+ 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73,
+ 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63,
+ 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x61,
+ 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45,
+ 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
+ 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73,
+ 0x12, 0x36, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74,
+ 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76,
+ 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x73, 0x65, 0x72, 0x76,
+ 0x65, 0x72, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18,
+ 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x35, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
+ 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61,
+ 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x52, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63,
+ 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69,
+ 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
+ 0x73, 0x65, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72,
+ 0x79, 0x52, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x81, 0x01, 0x0a,
+ 0x1a, 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28,
+ 0x0b, 0x32, 0x43, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65,
+ 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72,
+ 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61,
+ 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x18, 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61,
+ 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73,
+ 0x1a, 0x55, 0x0a, 0x14, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65,
+ 0x6d, 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61,
+ 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61,
+ 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x57, 0x0a, 0x16, 0x44, 0x61, 0x74, 0x61, 0x53,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72,
+ 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03,
+ 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53,
+ 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01,
+ 0x1a, 0x51, 0x0a, 0x0e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74,
+ 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x03, 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20,
+ 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
+ 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a,
+ 0x02, 0x38, 0x01, 0x1a, 0x5e, 0x0a, 0x1d, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c,
+ 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45,
+ 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18,
+ 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a,
+ 0x02, 0x38, 0x01, 0x22, 0xdb, 0x01, 0x0a, 0x15, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x50,
+ 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x3a, 0x0a,
+ 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66,
+ 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75,
+ 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x85, 0x01, 0x0a, 0x08, 0x52, 0x65,
+ 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x40, 0x0a, 0x0f, 0x70, 0x72, 0x65, 0x70, 0x61, 0x72,
+ 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61,
+ 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x70, 0x72, 0x65, 0x70, 0x61, 0x72,
+ 0x65, 0x64, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67,
+ 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f,
+ 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
+ 0x73, 0x22, 0x90, 0x02, 0x0a, 0x14, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x72, 0x0a, 0x07, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61,
+ 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61,
+ 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20,
+ 0x01, 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x09,
+ 0x72, 0x61, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x52, 0x61, 0x77, 0x53,
+ 0x74, 0x61, 0x74, 0x65, 0x52, 0x08, 0x72, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x83,
+ 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x0e, 0x75,
+ 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20,
+ 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
+ 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0d, 0x75, 0x70,
+ 0x67, 0x72, 0x61, 0x64, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64,
+ 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b,
+ 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61,
+ 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
+ 0x74, 0x69, 0x63, 0x73, 0x22, 0x8b, 0x02, 0x0a, 0x1a, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74,
+ 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x6e,
+ 0x66, 0x69, 0x67, 0x1a, 0xa7, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
+ 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06,
+ 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63,
+ 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4e, 0x0a,
+ 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
+ 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70,
+ 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e,
+ 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x43, 0x0a,
+ 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61,
+ 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
+ 0x63, 0x73, 0x22, 0xb8, 0x01, 0x0a, 0x18, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44,
+ 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a,
+ 0x57, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79,
+ 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74,
+ 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65,
+ 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
+ 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0x8a, 0x02,
+ 0x0a, 0x09, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x1a, 0xb7, 0x01, 0x0a, 0x07,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2b, 0x0a, 0x11, 0x74, 0x65, 0x72, 0x72, 0x61,
+ 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x10, 0x74, 0x65, 0x72, 0x72, 0x61, 0x66, 0x6f, 0x72, 0x6d, 0x56, 0x65, 0x72,
+ 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63,
+ 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f,
+ 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43,
+ 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65,
+ 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c,
+ 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
+ 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73,
+ 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64,
+ 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0xe4, 0x03, 0x0a, 0x0c, 0x52,
+ 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x8c, 0x02, 0x0a, 0x07,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f,
+ 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65,
+ 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f,
+ 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56,
+ 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61,
+ 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20,
+ 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d,
+ 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x04, 0x20,
+ 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
+ 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72,
+ 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c,
+ 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65,
+ 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69,
+ 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61,
+ 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0xc4, 0x01, 0x0a, 0x08, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x09, 0x6e, 0x65, 0x77, 0x5f, 0x73,
+ 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61,
+ 0x6c, 0x75, 0x65, 0x52, 0x08, 0x6e, 0x65, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a,
+ 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03,
+ 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44,
+ 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74,
+ 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65,
+ 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44,
+ 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65,
+ 0x64, 0x22, 0xf3, 0x05, 0x0a, 0x12, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x1a, 0x8b, 0x03, 0x0a, 0x07, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d,
+ 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d,
+ 0x65, 0x12, 0x38, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65,
+ 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52,
+ 0x0a, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x45, 0x0a, 0x12, 0x70,
+ 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x64, 0x5f, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74,
+ 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65,
+ 0x52, 0x10, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x64, 0x4e, 0x65, 0x77, 0x53, 0x74, 0x61,
+ 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44,
+ 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e,
+ 0x66, 0x69, 0x67, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x5f, 0x70, 0x72, 0x69,
+ 0x76, 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x70, 0x72, 0x69, 0x6f,
+ 0x72, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76,
+ 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61,
+ 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64,
+ 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74,
+ 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x07, 0x20,
+ 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
+ 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69,
+ 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69,
+ 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0xce, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x73,
+ 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61,
+ 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74,
+ 0x65, 0x12, 0x43, 0x0a, 0x10, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x73, 0x5f, 0x72, 0x65,
+ 0x70, 0x6c, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74,
+ 0x65, 0x50, 0x61, 0x74, 0x68, 0x52, 0x0f, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x73, 0x52,
+ 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65,
+ 0x64, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52,
+ 0x0e, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12,
+ 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x04,
+ 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61,
+ 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x6c, 0x65, 0x67, 0x61,
+ 0x63, 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x05,
+ 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x54, 0x79, 0x70, 0x65,
+ 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72,
+ 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64,
+ 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0x92, 0x04, 0x0a, 0x13, 0x41, 0x70, 0x70, 0x6c,
+ 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x1a,
+ 0xb6, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74,
+ 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
+ 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6f,
+ 0x72, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69,
+ 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x53, 0x74, 0x61,
+ 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x73, 0x74,
+ 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c,
+ 0x75, 0x65, 0x52, 0x0c, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65,
+ 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b,
+ 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e,
+ 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x69,
+ 0x76, 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e, 0x70, 0x6c, 0x61, 0x6e,
+ 0x6e, 0x65, 0x64, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72,
+ 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28,
+ 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79,
+ 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76,
+ 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x1a, 0xc1, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73,
+ 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x09, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74, 0x61,
+ 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75,
+ 0x65, 0x52, 0x08, 0x6e, 0x65, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70,
+ 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72,
+ 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
+ 0x74, 0x69, 0x63, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
+ 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2c,
+ 0x0a, 0x12, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x79,
+ 0x73, 0x74, 0x65, 0x6d, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x6c, 0x65, 0x67, 0x61,
+ 0x63, 0x79, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x22, 0xef, 0x03, 0x0a,
+ 0x13, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53,
+ 0x74, 0x61, 0x74, 0x65, 0x1a, 0x86, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
+ 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
+ 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x0e, 0x0a,
+ 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x4e, 0x0a,
+ 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
+ 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70,
+ 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e,
+ 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x78, 0x0a,
+ 0x10, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01,
+ 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2d,
+ 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69,
+ 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x18, 0x0a,
+ 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07,
+ 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a, 0xd4, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5e, 0x0a, 0x12, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64,
+ 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b,
+ 0x32, 0x2f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x49, 0x6d, 0x70,
+ 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65,
+ 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x52, 0x11, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
+ 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2f, 0x0a,
+ 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x65, 0x66, 0x65,
+ 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0xe7,
+ 0x03, 0x0a, 0x11, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53,
+ 0x74, 0x61, 0x74, 0x65, 0x1a, 0xa8, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
+ 0x12, 0x36, 0x0a, 0x17, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69,
+ 0x64, 0x65, 0x72, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x15, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65,
+ 0x72, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, 0x28, 0x0a, 0x10, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61,
+ 0x6d, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x68,
+ 0x65, 0x6d, 0x61, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28,
+ 0x03, 0x52, 0x13, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x56,
+ 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x36, 0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74,
+ 0x65, 0x52, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x28,
+ 0x0a, 0x10, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61,
+ 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74,
+ 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c,
+ 0x52, 0x0d, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a,
+ 0xa6, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3a, 0x0a, 0x0c,
+ 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44,
+ 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0b, 0x74, 0x61, 0x72,
+ 0x67, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67,
+ 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f,
+ 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
+ 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x70, 0x72, 0x69, 0x76,
+ 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x74, 0x61, 0x72, 0x67, 0x65,
+ 0x74, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x22, 0x9e, 0x03, 0x0a, 0x0e, 0x52, 0x65, 0x61,
+ 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0xe5, 0x01, 0x0a, 0x07,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f,
+ 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65,
+ 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63,
+ 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65,
+ 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63,
+ 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d,
+ 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61,
+ 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b,
+ 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c, 0x69,
+ 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52,
+ 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74,
+ 0x69, 0x65, 0x73, 0x1a, 0xa3, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
+ 0x12, 0x2d, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61,
+ 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12,
+ 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02,
+ 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61,
+ 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65,
+ 0x72, 0x72, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52,
+ 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0x9b, 0x01, 0x0a, 0x14, 0x47, 0x65,
+ 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65,
+ 0x6d, 0x61, 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x78, 0x0a,
+ 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x33, 0x0a, 0x0b, 0x70, 0x72, 0x6f,
+ 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d,
+ 0x61, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x37,
+ 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20,
+ 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
+ 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67,
+ 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0x9c, 0x01, 0x0a, 0x19, 0x56, 0x61, 0x6c, 0x69,
+ 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x43,
+ 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x3a, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
+ 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b,
+ 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e,
+ 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a,
+ 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03,
+ 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44,
+ 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0xe5, 0x01, 0x0a, 0x11, 0x50, 0x72, 0x6f, 0x76, 0x69,
+ 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x73, 0x0a, 0x07,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65,
+ 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x37, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x6e,
+ 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63,
+ 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f,
+ 0x6e, 0x1a, 0x5b, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a,
+ 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f,
+ 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
+ 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
+ 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0x81,
+ 0x02, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a,
+ 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0xe5, 0x01, 0x0a, 0x08, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4d, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74,
+ 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69,
+ 0x6f, 0x6e, 0x73, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x75, 0x6e,
+ 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x66, 0x75, 0x6e,
+ 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f,
+ 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x1a,
+ 0x51, 0x0a, 0x0e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72,
+ 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03,
+ 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x46,
+ 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02,
+ 0x38, 0x01, 0x22, 0xd1, 0x01, 0x0a, 0x0c, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74,
+ 0x69, 0x6f, 0x6e, 0x1a, 0x54, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12,
+ 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61,
+ 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18,
+ 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09,
+ 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x6b, 0x0a, 0x08, 0x52, 0x65, 0x73,
+ 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06,
+ 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x2e, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
+ 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x35, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52,
+ 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0xbf, 0x01, 0x0a, 0x1f, 0x56, 0x61, 0x6c, 0x69, 0x64,
+ 0x61, 0x74, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x57, 0x0a, 0x07, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61,
+ 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61,
+ 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44,
+ 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e,
+ 0x66, 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
+ 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01,
+ 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61,
+ 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0xdd, 0x03, 0x0a, 0x15, 0x4f, 0x70, 0x65,
+ 0x6e, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x1a, 0xa7, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b,
+ 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63,
+ 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56,
+ 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4e, 0x0a, 0x13,
+ 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74,
+ 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61,
+ 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74,
+ 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x99, 0x02, 0x0a,
+ 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61,
+ 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
+ 0x63, 0x73, 0x12, 0x3a, 0x0a, 0x08, 0x72, 0x65, 0x6e, 0x65, 0x77, 0x5f, 0x61, 0x74, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72,
+ 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70,
+ 0x48, 0x00, 0x52, 0x07, 0x72, 0x65, 0x6e, 0x65, 0x77, 0x41, 0x74, 0x88, 0x01, 0x01, 0x12, 0x2f,
+ 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d,
+ 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12,
+ 0x1d, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c,
+ 0x48, 0x01, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x12, 0x2f,
+ 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b,
+ 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x65, 0x66,
+ 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x42,
+ 0x0b, 0x0a, 0x09, 0x5f, 0x72, 0x65, 0x6e, 0x65, 0x77, 0x5f, 0x61, 0x74, 0x42, 0x0a, 0x0a, 0x08,
+ 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x22, 0xa5, 0x02, 0x0a, 0x16, 0x52, 0x65, 0x6e,
+ 0x65, 0x77, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x1a, 0x51, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b,
+ 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x70,
+ 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07,
+ 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70,
+ 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a, 0xb7, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
+ 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52,
+ 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x3a, 0x0a, 0x08,
+ 0x72, 0x65, 0x6e, 0x65, 0x77, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a,
+ 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
+ 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, 0x07, 0x72, 0x65,
+ 0x6e, 0x65, 0x77, 0x41, 0x74, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76,
+ 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x01, 0x52, 0x07, 0x70, 0x72, 0x69,
+ 0x76, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x72, 0x65, 0x6e, 0x65,
+ 0x77, 0x5f, 0x61, 0x74, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65,
+ 0x22, 0xb0, 0x01, 0x0a, 0x16, 0x43, 0x6c, 0x6f, 0x73, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65,
+ 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x51, 0x0a, 0x07, 0x52,
+ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e,
+ 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e,
+ 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x88,
+ 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a, 0x43,
+ 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69,
+ 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32,
+ 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x44, 0x69, 0x61, 0x67,
+ 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x73, 0x2a, 0x25, 0x0a, 0x0a, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e,
+ 0x64, 0x12, 0x09, 0x0a, 0x05, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08,
+ 0x4d, 0x41, 0x52, 0x4b, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x32, 0xcc, 0x0f, 0x0a, 0x08, 0x50,
+ 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x4e, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4d, 0x65,
+ 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1e, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52,
+ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x58, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x53, 0x63,
+ 0x68, 0x65, 0x6d, 0x61, 0x12, 0x24, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65,
+ 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64,
+ 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
+ 0x65, 0x12, 0x6c, 0x0a, 0x15, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76,
+ 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x28, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x50, 0x72,
+ 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x50, 0x72, 0x65, 0x70, 0x61, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
+ 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
+ 0x7b, 0x0a, 0x1a, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2d, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61,
+ 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f,
+ 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74,
+ 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x43, 0x6f, 0x6e,
+ 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x75, 0x0a, 0x18,
+ 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2b, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74,
+ 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x35, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x12, 0x69, 0x0a, 0x14, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65,
+ 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x27, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52,
+ 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x48,
+ 0x0a, 0x09, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x12, 0x1c, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72,
+ 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x2e,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x52, 0x65, 0x61, 0x64,
+ 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x12, 0x50,
+ 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67,
+ 0x65, 0x12, 0x25, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x50, 0x6c,
+ 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65,
+ 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
+ 0x12, 0x66, 0x0a, 0x13, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
+ 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x41, 0x70, 0x70, 0x6c,
+ 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x13, 0x49, 0x6d, 0x70, 0x6f,
+ 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12,
+ 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x49, 0x6d, 0x70, 0x6f,
+ 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
+ 0x12, 0x60, 0x0a, 0x11, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x24, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x35, 0x2e, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74,
+ 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
+ 0x73, 0x65, 0x12, 0x57, 0x0a, 0x0e, 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x12, 0x21, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35,
+ 0x2e, 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x35, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x8a, 0x01, 0x0a, 0x1f,
+ 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61,
+ 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12,
+ 0x32, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x56, 0x61, 0x6c, 0x69,
+ 0x64, 0x61, 0x74, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75,
+ 0x65, 0x73, 0x74, 0x1a, 0x33, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e,
+ 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61,
+ 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6c, 0x0a, 0x15, 0x4f, 0x70, 0x65, 0x6e,
+ 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x12, 0x28, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x4f, 0x70,
+ 0x65, 0x6e, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x4f, 0x70, 0x65, 0x6e, 0x45, 0x70, 0x68, 0x65,
+ 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65,
+ 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6f, 0x0a, 0x16, 0x52, 0x65, 0x6e, 0x65, 0x77, 0x45,
+ 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x12, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x52, 0x65, 0x6e,
+ 0x65, 0x77, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x52, 0x65, 0x6e, 0x65, 0x77, 0x45, 0x70, 0x68,
+ 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6f, 0x0a, 0x16, 0x43, 0x6c, 0x6f, 0x73, 0x65,
+ 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x12, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c,
+ 0x6f, 0x73, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x6c, 0x6f, 0x73, 0x65, 0x45, 0x70,
+ 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x46,
+ 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e,
+ 0x73, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f,
+ 0x6e, 0x73, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x43,
+ 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63,
+ 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e,
+ 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x39,
+ 0x0a, 0x04, 0x53, 0x74, 0x6f, 0x70, 0x12, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
+ 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x53, 0x74, 0x6f, 0x70,
+ 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x86, 0x03, 0x0a, 0x0b, 0x50, 0x72,
+ 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x5e, 0x0a, 0x09, 0x47, 0x65, 0x74,
+ 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65,
+ 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
+ 0x28, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x35, 0x2e, 0x47, 0x65, 0x74, 0x50,
+ 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61,
+ 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x78, 0x0a, 0x19, 0x56, 0x61, 0x6c,
0x69, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72,
0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2c, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
0x6e, 0x35, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69,
@@ -5430,234 +6045,272 @@ func file_tfplugin5_proto_rawDescGZIP() []byte {
}
var file_tfplugin5_proto_enumTypes = make([]protoimpl.EnumInfo, 4)
-var file_tfplugin5_proto_msgTypes = make([]protoimpl.MessageInfo, 82)
-var file_tfplugin5_proto_goTypes = []interface{}{
- (StringKind)(0), // 0: tfplugin5.StringKind
- (Diagnostic_Severity)(0), // 1: tfplugin5.Diagnostic.Severity
- (Schema_NestedBlock_NestingMode)(0), // 2: tfplugin5.Schema.NestedBlock.NestingMode
- (Deferred_Reason)(0), // 3: tfplugin5.Deferred.Reason
- (*DynamicValue)(nil), // 4: tfplugin5.DynamicValue
- (*Diagnostic)(nil), // 5: tfplugin5.Diagnostic
- (*FunctionError)(nil), // 6: tfplugin5.FunctionError
- (*AttributePath)(nil), // 7: tfplugin5.AttributePath
- (*Stop)(nil), // 8: tfplugin5.Stop
- (*RawState)(nil), // 9: tfplugin5.RawState
- (*Schema)(nil), // 10: tfplugin5.Schema
- (*ServerCapabilities)(nil), // 11: tfplugin5.ServerCapabilities
- (*ClientCapabilities)(nil), // 12: tfplugin5.ClientCapabilities
- (*Function)(nil), // 13: tfplugin5.Function
- (*Deferred)(nil), // 14: tfplugin5.Deferred
- (*GetMetadata)(nil), // 15: tfplugin5.GetMetadata
- (*GetProviderSchema)(nil), // 16: tfplugin5.GetProviderSchema
- (*PrepareProviderConfig)(nil), // 17: tfplugin5.PrepareProviderConfig
- (*UpgradeResourceState)(nil), // 18: tfplugin5.UpgradeResourceState
- (*ValidateResourceTypeConfig)(nil), // 19: tfplugin5.ValidateResourceTypeConfig
- (*ValidateDataSourceConfig)(nil), // 20: tfplugin5.ValidateDataSourceConfig
- (*Configure)(nil), // 21: tfplugin5.Configure
- (*ReadResource)(nil), // 22: tfplugin5.ReadResource
- (*PlanResourceChange)(nil), // 23: tfplugin5.PlanResourceChange
- (*ApplyResourceChange)(nil), // 24: tfplugin5.ApplyResourceChange
- (*ImportResourceState)(nil), // 25: tfplugin5.ImportResourceState
- (*MoveResourceState)(nil), // 26: tfplugin5.MoveResourceState
- (*ReadDataSource)(nil), // 27: tfplugin5.ReadDataSource
- (*GetProvisionerSchema)(nil), // 28: tfplugin5.GetProvisionerSchema
- (*ValidateProvisionerConfig)(nil), // 29: tfplugin5.ValidateProvisionerConfig
- (*ProvisionResource)(nil), // 30: tfplugin5.ProvisionResource
- (*GetFunctions)(nil), // 31: tfplugin5.GetFunctions
- (*CallFunction)(nil), // 32: tfplugin5.CallFunction
- (*AttributePath_Step)(nil), // 33: tfplugin5.AttributePath.Step
- (*Stop_Request)(nil), // 34: tfplugin5.Stop.Request
- (*Stop_Response)(nil), // 35: tfplugin5.Stop.Response
- nil, // 36: tfplugin5.RawState.FlatmapEntry
- (*Schema_Block)(nil), // 37: tfplugin5.Schema.Block
- (*Schema_Attribute)(nil), // 38: tfplugin5.Schema.Attribute
- (*Schema_NestedBlock)(nil), // 39: tfplugin5.Schema.NestedBlock
- (*Function_Parameter)(nil), // 40: tfplugin5.Function.Parameter
- (*Function_Return)(nil), // 41: tfplugin5.Function.Return
- (*GetMetadata_Request)(nil), // 42: tfplugin5.GetMetadata.Request
- (*GetMetadata_Response)(nil), // 43: tfplugin5.GetMetadata.Response
- (*GetMetadata_FunctionMetadata)(nil), // 44: tfplugin5.GetMetadata.FunctionMetadata
- (*GetMetadata_DataSourceMetadata)(nil), // 45: tfplugin5.GetMetadata.DataSourceMetadata
- (*GetMetadata_ResourceMetadata)(nil), // 46: tfplugin5.GetMetadata.ResourceMetadata
- (*GetProviderSchema_Request)(nil), // 47: tfplugin5.GetProviderSchema.Request
- (*GetProviderSchema_Response)(nil), // 48: tfplugin5.GetProviderSchema.Response
- nil, // 49: tfplugin5.GetProviderSchema.Response.ResourceSchemasEntry
- nil, // 50: tfplugin5.GetProviderSchema.Response.DataSourceSchemasEntry
- nil, // 51: tfplugin5.GetProviderSchema.Response.FunctionsEntry
- (*PrepareProviderConfig_Request)(nil), // 52: tfplugin5.PrepareProviderConfig.Request
- (*PrepareProviderConfig_Response)(nil), // 53: tfplugin5.PrepareProviderConfig.Response
- (*UpgradeResourceState_Request)(nil), // 54: tfplugin5.UpgradeResourceState.Request
- (*UpgradeResourceState_Response)(nil), // 55: tfplugin5.UpgradeResourceState.Response
- (*ValidateResourceTypeConfig_Request)(nil), // 56: tfplugin5.ValidateResourceTypeConfig.Request
- (*ValidateResourceTypeConfig_Response)(nil), // 57: tfplugin5.ValidateResourceTypeConfig.Response
- (*ValidateDataSourceConfig_Request)(nil), // 58: tfplugin5.ValidateDataSourceConfig.Request
- (*ValidateDataSourceConfig_Response)(nil), // 59: tfplugin5.ValidateDataSourceConfig.Response
- (*Configure_Request)(nil), // 60: tfplugin5.Configure.Request
- (*Configure_Response)(nil), // 61: tfplugin5.Configure.Response
- (*ReadResource_Request)(nil), // 62: tfplugin5.ReadResource.Request
- (*ReadResource_Response)(nil), // 63: tfplugin5.ReadResource.Response
- (*PlanResourceChange_Request)(nil), // 64: tfplugin5.PlanResourceChange.Request
- (*PlanResourceChange_Response)(nil), // 65: tfplugin5.PlanResourceChange.Response
- (*ApplyResourceChange_Request)(nil), // 66: tfplugin5.ApplyResourceChange.Request
- (*ApplyResourceChange_Response)(nil), // 67: tfplugin5.ApplyResourceChange.Response
- (*ImportResourceState_Request)(nil), // 68: tfplugin5.ImportResourceState.Request
- (*ImportResourceState_ImportedResource)(nil), // 69: tfplugin5.ImportResourceState.ImportedResource
- (*ImportResourceState_Response)(nil), // 70: tfplugin5.ImportResourceState.Response
- (*MoveResourceState_Request)(nil), // 71: tfplugin5.MoveResourceState.Request
- (*MoveResourceState_Response)(nil), // 72: tfplugin5.MoveResourceState.Response
- (*ReadDataSource_Request)(nil), // 73: tfplugin5.ReadDataSource.Request
- (*ReadDataSource_Response)(nil), // 74: tfplugin5.ReadDataSource.Response
- (*GetProvisionerSchema_Request)(nil), // 75: tfplugin5.GetProvisionerSchema.Request
- (*GetProvisionerSchema_Response)(nil), // 76: tfplugin5.GetProvisionerSchema.Response
- (*ValidateProvisionerConfig_Request)(nil), // 77: tfplugin5.ValidateProvisionerConfig.Request
- (*ValidateProvisionerConfig_Response)(nil), // 78: tfplugin5.ValidateProvisionerConfig.Response
- (*ProvisionResource_Request)(nil), // 79: tfplugin5.ProvisionResource.Request
- (*ProvisionResource_Response)(nil), // 80: tfplugin5.ProvisionResource.Response
- (*GetFunctions_Request)(nil), // 81: tfplugin5.GetFunctions.Request
- (*GetFunctions_Response)(nil), // 82: tfplugin5.GetFunctions.Response
- nil, // 83: tfplugin5.GetFunctions.Response.FunctionsEntry
- (*CallFunction_Request)(nil), // 84: tfplugin5.CallFunction.Request
- (*CallFunction_Response)(nil), // 85: tfplugin5.CallFunction.Response
+var file_tfplugin5_proto_msgTypes = make([]protoimpl.MessageInfo, 96)
+var file_tfplugin5_proto_goTypes = []any{
+ (StringKind)(0), // 0: tfplugin5.StringKind
+ (Diagnostic_Severity)(0), // 1: tfplugin5.Diagnostic.Severity
+ (Schema_NestedBlock_NestingMode)(0), // 2: tfplugin5.Schema.NestedBlock.NestingMode
+ (Deferred_Reason)(0), // 3: tfplugin5.Deferred.Reason
+ (*DynamicValue)(nil), // 4: tfplugin5.DynamicValue
+ (*Diagnostic)(nil), // 5: tfplugin5.Diagnostic
+ (*FunctionError)(nil), // 6: tfplugin5.FunctionError
+ (*AttributePath)(nil), // 7: tfplugin5.AttributePath
+ (*Stop)(nil), // 8: tfplugin5.Stop
+ (*RawState)(nil), // 9: tfplugin5.RawState
+ (*Schema)(nil), // 10: tfplugin5.Schema
+ (*ServerCapabilities)(nil), // 11: tfplugin5.ServerCapabilities
+ (*ClientCapabilities)(nil), // 12: tfplugin5.ClientCapabilities
+ (*Function)(nil), // 13: tfplugin5.Function
+ (*Deferred)(nil), // 14: tfplugin5.Deferred
+ (*GetMetadata)(nil), // 15: tfplugin5.GetMetadata
+ (*GetProviderSchema)(nil), // 16: tfplugin5.GetProviderSchema
+ (*PrepareProviderConfig)(nil), // 17: tfplugin5.PrepareProviderConfig
+ (*UpgradeResourceState)(nil), // 18: tfplugin5.UpgradeResourceState
+ (*ValidateResourceTypeConfig)(nil), // 19: tfplugin5.ValidateResourceTypeConfig
+ (*ValidateDataSourceConfig)(nil), // 20: tfplugin5.ValidateDataSourceConfig
+ (*Configure)(nil), // 21: tfplugin5.Configure
+ (*ReadResource)(nil), // 22: tfplugin5.ReadResource
+ (*PlanResourceChange)(nil), // 23: tfplugin5.PlanResourceChange
+ (*ApplyResourceChange)(nil), // 24: tfplugin5.ApplyResourceChange
+ (*ImportResourceState)(nil), // 25: tfplugin5.ImportResourceState
+ (*MoveResourceState)(nil), // 26: tfplugin5.MoveResourceState
+ (*ReadDataSource)(nil), // 27: tfplugin5.ReadDataSource
+ (*GetProvisionerSchema)(nil), // 28: tfplugin5.GetProvisionerSchema
+ (*ValidateProvisionerConfig)(nil), // 29: tfplugin5.ValidateProvisionerConfig
+ (*ProvisionResource)(nil), // 30: tfplugin5.ProvisionResource
+ (*GetFunctions)(nil), // 31: tfplugin5.GetFunctions
+ (*CallFunction)(nil), // 32: tfplugin5.CallFunction
+ (*ValidateEphemeralResourceConfig)(nil), // 33: tfplugin5.ValidateEphemeralResourceConfig
+ (*OpenEphemeralResource)(nil), // 34: tfplugin5.OpenEphemeralResource
+ (*RenewEphemeralResource)(nil), // 35: tfplugin5.RenewEphemeralResource
+ (*CloseEphemeralResource)(nil), // 36: tfplugin5.CloseEphemeralResource
+ (*AttributePath_Step)(nil), // 37: tfplugin5.AttributePath.Step
+ (*Stop_Request)(nil), // 38: tfplugin5.Stop.Request
+ (*Stop_Response)(nil), // 39: tfplugin5.Stop.Response
+ nil, // 40: tfplugin5.RawState.FlatmapEntry
+ (*Schema_Block)(nil), // 41: tfplugin5.Schema.Block
+ (*Schema_Attribute)(nil), // 42: tfplugin5.Schema.Attribute
+ (*Schema_NestedBlock)(nil), // 43: tfplugin5.Schema.NestedBlock
+ (*Function_Parameter)(nil), // 44: tfplugin5.Function.Parameter
+ (*Function_Return)(nil), // 45: tfplugin5.Function.Return
+ (*GetMetadata_Request)(nil), // 46: tfplugin5.GetMetadata.Request
+ (*GetMetadata_Response)(nil), // 47: tfplugin5.GetMetadata.Response
+ (*GetMetadata_FunctionMetadata)(nil), // 48: tfplugin5.GetMetadata.FunctionMetadata
+ (*GetMetadata_DataSourceMetadata)(nil), // 49: tfplugin5.GetMetadata.DataSourceMetadata
+ (*GetMetadata_ResourceMetadata)(nil), // 50: tfplugin5.GetMetadata.ResourceMetadata
+ (*GetMetadata_EphemeralResourceMetadata)(nil), // 51: tfplugin5.GetMetadata.EphemeralResourceMetadata
+ (*GetProviderSchema_Request)(nil), // 52: tfplugin5.GetProviderSchema.Request
+ (*GetProviderSchema_Response)(nil), // 53: tfplugin5.GetProviderSchema.Response
+ nil, // 54: tfplugin5.GetProviderSchema.Response.ResourceSchemasEntry
+ nil, // 55: tfplugin5.GetProviderSchema.Response.DataSourceSchemasEntry
+ nil, // 56: tfplugin5.GetProviderSchema.Response.FunctionsEntry
+ nil, // 57: tfplugin5.GetProviderSchema.Response.EphemeralResourceSchemasEntry
+ (*PrepareProviderConfig_Request)(nil), // 58: tfplugin5.PrepareProviderConfig.Request
+ (*PrepareProviderConfig_Response)(nil), // 59: tfplugin5.PrepareProviderConfig.Response
+ (*UpgradeResourceState_Request)(nil), // 60: tfplugin5.UpgradeResourceState.Request
+ (*UpgradeResourceState_Response)(nil), // 61: tfplugin5.UpgradeResourceState.Response
+ (*ValidateResourceTypeConfig_Request)(nil), // 62: tfplugin5.ValidateResourceTypeConfig.Request
+ (*ValidateResourceTypeConfig_Response)(nil), // 63: tfplugin5.ValidateResourceTypeConfig.Response
+ (*ValidateDataSourceConfig_Request)(nil), // 64: tfplugin5.ValidateDataSourceConfig.Request
+ (*ValidateDataSourceConfig_Response)(nil), // 65: tfplugin5.ValidateDataSourceConfig.Response
+ (*Configure_Request)(nil), // 66: tfplugin5.Configure.Request
+ (*Configure_Response)(nil), // 67: tfplugin5.Configure.Response
+ (*ReadResource_Request)(nil), // 68: tfplugin5.ReadResource.Request
+ (*ReadResource_Response)(nil), // 69: tfplugin5.ReadResource.Response
+ (*PlanResourceChange_Request)(nil), // 70: tfplugin5.PlanResourceChange.Request
+ (*PlanResourceChange_Response)(nil), // 71: tfplugin5.PlanResourceChange.Response
+ (*ApplyResourceChange_Request)(nil), // 72: tfplugin5.ApplyResourceChange.Request
+ (*ApplyResourceChange_Response)(nil), // 73: tfplugin5.ApplyResourceChange.Response
+ (*ImportResourceState_Request)(nil), // 74: tfplugin5.ImportResourceState.Request
+ (*ImportResourceState_ImportedResource)(nil), // 75: tfplugin5.ImportResourceState.ImportedResource
+ (*ImportResourceState_Response)(nil), // 76: tfplugin5.ImportResourceState.Response
+ (*MoveResourceState_Request)(nil), // 77: tfplugin5.MoveResourceState.Request
+ (*MoveResourceState_Response)(nil), // 78: tfplugin5.MoveResourceState.Response
+ (*ReadDataSource_Request)(nil), // 79: tfplugin5.ReadDataSource.Request
+ (*ReadDataSource_Response)(nil), // 80: tfplugin5.ReadDataSource.Response
+ (*GetProvisionerSchema_Request)(nil), // 81: tfplugin5.GetProvisionerSchema.Request
+ (*GetProvisionerSchema_Response)(nil), // 82: tfplugin5.GetProvisionerSchema.Response
+ (*ValidateProvisionerConfig_Request)(nil), // 83: tfplugin5.ValidateProvisionerConfig.Request
+ (*ValidateProvisionerConfig_Response)(nil), // 84: tfplugin5.ValidateProvisionerConfig.Response
+ (*ProvisionResource_Request)(nil), // 85: tfplugin5.ProvisionResource.Request
+ (*ProvisionResource_Response)(nil), // 86: tfplugin5.ProvisionResource.Response
+ (*GetFunctions_Request)(nil), // 87: tfplugin5.GetFunctions.Request
+ (*GetFunctions_Response)(nil), // 88: tfplugin5.GetFunctions.Response
+ nil, // 89: tfplugin5.GetFunctions.Response.FunctionsEntry
+ (*CallFunction_Request)(nil), // 90: tfplugin5.CallFunction.Request
+ (*CallFunction_Response)(nil), // 91: tfplugin5.CallFunction.Response
+ (*ValidateEphemeralResourceConfig_Request)(nil), // 92: tfplugin5.ValidateEphemeralResourceConfig.Request
+ (*ValidateEphemeralResourceConfig_Response)(nil), // 93: tfplugin5.ValidateEphemeralResourceConfig.Response
+ (*OpenEphemeralResource_Request)(nil), // 94: tfplugin5.OpenEphemeralResource.Request
+ (*OpenEphemeralResource_Response)(nil), // 95: tfplugin5.OpenEphemeralResource.Response
+ (*RenewEphemeralResource_Request)(nil), // 96: tfplugin5.RenewEphemeralResource.Request
+ (*RenewEphemeralResource_Response)(nil), // 97: tfplugin5.RenewEphemeralResource.Response
+ (*CloseEphemeralResource_Request)(nil), // 98: tfplugin5.CloseEphemeralResource.Request
+ (*CloseEphemeralResource_Response)(nil), // 99: tfplugin5.CloseEphemeralResource.Response
+ (*timestamppb.Timestamp)(nil), // 100: google.protobuf.Timestamp
}
var file_tfplugin5_proto_depIdxs = []int32{
1, // 0: tfplugin5.Diagnostic.severity:type_name -> tfplugin5.Diagnostic.Severity
7, // 1: tfplugin5.Diagnostic.attribute:type_name -> tfplugin5.AttributePath
- 33, // 2: tfplugin5.AttributePath.steps:type_name -> tfplugin5.AttributePath.Step
- 36, // 3: tfplugin5.RawState.flatmap:type_name -> tfplugin5.RawState.FlatmapEntry
- 37, // 4: tfplugin5.Schema.block:type_name -> tfplugin5.Schema.Block
- 40, // 5: tfplugin5.Function.parameters:type_name -> tfplugin5.Function.Parameter
- 40, // 6: tfplugin5.Function.variadic_parameter:type_name -> tfplugin5.Function.Parameter
- 41, // 7: tfplugin5.Function.return:type_name -> tfplugin5.Function.Return
+ 37, // 2: tfplugin5.AttributePath.steps:type_name -> tfplugin5.AttributePath.Step
+ 40, // 3: tfplugin5.RawState.flatmap:type_name -> tfplugin5.RawState.FlatmapEntry
+ 41, // 4: tfplugin5.Schema.block:type_name -> tfplugin5.Schema.Block
+ 44, // 5: tfplugin5.Function.parameters:type_name -> tfplugin5.Function.Parameter
+ 44, // 6: tfplugin5.Function.variadic_parameter:type_name -> tfplugin5.Function.Parameter
+ 45, // 7: tfplugin5.Function.return:type_name -> tfplugin5.Function.Return
0, // 8: tfplugin5.Function.description_kind:type_name -> tfplugin5.StringKind
3, // 9: tfplugin5.Deferred.reason:type_name -> tfplugin5.Deferred.Reason
- 38, // 10: tfplugin5.Schema.Block.attributes:type_name -> tfplugin5.Schema.Attribute
- 39, // 11: tfplugin5.Schema.Block.block_types:type_name -> tfplugin5.Schema.NestedBlock
+ 42, // 10: tfplugin5.Schema.Block.attributes:type_name -> tfplugin5.Schema.Attribute
+ 43, // 11: tfplugin5.Schema.Block.block_types:type_name -> tfplugin5.Schema.NestedBlock
0, // 12: tfplugin5.Schema.Block.description_kind:type_name -> tfplugin5.StringKind
0, // 13: tfplugin5.Schema.Attribute.description_kind:type_name -> tfplugin5.StringKind
- 37, // 14: tfplugin5.Schema.NestedBlock.block:type_name -> tfplugin5.Schema.Block
+ 41, // 14: tfplugin5.Schema.NestedBlock.block:type_name -> tfplugin5.Schema.Block
2, // 15: tfplugin5.Schema.NestedBlock.nesting:type_name -> tfplugin5.Schema.NestedBlock.NestingMode
0, // 16: tfplugin5.Function.Parameter.description_kind:type_name -> tfplugin5.StringKind
11, // 17: tfplugin5.GetMetadata.Response.server_capabilities:type_name -> tfplugin5.ServerCapabilities
5, // 18: tfplugin5.GetMetadata.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 45, // 19: tfplugin5.GetMetadata.Response.data_sources:type_name -> tfplugin5.GetMetadata.DataSourceMetadata
- 46, // 20: tfplugin5.GetMetadata.Response.resources:type_name -> tfplugin5.GetMetadata.ResourceMetadata
- 44, // 21: tfplugin5.GetMetadata.Response.functions:type_name -> tfplugin5.GetMetadata.FunctionMetadata
- 10, // 22: tfplugin5.GetProviderSchema.Response.provider:type_name -> tfplugin5.Schema
- 49, // 23: tfplugin5.GetProviderSchema.Response.resource_schemas:type_name -> tfplugin5.GetProviderSchema.Response.ResourceSchemasEntry
- 50, // 24: tfplugin5.GetProviderSchema.Response.data_source_schemas:type_name -> tfplugin5.GetProviderSchema.Response.DataSourceSchemasEntry
- 5, // 25: tfplugin5.GetProviderSchema.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 10, // 26: tfplugin5.GetProviderSchema.Response.provider_meta:type_name -> tfplugin5.Schema
- 11, // 27: tfplugin5.GetProviderSchema.Response.server_capabilities:type_name -> tfplugin5.ServerCapabilities
- 51, // 28: tfplugin5.GetProviderSchema.Response.functions:type_name -> tfplugin5.GetProviderSchema.Response.FunctionsEntry
- 10, // 29: tfplugin5.GetProviderSchema.Response.ResourceSchemasEntry.value:type_name -> tfplugin5.Schema
- 10, // 30: tfplugin5.GetProviderSchema.Response.DataSourceSchemasEntry.value:type_name -> tfplugin5.Schema
- 13, // 31: tfplugin5.GetProviderSchema.Response.FunctionsEntry.value:type_name -> tfplugin5.Function
- 4, // 32: tfplugin5.PrepareProviderConfig.Request.config:type_name -> tfplugin5.DynamicValue
- 4, // 33: tfplugin5.PrepareProviderConfig.Response.prepared_config:type_name -> tfplugin5.DynamicValue
- 5, // 34: tfplugin5.PrepareProviderConfig.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 9, // 35: tfplugin5.UpgradeResourceState.Request.raw_state:type_name -> tfplugin5.RawState
- 4, // 36: tfplugin5.UpgradeResourceState.Response.upgraded_state:type_name -> tfplugin5.DynamicValue
- 5, // 37: tfplugin5.UpgradeResourceState.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 4, // 38: tfplugin5.ValidateResourceTypeConfig.Request.config:type_name -> tfplugin5.DynamicValue
- 5, // 39: tfplugin5.ValidateResourceTypeConfig.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 4, // 40: tfplugin5.ValidateDataSourceConfig.Request.config:type_name -> tfplugin5.DynamicValue
- 5, // 41: tfplugin5.ValidateDataSourceConfig.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 4, // 42: tfplugin5.Configure.Request.config:type_name -> tfplugin5.DynamicValue
- 12, // 43: tfplugin5.Configure.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
- 5, // 44: tfplugin5.Configure.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 4, // 45: tfplugin5.ReadResource.Request.current_state:type_name -> tfplugin5.DynamicValue
- 4, // 46: tfplugin5.ReadResource.Request.provider_meta:type_name -> tfplugin5.DynamicValue
- 12, // 47: tfplugin5.ReadResource.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
- 4, // 48: tfplugin5.ReadResource.Response.new_state:type_name -> tfplugin5.DynamicValue
- 5, // 49: tfplugin5.ReadResource.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 14, // 50: tfplugin5.ReadResource.Response.deferred:type_name -> tfplugin5.Deferred
- 4, // 51: tfplugin5.PlanResourceChange.Request.prior_state:type_name -> tfplugin5.DynamicValue
- 4, // 52: tfplugin5.PlanResourceChange.Request.proposed_new_state:type_name -> tfplugin5.DynamicValue
- 4, // 53: tfplugin5.PlanResourceChange.Request.config:type_name -> tfplugin5.DynamicValue
- 4, // 54: tfplugin5.PlanResourceChange.Request.provider_meta:type_name -> tfplugin5.DynamicValue
- 12, // 55: tfplugin5.PlanResourceChange.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
- 4, // 56: tfplugin5.PlanResourceChange.Response.planned_state:type_name -> tfplugin5.DynamicValue
- 7, // 57: tfplugin5.PlanResourceChange.Response.requires_replace:type_name -> tfplugin5.AttributePath
- 5, // 58: tfplugin5.PlanResourceChange.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 14, // 59: tfplugin5.PlanResourceChange.Response.deferred:type_name -> tfplugin5.Deferred
- 4, // 60: tfplugin5.ApplyResourceChange.Request.prior_state:type_name -> tfplugin5.DynamicValue
- 4, // 61: tfplugin5.ApplyResourceChange.Request.planned_state:type_name -> tfplugin5.DynamicValue
- 4, // 62: tfplugin5.ApplyResourceChange.Request.config:type_name -> tfplugin5.DynamicValue
- 4, // 63: tfplugin5.ApplyResourceChange.Request.provider_meta:type_name -> tfplugin5.DynamicValue
- 4, // 64: tfplugin5.ApplyResourceChange.Response.new_state:type_name -> tfplugin5.DynamicValue
- 5, // 65: tfplugin5.ApplyResourceChange.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 12, // 66: tfplugin5.ImportResourceState.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
- 4, // 67: tfplugin5.ImportResourceState.ImportedResource.state:type_name -> tfplugin5.DynamicValue
- 69, // 68: tfplugin5.ImportResourceState.Response.imported_resources:type_name -> tfplugin5.ImportResourceState.ImportedResource
- 5, // 69: tfplugin5.ImportResourceState.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 14, // 70: tfplugin5.ImportResourceState.Response.deferred:type_name -> tfplugin5.Deferred
- 9, // 71: tfplugin5.MoveResourceState.Request.source_state:type_name -> tfplugin5.RawState
- 4, // 72: tfplugin5.MoveResourceState.Response.target_state:type_name -> tfplugin5.DynamicValue
- 5, // 73: tfplugin5.MoveResourceState.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 4, // 74: tfplugin5.ReadDataSource.Request.config:type_name -> tfplugin5.DynamicValue
- 4, // 75: tfplugin5.ReadDataSource.Request.provider_meta:type_name -> tfplugin5.DynamicValue
- 12, // 76: tfplugin5.ReadDataSource.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
- 4, // 77: tfplugin5.ReadDataSource.Response.state:type_name -> tfplugin5.DynamicValue
- 5, // 78: tfplugin5.ReadDataSource.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 14, // 79: tfplugin5.ReadDataSource.Response.deferred:type_name -> tfplugin5.Deferred
- 10, // 80: tfplugin5.GetProvisionerSchema.Response.provisioner:type_name -> tfplugin5.Schema
- 5, // 81: tfplugin5.GetProvisionerSchema.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 4, // 82: tfplugin5.ValidateProvisionerConfig.Request.config:type_name -> tfplugin5.DynamicValue
- 5, // 83: tfplugin5.ValidateProvisionerConfig.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 4, // 84: tfplugin5.ProvisionResource.Request.config:type_name -> tfplugin5.DynamicValue
- 4, // 85: tfplugin5.ProvisionResource.Request.connection:type_name -> tfplugin5.DynamicValue
- 5, // 86: tfplugin5.ProvisionResource.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 83, // 87: tfplugin5.GetFunctions.Response.functions:type_name -> tfplugin5.GetFunctions.Response.FunctionsEntry
- 5, // 88: tfplugin5.GetFunctions.Response.diagnostics:type_name -> tfplugin5.Diagnostic
- 13, // 89: tfplugin5.GetFunctions.Response.FunctionsEntry.value:type_name -> tfplugin5.Function
- 4, // 90: tfplugin5.CallFunction.Request.arguments:type_name -> tfplugin5.DynamicValue
- 4, // 91: tfplugin5.CallFunction.Response.result:type_name -> tfplugin5.DynamicValue
- 6, // 92: tfplugin5.CallFunction.Response.error:type_name -> tfplugin5.FunctionError
- 42, // 93: tfplugin5.Provider.GetMetadata:input_type -> tfplugin5.GetMetadata.Request
- 47, // 94: tfplugin5.Provider.GetSchema:input_type -> tfplugin5.GetProviderSchema.Request
- 52, // 95: tfplugin5.Provider.PrepareProviderConfig:input_type -> tfplugin5.PrepareProviderConfig.Request
- 56, // 96: tfplugin5.Provider.ValidateResourceTypeConfig:input_type -> tfplugin5.ValidateResourceTypeConfig.Request
- 58, // 97: tfplugin5.Provider.ValidateDataSourceConfig:input_type -> tfplugin5.ValidateDataSourceConfig.Request
- 54, // 98: tfplugin5.Provider.UpgradeResourceState:input_type -> tfplugin5.UpgradeResourceState.Request
- 60, // 99: tfplugin5.Provider.Configure:input_type -> tfplugin5.Configure.Request
- 62, // 100: tfplugin5.Provider.ReadResource:input_type -> tfplugin5.ReadResource.Request
- 64, // 101: tfplugin5.Provider.PlanResourceChange:input_type -> tfplugin5.PlanResourceChange.Request
- 66, // 102: tfplugin5.Provider.ApplyResourceChange:input_type -> tfplugin5.ApplyResourceChange.Request
- 68, // 103: tfplugin5.Provider.ImportResourceState:input_type -> tfplugin5.ImportResourceState.Request
- 71, // 104: tfplugin5.Provider.MoveResourceState:input_type -> tfplugin5.MoveResourceState.Request
- 73, // 105: tfplugin5.Provider.ReadDataSource:input_type -> tfplugin5.ReadDataSource.Request
- 81, // 106: tfplugin5.Provider.GetFunctions:input_type -> tfplugin5.GetFunctions.Request
- 84, // 107: tfplugin5.Provider.CallFunction:input_type -> tfplugin5.CallFunction.Request
- 34, // 108: tfplugin5.Provider.Stop:input_type -> tfplugin5.Stop.Request
- 75, // 109: tfplugin5.Provisioner.GetSchema:input_type -> tfplugin5.GetProvisionerSchema.Request
- 77, // 110: tfplugin5.Provisioner.ValidateProvisionerConfig:input_type -> tfplugin5.ValidateProvisionerConfig.Request
- 79, // 111: tfplugin5.Provisioner.ProvisionResource:input_type -> tfplugin5.ProvisionResource.Request
- 34, // 112: tfplugin5.Provisioner.Stop:input_type -> tfplugin5.Stop.Request
- 43, // 113: tfplugin5.Provider.GetMetadata:output_type -> tfplugin5.GetMetadata.Response
- 48, // 114: tfplugin5.Provider.GetSchema:output_type -> tfplugin5.GetProviderSchema.Response
- 53, // 115: tfplugin5.Provider.PrepareProviderConfig:output_type -> tfplugin5.PrepareProviderConfig.Response
- 57, // 116: tfplugin5.Provider.ValidateResourceTypeConfig:output_type -> tfplugin5.ValidateResourceTypeConfig.Response
- 59, // 117: tfplugin5.Provider.ValidateDataSourceConfig:output_type -> tfplugin5.ValidateDataSourceConfig.Response
- 55, // 118: tfplugin5.Provider.UpgradeResourceState:output_type -> tfplugin5.UpgradeResourceState.Response
- 61, // 119: tfplugin5.Provider.Configure:output_type -> tfplugin5.Configure.Response
- 63, // 120: tfplugin5.Provider.ReadResource:output_type -> tfplugin5.ReadResource.Response
- 65, // 121: tfplugin5.Provider.PlanResourceChange:output_type -> tfplugin5.PlanResourceChange.Response
- 67, // 122: tfplugin5.Provider.ApplyResourceChange:output_type -> tfplugin5.ApplyResourceChange.Response
- 70, // 123: tfplugin5.Provider.ImportResourceState:output_type -> tfplugin5.ImportResourceState.Response
- 72, // 124: tfplugin5.Provider.MoveResourceState:output_type -> tfplugin5.MoveResourceState.Response
- 74, // 125: tfplugin5.Provider.ReadDataSource:output_type -> tfplugin5.ReadDataSource.Response
- 82, // 126: tfplugin5.Provider.GetFunctions:output_type -> tfplugin5.GetFunctions.Response
- 85, // 127: tfplugin5.Provider.CallFunction:output_type -> tfplugin5.CallFunction.Response
- 35, // 128: tfplugin5.Provider.Stop:output_type -> tfplugin5.Stop.Response
- 76, // 129: tfplugin5.Provisioner.GetSchema:output_type -> tfplugin5.GetProvisionerSchema.Response
- 78, // 130: tfplugin5.Provisioner.ValidateProvisionerConfig:output_type -> tfplugin5.ValidateProvisionerConfig.Response
- 80, // 131: tfplugin5.Provisioner.ProvisionResource:output_type -> tfplugin5.ProvisionResource.Response
- 35, // 132: tfplugin5.Provisioner.Stop:output_type -> tfplugin5.Stop.Response
- 113, // [113:133] is the sub-list for method output_type
- 93, // [93:113] is the sub-list for method input_type
- 93, // [93:93] is the sub-list for extension type_name
- 93, // [93:93] is the sub-list for extension extendee
- 0, // [0:93] is the sub-list for field type_name
+ 49, // 19: tfplugin5.GetMetadata.Response.data_sources:type_name -> tfplugin5.GetMetadata.DataSourceMetadata
+ 50, // 20: tfplugin5.GetMetadata.Response.resources:type_name -> tfplugin5.GetMetadata.ResourceMetadata
+ 48, // 21: tfplugin5.GetMetadata.Response.functions:type_name -> tfplugin5.GetMetadata.FunctionMetadata
+ 51, // 22: tfplugin5.GetMetadata.Response.ephemeral_resources:type_name -> tfplugin5.GetMetadata.EphemeralResourceMetadata
+ 10, // 23: tfplugin5.GetProviderSchema.Response.provider:type_name -> tfplugin5.Schema
+ 54, // 24: tfplugin5.GetProviderSchema.Response.resource_schemas:type_name -> tfplugin5.GetProviderSchema.Response.ResourceSchemasEntry
+ 55, // 25: tfplugin5.GetProviderSchema.Response.data_source_schemas:type_name -> tfplugin5.GetProviderSchema.Response.DataSourceSchemasEntry
+ 5, // 26: tfplugin5.GetProviderSchema.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 10, // 27: tfplugin5.GetProviderSchema.Response.provider_meta:type_name -> tfplugin5.Schema
+ 11, // 28: tfplugin5.GetProviderSchema.Response.server_capabilities:type_name -> tfplugin5.ServerCapabilities
+ 56, // 29: tfplugin5.GetProviderSchema.Response.functions:type_name -> tfplugin5.GetProviderSchema.Response.FunctionsEntry
+ 57, // 30: tfplugin5.GetProviderSchema.Response.ephemeral_resource_schemas:type_name -> tfplugin5.GetProviderSchema.Response.EphemeralResourceSchemasEntry
+ 10, // 31: tfplugin5.GetProviderSchema.Response.ResourceSchemasEntry.value:type_name -> tfplugin5.Schema
+ 10, // 32: tfplugin5.GetProviderSchema.Response.DataSourceSchemasEntry.value:type_name -> tfplugin5.Schema
+ 13, // 33: tfplugin5.GetProviderSchema.Response.FunctionsEntry.value:type_name -> tfplugin5.Function
+ 10, // 34: tfplugin5.GetProviderSchema.Response.EphemeralResourceSchemasEntry.value:type_name -> tfplugin5.Schema
+ 4, // 35: tfplugin5.PrepareProviderConfig.Request.config:type_name -> tfplugin5.DynamicValue
+ 4, // 36: tfplugin5.PrepareProviderConfig.Response.prepared_config:type_name -> tfplugin5.DynamicValue
+ 5, // 37: tfplugin5.PrepareProviderConfig.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 9, // 38: tfplugin5.UpgradeResourceState.Request.raw_state:type_name -> tfplugin5.RawState
+ 4, // 39: tfplugin5.UpgradeResourceState.Response.upgraded_state:type_name -> tfplugin5.DynamicValue
+ 5, // 40: tfplugin5.UpgradeResourceState.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 4, // 41: tfplugin5.ValidateResourceTypeConfig.Request.config:type_name -> tfplugin5.DynamicValue
+ 12, // 42: tfplugin5.ValidateResourceTypeConfig.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
+ 5, // 43: tfplugin5.ValidateResourceTypeConfig.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 4, // 44: tfplugin5.ValidateDataSourceConfig.Request.config:type_name -> tfplugin5.DynamicValue
+ 5, // 45: tfplugin5.ValidateDataSourceConfig.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 4, // 46: tfplugin5.Configure.Request.config:type_name -> tfplugin5.DynamicValue
+ 12, // 47: tfplugin5.Configure.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
+ 5, // 48: tfplugin5.Configure.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 4, // 49: tfplugin5.ReadResource.Request.current_state:type_name -> tfplugin5.DynamicValue
+ 4, // 50: tfplugin5.ReadResource.Request.provider_meta:type_name -> tfplugin5.DynamicValue
+ 12, // 51: tfplugin5.ReadResource.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
+ 4, // 52: tfplugin5.ReadResource.Response.new_state:type_name -> tfplugin5.DynamicValue
+ 5, // 53: tfplugin5.ReadResource.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 14, // 54: tfplugin5.ReadResource.Response.deferred:type_name -> tfplugin5.Deferred
+ 4, // 55: tfplugin5.PlanResourceChange.Request.prior_state:type_name -> tfplugin5.DynamicValue
+ 4, // 56: tfplugin5.PlanResourceChange.Request.proposed_new_state:type_name -> tfplugin5.DynamicValue
+ 4, // 57: tfplugin5.PlanResourceChange.Request.config:type_name -> tfplugin5.DynamicValue
+ 4, // 58: tfplugin5.PlanResourceChange.Request.provider_meta:type_name -> tfplugin5.DynamicValue
+ 12, // 59: tfplugin5.PlanResourceChange.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
+ 4, // 60: tfplugin5.PlanResourceChange.Response.planned_state:type_name -> tfplugin5.DynamicValue
+ 7, // 61: tfplugin5.PlanResourceChange.Response.requires_replace:type_name -> tfplugin5.AttributePath
+ 5, // 62: tfplugin5.PlanResourceChange.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 14, // 63: tfplugin5.PlanResourceChange.Response.deferred:type_name -> tfplugin5.Deferred
+ 4, // 64: tfplugin5.ApplyResourceChange.Request.prior_state:type_name -> tfplugin5.DynamicValue
+ 4, // 65: tfplugin5.ApplyResourceChange.Request.planned_state:type_name -> tfplugin5.DynamicValue
+ 4, // 66: tfplugin5.ApplyResourceChange.Request.config:type_name -> tfplugin5.DynamicValue
+ 4, // 67: tfplugin5.ApplyResourceChange.Request.provider_meta:type_name -> tfplugin5.DynamicValue
+ 4, // 68: tfplugin5.ApplyResourceChange.Response.new_state:type_name -> tfplugin5.DynamicValue
+ 5, // 69: tfplugin5.ApplyResourceChange.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 12, // 70: tfplugin5.ImportResourceState.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
+ 4, // 71: tfplugin5.ImportResourceState.ImportedResource.state:type_name -> tfplugin5.DynamicValue
+ 75, // 72: tfplugin5.ImportResourceState.Response.imported_resources:type_name -> tfplugin5.ImportResourceState.ImportedResource
+ 5, // 73: tfplugin5.ImportResourceState.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 14, // 74: tfplugin5.ImportResourceState.Response.deferred:type_name -> tfplugin5.Deferred
+ 9, // 75: tfplugin5.MoveResourceState.Request.source_state:type_name -> tfplugin5.RawState
+ 4, // 76: tfplugin5.MoveResourceState.Response.target_state:type_name -> tfplugin5.DynamicValue
+ 5, // 77: tfplugin5.MoveResourceState.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 4, // 78: tfplugin5.ReadDataSource.Request.config:type_name -> tfplugin5.DynamicValue
+ 4, // 79: tfplugin5.ReadDataSource.Request.provider_meta:type_name -> tfplugin5.DynamicValue
+ 12, // 80: tfplugin5.ReadDataSource.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
+ 4, // 81: tfplugin5.ReadDataSource.Response.state:type_name -> tfplugin5.DynamicValue
+ 5, // 82: tfplugin5.ReadDataSource.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 14, // 83: tfplugin5.ReadDataSource.Response.deferred:type_name -> tfplugin5.Deferred
+ 10, // 84: tfplugin5.GetProvisionerSchema.Response.provisioner:type_name -> tfplugin5.Schema
+ 5, // 85: tfplugin5.GetProvisionerSchema.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 4, // 86: tfplugin5.ValidateProvisionerConfig.Request.config:type_name -> tfplugin5.DynamicValue
+ 5, // 87: tfplugin5.ValidateProvisionerConfig.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 4, // 88: tfplugin5.ProvisionResource.Request.config:type_name -> tfplugin5.DynamicValue
+ 4, // 89: tfplugin5.ProvisionResource.Request.connection:type_name -> tfplugin5.DynamicValue
+ 5, // 90: tfplugin5.ProvisionResource.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 89, // 91: tfplugin5.GetFunctions.Response.functions:type_name -> tfplugin5.GetFunctions.Response.FunctionsEntry
+ 5, // 92: tfplugin5.GetFunctions.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 13, // 93: tfplugin5.GetFunctions.Response.FunctionsEntry.value:type_name -> tfplugin5.Function
+ 4, // 94: tfplugin5.CallFunction.Request.arguments:type_name -> tfplugin5.DynamicValue
+ 4, // 95: tfplugin5.CallFunction.Response.result:type_name -> tfplugin5.DynamicValue
+ 6, // 96: tfplugin5.CallFunction.Response.error:type_name -> tfplugin5.FunctionError
+ 4, // 97: tfplugin5.ValidateEphemeralResourceConfig.Request.config:type_name -> tfplugin5.DynamicValue
+ 5, // 98: tfplugin5.ValidateEphemeralResourceConfig.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 4, // 99: tfplugin5.OpenEphemeralResource.Request.config:type_name -> tfplugin5.DynamicValue
+ 12, // 100: tfplugin5.OpenEphemeralResource.Request.client_capabilities:type_name -> tfplugin5.ClientCapabilities
+ 5, // 101: tfplugin5.OpenEphemeralResource.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 100, // 102: tfplugin5.OpenEphemeralResource.Response.renew_at:type_name -> google.protobuf.Timestamp
+ 4, // 103: tfplugin5.OpenEphemeralResource.Response.result:type_name -> tfplugin5.DynamicValue
+ 14, // 104: tfplugin5.OpenEphemeralResource.Response.deferred:type_name -> tfplugin5.Deferred
+ 5, // 105: tfplugin5.RenewEphemeralResource.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 100, // 106: tfplugin5.RenewEphemeralResource.Response.renew_at:type_name -> google.protobuf.Timestamp
+ 5, // 107: tfplugin5.CloseEphemeralResource.Response.diagnostics:type_name -> tfplugin5.Diagnostic
+ 46, // 108: tfplugin5.Provider.GetMetadata:input_type -> tfplugin5.GetMetadata.Request
+ 52, // 109: tfplugin5.Provider.GetSchema:input_type -> tfplugin5.GetProviderSchema.Request
+ 58, // 110: tfplugin5.Provider.PrepareProviderConfig:input_type -> tfplugin5.PrepareProviderConfig.Request
+ 62, // 111: tfplugin5.Provider.ValidateResourceTypeConfig:input_type -> tfplugin5.ValidateResourceTypeConfig.Request
+ 64, // 112: tfplugin5.Provider.ValidateDataSourceConfig:input_type -> tfplugin5.ValidateDataSourceConfig.Request
+ 60, // 113: tfplugin5.Provider.UpgradeResourceState:input_type -> tfplugin5.UpgradeResourceState.Request
+ 66, // 114: tfplugin5.Provider.Configure:input_type -> tfplugin5.Configure.Request
+ 68, // 115: tfplugin5.Provider.ReadResource:input_type -> tfplugin5.ReadResource.Request
+ 70, // 116: tfplugin5.Provider.PlanResourceChange:input_type -> tfplugin5.PlanResourceChange.Request
+ 72, // 117: tfplugin5.Provider.ApplyResourceChange:input_type -> tfplugin5.ApplyResourceChange.Request
+ 74, // 118: tfplugin5.Provider.ImportResourceState:input_type -> tfplugin5.ImportResourceState.Request
+ 77, // 119: tfplugin5.Provider.MoveResourceState:input_type -> tfplugin5.MoveResourceState.Request
+ 79, // 120: tfplugin5.Provider.ReadDataSource:input_type -> tfplugin5.ReadDataSource.Request
+ 92, // 121: tfplugin5.Provider.ValidateEphemeralResourceConfig:input_type -> tfplugin5.ValidateEphemeralResourceConfig.Request
+ 94, // 122: tfplugin5.Provider.OpenEphemeralResource:input_type -> tfplugin5.OpenEphemeralResource.Request
+ 96, // 123: tfplugin5.Provider.RenewEphemeralResource:input_type -> tfplugin5.RenewEphemeralResource.Request
+ 98, // 124: tfplugin5.Provider.CloseEphemeralResource:input_type -> tfplugin5.CloseEphemeralResource.Request
+ 87, // 125: tfplugin5.Provider.GetFunctions:input_type -> tfplugin5.GetFunctions.Request
+ 90, // 126: tfplugin5.Provider.CallFunction:input_type -> tfplugin5.CallFunction.Request
+ 38, // 127: tfplugin5.Provider.Stop:input_type -> tfplugin5.Stop.Request
+ 81, // 128: tfplugin5.Provisioner.GetSchema:input_type -> tfplugin5.GetProvisionerSchema.Request
+ 83, // 129: tfplugin5.Provisioner.ValidateProvisionerConfig:input_type -> tfplugin5.ValidateProvisionerConfig.Request
+ 85, // 130: tfplugin5.Provisioner.ProvisionResource:input_type -> tfplugin5.ProvisionResource.Request
+ 38, // 131: tfplugin5.Provisioner.Stop:input_type -> tfplugin5.Stop.Request
+ 47, // 132: tfplugin5.Provider.GetMetadata:output_type -> tfplugin5.GetMetadata.Response
+ 53, // 133: tfplugin5.Provider.GetSchema:output_type -> tfplugin5.GetProviderSchema.Response
+ 59, // 134: tfplugin5.Provider.PrepareProviderConfig:output_type -> tfplugin5.PrepareProviderConfig.Response
+ 63, // 135: tfplugin5.Provider.ValidateResourceTypeConfig:output_type -> tfplugin5.ValidateResourceTypeConfig.Response
+ 65, // 136: tfplugin5.Provider.ValidateDataSourceConfig:output_type -> tfplugin5.ValidateDataSourceConfig.Response
+ 61, // 137: tfplugin5.Provider.UpgradeResourceState:output_type -> tfplugin5.UpgradeResourceState.Response
+ 67, // 138: tfplugin5.Provider.Configure:output_type -> tfplugin5.Configure.Response
+ 69, // 139: tfplugin5.Provider.ReadResource:output_type -> tfplugin5.ReadResource.Response
+ 71, // 140: tfplugin5.Provider.PlanResourceChange:output_type -> tfplugin5.PlanResourceChange.Response
+ 73, // 141: tfplugin5.Provider.ApplyResourceChange:output_type -> tfplugin5.ApplyResourceChange.Response
+ 76, // 142: tfplugin5.Provider.ImportResourceState:output_type -> tfplugin5.ImportResourceState.Response
+ 78, // 143: tfplugin5.Provider.MoveResourceState:output_type -> tfplugin5.MoveResourceState.Response
+ 80, // 144: tfplugin5.Provider.ReadDataSource:output_type -> tfplugin5.ReadDataSource.Response
+ 93, // 145: tfplugin5.Provider.ValidateEphemeralResourceConfig:output_type -> tfplugin5.ValidateEphemeralResourceConfig.Response
+ 95, // 146: tfplugin5.Provider.OpenEphemeralResource:output_type -> tfplugin5.OpenEphemeralResource.Response
+ 97, // 147: tfplugin5.Provider.RenewEphemeralResource:output_type -> tfplugin5.RenewEphemeralResource.Response
+ 99, // 148: tfplugin5.Provider.CloseEphemeralResource:output_type -> tfplugin5.CloseEphemeralResource.Response
+ 88, // 149: tfplugin5.Provider.GetFunctions:output_type -> tfplugin5.GetFunctions.Response
+ 91, // 150: tfplugin5.Provider.CallFunction:output_type -> tfplugin5.CallFunction.Response
+ 39, // 151: tfplugin5.Provider.Stop:output_type -> tfplugin5.Stop.Response
+ 82, // 152: tfplugin5.Provisioner.GetSchema:output_type -> tfplugin5.GetProvisionerSchema.Response
+ 84, // 153: tfplugin5.Provisioner.ValidateProvisionerConfig:output_type -> tfplugin5.ValidateProvisionerConfig.Response
+ 86, // 154: tfplugin5.Provisioner.ProvisionResource:output_type -> tfplugin5.ProvisionResource.Response
+ 39, // 155: tfplugin5.Provisioner.Stop:output_type -> tfplugin5.Stop.Response
+ 132, // [132:156] is the sub-list for method output_type
+ 108, // [108:132] is the sub-list for method input_type
+ 108, // [108:108] is the sub-list for extension type_name
+ 108, // [108:108] is the sub-list for extension extendee
+ 0, // [0:108] is the sub-list for field type_name
}
func init() { file_tfplugin5_proto_init() }
@@ -5665,945 +6318,23 @@ func file_tfplugin5_proto_init() {
if File_tfplugin5_proto != nil {
return
}
- if !protoimpl.UnsafeEnabled {
- file_tfplugin5_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*DynamicValue); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Diagnostic); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*FunctionError); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*AttributePath); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Stop); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*RawState); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Schema); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ServerCapabilities); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ClientCapabilities); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Function); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Deferred); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetProviderSchema); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*PrepareProviderConfig); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*UpgradeResourceState); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateResourceTypeConfig); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateDataSourceConfig); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Configure); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadResource); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*PlanResourceChange); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ApplyResourceChange); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ImportResourceState); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*MoveResourceState); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadDataSource); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetProvisionerSchema); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateProvisionerConfig); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ProvisionResource); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetFunctions); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*CallFunction); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*AttributePath_Step); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Stop_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Stop_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Schema_Block); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Schema_Attribute); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Schema_NestedBlock); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Function_Parameter); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Function_Return); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_FunctionMetadata); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_DataSourceMetadata); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_ResourceMetadata); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetProviderSchema_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[44].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetProviderSchema_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[48].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*PrepareProviderConfig_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[49].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*PrepareProviderConfig_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[50].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*UpgradeResourceState_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[51].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*UpgradeResourceState_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[52].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateResourceTypeConfig_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[53].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateResourceTypeConfig_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[54].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateDataSourceConfig_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[55].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateDataSourceConfig_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[56].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Configure_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[57].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Configure_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[58].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadResource_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[59].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadResource_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[60].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*PlanResourceChange_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[61].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*PlanResourceChange_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[62].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ApplyResourceChange_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[63].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ApplyResourceChange_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[64].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ImportResourceState_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[65].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ImportResourceState_ImportedResource); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[66].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ImportResourceState_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[67].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*MoveResourceState_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[68].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*MoveResourceState_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[69].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadDataSource_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[70].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadDataSource_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[71].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetProvisionerSchema_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[72].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetProvisionerSchema_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[73].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateProvisionerConfig_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[74].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateProvisionerConfig_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[75].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ProvisionResource_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[76].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ProvisionResource_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[77].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetFunctions_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[78].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetFunctions_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[80].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*CallFunction_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin5_proto_msgTypes[81].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*CallFunction_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- }
- file_tfplugin5_proto_msgTypes[2].OneofWrappers = []interface{}{}
- file_tfplugin5_proto_msgTypes[29].OneofWrappers = []interface{}{
+ file_tfplugin5_proto_msgTypes[2].OneofWrappers = []any{}
+ file_tfplugin5_proto_msgTypes[33].OneofWrappers = []any{
(*AttributePath_Step_AttributeName)(nil),
(*AttributePath_Step_ElementKeyString)(nil),
(*AttributePath_Step_ElementKeyInt)(nil),
}
+ file_tfplugin5_proto_msgTypes[91].OneofWrappers = []any{}
+ file_tfplugin5_proto_msgTypes[92].OneofWrappers = []any{}
+ file_tfplugin5_proto_msgTypes[93].OneofWrappers = []any{}
+ file_tfplugin5_proto_msgTypes[94].OneofWrappers = []any{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_tfplugin5_proto_rawDesc,
NumEnums: 4,
- NumMessages: 82,
+ NumMessages: 96,
NumExtensions: 0,
NumServices: 2,
},
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5.proto b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5.proto
index 3c2fa84aca..947bbd98ee 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5.proto
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5.proto
@@ -1,9 +1,9 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0
-// Terraform Plugin RPC protocol version 5.6
+// Terraform Plugin RPC protocol version 5.8
//
-// This file defines version 5.6 of the RPC protocol. To implement a plugin
+// This file defines version 5.8 of the RPC protocol. To implement a plugin
// against this protocol, copy this definition into your own codebase and
// use protoc to generate stubs for your target language.
//
@@ -22,6 +22,8 @@
syntax = "proto3";
option go_package = "github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5";
+import "google/protobuf/timestamp.proto";
+
package tfplugin5;
// DynamicValue is an opaque encoding of terraform data, with the field name
@@ -107,6 +109,11 @@ message Schema {
bool sensitive = 7;
StringKind description_kind = 8;
bool deprecated = 9;
+ // write_only indicates that the attribute value will be provided via
+ // configuration and must be omitted from state. write_only must be
+ // combined with optional or required, and is only valid for managed
+ // resource schemas.
+ bool write_only = 10;
}
message NestedBlock {
@@ -163,6 +170,9 @@ message ClientCapabilities {
// The deferral_allowed capability signals that the client is able to
// handle deferred responses from the provider.
bool deferral_allowed = 1;
+ // The write_only_attributes_allowed capability signals that the client
+ // is able to handle write_only attributes for managed resources.
+ bool write_only_attributes_allowed = 2;
}
message Function {
@@ -269,6 +279,12 @@ service Provider {
rpc MoveResourceState(MoveResourceState.Request) returns (MoveResourceState.Response);
rpc ReadDataSource(ReadDataSource.Request) returns (ReadDataSource.Response);
+ //////// Ephemeral Resource Lifecycle
+ rpc ValidateEphemeralResourceConfig(ValidateEphemeralResourceConfig.Request) returns (ValidateEphemeralResourceConfig.Response);
+ rpc OpenEphemeralResource(OpenEphemeralResource.Request) returns (OpenEphemeralResource.Response);
+ rpc RenewEphemeralResource(RenewEphemeralResource.Request) returns (RenewEphemeralResource.Response);
+ rpc CloseEphemeralResource(CloseEphemeralResource.Request) returns (CloseEphemeralResource.Response);
+
// Functions
// GetFunctions returns the definitions of all functions.
@@ -294,6 +310,7 @@ message GetMetadata {
// functions returns metadata for any functions.
repeated FunctionMetadata functions = 5;
+ repeated EphemeralResourceMetadata ephemeral_resources = 6;
}
message FunctionMetadata {
@@ -308,6 +325,10 @@ message GetMetadata {
message ResourceMetadata {
string type_name = 1;
}
+
+ message EphemeralResourceMetadata {
+ string type_name = 1;
+ }
}
message GetProviderSchema {
@@ -323,6 +344,7 @@ message GetProviderSchema {
// functions is a mapping of function names to definitions.
map functions = 7;
+ map ephemeral_resource_schemas = 8;
}
}
@@ -376,6 +398,7 @@ message ValidateResourceTypeConfig {
message Request {
string type_name = 1;
DynamicValue config = 2;
+ ClientCapabilities client_capabilities = 3;
}
message Response {
repeated Diagnostic diagnostics = 1;
@@ -633,3 +656,52 @@ message CallFunction {
FunctionError error = 2;
}
}
+
+message ValidateEphemeralResourceConfig {
+ message Request {
+ string type_name = 1;
+ DynamicValue config = 2;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ }
+}
+
+message OpenEphemeralResource {
+ message Request {
+ string type_name = 1;
+ DynamicValue config = 2;
+ ClientCapabilities client_capabilities = 3;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ optional google.protobuf.Timestamp renew_at = 2;
+ DynamicValue result = 3;
+ optional bytes private = 4;
+ // deferred is set if the provider is deferring the change. If set the caller
+ // needs to handle the deferral.
+ Deferred deferred = 5;
+ }
+}
+
+message RenewEphemeralResource {
+ message Request {
+ string type_name = 1;
+ optional bytes private = 2;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ optional google.protobuf.Timestamp renew_at = 2;
+ optional bytes private = 3;
+ }
+}
+
+message CloseEphemeralResource {
+ message Request {
+ string type_name = 1;
+ optional bytes private = 2;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ }
+}
\ No newline at end of file
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5_grpc.pb.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5_grpc.pb.go
index 8a8c8a5a01..7e3f2f8bc3 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5_grpc.pb.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5/tfplugin5_grpc.pb.go
@@ -1,9 +1,9 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0
-// Terraform Plugin RPC protocol version 5.6
+// Terraform Plugin RPC protocol version 5.8
//
-// This file defines version 5.6 of the RPC protocol. To implement a plugin
+// This file defines version 5.8 of the RPC protocol. To implement a plugin
// against this protocol, copy this definition into your own codebase and
// use protoc to generate stubs for your target language.
//
@@ -22,8 +22,8 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.3.0
-// - protoc v5.26.1
+// - protoc-gen-go-grpc v1.5.1
+// - protoc v5.29.3
// source: tfplugin5.proto
package tfplugin5
@@ -37,26 +37,30 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.32.0 or later.
-const _ = grpc.SupportPackageIsVersion7
+// Requires gRPC-Go v1.64.0 or later.
+const _ = grpc.SupportPackageIsVersion9
const (
- Provider_GetMetadata_FullMethodName = "/tfplugin5.Provider/GetMetadata"
- Provider_GetSchema_FullMethodName = "/tfplugin5.Provider/GetSchema"
- Provider_PrepareProviderConfig_FullMethodName = "/tfplugin5.Provider/PrepareProviderConfig"
- Provider_ValidateResourceTypeConfig_FullMethodName = "/tfplugin5.Provider/ValidateResourceTypeConfig"
- Provider_ValidateDataSourceConfig_FullMethodName = "/tfplugin5.Provider/ValidateDataSourceConfig"
- Provider_UpgradeResourceState_FullMethodName = "/tfplugin5.Provider/UpgradeResourceState"
- Provider_Configure_FullMethodName = "/tfplugin5.Provider/Configure"
- Provider_ReadResource_FullMethodName = "/tfplugin5.Provider/ReadResource"
- Provider_PlanResourceChange_FullMethodName = "/tfplugin5.Provider/PlanResourceChange"
- Provider_ApplyResourceChange_FullMethodName = "/tfplugin5.Provider/ApplyResourceChange"
- Provider_ImportResourceState_FullMethodName = "/tfplugin5.Provider/ImportResourceState"
- Provider_MoveResourceState_FullMethodName = "/tfplugin5.Provider/MoveResourceState"
- Provider_ReadDataSource_FullMethodName = "/tfplugin5.Provider/ReadDataSource"
- Provider_GetFunctions_FullMethodName = "/tfplugin5.Provider/GetFunctions"
- Provider_CallFunction_FullMethodName = "/tfplugin5.Provider/CallFunction"
- Provider_Stop_FullMethodName = "/tfplugin5.Provider/Stop"
+ Provider_GetMetadata_FullMethodName = "/tfplugin5.Provider/GetMetadata"
+ Provider_GetSchema_FullMethodName = "/tfplugin5.Provider/GetSchema"
+ Provider_PrepareProviderConfig_FullMethodName = "/tfplugin5.Provider/PrepareProviderConfig"
+ Provider_ValidateResourceTypeConfig_FullMethodName = "/tfplugin5.Provider/ValidateResourceTypeConfig"
+ Provider_ValidateDataSourceConfig_FullMethodName = "/tfplugin5.Provider/ValidateDataSourceConfig"
+ Provider_UpgradeResourceState_FullMethodName = "/tfplugin5.Provider/UpgradeResourceState"
+ Provider_Configure_FullMethodName = "/tfplugin5.Provider/Configure"
+ Provider_ReadResource_FullMethodName = "/tfplugin5.Provider/ReadResource"
+ Provider_PlanResourceChange_FullMethodName = "/tfplugin5.Provider/PlanResourceChange"
+ Provider_ApplyResourceChange_FullMethodName = "/tfplugin5.Provider/ApplyResourceChange"
+ Provider_ImportResourceState_FullMethodName = "/tfplugin5.Provider/ImportResourceState"
+ Provider_MoveResourceState_FullMethodName = "/tfplugin5.Provider/MoveResourceState"
+ Provider_ReadDataSource_FullMethodName = "/tfplugin5.Provider/ReadDataSource"
+ Provider_ValidateEphemeralResourceConfig_FullMethodName = "/tfplugin5.Provider/ValidateEphemeralResourceConfig"
+ Provider_OpenEphemeralResource_FullMethodName = "/tfplugin5.Provider/OpenEphemeralResource"
+ Provider_RenewEphemeralResource_FullMethodName = "/tfplugin5.Provider/RenewEphemeralResource"
+ Provider_CloseEphemeralResource_FullMethodName = "/tfplugin5.Provider/CloseEphemeralResource"
+ Provider_GetFunctions_FullMethodName = "/tfplugin5.Provider/GetFunctions"
+ Provider_CallFunction_FullMethodName = "/tfplugin5.Provider/CallFunction"
+ Provider_Stop_FullMethodName = "/tfplugin5.Provider/Stop"
)
// ProviderClient is the client API for Provider service.
@@ -85,6 +89,11 @@ type ProviderClient interface {
ImportResourceState(ctx context.Context, in *ImportResourceState_Request, opts ...grpc.CallOption) (*ImportResourceState_Response, error)
MoveResourceState(ctx context.Context, in *MoveResourceState_Request, opts ...grpc.CallOption) (*MoveResourceState_Response, error)
ReadDataSource(ctx context.Context, in *ReadDataSource_Request, opts ...grpc.CallOption) (*ReadDataSource_Response, error)
+ // ////// Ephemeral Resource Lifecycle
+ ValidateEphemeralResourceConfig(ctx context.Context, in *ValidateEphemeralResourceConfig_Request, opts ...grpc.CallOption) (*ValidateEphemeralResourceConfig_Response, error)
+ OpenEphemeralResource(ctx context.Context, in *OpenEphemeralResource_Request, opts ...grpc.CallOption) (*OpenEphemeralResource_Response, error)
+ RenewEphemeralResource(ctx context.Context, in *RenewEphemeralResource_Request, opts ...grpc.CallOption) (*RenewEphemeralResource_Response, error)
+ CloseEphemeralResource(ctx context.Context, in *CloseEphemeralResource_Request, opts ...grpc.CallOption) (*CloseEphemeralResource_Response, error)
// GetFunctions returns the definitions of all functions.
GetFunctions(ctx context.Context, in *GetFunctions_Request, opts ...grpc.CallOption) (*GetFunctions_Response, error)
// CallFunction runs the provider-defined function logic and returns
@@ -103,8 +112,9 @@ func NewProviderClient(cc grpc.ClientConnInterface) ProviderClient {
}
func (c *providerClient) GetMetadata(ctx context.Context, in *GetMetadata_Request, opts ...grpc.CallOption) (*GetMetadata_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(GetMetadata_Response)
- err := c.cc.Invoke(ctx, Provider_GetMetadata_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_GetMetadata_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -112,8 +122,9 @@ func (c *providerClient) GetMetadata(ctx context.Context, in *GetMetadata_Reques
}
func (c *providerClient) GetSchema(ctx context.Context, in *GetProviderSchema_Request, opts ...grpc.CallOption) (*GetProviderSchema_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(GetProviderSchema_Response)
- err := c.cc.Invoke(ctx, Provider_GetSchema_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_GetSchema_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -121,8 +132,9 @@ func (c *providerClient) GetSchema(ctx context.Context, in *GetProviderSchema_Re
}
func (c *providerClient) PrepareProviderConfig(ctx context.Context, in *PrepareProviderConfig_Request, opts ...grpc.CallOption) (*PrepareProviderConfig_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(PrepareProviderConfig_Response)
- err := c.cc.Invoke(ctx, Provider_PrepareProviderConfig_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_PrepareProviderConfig_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -130,8 +142,9 @@ func (c *providerClient) PrepareProviderConfig(ctx context.Context, in *PrepareP
}
func (c *providerClient) ValidateResourceTypeConfig(ctx context.Context, in *ValidateResourceTypeConfig_Request, opts ...grpc.CallOption) (*ValidateResourceTypeConfig_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ValidateResourceTypeConfig_Response)
- err := c.cc.Invoke(ctx, Provider_ValidateResourceTypeConfig_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ValidateResourceTypeConfig_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -139,8 +152,9 @@ func (c *providerClient) ValidateResourceTypeConfig(ctx context.Context, in *Val
}
func (c *providerClient) ValidateDataSourceConfig(ctx context.Context, in *ValidateDataSourceConfig_Request, opts ...grpc.CallOption) (*ValidateDataSourceConfig_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ValidateDataSourceConfig_Response)
- err := c.cc.Invoke(ctx, Provider_ValidateDataSourceConfig_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ValidateDataSourceConfig_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -148,8 +162,9 @@ func (c *providerClient) ValidateDataSourceConfig(ctx context.Context, in *Valid
}
func (c *providerClient) UpgradeResourceState(ctx context.Context, in *UpgradeResourceState_Request, opts ...grpc.CallOption) (*UpgradeResourceState_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(UpgradeResourceState_Response)
- err := c.cc.Invoke(ctx, Provider_UpgradeResourceState_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_UpgradeResourceState_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -157,8 +172,9 @@ func (c *providerClient) UpgradeResourceState(ctx context.Context, in *UpgradeRe
}
func (c *providerClient) Configure(ctx context.Context, in *Configure_Request, opts ...grpc.CallOption) (*Configure_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(Configure_Response)
- err := c.cc.Invoke(ctx, Provider_Configure_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_Configure_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -166,8 +182,9 @@ func (c *providerClient) Configure(ctx context.Context, in *Configure_Request, o
}
func (c *providerClient) ReadResource(ctx context.Context, in *ReadResource_Request, opts ...grpc.CallOption) (*ReadResource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ReadResource_Response)
- err := c.cc.Invoke(ctx, Provider_ReadResource_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ReadResource_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -175,8 +192,9 @@ func (c *providerClient) ReadResource(ctx context.Context, in *ReadResource_Requ
}
func (c *providerClient) PlanResourceChange(ctx context.Context, in *PlanResourceChange_Request, opts ...grpc.CallOption) (*PlanResourceChange_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(PlanResourceChange_Response)
- err := c.cc.Invoke(ctx, Provider_PlanResourceChange_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_PlanResourceChange_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -184,8 +202,9 @@ func (c *providerClient) PlanResourceChange(ctx context.Context, in *PlanResourc
}
func (c *providerClient) ApplyResourceChange(ctx context.Context, in *ApplyResourceChange_Request, opts ...grpc.CallOption) (*ApplyResourceChange_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ApplyResourceChange_Response)
- err := c.cc.Invoke(ctx, Provider_ApplyResourceChange_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ApplyResourceChange_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -193,8 +212,9 @@ func (c *providerClient) ApplyResourceChange(ctx context.Context, in *ApplyResou
}
func (c *providerClient) ImportResourceState(ctx context.Context, in *ImportResourceState_Request, opts ...grpc.CallOption) (*ImportResourceState_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ImportResourceState_Response)
- err := c.cc.Invoke(ctx, Provider_ImportResourceState_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ImportResourceState_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -202,8 +222,9 @@ func (c *providerClient) ImportResourceState(ctx context.Context, in *ImportReso
}
func (c *providerClient) MoveResourceState(ctx context.Context, in *MoveResourceState_Request, opts ...grpc.CallOption) (*MoveResourceState_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(MoveResourceState_Response)
- err := c.cc.Invoke(ctx, Provider_MoveResourceState_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_MoveResourceState_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -211,8 +232,49 @@ func (c *providerClient) MoveResourceState(ctx context.Context, in *MoveResource
}
func (c *providerClient) ReadDataSource(ctx context.Context, in *ReadDataSource_Request, opts ...grpc.CallOption) (*ReadDataSource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ReadDataSource_Response)
- err := c.cc.Invoke(ctx, Provider_ReadDataSource_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ReadDataSource_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *providerClient) ValidateEphemeralResourceConfig(ctx context.Context, in *ValidateEphemeralResourceConfig_Request, opts ...grpc.CallOption) (*ValidateEphemeralResourceConfig_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(ValidateEphemeralResourceConfig_Response)
+ err := c.cc.Invoke(ctx, Provider_ValidateEphemeralResourceConfig_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *providerClient) OpenEphemeralResource(ctx context.Context, in *OpenEphemeralResource_Request, opts ...grpc.CallOption) (*OpenEphemeralResource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(OpenEphemeralResource_Response)
+ err := c.cc.Invoke(ctx, Provider_OpenEphemeralResource_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *providerClient) RenewEphemeralResource(ctx context.Context, in *RenewEphemeralResource_Request, opts ...grpc.CallOption) (*RenewEphemeralResource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(RenewEphemeralResource_Response)
+ err := c.cc.Invoke(ctx, Provider_RenewEphemeralResource_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *providerClient) CloseEphemeralResource(ctx context.Context, in *CloseEphemeralResource_Request, opts ...grpc.CallOption) (*CloseEphemeralResource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(CloseEphemeralResource_Response)
+ err := c.cc.Invoke(ctx, Provider_CloseEphemeralResource_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -220,8 +282,9 @@ func (c *providerClient) ReadDataSource(ctx context.Context, in *ReadDataSource_
}
func (c *providerClient) GetFunctions(ctx context.Context, in *GetFunctions_Request, opts ...grpc.CallOption) (*GetFunctions_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(GetFunctions_Response)
- err := c.cc.Invoke(ctx, Provider_GetFunctions_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_GetFunctions_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -229,8 +292,9 @@ func (c *providerClient) GetFunctions(ctx context.Context, in *GetFunctions_Requ
}
func (c *providerClient) CallFunction(ctx context.Context, in *CallFunction_Request, opts ...grpc.CallOption) (*CallFunction_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(CallFunction_Response)
- err := c.cc.Invoke(ctx, Provider_CallFunction_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_CallFunction_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -238,8 +302,9 @@ func (c *providerClient) CallFunction(ctx context.Context, in *CallFunction_Requ
}
func (c *providerClient) Stop(ctx context.Context, in *Stop_Request, opts ...grpc.CallOption) (*Stop_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(Stop_Response)
- err := c.cc.Invoke(ctx, Provider_Stop_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_Stop_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -248,7 +313,7 @@ func (c *providerClient) Stop(ctx context.Context, in *Stop_Request, opts ...grp
// ProviderServer is the server API for Provider service.
// All implementations must embed UnimplementedProviderServer
-// for forward compatibility
+// for forward compatibility.
type ProviderServer interface {
// GetMetadata returns upfront information about server capabilities and
// supported resource types without requiring the server to instantiate all
@@ -272,6 +337,11 @@ type ProviderServer interface {
ImportResourceState(context.Context, *ImportResourceState_Request) (*ImportResourceState_Response, error)
MoveResourceState(context.Context, *MoveResourceState_Request) (*MoveResourceState_Response, error)
ReadDataSource(context.Context, *ReadDataSource_Request) (*ReadDataSource_Response, error)
+ // ////// Ephemeral Resource Lifecycle
+ ValidateEphemeralResourceConfig(context.Context, *ValidateEphemeralResourceConfig_Request) (*ValidateEphemeralResourceConfig_Response, error)
+ OpenEphemeralResource(context.Context, *OpenEphemeralResource_Request) (*OpenEphemeralResource_Response, error)
+ RenewEphemeralResource(context.Context, *RenewEphemeralResource_Request) (*RenewEphemeralResource_Response, error)
+ CloseEphemeralResource(context.Context, *CloseEphemeralResource_Request) (*CloseEphemeralResource_Response, error)
// GetFunctions returns the definitions of all functions.
GetFunctions(context.Context, *GetFunctions_Request) (*GetFunctions_Response, error)
// CallFunction runs the provider-defined function logic and returns
@@ -282,9 +352,12 @@ type ProviderServer interface {
mustEmbedUnimplementedProviderServer()
}
-// UnimplementedProviderServer must be embedded to have forward compatible implementations.
-type UnimplementedProviderServer struct {
-}
+// UnimplementedProviderServer must be embedded to have
+// forward compatible implementations.
+//
+// NOTE: this should be embedded by value instead of pointer to avoid a nil
+// pointer dereference when methods are called.
+type UnimplementedProviderServer struct{}
func (UnimplementedProviderServer) GetMetadata(context.Context, *GetMetadata_Request) (*GetMetadata_Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetMetadata not implemented")
@@ -325,6 +398,18 @@ func (UnimplementedProviderServer) MoveResourceState(context.Context, *MoveResou
func (UnimplementedProviderServer) ReadDataSource(context.Context, *ReadDataSource_Request) (*ReadDataSource_Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method ReadDataSource not implemented")
}
+func (UnimplementedProviderServer) ValidateEphemeralResourceConfig(context.Context, *ValidateEphemeralResourceConfig_Request) (*ValidateEphemeralResourceConfig_Response, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method ValidateEphemeralResourceConfig not implemented")
+}
+func (UnimplementedProviderServer) OpenEphemeralResource(context.Context, *OpenEphemeralResource_Request) (*OpenEphemeralResource_Response, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method OpenEphemeralResource not implemented")
+}
+func (UnimplementedProviderServer) RenewEphemeralResource(context.Context, *RenewEphemeralResource_Request) (*RenewEphemeralResource_Response, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method RenewEphemeralResource not implemented")
+}
+func (UnimplementedProviderServer) CloseEphemeralResource(context.Context, *CloseEphemeralResource_Request) (*CloseEphemeralResource_Response, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method CloseEphemeralResource not implemented")
+}
func (UnimplementedProviderServer) GetFunctions(context.Context, *GetFunctions_Request) (*GetFunctions_Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetFunctions not implemented")
}
@@ -335,6 +420,7 @@ func (UnimplementedProviderServer) Stop(context.Context, *Stop_Request) (*Stop_R
return nil, status.Errorf(codes.Unimplemented, "method Stop not implemented")
}
func (UnimplementedProviderServer) mustEmbedUnimplementedProviderServer() {}
+func (UnimplementedProviderServer) testEmbeddedByValue() {}
// UnsafeProviderServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to ProviderServer will
@@ -344,6 +430,13 @@ type UnsafeProviderServer interface {
}
func RegisterProviderServer(s grpc.ServiceRegistrar, srv ProviderServer) {
+ // If the following call pancis, it indicates UnimplementedProviderServer was
+ // embedded by pointer and is nil. This will cause panics if an
+ // unimplemented method is ever invoked, so we test this at initialization
+ // time to prevent it from happening at runtime later due to I/O.
+ if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
+ t.testEmbeddedByValue()
+ }
s.RegisterService(&Provider_ServiceDesc, srv)
}
@@ -581,6 +674,78 @@ func _Provider_ReadDataSource_Handler(srv interface{}, ctx context.Context, dec
return interceptor(ctx, in, info, handler)
}
+func _Provider_ValidateEphemeralResourceConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(ValidateEphemeralResourceConfig_Request)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(ProviderServer).ValidateEphemeralResourceConfig(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: Provider_ValidateEphemeralResourceConfig_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(ProviderServer).ValidateEphemeralResourceConfig(ctx, req.(*ValidateEphemeralResourceConfig_Request))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _Provider_OpenEphemeralResource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(OpenEphemeralResource_Request)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(ProviderServer).OpenEphemeralResource(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: Provider_OpenEphemeralResource_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(ProviderServer).OpenEphemeralResource(ctx, req.(*OpenEphemeralResource_Request))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _Provider_RenewEphemeralResource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(RenewEphemeralResource_Request)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(ProviderServer).RenewEphemeralResource(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: Provider_RenewEphemeralResource_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(ProviderServer).RenewEphemeralResource(ctx, req.(*RenewEphemeralResource_Request))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _Provider_CloseEphemeralResource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(CloseEphemeralResource_Request)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(ProviderServer).CloseEphemeralResource(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: Provider_CloseEphemeralResource_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(ProviderServer).CloseEphemeralResource(ctx, req.(*CloseEphemeralResource_Request))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
func _Provider_GetFunctions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetFunctions_Request)
if err := dec(in); err != nil {
@@ -694,6 +859,22 @@ var Provider_ServiceDesc = grpc.ServiceDesc{
MethodName: "ReadDataSource",
Handler: _Provider_ReadDataSource_Handler,
},
+ {
+ MethodName: "ValidateEphemeralResourceConfig",
+ Handler: _Provider_ValidateEphemeralResourceConfig_Handler,
+ },
+ {
+ MethodName: "OpenEphemeralResource",
+ Handler: _Provider_OpenEphemeralResource_Handler,
+ },
+ {
+ MethodName: "RenewEphemeralResource",
+ Handler: _Provider_RenewEphemeralResource_Handler,
+ },
+ {
+ MethodName: "CloseEphemeralResource",
+ Handler: _Provider_CloseEphemeralResource_Handler,
+ },
{
MethodName: "GetFunctions",
Handler: _Provider_GetFunctions_Handler,
@@ -724,7 +905,7 @@ const (
type ProvisionerClient interface {
GetSchema(ctx context.Context, in *GetProvisionerSchema_Request, opts ...grpc.CallOption) (*GetProvisionerSchema_Response, error)
ValidateProvisionerConfig(ctx context.Context, in *ValidateProvisionerConfig_Request, opts ...grpc.CallOption) (*ValidateProvisionerConfig_Response, error)
- ProvisionResource(ctx context.Context, in *ProvisionResource_Request, opts ...grpc.CallOption) (Provisioner_ProvisionResourceClient, error)
+ ProvisionResource(ctx context.Context, in *ProvisionResource_Request, opts ...grpc.CallOption) (grpc.ServerStreamingClient[ProvisionResource_Response], error)
Stop(ctx context.Context, in *Stop_Request, opts ...grpc.CallOption) (*Stop_Response, error)
}
@@ -737,8 +918,9 @@ func NewProvisionerClient(cc grpc.ClientConnInterface) ProvisionerClient {
}
func (c *provisionerClient) GetSchema(ctx context.Context, in *GetProvisionerSchema_Request, opts ...grpc.CallOption) (*GetProvisionerSchema_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(GetProvisionerSchema_Response)
- err := c.cc.Invoke(ctx, Provisioner_GetSchema_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provisioner_GetSchema_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -746,20 +928,22 @@ func (c *provisionerClient) GetSchema(ctx context.Context, in *GetProvisionerSch
}
func (c *provisionerClient) ValidateProvisionerConfig(ctx context.Context, in *ValidateProvisionerConfig_Request, opts ...grpc.CallOption) (*ValidateProvisionerConfig_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ValidateProvisionerConfig_Response)
- err := c.cc.Invoke(ctx, Provisioner_ValidateProvisionerConfig_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provisioner_ValidateProvisionerConfig_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
return out, nil
}
-func (c *provisionerClient) ProvisionResource(ctx context.Context, in *ProvisionResource_Request, opts ...grpc.CallOption) (Provisioner_ProvisionResourceClient, error) {
- stream, err := c.cc.NewStream(ctx, &Provisioner_ServiceDesc.Streams[0], Provisioner_ProvisionResource_FullMethodName, opts...)
+func (c *provisionerClient) ProvisionResource(ctx context.Context, in *ProvisionResource_Request, opts ...grpc.CallOption) (grpc.ServerStreamingClient[ProvisionResource_Response], error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ stream, err := c.cc.NewStream(ctx, &Provisioner_ServiceDesc.Streams[0], Provisioner_ProvisionResource_FullMethodName, cOpts...)
if err != nil {
return nil, err
}
- x := &provisionerProvisionResourceClient{stream}
+ x := &grpc.GenericClientStream[ProvisionResource_Request, ProvisionResource_Response]{ClientStream: stream}
if err := x.ClientStream.SendMsg(in); err != nil {
return nil, err
}
@@ -769,26 +953,13 @@ func (c *provisionerClient) ProvisionResource(ctx context.Context, in *Provision
return x, nil
}
-type Provisioner_ProvisionResourceClient interface {
- Recv() (*ProvisionResource_Response, error)
- grpc.ClientStream
-}
-
-type provisionerProvisionResourceClient struct {
- grpc.ClientStream
-}
-
-func (x *provisionerProvisionResourceClient) Recv() (*ProvisionResource_Response, error) {
- m := new(ProvisionResource_Response)
- if err := x.ClientStream.RecvMsg(m); err != nil {
- return nil, err
- }
- return m, nil
-}
+// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name.
+type Provisioner_ProvisionResourceClient = grpc.ServerStreamingClient[ProvisionResource_Response]
func (c *provisionerClient) Stop(ctx context.Context, in *Stop_Request, opts ...grpc.CallOption) (*Stop_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(Stop_Response)
- err := c.cc.Invoke(ctx, Provisioner_Stop_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provisioner_Stop_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -797,18 +968,21 @@ func (c *provisionerClient) Stop(ctx context.Context, in *Stop_Request, opts ...
// ProvisionerServer is the server API for Provisioner service.
// All implementations must embed UnimplementedProvisionerServer
-// for forward compatibility
+// for forward compatibility.
type ProvisionerServer interface {
GetSchema(context.Context, *GetProvisionerSchema_Request) (*GetProvisionerSchema_Response, error)
ValidateProvisionerConfig(context.Context, *ValidateProvisionerConfig_Request) (*ValidateProvisionerConfig_Response, error)
- ProvisionResource(*ProvisionResource_Request, Provisioner_ProvisionResourceServer) error
+ ProvisionResource(*ProvisionResource_Request, grpc.ServerStreamingServer[ProvisionResource_Response]) error
Stop(context.Context, *Stop_Request) (*Stop_Response, error)
mustEmbedUnimplementedProvisionerServer()
}
-// UnimplementedProvisionerServer must be embedded to have forward compatible implementations.
-type UnimplementedProvisionerServer struct {
-}
+// UnimplementedProvisionerServer must be embedded to have
+// forward compatible implementations.
+//
+// NOTE: this should be embedded by value instead of pointer to avoid a nil
+// pointer dereference when methods are called.
+type UnimplementedProvisionerServer struct{}
func (UnimplementedProvisionerServer) GetSchema(context.Context, *GetProvisionerSchema_Request) (*GetProvisionerSchema_Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetSchema not implemented")
@@ -816,13 +990,14 @@ func (UnimplementedProvisionerServer) GetSchema(context.Context, *GetProvisioner
func (UnimplementedProvisionerServer) ValidateProvisionerConfig(context.Context, *ValidateProvisionerConfig_Request) (*ValidateProvisionerConfig_Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method ValidateProvisionerConfig not implemented")
}
-func (UnimplementedProvisionerServer) ProvisionResource(*ProvisionResource_Request, Provisioner_ProvisionResourceServer) error {
+func (UnimplementedProvisionerServer) ProvisionResource(*ProvisionResource_Request, grpc.ServerStreamingServer[ProvisionResource_Response]) error {
return status.Errorf(codes.Unimplemented, "method ProvisionResource not implemented")
}
func (UnimplementedProvisionerServer) Stop(context.Context, *Stop_Request) (*Stop_Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method Stop not implemented")
}
func (UnimplementedProvisionerServer) mustEmbedUnimplementedProvisionerServer() {}
+func (UnimplementedProvisionerServer) testEmbeddedByValue() {}
// UnsafeProvisionerServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to ProvisionerServer will
@@ -832,6 +1007,13 @@ type UnsafeProvisionerServer interface {
}
func RegisterProvisionerServer(s grpc.ServiceRegistrar, srv ProvisionerServer) {
+ // If the following call pancis, it indicates UnimplementedProvisionerServer was
+ // embedded by pointer and is nil. This will cause panics if an
+ // unimplemented method is ever invoked, so we test this at initialization
+ // time to prevent it from happening at runtime later due to I/O.
+ if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
+ t.testEmbeddedByValue()
+ }
s.RegisterService(&Provisioner_ServiceDesc, srv)
}
@@ -876,21 +1058,11 @@ func _Provisioner_ProvisionResource_Handler(srv interface{}, stream grpc.ServerS
if err := stream.RecvMsg(m); err != nil {
return err
}
- return srv.(ProvisionerServer).ProvisionResource(m, &provisionerProvisionResourceServer{stream})
-}
-
-type Provisioner_ProvisionResourceServer interface {
- Send(*ProvisionResource_Response) error
- grpc.ServerStream
+ return srv.(ProvisionerServer).ProvisionResource(m, &grpc.GenericServerStream[ProvisionResource_Request, ProvisionResource_Response]{ServerStream: stream})
}
-type provisionerProvisionResourceServer struct {
- grpc.ServerStream
-}
-
-func (x *provisionerProvisionResourceServer) Send(m *ProvisionResource_Response) error {
- return x.ServerStream.SendMsg(m)
-}
+// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name.
+type Provisioner_ProvisionResourceServer = grpc.ServerStreamingServer[ProvisionResource_Response]
func _Provisioner_Stop_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Stop_Request)
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/ephemeral_resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/ephemeral_resource.go
new file mode 100644
index 0000000000..952f5d9673
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/ephemeral_resource.go
@@ -0,0 +1,65 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package toproto
+
+import (
+ "github.com/hashicorp/terraform-plugin-go/tfprotov5"
+ "github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5"
+)
+
+func GetMetadata_EphemeralResourceMetadata(in *tfprotov5.EphemeralResourceMetadata) *tfplugin5.GetMetadata_EphemeralResourceMetadata {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin5.GetMetadata_EphemeralResourceMetadata{
+ TypeName: in.TypeName,
+ }
+}
+
+func ValidateEphemeralResourceConfig_Response(in *tfprotov5.ValidateEphemeralResourceConfigResponse) *tfplugin5.ValidateEphemeralResourceConfig_Response {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin5.ValidateEphemeralResourceConfig_Response{
+ Diagnostics: Diagnostics(in.Diagnostics),
+ }
+}
+
+func OpenEphemeralResource_Response(in *tfprotov5.OpenEphemeralResourceResponse) *tfplugin5.OpenEphemeralResource_Response {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin5.OpenEphemeralResource_Response{
+ Result: DynamicValue(in.Result),
+ Diagnostics: Diagnostics(in.Diagnostics),
+ Private: in.Private,
+ RenewAt: Timestamp(in.RenewAt),
+ Deferred: Deferred(in.Deferred),
+ }
+}
+
+func RenewEphemeralResource_Response(in *tfprotov5.RenewEphemeralResourceResponse) *tfplugin5.RenewEphemeralResource_Response {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin5.RenewEphemeralResource_Response{
+ Diagnostics: Diagnostics(in.Diagnostics),
+ Private: in.Private,
+ RenewAt: Timestamp(in.RenewAt),
+ }
+}
+
+func CloseEphemeralResource_Response(in *tfprotov5.CloseEphemeralResourceResponse) *tfplugin5.CloseEphemeralResource_Response {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin5.CloseEphemeralResource_Response{
+ Diagnostics: Diagnostics(in.Diagnostics),
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/provider.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/provider.go
index 4891c53874..84d579a54f 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/provider.go
@@ -16,6 +16,7 @@ func GetMetadata_Response(in *tfprotov5.GetMetadataResponse) *tfplugin5.GetMetad
resp := &tfplugin5.GetMetadata_Response{
DataSources: make([]*tfplugin5.GetMetadata_DataSourceMetadata, 0, len(in.DataSources)),
Diagnostics: Diagnostics(in.Diagnostics),
+ EphemeralResources: make([]*tfplugin5.GetMetadata_EphemeralResourceMetadata, 0, len(in.EphemeralResources)),
Functions: make([]*tfplugin5.GetMetadata_FunctionMetadata, 0, len(in.Functions)),
Resources: make([]*tfplugin5.GetMetadata_ResourceMetadata, 0, len(in.Resources)),
ServerCapabilities: ServerCapabilities(in.ServerCapabilities),
@@ -25,6 +26,10 @@ func GetMetadata_Response(in *tfprotov5.GetMetadataResponse) *tfplugin5.GetMetad
resp.DataSources = append(resp.DataSources, GetMetadata_DataSourceMetadata(&datasource))
}
+ for _, ephemeralResource := range in.EphemeralResources {
+ resp.EphemeralResources = append(resp.EphemeralResources, GetMetadata_EphemeralResourceMetadata(&ephemeralResource))
+ }
+
for _, function := range in.Functions {
resp.Functions = append(resp.Functions, GetMetadata_FunctionMetadata(&function))
}
@@ -42,13 +47,18 @@ func GetProviderSchema_Response(in *tfprotov5.GetProviderSchemaResponse) *tfplug
}
resp := &tfplugin5.GetProviderSchema_Response{
- DataSourceSchemas: make(map[string]*tfplugin5.Schema, len(in.DataSourceSchemas)),
- Diagnostics: Diagnostics(in.Diagnostics),
- Functions: make(map[string]*tfplugin5.Function, len(in.Functions)),
- Provider: Schema(in.Provider),
- ProviderMeta: Schema(in.ProviderMeta),
- ResourceSchemas: make(map[string]*tfplugin5.Schema, len(in.ResourceSchemas)),
- ServerCapabilities: ServerCapabilities(in.ServerCapabilities),
+ DataSourceSchemas: make(map[string]*tfplugin5.Schema, len(in.DataSourceSchemas)),
+ Diagnostics: Diagnostics(in.Diagnostics),
+ EphemeralResourceSchemas: make(map[string]*tfplugin5.Schema, len(in.EphemeralResourceSchemas)),
+ Functions: make(map[string]*tfplugin5.Function, len(in.Functions)),
+ Provider: Schema(in.Provider),
+ ProviderMeta: Schema(in.ProviderMeta),
+ ResourceSchemas: make(map[string]*tfplugin5.Schema, len(in.ResourceSchemas)),
+ ServerCapabilities: ServerCapabilities(in.ServerCapabilities),
+ }
+
+ for name, schema := range in.EphemeralResourceSchemas {
+ resp.EphemeralResourceSchemas[name] = Schema(schema)
}
for name, schema := range in.ResourceSchemas {
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/schema.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/schema.go
index 69d47af1a2..30cabc1e29 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/schema.go
@@ -53,6 +53,7 @@ func Schema_Attribute(in *tfprotov5.SchemaAttribute) *tfplugin5.Schema_Attribute
Required: in.Required,
Sensitive: in.Sensitive,
Type: CtyType(in.Type),
+ WriteOnly: in.WriteOnly,
}
return resp
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/timestamp.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/timestamp.go
new file mode 100644
index 0000000000..3ee28365e9
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto/timestamp.go
@@ -0,0 +1,18 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package toproto
+
+import (
+ "time"
+
+ "google.golang.org/protobuf/types/known/timestamppb"
+)
+
+func Timestamp(in time.Time) *timestamppb.Timestamp {
+ if in.IsZero() {
+ return nil
+ }
+
+ return timestamppb.New(in)
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/provider.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/provider.go
index 799f90238a..19fea3b8f8 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/provider.go
@@ -54,6 +54,13 @@ type ProviderServer interface {
// terraform-plugin-go, so they are their own interface that is composed
// into ProviderServer.
FunctionServer
+
+ // EphemeralResourceServer is an interface encapsulating all the ephemeral
+ // resource-related RPC requests. ProviderServer implementations must
+ // implement them, but they are a handy interface for defining what an
+ // ephemeral resource is to terraform-plugin-go, so they're their own
+ // interface that is composed into ProviderServer.
+ EphemeralResourceServer
}
// GetMetadataRequest represents a GetMetadata RPC request.
@@ -78,6 +85,9 @@ type GetMetadataResponse struct {
// Resources returns metadata for all managed resources.
Resources []ResourceMetadata
+
+ // EphemeralResources returns metadata for all ephemeral resources.
+ EphemeralResources []EphemeralResourceMetadata
}
// GetProviderSchemaRequest represents a Terraform RPC request for the
@@ -124,6 +134,13 @@ type GetProviderSchemaResponse struct {
// includes the provider name.
Functions map[string]*Function
+ // EphemeralResourceSchemas is a map of ephemeral resource names to the schema for
+ // the configuration specified in the ephemeral resource. The name should be an
+ // ephemeral resource name, and should be prefixed with your provider's
+ // shortname and an underscore. It should match the first label after
+ // `ephemeral` in a user's configuration.
+ EphemeralResourceSchemas map[string]*Schema
+
// Diagnostics report errors or warnings related to returning the
// provider's schemas. Returning an empty slice indicates success, with
// no errors or warnings generated.
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/resource.go
index 9e50a0ce6c..df28e0a5de 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/resource.go
@@ -65,26 +65,6 @@ type ResourceServer interface {
MoveResourceState(context.Context, *MoveResourceStateRequest) (*MoveResourceStateResponse, error)
}
-// ResourceServerWithMoveResourceState is a temporary interface for servers
-// to implement MoveResourceState RPC handling.
-//
-// Deprecated: This interface will be removed in a future version. Use
-// ResourceServer instead.
-type ResourceServerWithMoveResourceState interface {
- ResourceServer
-
- // MoveResourceState is called when Terraform is asked to change a resource
- // type for an existing resource. The provider must accept the change as
- // valid by ensuring the source resource type, schema version, and provider
- // address are compatible to convert the source state into the target
- // resource type and latest state version.
- //
- // This functionality is only supported in Terraform 1.8 and later. The
- // provider must have enabled the MoveResourceState server capability to
- // enable these requests.
- MoveResourceState(context.Context, *MoveResourceStateRequest) (*MoveResourceStateResponse, error)
-}
-
// ValidateResourceTypeConfigRequest is the request Terraform sends when it
// wants to validate a resource's configuration.
type ValidateResourceTypeConfigRequest struct {
@@ -103,6 +83,10 @@ type ValidateResourceTypeConfigRequest struct {
// from knowing the value at request time. Any attributes not directly
// set in the configuration will be null.
Config *DynamicValue
+
+ // ClientCapabilities defines optionally supported protocol features for the
+ // ValidateResourceTypeConfig RPC, such as forward-compatible Terraform behavior changes.
+ ClientCapabilities *ValidateResourceTypeConfigClientCapabilities
}
// ValidateResourceTypeConfigResponse is the response from the provider about
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/schema.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/schema.go
index 9b860275f1..a91dbd7cad 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/schema.go
@@ -221,6 +221,12 @@ type SchemaAttribute struct {
// experiences. Providers should set it when deprecating attributes in
// preparation for these tools.
Deprecated bool
+
+ // WriteOnly, when set to true, indicates that the attribute value will
+ // be provided via configuration and must be omitted from plan and state response objects. WriteOnly
+ // must be combined with Optional or Required, and is only valid for managed
+ // resource schemas.
+ WriteOnly bool
}
// ValueType returns the tftypes.Type for a SchemaAttribute.
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/tf5server/server.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/tf5server/server.go
index 17c5c147ae..e1542f0934 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/tf5server/server.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov5/tf5server/server.go
@@ -18,18 +18,18 @@ import (
"google.golang.org/grpc"
+ "github.com/hashicorp/go-hclog"
+ "github.com/hashicorp/go-plugin"
+ "github.com/hashicorp/terraform-plugin-log/tflog"
+ "github.com/hashicorp/terraform-plugin-log/tfsdklog"
+ "github.com/mitchellh/go-testing-interface"
+
"github.com/hashicorp/terraform-plugin-go/internal/logging"
"github.com/hashicorp/terraform-plugin-go/tfprotov5"
"github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/fromproto"
"github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tf5serverlogging"
"github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/tfplugin5"
"github.com/hashicorp/terraform-plugin-go/tfprotov5/internal/toproto"
-
- "github.com/hashicorp/go-hclog"
- "github.com/hashicorp/go-plugin"
- "github.com/hashicorp/terraform-plugin-log/tflog"
- "github.com/hashicorp/terraform-plugin-log/tfsdklog"
- "github.com/mitchellh/go-testing-interface"
)
const (
@@ -49,7 +49,7 @@ const (
//
// In the future, it may be possible to include this information directly
// in the protocol buffers rather than recreating a constant here.
- protocolVersionMinor uint = 6
+ protocolVersionMinor uint = 8
)
// protocolVersion represents the combined major and minor version numbers of
@@ -716,6 +716,7 @@ func (s *server) ValidateResourceTypeConfig(ctx context.Context, protoReq *tfplu
req := fromproto.ValidateResourceTypeConfigRequest(protoReq)
+ tf5serverlogging.ValidateResourceTypeConfigClientCapabilities(ctx, req.ClientCapabilities)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", req.Config)
ctx = tf5serverlogging.DownstreamRequest(ctx)
@@ -1003,6 +1004,120 @@ func (s *server) GetFunctions(ctx context.Context, protoReq *tfplugin5.GetFuncti
return protoResp, nil
}
+func (s *server) ValidateEphemeralResourceConfig(ctx context.Context, protoReq *tfplugin5.ValidateEphemeralResourceConfig_Request) (*tfplugin5.ValidateEphemeralResourceConfig_Response, error) {
+ rpc := "ValidateEphemeralResourceConfig"
+ ctx = s.loggingContext(ctx)
+ ctx = logging.RpcContext(ctx, rpc)
+ ctx = logging.EphemeralResourceContext(ctx, protoReq.TypeName)
+ ctx = s.stoppableContext(ctx)
+ logging.ProtocolTrace(ctx, "Received request")
+ defer logging.ProtocolTrace(ctx, "Served request")
+
+ req := fromproto.ValidateEphemeralResourceConfigRequest(protoReq)
+
+ logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", req.Config)
+
+ ctx = tf5serverlogging.DownstreamRequest(ctx)
+
+ resp, err := s.downstream.ValidateEphemeralResourceConfig(ctx, req)
+ if err != nil {
+ logging.ProtocolError(ctx, "Error from downstream", map[string]any{logging.KeyError: err})
+ return nil, err
+ }
+
+ tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
+
+ protoResp := toproto.ValidateEphemeralResourceConfig_Response(resp)
+
+ return protoResp, nil
+}
+
+func (s *server) OpenEphemeralResource(ctx context.Context, protoReq *tfplugin5.OpenEphemeralResource_Request) (*tfplugin5.OpenEphemeralResource_Response, error) {
+ rpc := "OpenEphemeralResource"
+ ctx = s.loggingContext(ctx)
+ ctx = logging.RpcContext(ctx, rpc)
+ ctx = logging.EphemeralResourceContext(ctx, protoReq.TypeName)
+ ctx = s.stoppableContext(ctx)
+ logging.ProtocolTrace(ctx, "Received request")
+ defer logging.ProtocolTrace(ctx, "Served request")
+
+ req := fromproto.OpenEphemeralResourceRequest(protoReq)
+
+ tf5serverlogging.OpenEphemeralResourceClientCapabilities(ctx, req.ClientCapabilities)
+ logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", req.Config)
+ ctx = tf5serverlogging.DownstreamRequest(ctx)
+
+ resp, err := s.downstream.OpenEphemeralResource(ctx, req)
+ if err != nil {
+ logging.ProtocolError(ctx, "Error from downstream", map[string]any{logging.KeyError: err})
+ return nil, err
+ }
+
+ tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
+ logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "Result", resp.Result)
+ tf5serverlogging.Deferred(ctx, resp.Deferred)
+
+ if resp.Deferred != nil && (req.ClientCapabilities == nil || !req.ClientCapabilities.DeferralAllowed) {
+ resp.Diagnostics = append(resp.Diagnostics, invalidDeferredResponseDiag(resp.Deferred.Reason))
+ }
+
+ protoResp := toproto.OpenEphemeralResource_Response(resp)
+
+ return protoResp, nil
+}
+
+func (s *server) RenewEphemeralResource(ctx context.Context, protoReq *tfplugin5.RenewEphemeralResource_Request) (*tfplugin5.RenewEphemeralResource_Response, error) {
+ rpc := "RenewEphemeralResource"
+ ctx = s.loggingContext(ctx)
+ ctx = logging.RpcContext(ctx, rpc)
+ ctx = logging.EphemeralResourceContext(ctx, protoReq.TypeName)
+ ctx = s.stoppableContext(ctx)
+ logging.ProtocolTrace(ctx, "Received request")
+ defer logging.ProtocolTrace(ctx, "Served request")
+
+ req := fromproto.RenewEphemeralResourceRequest(protoReq)
+
+ ctx = tf5serverlogging.DownstreamRequest(ctx)
+
+ resp, err := s.downstream.RenewEphemeralResource(ctx, req)
+ if err != nil {
+ logging.ProtocolError(ctx, "Error from downstream", map[string]any{logging.KeyError: err})
+ return nil, err
+ }
+
+ tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
+
+ protoResp := toproto.RenewEphemeralResource_Response(resp)
+
+ return protoResp, nil
+}
+
+func (s *server) CloseEphemeralResource(ctx context.Context, protoReq *tfplugin5.CloseEphemeralResource_Request) (*tfplugin5.CloseEphemeralResource_Response, error) {
+ rpc := "CloseEphemeralResource"
+ ctx = s.loggingContext(ctx)
+ ctx = logging.RpcContext(ctx, rpc)
+ ctx = logging.EphemeralResourceContext(ctx, protoReq.TypeName)
+ ctx = s.stoppableContext(ctx)
+ logging.ProtocolTrace(ctx, "Received request")
+ defer logging.ProtocolTrace(ctx, "Served request")
+
+ req := fromproto.CloseEphemeralResourceRequest(protoReq)
+
+ ctx = tf5serverlogging.DownstreamRequest(ctx)
+
+ resp, err := s.downstream.CloseEphemeralResource(ctx, req)
+ if err != nil {
+ logging.ProtocolError(ctx, "Error from downstream", map[string]any{logging.KeyError: err})
+ return nil, err
+ }
+
+ tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
+
+ protoResp := toproto.CloseEphemeralResource_Response(resp)
+
+ return protoResp, nil
+}
+
func invalidDeferredResponseDiag(reason tfprotov5.DeferredReason) *tfprotov5.Diagnostic {
return &tfprotov5.Diagnostic{
Severity: tfprotov5.DiagnosticSeverityError,
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/client_capabilities.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/client_capabilities.go
index b528c123ab..b74f2ec7c8 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/client_capabilities.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/client_capabilities.go
@@ -3,6 +3,15 @@
package tfprotov6
+// ValidateResourceConfigClientCapabilities allows Terraform to publish information
+// regarding optionally supported protocol features for the ValidateResourceConfig RPC,
+// such as forward-compatible Terraform behavior changes.
+type ValidateResourceConfigClientCapabilities struct {
+ // WriteOnlyAttributesAllowed signals that the client is able to
+ // handle write_only attributes for managed resources.
+ WriteOnlyAttributesAllowed bool
+}
+
// ConfigureProviderClientCapabilities allows Terraform to publish information
// regarding optionally supported protocol features for the ConfigureProvider RPC,
// such as forward-compatible Terraform behavior changes.
@@ -47,3 +56,12 @@ type ImportResourceStateClientCapabilities struct {
// handle deferred responses from the provider.
DeferralAllowed bool
}
+
+// OpenEphemeralResourceClientCapabilities allows Terraform to publish information
+// regarding optionally supported protocol features for the OpenEphemeralResource RPC,
+// such as forward-compatible Terraform behavior changes.
+type OpenEphemeralResourceClientCapabilities struct {
+ // DeferralAllowed signals that the request from Terraform is able to
+ // handle deferred responses from the provider.
+ DeferralAllowed bool
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/ephemeral_resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/ephemeral_resource.go
new file mode 100644
index 0000000000..038b21f7e1
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/ephemeral_resource.go
@@ -0,0 +1,185 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package tfprotov6
+
+import (
+ "context"
+ "time"
+)
+
+// EphemeralResourceMetadata describes metadata for an ephemeral resource in the GetMetadata
+// RPC.
+type EphemeralResourceMetadata struct {
+ // TypeName is the name of the ephemeral resource.
+ TypeName string
+}
+
+// EphemeralResourceServer is an interface containing the methods an ephemeral resource
+// implementation needs to fill.
+type EphemeralResourceServer interface {
+ // ValidateEphemeralResourceConfig is called when Terraform is checking that an
+ // ephemeral resource configuration is valid. It is guaranteed to have types
+ // conforming to your schema, but it is not guaranteed that all values
+ // will be known. This is your opportunity to do custom or advanced
+ // validation prior to an ephemeral resource being opened.
+ ValidateEphemeralResourceConfig(context.Context, *ValidateEphemeralResourceConfigRequest) (*ValidateEphemeralResourceConfigResponse, error)
+
+ // OpenEphemeralResource is called when Terraform wants to open the ephemeral resource,
+ // usually during planning. If the config for the ephemeral resource contains unknown
+ // values, Terraform will defer the OpenEphemeralResource call until apply.
+ OpenEphemeralResource(context.Context, *OpenEphemeralResourceRequest) (*OpenEphemeralResourceResponse, error)
+
+ // RenewEphemeralResource is called when Terraform detects that the previously specified
+ // RenewAt timestamp has passed. The RenewAt timestamp is supplied either from the
+ // OpenEphemeralResource call or a previous RenewEphemeralResource call.
+ RenewEphemeralResource(context.Context, *RenewEphemeralResourceRequest) (*RenewEphemeralResourceResponse, error)
+
+ // CloseEphemeralResource is called when Terraform is closing the ephemeral resource.
+ CloseEphemeralResource(context.Context, *CloseEphemeralResourceRequest) (*CloseEphemeralResourceResponse, error)
+}
+
+// ValidateEphemeralResourceConfigRequest is the request Terraform sends when it
+// wants to validate an ephemeral resource's configuration.
+type ValidateEphemeralResourceConfigRequest struct {
+ // TypeName is the type of resource Terraform is validating.
+ TypeName string
+
+ // Config is the configuration the user supplied for that ephemeral resource. See
+ // the documentation on `DynamicValue` for more information about
+ // safely accessing the configuration.
+ //
+ // The configuration is represented as a tftypes.Object, with each
+ // attribute and nested block getting its own key and value.
+ //
+ // This configuration may contain unknown values if a user uses
+ // interpolation or other functionality that would prevent Terraform
+ // from knowing the value at request time. Any attributes not directly
+ // set in the configuration will be null.
+ Config *DynamicValue
+}
+
+// ValidateEphemeralResourceConfigResponse is the response from the provider about
+// the validity of an ephemeral resource's configuration.
+type ValidateEphemeralResourceConfigResponse struct {
+ // Diagnostics report errors or warnings related to the given
+ // configuration. Returning an empty slice indicates a successful
+ // validation with no warnings or errors generated.
+ Diagnostics []*Diagnostic
+}
+
+// OpenEphemeralResourceRequest is the request Terraform sends when it
+// wants to open an ephemeral resource.
+type OpenEphemeralResourceRequest struct {
+ // TypeName is the type of resource Terraform is opening.
+ TypeName string
+
+ // Config is the configuration the user supplied for that ephemeral resource. See
+ // the documentation on `DynamicValue` for more information about
+ // safely accessing the configuration.
+ //
+ // The configuration is represented as a tftypes.Object, with each
+ // attribute and nested block getting its own key and value.
+ //
+ // This configuration will always be fully known. If Config contains unknown values,
+ // Terraform will defer the OpenEphemeralResource RPC until apply.
+ Config *DynamicValue
+
+ // ClientCapabilities defines optionally supported protocol features for the
+ // OpenEphemeralResource RPC, such as forward-compatible Terraform behavior changes.
+ ClientCapabilities *OpenEphemeralResourceClientCapabilities
+}
+
+// OpenEphemeralResourceResponse is the response from the provider about the current
+// state of the opened ephemeral resource.
+type OpenEphemeralResourceResponse struct {
+ // Result is the provider's understanding of what the ephemeral resource's
+ // data is after it has been opened, represented as a `DynamicValue`.
+ // See the documentation for `DynamicValue` for information about
+ // safely creating the `DynamicValue`.
+ //
+ // Any attribute, whether computed or not, that has a known value in
+ // the Config in the OpenEphemeralResourceRequest must be preserved
+ // exactly as it was in Result.
+ //
+ // Any attribute in the Config in the OpenEphemeralResourceRequest
+ // that is unknown must take on a known value at this time. No unknown
+ // values are allowed in the Result.
+ //
+ // The result should be represented as a tftypes.Object, with each
+ // attribute and nested block getting its own key and value.
+ Result *DynamicValue
+
+ // Diagnostics report errors or warnings related to opening the
+ // requested ephemeral resource. Returning an empty slice
+ // indicates a successful creation with no warnings or errors
+ // generated.
+ Diagnostics []*Diagnostic
+
+ // Private should be set to any private data that the provider would like to be
+ // sent to the next Renew or Close call.
+ Private []byte
+
+ // RenewAt indicates to Terraform that the ephemeral resource
+ // needs to be renewed at the specified time. Terraform will
+ // call the RenewEphemeralResource RPC when the specified time has passed.
+ RenewAt time.Time
+
+ // Deferred is used to indicate to Terraform that the OpenEphemeralResource operation
+ // needs to be deferred for a reason.
+ Deferred *Deferred
+}
+
+// RenewEphemeralResourceRequest is the request Terraform sends when it
+// wants to renew an ephemeral resource.
+type RenewEphemeralResourceRequest struct {
+ // TypeName is the type of resource Terraform is renewing.
+ TypeName string
+
+ // Private is any provider-defined private data stored with the
+ // ephemeral resource from the most recent Open or Renew call.
+ //
+ // To ensure private data is preserved, copy any necessary data to
+ // the RenewEphemeralResourceResponse type Private field.
+ Private []byte
+}
+
+// RenewEphemeralResourceResponse is the response from the provider after an ephemeral resource
+// has been renewed.
+type RenewEphemeralResourceResponse struct {
+ // Diagnostics report errors or warnings related to renewing the
+ // requested ephemeral resource. Returning an empty slice
+ // indicates a successful creation with no warnings or errors
+ // generated.
+ Diagnostics []*Diagnostic
+
+ // Private should be set to any private data that the provider would like to be
+ // sent to the next Renew or Close call.
+ Private []byte
+
+ // RenewAt indicates to Terraform that the ephemeral resource
+ // needs to be renewed at the specified time. Terraform will
+ // call the RenewEphemeralResource RPC when the specified time has passed.
+ RenewAt time.Time
+}
+
+// CloseEphemeralResourceRequest is the request Terraform sends when it
+// wants to close an ephemeral resource.
+type CloseEphemeralResourceRequest struct {
+ // TypeName is the type of resource Terraform is closing.
+ TypeName string
+
+ // Private is any provider-defined private data stored with the
+ // ephemeral resource from the most recent Open or Renew call.
+ Private []byte
+}
+
+// CloseEphemeralResourceResponse is the response from the provider about
+// the closed ephemeral resource.
+type CloseEphemeralResourceResponse struct {
+ // Diagnostics report errors or warnings related to closing the
+ // requested ephemeral resource. Returning an empty slice
+ // indicates a successful creation with no warnings or errors
+ // generated.
+ Diagnostics []*Diagnostic
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/client_capabilities.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/client_capabilities.go
index 06238eac04..70527905d5 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/client_capabilities.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/client_capabilities.go
@@ -8,6 +8,18 @@ import (
"github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6"
)
+func ValidateResourceConfigClientCapabilities(in *tfplugin6.ClientCapabilities) *tfprotov6.ValidateResourceConfigClientCapabilities {
+ if in == nil {
+ return nil
+ }
+
+ resp := &tfprotov6.ValidateResourceConfigClientCapabilities{
+ WriteOnlyAttributesAllowed: in.WriteOnlyAttributesAllowed,
+ }
+
+ return resp
+}
+
func ConfigureProviderClientCapabilities(in *tfplugin6.ClientCapabilities) *tfprotov6.ConfigureProviderClientCapabilities {
if in == nil {
return nil
@@ -67,3 +79,15 @@ func ImportResourceStateClientCapabilities(in *tfplugin6.ClientCapabilities) *tf
return resp
}
+
+func OpenEphemeralResourceClientCapabilities(in *tfplugin6.ClientCapabilities) *tfprotov6.OpenEphemeralResourceClientCapabilities {
+ if in == nil {
+ return nil
+ }
+
+ resp := &tfprotov6.OpenEphemeralResourceClientCapabilities{
+ DeferralAllowed: in.DeferralAllowed,
+ }
+
+ return resp
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/ephemeral_resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/ephemeral_resource.go
new file mode 100644
index 0000000000..103aabd833
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/ephemeral_resource.go
@@ -0,0 +1,54 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package fromproto
+
+import (
+ "github.com/hashicorp/terraform-plugin-go/tfprotov6"
+ "github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6"
+)
+
+func ValidateEphemeralResourceConfigRequest(in *tfplugin6.ValidateEphemeralResourceConfig_Request) *tfprotov6.ValidateEphemeralResourceConfigRequest {
+ if in == nil {
+ return nil
+ }
+
+ return &tfprotov6.ValidateEphemeralResourceConfigRequest{
+ TypeName: in.TypeName,
+ Config: DynamicValue(in.Config),
+ }
+}
+
+func OpenEphemeralResourceRequest(in *tfplugin6.OpenEphemeralResource_Request) *tfprotov6.OpenEphemeralResourceRequest {
+ if in == nil {
+ return nil
+ }
+
+ return &tfprotov6.OpenEphemeralResourceRequest{
+ TypeName: in.TypeName,
+ Config: DynamicValue(in.Config),
+ ClientCapabilities: OpenEphemeralResourceClientCapabilities(in.ClientCapabilities),
+ }
+}
+
+func RenewEphemeralResourceRequest(in *tfplugin6.RenewEphemeralResource_Request) *tfprotov6.RenewEphemeralResourceRequest {
+ if in == nil {
+ return nil
+ }
+
+ return &tfprotov6.RenewEphemeralResourceRequest{
+ TypeName: in.TypeName,
+ Private: in.Private,
+ }
+}
+
+func CloseEphemeralResourceRequest(in *tfplugin6.CloseEphemeralResource_Request) *tfprotov6.CloseEphemeralResourceRequest {
+ if in == nil {
+ return nil
+ }
+
+ return &tfprotov6.CloseEphemeralResourceRequest{
+ TypeName: in.TypeName,
+ Private: in.Private,
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/resource.go
index 24e336953f..406158f672 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/fromproto/resource.go
@@ -14,8 +14,9 @@ func ValidateResourceConfigRequest(in *tfplugin6.ValidateResourceConfig_Request)
}
resp := &tfprotov6.ValidateResourceConfigRequest{
- Config: DynamicValue(in.Config),
- TypeName: in.TypeName,
+ ClientCapabilities: ValidateResourceConfigClientCapabilities(in.ClientCapabilities),
+ Config: DynamicValue(in.Config),
+ TypeName: in.TypeName,
}
return resp
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tf6serverlogging/client_capabilities.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tf6serverlogging/client_capabilities.go
index d8d5859f43..e53996e7f8 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tf6serverlogging/client_capabilities.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tf6serverlogging/client_capabilities.go
@@ -10,6 +10,20 @@ import (
"github.com/hashicorp/terraform-plugin-go/tfprotov6"
)
+// ValidateResourceConfigClientCapabilities generates a TRACE "Announced client capabilities" log.
+func ValidateResourceConfigClientCapabilities(ctx context.Context, capabilities *tfprotov6.ValidateResourceConfigClientCapabilities) {
+ if capabilities == nil {
+ logging.ProtocolTrace(ctx, "No announced client capabilities", map[string]interface{}{})
+ return
+ }
+
+ responseFields := map[string]interface{}{
+ logging.KeyClientCapabilityWriteOnlyAttributesAllowed: capabilities.WriteOnlyAttributesAllowed,
+ }
+
+ logging.ProtocolTrace(ctx, "Announced client capabilities", responseFields)
+}
+
// ConfigureProviderClientCapabilities generates a TRACE "Announced client capabilities" log.
func ConfigureProviderClientCapabilities(ctx context.Context, capabilities *tfprotov6.ConfigureProviderClientCapabilities) {
if capabilities == nil {
@@ -79,3 +93,17 @@ func ImportResourceStateClientCapabilities(ctx context.Context, capabilities *tf
logging.ProtocolTrace(ctx, "Announced client capabilities", responseFields)
}
+
+// OpenEphemeralResourceClientCapabilities generates a TRACE "Announced client capabilities" log.
+func OpenEphemeralResourceClientCapabilities(ctx context.Context, capabilities *tfprotov6.OpenEphemeralResourceClientCapabilities) {
+ if capabilities == nil {
+ logging.ProtocolTrace(ctx, "No announced client capabilities", map[string]interface{}{})
+ return
+ }
+
+ responseFields := map[string]interface{}{
+ logging.KeyClientCapabilityDeferralAllowed: capabilities.DeferralAllowed,
+ }
+
+ logging.ProtocolTrace(ctx, "Announced client capabilities", responseFields)
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tf6serverlogging/server_capabilities.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tf6serverlogging/server_capabilities.go
index f6aaf953d9..74f9931aca 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tf6serverlogging/server_capabilities.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tf6serverlogging/server_capabilities.go
@@ -14,11 +14,13 @@ import (
func ServerCapabilities(ctx context.Context, capabilities *tfprotov6.ServerCapabilities) {
responseFields := map[string]interface{}{
logging.KeyServerCapabilityGetProviderSchemaOptional: false,
+ logging.KeyServerCapabilityMoveResourceState: false,
logging.KeyServerCapabilityPlanDestroy: false,
}
if capabilities != nil {
responseFields[logging.KeyServerCapabilityGetProviderSchemaOptional] = capabilities.GetProviderSchemaOptional
+ responseFields[logging.KeyServerCapabilityMoveResourceState] = capabilities.MoveResourceState
responseFields[logging.KeyServerCapabilityPlanDestroy] = capabilities.PlanDestroy
}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6.pb.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6.pb.go
index e0f55a1f54..123bf01a82 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6.pb.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6.pb.go
@@ -1,9 +1,9 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0
-// Terraform Plugin RPC protocol version 6.6
+// Terraform Plugin RPC protocol version 6.8
//
-// This file defines version 6.6 of the RPC protocol. To implement a plugin
+// This file defines version 6.8 of the RPC protocol. To implement a plugin
// against this protocol, copy this definition into your own codebase and
// use protoc to generate stubs for your target language.
//
@@ -22,8 +22,8 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.34.0
-// protoc v5.26.1
+// protoc-gen-go v1.36.3
+// protoc v5.29.3
// source: tfplugin6.proto
package tfplugin6
@@ -31,6 +31,7 @@ package tfplugin6
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
+ timestamppb "google.golang.org/protobuf/types/known/timestamppb"
reflect "reflect"
sync "sync"
)
@@ -312,21 +313,18 @@ func (Deferred_Reason) EnumDescriptor() ([]byte, []int) {
// DynamicValue is an opaque encoding of terraform data, with the field name
// indicating the encoding scheme used.
type DynamicValue struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Msgpack []byte `protobuf:"bytes,1,opt,name=msgpack,proto3" json:"msgpack,omitempty"`
+ Json []byte `protobuf:"bytes,2,opt,name=json,proto3" json:"json,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Msgpack []byte `protobuf:"bytes,1,opt,name=msgpack,proto3" json:"msgpack,omitempty"`
- Json []byte `protobuf:"bytes,2,opt,name=json,proto3" json:"json,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *DynamicValue) Reset() {
*x = DynamicValue{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *DynamicValue) String() string {
@@ -337,7 +335,7 @@ func (*DynamicValue) ProtoMessage() {}
func (x *DynamicValue) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[0]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -367,23 +365,20 @@ func (x *DynamicValue) GetJson() []byte {
}
type Diagnostic struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Severity Diagnostic_Severity `protobuf:"varint,1,opt,name=severity,proto3,enum=tfplugin6.Diagnostic_Severity" json:"severity,omitempty"`
+ Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"`
+ Detail string `protobuf:"bytes,3,opt,name=detail,proto3" json:"detail,omitempty"`
+ Attribute *AttributePath `protobuf:"bytes,4,opt,name=attribute,proto3" json:"attribute,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Severity Diagnostic_Severity `protobuf:"varint,1,opt,name=severity,proto3,enum=tfplugin6.Diagnostic_Severity" json:"severity,omitempty"`
- Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"`
- Detail string `protobuf:"bytes,3,opt,name=detail,proto3" json:"detail,omitempty"`
- Attribute *AttributePath `protobuf:"bytes,4,opt,name=attribute,proto3" json:"attribute,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *Diagnostic) Reset() {
*x = Diagnostic{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Diagnostic) String() string {
@@ -394,7 +389,7 @@ func (*Diagnostic) ProtoMessage() {}
func (x *Diagnostic) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[1]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -438,23 +433,20 @@ func (x *Diagnostic) GetAttribute() *AttributePath {
}
type FunctionError struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
// The optional function_argument records the index position of the
// argument which caused the error.
FunctionArgument *int64 `protobuf:"varint,2,opt,name=function_argument,json=functionArgument,proto3,oneof" json:"function_argument,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *FunctionError) Reset() {
*x = FunctionError{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *FunctionError) String() string {
@@ -465,7 +457,7 @@ func (*FunctionError) ProtoMessage() {}
func (x *FunctionError) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[2]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -495,20 +487,17 @@ func (x *FunctionError) GetFunctionArgument() int64 {
}
type AttributePath struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Steps []*AttributePath_Step `protobuf:"bytes,1,rep,name=steps,proto3" json:"steps,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Steps []*AttributePath_Step `protobuf:"bytes,1,rep,name=steps,proto3" json:"steps,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *AttributePath) Reset() {
*x = AttributePath{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *AttributePath) String() string {
@@ -519,7 +508,7 @@ func (*AttributePath) ProtoMessage() {}
func (x *AttributePath) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[3]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -542,18 +531,16 @@ func (x *AttributePath) GetSteps() []*AttributePath_Step {
}
type StopProvider struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *StopProvider) Reset() {
*x = StopProvider{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *StopProvider) String() string {
@@ -564,7 +551,7 @@ func (*StopProvider) ProtoMessage() {}
func (x *StopProvider) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[4]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -583,21 +570,18 @@ func (*StopProvider) Descriptor() ([]byte, []int) {
// provider. It can be in one of two formats, the current json encoded format
// in bytes, or the legacy flatmap format as a map of strings.
type RawState struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Json []byte `protobuf:"bytes,1,opt,name=json,proto3" json:"json,omitempty"`
+ Flatmap map[string]string `protobuf:"bytes,2,rep,name=flatmap,proto3" json:"flatmap,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
unknownFields protoimpl.UnknownFields
-
- Json []byte `protobuf:"bytes,1,opt,name=json,proto3" json:"json,omitempty"`
- Flatmap map[string]string `protobuf:"bytes,2,rep,name=flatmap,proto3" json:"flatmap,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
+ sizeCache protoimpl.SizeCache
}
func (x *RawState) Reset() {
*x = RawState{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *RawState) String() string {
@@ -608,7 +592,7 @@ func (*RawState) ProtoMessage() {}
func (x *RawState) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[5]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -639,25 +623,22 @@ func (x *RawState) GetFlatmap() map[string]string {
// Schema is the configuration schema for a Resource or Provider.
type Schema struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The version of the schema.
// Schemas are versioned, so that providers can upgrade a saved resource
// state when the schema is changed.
Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"`
// Block is the top level configuration block for this schema.
- Block *Schema_Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"`
+ Block *Schema_Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Schema) Reset() {
*x = Schema{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[6]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[6]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Schema) String() string {
@@ -668,7 +649,7 @@ func (*Schema) ProtoMessage() {}
func (x *Schema) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[6]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -698,10 +679,7 @@ func (x *Schema) GetBlock() *Schema_Block {
}
type Function struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// parameters is the ordered list of positional function parameters.
Parameters []*Function_Parameter `protobuf:"bytes,1,rep,name=parameters,proto3" json:"parameters,omitempty"`
// variadic_parameter is an optional final parameter which accepts
@@ -719,15 +697,15 @@ type Function struct {
// deprecation_message is human-readable documentation if the
// function is deprecated.
DeprecationMessage string `protobuf:"bytes,7,opt,name=deprecation_message,json=deprecationMessage,proto3" json:"deprecation_message,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Function) Reset() {
*x = Function{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[7]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[7]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Function) String() string {
@@ -738,7 +716,7 @@ func (*Function) ProtoMessage() {}
func (x *Function) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[7]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -807,10 +785,7 @@ func (x *Function) GetDeprecationMessage() string {
// availability of certain forward-compatible changes which may be optional
// in a major protocol version, but cannot be tested for directly.
type ServerCapabilities struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The plan_destroy capability signals that a provider expects a call
// to PlanResourceChange when a resource is going to be destroyed.
PlanDestroy bool `protobuf:"varint,1,opt,name=plan_destroy,json=planDestroy,proto3" json:"plan_destroy,omitempty"`
@@ -822,15 +797,15 @@ type ServerCapabilities struct {
// The move_resource_state capability signals that a provider supports the
// MoveResourceState RPC.
MoveResourceState bool `protobuf:"varint,3,opt,name=move_resource_state,json=moveResourceState,proto3" json:"move_resource_state,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ServerCapabilities) Reset() {
*x = ServerCapabilities{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[8]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[8]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ServerCapabilities) String() string {
@@ -841,7 +816,7 @@ func (*ServerCapabilities) ProtoMessage() {}
func (x *ServerCapabilities) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[8]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -882,22 +857,22 @@ func (x *ServerCapabilities) GetMoveResourceState() bool {
// certain forward-compatible changes which may be optional in a major
// protocol version, but cannot be tested for directly.
type ClientCapabilities struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The deferral_allowed capability signals that the client is able to
// handle deferred responses from the provider.
DeferralAllowed bool `protobuf:"varint,1,opt,name=deferral_allowed,json=deferralAllowed,proto3" json:"deferral_allowed,omitempty"`
+ // The write_only_attributes_allowed capability signals that the client
+ // is able to handle write_only attributes for managed resources.
+ WriteOnlyAttributesAllowed bool `protobuf:"varint,2,opt,name=write_only_attributes_allowed,json=writeOnlyAttributesAllowed,proto3" json:"write_only_attributes_allowed,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ClientCapabilities) Reset() {
*x = ClientCapabilities{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[9]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[9]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ClientCapabilities) String() string {
@@ -908,7 +883,7 @@ func (*ClientCapabilities) ProtoMessage() {}
func (x *ClientCapabilities) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[9]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -930,23 +905,27 @@ func (x *ClientCapabilities) GetDeferralAllowed() bool {
return false
}
+func (x *ClientCapabilities) GetWriteOnlyAttributesAllowed() bool {
+ if x != nil {
+ return x.WriteOnlyAttributesAllowed
+ }
+ return false
+}
+
// Deferred is a message that indicates that change is deferred for a reason.
type Deferred struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// reason is the reason for deferring the change.
- Reason Deferred_Reason `protobuf:"varint,1,opt,name=reason,proto3,enum=tfplugin6.Deferred_Reason" json:"reason,omitempty"`
+ Reason Deferred_Reason `protobuf:"varint,1,opt,name=reason,proto3,enum=tfplugin6.Deferred_Reason" json:"reason,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Deferred) Reset() {
*x = Deferred{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[10]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[10]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Deferred) String() string {
@@ -957,7 +936,7 @@ func (*Deferred) ProtoMessage() {}
func (x *Deferred) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[10]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -980,18 +959,16 @@ func (x *Deferred) GetReason() Deferred_Reason {
}
type GetMetadata struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata) Reset() {
*x = GetMetadata{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[11]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[11]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata) String() string {
@@ -1002,7 +979,7 @@ func (*GetMetadata) ProtoMessage() {}
func (x *GetMetadata) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[11]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1018,18 +995,16 @@ func (*GetMetadata) Descriptor() ([]byte, []int) {
}
type GetProviderSchema struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetProviderSchema) Reset() {
*x = GetProviderSchema{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[12]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[12]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetProviderSchema) String() string {
@@ -1040,7 +1015,7 @@ func (*GetProviderSchema) ProtoMessage() {}
func (x *GetProviderSchema) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[12]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1056,18 +1031,16 @@ func (*GetProviderSchema) Descriptor() ([]byte, []int) {
}
type ValidateProviderConfig struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateProviderConfig) Reset() {
*x = ValidateProviderConfig{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[13]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[13]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateProviderConfig) String() string {
@@ -1078,7 +1051,7 @@ func (*ValidateProviderConfig) ProtoMessage() {}
func (x *ValidateProviderConfig) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[13]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1094,18 +1067,16 @@ func (*ValidateProviderConfig) Descriptor() ([]byte, []int) {
}
type UpgradeResourceState struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *UpgradeResourceState) Reset() {
*x = UpgradeResourceState{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[14]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[14]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *UpgradeResourceState) String() string {
@@ -1116,7 +1087,7 @@ func (*UpgradeResourceState) ProtoMessage() {}
func (x *UpgradeResourceState) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[14]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1132,18 +1103,16 @@ func (*UpgradeResourceState) Descriptor() ([]byte, []int) {
}
type ValidateResourceConfig struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateResourceConfig) Reset() {
*x = ValidateResourceConfig{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[15]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[15]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateResourceConfig) String() string {
@@ -1154,7 +1123,7 @@ func (*ValidateResourceConfig) ProtoMessage() {}
func (x *ValidateResourceConfig) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[15]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1170,18 +1139,16 @@ func (*ValidateResourceConfig) Descriptor() ([]byte, []int) {
}
type ValidateDataResourceConfig struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateDataResourceConfig) Reset() {
*x = ValidateDataResourceConfig{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[16]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[16]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateDataResourceConfig) String() string {
@@ -1192,7 +1159,7 @@ func (*ValidateDataResourceConfig) ProtoMessage() {}
func (x *ValidateDataResourceConfig) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[16]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1208,18 +1175,16 @@ func (*ValidateDataResourceConfig) Descriptor() ([]byte, []int) {
}
type ConfigureProvider struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ConfigureProvider) Reset() {
*x = ConfigureProvider{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[17]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[17]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ConfigureProvider) String() string {
@@ -1230,7 +1195,7 @@ func (*ConfigureProvider) ProtoMessage() {}
func (x *ConfigureProvider) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[17]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1246,18 +1211,16 @@ func (*ConfigureProvider) Descriptor() ([]byte, []int) {
}
type ReadResource struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadResource) Reset() {
*x = ReadResource{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[18]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[18]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadResource) String() string {
@@ -1268,7 +1231,7 @@ func (*ReadResource) ProtoMessage() {}
func (x *ReadResource) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[18]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1284,18 +1247,16 @@ func (*ReadResource) Descriptor() ([]byte, []int) {
}
type PlanResourceChange struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *PlanResourceChange) Reset() {
*x = PlanResourceChange{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[19]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[19]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *PlanResourceChange) String() string {
@@ -1306,7 +1267,7 @@ func (*PlanResourceChange) ProtoMessage() {}
func (x *PlanResourceChange) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[19]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1322,18 +1283,16 @@ func (*PlanResourceChange) Descriptor() ([]byte, []int) {
}
type ApplyResourceChange struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ApplyResourceChange) Reset() {
*x = ApplyResourceChange{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[20]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[20]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ApplyResourceChange) String() string {
@@ -1344,7 +1303,7 @@ func (*ApplyResourceChange) ProtoMessage() {}
func (x *ApplyResourceChange) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[20]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1360,18 +1319,16 @@ func (*ApplyResourceChange) Descriptor() ([]byte, []int) {
}
type ImportResourceState struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ImportResourceState) Reset() {
*x = ImportResourceState{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[21]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[21]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ImportResourceState) String() string {
@@ -1382,7 +1339,7 @@ func (*ImportResourceState) ProtoMessage() {}
func (x *ImportResourceState) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[21]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1398,18 +1355,16 @@ func (*ImportResourceState) Descriptor() ([]byte, []int) {
}
type MoveResourceState struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *MoveResourceState) Reset() {
*x = MoveResourceState{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[22]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[22]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *MoveResourceState) String() string {
@@ -1420,7 +1375,7 @@ func (*MoveResourceState) ProtoMessage() {}
func (x *MoveResourceState) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[22]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1436,18 +1391,16 @@ func (*MoveResourceState) Descriptor() ([]byte, []int) {
}
type ReadDataSource struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadDataSource) Reset() {
*x = ReadDataSource{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[23]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[23]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadDataSource) String() string {
@@ -1458,7 +1411,7 @@ func (*ReadDataSource) ProtoMessage() {}
func (x *ReadDataSource) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[23]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1474,18 +1427,16 @@ func (*ReadDataSource) Descriptor() ([]byte, []int) {
}
type GetFunctions struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetFunctions) Reset() {
*x = GetFunctions{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[24]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[24]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetFunctions) String() string {
@@ -1496,7 +1447,7 @@ func (*GetFunctions) ProtoMessage() {}
func (x *GetFunctions) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[24]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1512,18 +1463,16 @@ func (*GetFunctions) Descriptor() ([]byte, []int) {
}
type CallFunction struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *CallFunction) Reset() {
*x = CallFunction{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[25]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[25]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *CallFunction) String() string {
@@ -1534,7 +1483,7 @@ func (*CallFunction) ProtoMessage() {}
func (x *CallFunction) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[25]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1549,37 +1498,28 @@ func (*CallFunction) Descriptor() ([]byte, []int) {
return file_tfplugin6_proto_rawDescGZIP(), []int{25}
}
-type AttributePath_Step struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+type ValidateEphemeralResourceConfig struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
-
- // Types that are assignable to Selector:
- //
- // *AttributePath_Step_AttributeName
- // *AttributePath_Step_ElementKeyString
- // *AttributePath_Step_ElementKeyInt
- Selector isAttributePath_Step_Selector `protobuf_oneof:"selector"`
+ sizeCache protoimpl.SizeCache
}
-func (x *AttributePath_Step) Reset() {
- *x = AttributePath_Step{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[26]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+func (x *ValidateEphemeralResourceConfig) Reset() {
+ *x = ValidateEphemeralResourceConfig{}
+ mi := &file_tfplugin6_proto_msgTypes[26]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
-func (x *AttributePath_Step) String() string {
+func (x *ValidateEphemeralResourceConfig) String() string {
return protoimpl.X.MessageStringOf(x)
}
-func (*AttributePath_Step) ProtoMessage() {}
+func (*ValidateEphemeralResourceConfig) ProtoMessage() {}
-func (x *AttributePath_Step) ProtoReflect() protoreflect.Message {
+func (x *ValidateEphemeralResourceConfig) ProtoReflect() protoreflect.Message {
mi := &file_tfplugin6_proto_msgTypes[26]
- if protoimpl.UnsafeEnabled && x != nil {
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1589,89 +1529,243 @@ func (x *AttributePath_Step) ProtoReflect() protoreflect.Message {
return mi.MessageOf(x)
}
-// Deprecated: Use AttributePath_Step.ProtoReflect.Descriptor instead.
-func (*AttributePath_Step) Descriptor() ([]byte, []int) {
- return file_tfplugin6_proto_rawDescGZIP(), []int{3, 0}
-}
-
-func (m *AttributePath_Step) GetSelector() isAttributePath_Step_Selector {
- if m != nil {
- return m.Selector
- }
- return nil
-}
-
-func (x *AttributePath_Step) GetAttributeName() string {
- if x, ok := x.GetSelector().(*AttributePath_Step_AttributeName); ok {
- return x.AttributeName
- }
- return ""
+// Deprecated: Use ValidateEphemeralResourceConfig.ProtoReflect.Descriptor instead.
+func (*ValidateEphemeralResourceConfig) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{26}
}
-func (x *AttributePath_Step) GetElementKeyString() string {
- if x, ok := x.GetSelector().(*AttributePath_Step_ElementKeyString); ok {
- return x.ElementKeyString
- }
- return ""
+type OpenEphemeralResource struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
-func (x *AttributePath_Step) GetElementKeyInt() int64 {
- if x, ok := x.GetSelector().(*AttributePath_Step_ElementKeyInt); ok {
- return x.ElementKeyInt
- }
- return 0
+func (x *OpenEphemeralResource) Reset() {
+ *x = OpenEphemeralResource{}
+ mi := &file_tfplugin6_proto_msgTypes[27]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
-type isAttributePath_Step_Selector interface {
- isAttributePath_Step_Selector()
+func (x *OpenEphemeralResource) String() string {
+ return protoimpl.X.MessageStringOf(x)
}
-type AttributePath_Step_AttributeName struct {
- // Set "attribute_name" to represent looking up an attribute
- // in the current object value.
- AttributeName string `protobuf:"bytes,1,opt,name=attribute_name,json=attributeName,proto3,oneof"`
-}
+func (*OpenEphemeralResource) ProtoMessage() {}
-type AttributePath_Step_ElementKeyString struct {
- // Set "element_key_*" to represent looking up an element in
- // an indexable collection type.
- ElementKeyString string `protobuf:"bytes,2,opt,name=element_key_string,json=elementKeyString,proto3,oneof"`
+func (x *OpenEphemeralResource) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[27]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
}
-type AttributePath_Step_ElementKeyInt struct {
- ElementKeyInt int64 `protobuf:"varint,3,opt,name=element_key_int,json=elementKeyInt,proto3,oneof"`
+// Deprecated: Use OpenEphemeralResource.ProtoReflect.Descriptor instead.
+func (*OpenEphemeralResource) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{27}
}
-func (*AttributePath_Step_AttributeName) isAttributePath_Step_Selector() {}
-
-func (*AttributePath_Step_ElementKeyString) isAttributePath_Step_Selector() {}
-
-func (*AttributePath_Step_ElementKeyInt) isAttributePath_Step_Selector() {}
-
-type StopProvider_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+type RenewEphemeralResource struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
-func (x *StopProvider_Request) Reset() {
- *x = StopProvider_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[27]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+func (x *RenewEphemeralResource) Reset() {
+ *x = RenewEphemeralResource{}
+ mi := &file_tfplugin6_proto_msgTypes[28]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
-func (x *StopProvider_Request) String() string {
+func (x *RenewEphemeralResource) String() string {
return protoimpl.X.MessageStringOf(x)
}
-func (*StopProvider_Request) ProtoMessage() {}
+func (*RenewEphemeralResource) ProtoMessage() {}
-func (x *StopProvider_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[27]
- if protoimpl.UnsafeEnabled && x != nil {
+func (x *RenewEphemeralResource) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[28]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use RenewEphemeralResource.ProtoReflect.Descriptor instead.
+func (*RenewEphemeralResource) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{28}
+}
+
+type CloseEphemeralResource struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *CloseEphemeralResource) Reset() {
+ *x = CloseEphemeralResource{}
+ mi := &file_tfplugin6_proto_msgTypes[29]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *CloseEphemeralResource) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CloseEphemeralResource) ProtoMessage() {}
+
+func (x *CloseEphemeralResource) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[29]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CloseEphemeralResource.ProtoReflect.Descriptor instead.
+func (*CloseEphemeralResource) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{29}
+}
+
+type AttributePath_Step struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ // Types that are valid to be assigned to Selector:
+ //
+ // *AttributePath_Step_AttributeName
+ // *AttributePath_Step_ElementKeyString
+ // *AttributePath_Step_ElementKeyInt
+ Selector isAttributePath_Step_Selector `protobuf_oneof:"selector"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *AttributePath_Step) Reset() {
+ *x = AttributePath_Step{}
+ mi := &file_tfplugin6_proto_msgTypes[30]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *AttributePath_Step) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*AttributePath_Step) ProtoMessage() {}
+
+func (x *AttributePath_Step) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[30]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use AttributePath_Step.ProtoReflect.Descriptor instead.
+func (*AttributePath_Step) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{3, 0}
+}
+
+func (x *AttributePath_Step) GetSelector() isAttributePath_Step_Selector {
+ if x != nil {
+ return x.Selector
+ }
+ return nil
+}
+
+func (x *AttributePath_Step) GetAttributeName() string {
+ if x != nil {
+ if x, ok := x.Selector.(*AttributePath_Step_AttributeName); ok {
+ return x.AttributeName
+ }
+ }
+ return ""
+}
+
+func (x *AttributePath_Step) GetElementKeyString() string {
+ if x != nil {
+ if x, ok := x.Selector.(*AttributePath_Step_ElementKeyString); ok {
+ return x.ElementKeyString
+ }
+ }
+ return ""
+}
+
+func (x *AttributePath_Step) GetElementKeyInt() int64 {
+ if x != nil {
+ if x, ok := x.Selector.(*AttributePath_Step_ElementKeyInt); ok {
+ return x.ElementKeyInt
+ }
+ }
+ return 0
+}
+
+type isAttributePath_Step_Selector interface {
+ isAttributePath_Step_Selector()
+}
+
+type AttributePath_Step_AttributeName struct {
+ // Set "attribute_name" to represent looking up an attribute
+ // in the current object value.
+ AttributeName string `protobuf:"bytes,1,opt,name=attribute_name,json=attributeName,proto3,oneof"`
+}
+
+type AttributePath_Step_ElementKeyString struct {
+ // Set "element_key_*" to represent looking up an element in
+ // an indexable collection type.
+ ElementKeyString string `protobuf:"bytes,2,opt,name=element_key_string,json=elementKeyString,proto3,oneof"`
+}
+
+type AttributePath_Step_ElementKeyInt struct {
+ ElementKeyInt int64 `protobuf:"varint,3,opt,name=element_key_int,json=elementKeyInt,proto3,oneof"`
+}
+
+func (*AttributePath_Step_AttributeName) isAttributePath_Step_Selector() {}
+
+func (*AttributePath_Step_ElementKeyString) isAttributePath_Step_Selector() {}
+
+func (*AttributePath_Step_ElementKeyInt) isAttributePath_Step_Selector() {}
+
+type StopProvider_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *StopProvider_Request) Reset() {
+ *x = StopProvider_Request{}
+ mi := &file_tfplugin6_proto_msgTypes[31]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *StopProvider_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*StopProvider_Request) ProtoMessage() {}
+
+func (x *StopProvider_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[31]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1687,20 +1781,17 @@ func (*StopProvider_Request) Descriptor() ([]byte, []int) {
}
type StopProvider_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Error string `protobuf:"bytes,1,opt,name=Error,proto3" json:"Error,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Error string `protobuf:"bytes,1,opt,name=Error,proto3" json:"Error,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *StopProvider_Response) Reset() {
*x = StopProvider_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[28]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[32]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *StopProvider_Response) String() string {
@@ -1710,8 +1801,8 @@ func (x *StopProvider_Response) String() string {
func (*StopProvider_Response) ProtoMessage() {}
func (x *StopProvider_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[28]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[32]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1734,25 +1825,22 @@ func (x *StopProvider_Response) GetError() string {
}
type Schema_Block struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"`
- Attributes []*Schema_Attribute `protobuf:"bytes,2,rep,name=attributes,proto3" json:"attributes,omitempty"`
- BlockTypes []*Schema_NestedBlock `protobuf:"bytes,3,rep,name=block_types,json=blockTypes,proto3" json:"block_types,omitempty"`
- Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
- DescriptionKind StringKind `protobuf:"varint,5,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin6.StringKind" json:"description_kind,omitempty"`
- Deprecated bool `protobuf:"varint,6,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"`
+ Attributes []*Schema_Attribute `protobuf:"bytes,2,rep,name=attributes,proto3" json:"attributes,omitempty"`
+ BlockTypes []*Schema_NestedBlock `protobuf:"bytes,3,rep,name=block_types,json=blockTypes,proto3" json:"block_types,omitempty"`
+ Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
+ DescriptionKind StringKind `protobuf:"varint,5,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin6.StringKind" json:"description_kind,omitempty"`
+ Deprecated bool `protobuf:"varint,6,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Schema_Block) Reset() {
*x = Schema_Block{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[30]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[34]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Schema_Block) String() string {
@@ -1762,8 +1850,8 @@ func (x *Schema_Block) String() string {
func (*Schema_Block) ProtoMessage() {}
func (x *Schema_Block) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[30]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[34]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1821,29 +1909,31 @@ func (x *Schema_Block) GetDeprecated() bool {
}
type Schema_Attribute struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ Type []byte `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"`
+ NestedType *Schema_Object `protobuf:"bytes,10,opt,name=nested_type,json=nestedType,proto3" json:"nested_type,omitempty"`
+ Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
+ Required bool `protobuf:"varint,4,opt,name=required,proto3" json:"required,omitempty"`
+ Optional bool `protobuf:"varint,5,opt,name=optional,proto3" json:"optional,omitempty"`
+ Computed bool `protobuf:"varint,6,opt,name=computed,proto3" json:"computed,omitempty"`
+ Sensitive bool `protobuf:"varint,7,opt,name=sensitive,proto3" json:"sensitive,omitempty"`
+ DescriptionKind StringKind `protobuf:"varint,8,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin6.StringKind" json:"description_kind,omitempty"`
+ Deprecated bool `protobuf:"varint,9,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
+ // write_only indicates that the attribute value will be provided via
+ // configuration and must be omitted from state. write_only must be
+ // combined with optional or required, and is only valid for managed
+ // resource schemas.
+ WriteOnly bool `protobuf:"varint,11,opt,name=write_only,json=writeOnly,proto3" json:"write_only,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
- Type []byte `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"`
- NestedType *Schema_Object `protobuf:"bytes,10,opt,name=nested_type,json=nestedType,proto3" json:"nested_type,omitempty"`
- Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"`
- Required bool `protobuf:"varint,4,opt,name=required,proto3" json:"required,omitempty"`
- Optional bool `protobuf:"varint,5,opt,name=optional,proto3" json:"optional,omitempty"`
- Computed bool `protobuf:"varint,6,opt,name=computed,proto3" json:"computed,omitempty"`
- Sensitive bool `protobuf:"varint,7,opt,name=sensitive,proto3" json:"sensitive,omitempty"`
- DescriptionKind StringKind `protobuf:"varint,8,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin6.StringKind" json:"description_kind,omitempty"`
- Deprecated bool `protobuf:"varint,9,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *Schema_Attribute) Reset() {
*x = Schema_Attribute{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[31]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[35]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Schema_Attribute) String() string {
@@ -1853,8 +1943,8 @@ func (x *Schema_Attribute) String() string {
func (*Schema_Attribute) ProtoMessage() {}
func (x *Schema_Attribute) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[31]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[35]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1939,25 +2029,29 @@ func (x *Schema_Attribute) GetDeprecated() bool {
return false
}
+func (x *Schema_Attribute) GetWriteOnly() bool {
+ if x != nil {
+ return x.WriteOnly
+ }
+ return false
+}
+
type Schema_NestedBlock struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Block *Schema_Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"`
+ Nesting Schema_NestedBlock_NestingMode `protobuf:"varint,3,opt,name=nesting,proto3,enum=tfplugin6.Schema_NestedBlock_NestingMode" json:"nesting,omitempty"`
+ MinItems int64 `protobuf:"varint,4,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"`
+ MaxItems int64 `protobuf:"varint,5,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Block *Schema_Block `protobuf:"bytes,2,opt,name=block,proto3" json:"block,omitempty"`
- Nesting Schema_NestedBlock_NestingMode `protobuf:"varint,3,opt,name=nesting,proto3,enum=tfplugin6.Schema_NestedBlock_NestingMode" json:"nesting,omitempty"`
- MinItems int64 `protobuf:"varint,4,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"`
- MaxItems int64 `protobuf:"varint,5,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *Schema_NestedBlock) Reset() {
*x = Schema_NestedBlock{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[32]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[36]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Schema_NestedBlock) String() string {
@@ -1967,8 +2061,8 @@ func (x *Schema_NestedBlock) String() string {
func (*Schema_NestedBlock) ProtoMessage() {}
func (x *Schema_NestedBlock) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[32]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[36]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2019,10 +2113,7 @@ func (x *Schema_NestedBlock) GetMaxItems() int64 {
}
type Schema_Object struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
Attributes []*Schema_Attribute `protobuf:"bytes,1,rep,name=attributes,proto3" json:"attributes,omitempty"`
Nesting Schema_Object_NestingMode `protobuf:"varint,3,opt,name=nesting,proto3,enum=tfplugin6.Schema_Object_NestingMode" json:"nesting,omitempty"`
// MinItems and MaxItems were never used in the protocol, and have no
@@ -2031,16 +2122,16 @@ type Schema_Object struct {
// Deprecated: Marked as deprecated in tfplugin6.proto.
MinItems int64 `protobuf:"varint,4,opt,name=min_items,json=minItems,proto3" json:"min_items,omitempty"`
// Deprecated: Marked as deprecated in tfplugin6.proto.
- MaxItems int64 `protobuf:"varint,5,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"`
+ MaxItems int64 `protobuf:"varint,5,opt,name=max_items,json=maxItems,proto3" json:"max_items,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Schema_Object) Reset() {
*x = Schema_Object{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[33]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[37]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Schema_Object) String() string {
@@ -2050,8 +2141,8 @@ func (x *Schema_Object) String() string {
func (*Schema_Object) ProtoMessage() {}
func (x *Schema_Object) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[33]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[37]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2097,10 +2188,7 @@ func (x *Schema_Object) GetMaxItems() int64 {
}
type Function_Parameter struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// name is the human-readable display name for the parameter.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// type is the type constraint for the parameter.
@@ -2118,15 +2206,15 @@ type Function_Parameter struct {
Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"`
// description_kind is the formatting of the description.
DescriptionKind StringKind `protobuf:"varint,6,opt,name=description_kind,json=descriptionKind,proto3,enum=tfplugin6.StringKind" json:"description_kind,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Function_Parameter) Reset() {
*x = Function_Parameter{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[34]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[38]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Function_Parameter) String() string {
@@ -2136,8 +2224,8 @@ func (x *Function_Parameter) String() string {
func (*Function_Parameter) ProtoMessage() {}
func (x *Function_Parameter) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[34]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[38]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2195,21 +2283,18 @@ func (x *Function_Parameter) GetDescriptionKind() StringKind {
}
type Function_Return struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// type is the type constraint for the function result.
- Type []byte `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"`
+ Type []byte `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *Function_Return) Reset() {
*x = Function_Return{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[35]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[39]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *Function_Return) String() string {
@@ -2219,8 +2304,8 @@ func (x *Function_Return) String() string {
func (*Function_Return) ProtoMessage() {}
func (x *Function_Return) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[35]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[39]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2243,18 +2328,16 @@ func (x *Function_Return) GetType() []byte {
}
type GetMetadata_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_Request) Reset() {
*x = GetMetadata_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[36]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[40]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_Request) String() string {
@@ -2264,8 +2347,8 @@ func (x *GetMetadata_Request) String() string {
func (*GetMetadata_Request) ProtoMessage() {}
func (x *GetMetadata_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[36]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[40]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2281,25 +2364,23 @@ func (*GetMetadata_Request) Descriptor() ([]byte, []int) {
}
type GetMetadata_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
ServerCapabilities *ServerCapabilities `protobuf:"bytes,1,opt,name=server_capabilities,json=serverCapabilities,proto3" json:"server_capabilities,omitempty"`
Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
DataSources []*GetMetadata_DataSourceMetadata `protobuf:"bytes,3,rep,name=data_sources,json=dataSources,proto3" json:"data_sources,omitempty"`
Resources []*GetMetadata_ResourceMetadata `protobuf:"bytes,4,rep,name=resources,proto3" json:"resources,omitempty"`
// functions returns metadata for any functions.
- Functions []*GetMetadata_FunctionMetadata `protobuf:"bytes,5,rep,name=functions,proto3" json:"functions,omitempty"`
+ Functions []*GetMetadata_FunctionMetadata `protobuf:"bytes,5,rep,name=functions,proto3" json:"functions,omitempty"`
+ EphemeralResources []*GetMetadata_EphemeralResourceMetadata `protobuf:"bytes,6,rep,name=ephemeral_resources,json=ephemeralResources,proto3" json:"ephemeral_resources,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_Response) Reset() {
*x = GetMetadata_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[37]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[41]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_Response) String() string {
@@ -2309,8 +2390,8 @@ func (x *GetMetadata_Response) String() string {
func (*GetMetadata_Response) ProtoMessage() {}
func (x *GetMetadata_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[37]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[41]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2360,22 +2441,26 @@ func (x *GetMetadata_Response) GetFunctions() []*GetMetadata_FunctionMetadata {
return nil
}
-type GetMetadata_FunctionMetadata struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
+func (x *GetMetadata_Response) GetEphemeralResources() []*GetMetadata_EphemeralResourceMetadata {
+ if x != nil {
+ return x.EphemeralResources
+ }
+ return nil
+}
+type GetMetadata_FunctionMetadata struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
// name is the function name.
- Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_FunctionMetadata) Reset() {
*x = GetMetadata_FunctionMetadata{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[38]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[42]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_FunctionMetadata) String() string {
@@ -2385,8 +2470,8 @@ func (x *GetMetadata_FunctionMetadata) String() string {
func (*GetMetadata_FunctionMetadata) ProtoMessage() {}
func (x *GetMetadata_FunctionMetadata) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[38]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[42]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2409,20 +2494,17 @@ func (x *GetMetadata_FunctionMetadata) GetName() string {
}
type GetMetadata_DataSourceMetadata struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_DataSourceMetadata) Reset() {
*x = GetMetadata_DataSourceMetadata{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[39]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[43]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_DataSourceMetadata) String() string {
@@ -2432,8 +2514,8 @@ func (x *GetMetadata_DataSourceMetadata) String() string {
func (*GetMetadata_DataSourceMetadata) ProtoMessage() {}
func (x *GetMetadata_DataSourceMetadata) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[39]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[43]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2456,20 +2538,17 @@ func (x *GetMetadata_DataSourceMetadata) GetTypeName() string {
}
type GetMetadata_ResourceMetadata struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *GetMetadata_ResourceMetadata) Reset() {
*x = GetMetadata_ResourceMetadata{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[40]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[44]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetMetadata_ResourceMetadata) String() string {
@@ -2479,8 +2558,8 @@ func (x *GetMetadata_ResourceMetadata) String() string {
func (*GetMetadata_ResourceMetadata) ProtoMessage() {}
func (x *GetMetadata_ResourceMetadata) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[40]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[44]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2502,19 +2581,61 @@ func (x *GetMetadata_ResourceMetadata) GetTypeName() string {
return ""
}
-type GetProviderSchema_Request struct {
- state protoimpl.MessageState
+type GetMetadata_EphemeralResourceMetadata struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
+}
+
+func (x *GetMetadata_EphemeralResourceMetadata) Reset() {
+ *x = GetMetadata_EphemeralResourceMetadata{}
+ mi := &file_tfplugin6_proto_msgTypes[45]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *GetMetadata_EphemeralResourceMetadata) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*GetMetadata_EphemeralResourceMetadata) ProtoMessage() {}
+
+func (x *GetMetadata_EphemeralResourceMetadata) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[45]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use GetMetadata_EphemeralResourceMetadata.ProtoReflect.Descriptor instead.
+func (*GetMetadata_EphemeralResourceMetadata) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{11, 5}
+}
+
+func (x *GetMetadata_EphemeralResourceMetadata) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+type GetProviderSchema_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetProviderSchema_Request) Reset() {
*x = GetProviderSchema_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[41]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[46]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetProviderSchema_Request) String() string {
@@ -2524,8 +2645,8 @@ func (x *GetProviderSchema_Request) String() string {
func (*GetProviderSchema_Request) ProtoMessage() {}
func (x *GetProviderSchema_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[41]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[46]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2541,27 +2662,25 @@ func (*GetProviderSchema_Request) Descriptor() ([]byte, []int) {
}
type GetProviderSchema_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- Provider *Schema `protobuf:"bytes,1,opt,name=provider,proto3" json:"provider,omitempty"`
- ResourceSchemas map[string]*Schema `protobuf:"bytes,2,rep,name=resource_schemas,json=resourceSchemas,proto3" json:"resource_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- DataSourceSchemas map[string]*Schema `protobuf:"bytes,3,rep,name=data_source_schemas,json=dataSourceSchemas,proto3" json:"data_source_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- Diagnostics []*Diagnostic `protobuf:"bytes,4,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
- ProviderMeta *Schema `protobuf:"bytes,5,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
- ServerCapabilities *ServerCapabilities `protobuf:"bytes,6,opt,name=server_capabilities,json=serverCapabilities,proto3" json:"server_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Provider *Schema `protobuf:"bytes,1,opt,name=provider,proto3" json:"provider,omitempty"`
+ ResourceSchemas map[string]*Schema `protobuf:"bytes,2,rep,name=resource_schemas,json=resourceSchemas,proto3" json:"resource_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+ DataSourceSchemas map[string]*Schema `protobuf:"bytes,3,rep,name=data_source_schemas,json=dataSourceSchemas,proto3" json:"data_source_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,4,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ ProviderMeta *Schema `protobuf:"bytes,5,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ ServerCapabilities *ServerCapabilities `protobuf:"bytes,6,opt,name=server_capabilities,json=serverCapabilities,proto3" json:"server_capabilities,omitempty"`
// functions is a mapping of function names to definitions.
- Functions map[string]*Function `protobuf:"bytes,7,rep,name=functions,proto3" json:"functions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
+ Functions map[string]*Function `protobuf:"bytes,7,rep,name=functions,proto3" json:"functions,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+ EphemeralResourceSchemas map[string]*Schema `protobuf:"bytes,8,rep,name=ephemeral_resource_schemas,json=ephemeralResourceSchemas,proto3" json:"ephemeral_resource_schemas,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetProviderSchema_Response) Reset() {
*x = GetProviderSchema_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[42]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[47]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetProviderSchema_Response) String() string {
@@ -2571,8 +2690,8 @@ func (x *GetProviderSchema_Response) String() string {
func (*GetProviderSchema_Response) ProtoMessage() {}
func (x *GetProviderSchema_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[42]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[47]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2636,21 +2755,25 @@ func (x *GetProviderSchema_Response) GetFunctions() map[string]*Function {
return nil
}
+func (x *GetProviderSchema_Response) GetEphemeralResourceSchemas() map[string]*Schema {
+ if x != nil {
+ return x.EphemeralResourceSchemas
+ }
+ return nil
+}
+
type ValidateProviderConfig_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Config *DynamicValue `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Config *DynamicValue `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateProviderConfig_Request) Reset() {
*x = ValidateProviderConfig_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[46]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[52]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateProviderConfig_Request) String() string {
@@ -2660,8 +2783,8 @@ func (x *ValidateProviderConfig_Request) String() string {
func (*ValidateProviderConfig_Request) ProtoMessage() {}
func (x *ValidateProviderConfig_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[46]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[52]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2684,20 +2807,17 @@ func (x *ValidateProviderConfig_Request) GetConfig() *DynamicValue {
}
type ValidateProviderConfig_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateProviderConfig_Response) Reset() {
*x = ValidateProviderConfig_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[47]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[53]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateProviderConfig_Response) String() string {
@@ -2707,8 +2827,8 @@ func (x *ValidateProviderConfig_Response) String() string {
func (*ValidateProviderConfig_Response) ProtoMessage() {}
func (x *ValidateProviderConfig_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[47]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[53]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2740,11 +2860,8 @@ func (x *ValidateProviderConfig_Response) GetDiagnostics() []*Diagnostic {
// known, nor match the given prior state, which could lead to unexpected
// provider behaviors for practitioners.
type UpgradeResourceState_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
// version is the schema_version number recorded in the state file
Version int64 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"`
// raw_state is the raw states as stored for the resource. Core does
@@ -2752,16 +2869,16 @@ type UpgradeResourceState_Request struct {
// provider's responsibility to interpret this value using the
// appropriate older schema. The raw_state will be the json encoded
// state, or a legacy flat-mapped format.
- RawState *RawState `protobuf:"bytes,3,opt,name=raw_state,json=rawState,proto3" json:"raw_state,omitempty"`
+ RawState *RawState `protobuf:"bytes,3,opt,name=raw_state,json=rawState,proto3" json:"raw_state,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *UpgradeResourceState_Request) Reset() {
*x = UpgradeResourceState_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[48]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[54]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *UpgradeResourceState_Request) String() string {
@@ -2771,8 +2888,8 @@ func (x *UpgradeResourceState_Request) String() string {
func (*UpgradeResourceState_Request) ProtoMessage() {}
func (x *UpgradeResourceState_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[48]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[54]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2809,10 +2926,7 @@ func (x *UpgradeResourceState_Request) GetRawState() *RawState {
}
type UpgradeResourceState_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// new_state is a msgpack-encoded data structure that, when interpreted with
// the _current_ schema for this resource type, is functionally equivalent to
// that which was given in prior_state_raw.
@@ -2820,16 +2934,16 @@ type UpgradeResourceState_Response struct {
// diagnostics describes any errors encountered during migration that could not
// be safely resolved, and warnings about any possibly-risky assumptions made
// in the upgrade process.
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *UpgradeResourceState_Response) Reset() {
*x = UpgradeResourceState_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[49]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[55]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *UpgradeResourceState_Response) String() string {
@@ -2839,8 +2953,8 @@ func (x *UpgradeResourceState_Response) String() string {
func (*UpgradeResourceState_Response) ProtoMessage() {}
func (x *UpgradeResourceState_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[49]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[55]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2870,21 +2984,19 @@ func (x *UpgradeResourceState_Response) GetDiagnostics() []*Diagnostic {
}
type ValidateResourceConfig_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateResourceConfig_Request) Reset() {
*x = ValidateResourceConfig_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[50]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[56]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateResourceConfig_Request) String() string {
@@ -2894,8 +3006,8 @@ func (x *ValidateResourceConfig_Request) String() string {
func (*ValidateResourceConfig_Request) ProtoMessage() {}
func (x *ValidateResourceConfig_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[50]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[56]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2924,21 +3036,25 @@ func (x *ValidateResourceConfig_Request) GetConfig() *DynamicValue {
return nil
}
+func (x *ValidateResourceConfig_Request) GetClientCapabilities() *ClientCapabilities {
+ if x != nil {
+ return x.ClientCapabilities
+ }
+ return nil
+}
+
type ValidateResourceConfig_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateResourceConfig_Response) Reset() {
*x = ValidateResourceConfig_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[51]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[57]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateResourceConfig_Response) String() string {
@@ -2948,8 +3064,8 @@ func (x *ValidateResourceConfig_Response) String() string {
func (*ValidateResourceConfig_Response) ProtoMessage() {}
func (x *ValidateResourceConfig_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[51]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[57]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2972,21 +3088,18 @@ func (x *ValidateResourceConfig_Response) GetDiagnostics() []*Diagnostic {
}
type ValidateDataResourceConfig_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateDataResourceConfig_Request) Reset() {
*x = ValidateDataResourceConfig_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[52]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[58]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateDataResourceConfig_Request) String() string {
@@ -2996,8 +3109,8 @@ func (x *ValidateDataResourceConfig_Request) String() string {
func (*ValidateDataResourceConfig_Request) ProtoMessage() {}
func (x *ValidateDataResourceConfig_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[52]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[58]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3027,20 +3140,17 @@ func (x *ValidateDataResourceConfig_Request) GetConfig() *DynamicValue {
}
type ValidateDataResourceConfig_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ValidateDataResourceConfig_Response) Reset() {
*x = ValidateDataResourceConfig_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[53]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[59]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ValidateDataResourceConfig_Response) String() string {
@@ -3050,8 +3160,8 @@ func (x *ValidateDataResourceConfig_Response) String() string {
func (*ValidateDataResourceConfig_Response) ProtoMessage() {}
func (x *ValidateDataResourceConfig_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[53]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[59]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3074,22 +3184,19 @@ func (x *ValidateDataResourceConfig_Response) GetDiagnostics() []*Diagnostic {
}
type ConfigureProvider_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TerraformVersion string `protobuf:"bytes,1,opt,name=terraform_version,json=terraformVersion,proto3" json:"terraform_version,omitempty"`
- Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TerraformVersion string `protobuf:"bytes,1,opt,name=terraform_version,json=terraformVersion,proto3" json:"terraform_version,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ConfigureProvider_Request) Reset() {
*x = ConfigureProvider_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[54]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[60]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ConfigureProvider_Request) String() string {
@@ -3099,8 +3206,8 @@ func (x *ConfigureProvider_Request) String() string {
func (*ConfigureProvider_Request) ProtoMessage() {}
func (x *ConfigureProvider_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[54]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[60]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3137,20 +3244,17 @@ func (x *ConfigureProvider_Request) GetClientCapabilities() *ClientCapabilities
}
type ConfigureProvider_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
unknownFields protoimpl.UnknownFields
-
- Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ConfigureProvider_Response) Reset() {
*x = ConfigureProvider_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[55]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[61]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ConfigureProvider_Response) String() string {
@@ -3160,8 +3264,8 @@ func (x *ConfigureProvider_Response) String() string {
func (*ConfigureProvider_Response) ProtoMessage() {}
func (x *ConfigureProvider_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[55]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[61]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3192,24 +3296,21 @@ func (x *ConfigureProvider_Response) GetDiagnostics() []*Diagnostic {
// not guaranteed to be wholly known nor match the given prior state, which
// could lead to unexpected provider behaviors for practitioners.
type ReadResource_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- CurrentState *DynamicValue `protobuf:"bytes,2,opt,name=current_state,json=currentState,proto3" json:"current_state,omitempty"`
- Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
- ProviderMeta *DynamicValue `protobuf:"bytes,4,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,5,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ CurrentState *DynamicValue `protobuf:"bytes,2,opt,name=current_state,json=currentState,proto3" json:"current_state,omitempty"`
+ Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
+ ProviderMeta *DynamicValue `protobuf:"bytes,4,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,5,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadResource_Request) Reset() {
*x = ReadResource_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[56]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[62]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadResource_Request) String() string {
@@ -3219,8 +3320,8 @@ func (x *ReadResource_Request) String() string {
func (*ReadResource_Request) ProtoMessage() {}
func (x *ReadResource_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[56]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[62]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3271,25 +3372,22 @@ func (x *ReadResource_Request) GetClientCapabilities() *ClientCapabilities {
}
type ReadResource_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- NewState *DynamicValue `protobuf:"bytes,1,opt,name=new_state,json=newState,proto3" json:"new_state,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
- Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ NewState *DynamicValue `protobuf:"bytes,1,opt,name=new_state,json=newState,proto3" json:"new_state,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
// deferred is set if the provider is deferring the change. If set the caller
// needs to handle the deferral.
- Deferred *Deferred `protobuf:"bytes,4,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ Deferred *Deferred `protobuf:"bytes,4,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadResource_Response) Reset() {
*x = ReadResource_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[57]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[63]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadResource_Response) String() string {
@@ -3299,8 +3397,8 @@ func (x *ReadResource_Response) String() string {
func (*ReadResource_Response) ProtoMessage() {}
func (x *ReadResource_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[57]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[63]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3344,26 +3442,23 @@ func (x *ReadResource_Response) GetDeferred() *Deferred {
}
type PlanResourceChange_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- PriorState *DynamicValue `protobuf:"bytes,2,opt,name=prior_state,json=priorState,proto3" json:"prior_state,omitempty"`
- ProposedNewState *DynamicValue `protobuf:"bytes,3,opt,name=proposed_new_state,json=proposedNewState,proto3" json:"proposed_new_state,omitempty"`
- Config *DynamicValue `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"`
- PriorPrivate []byte `protobuf:"bytes,5,opt,name=prior_private,json=priorPrivate,proto3" json:"prior_private,omitempty"`
- ProviderMeta *DynamicValue `protobuf:"bytes,6,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,7,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ PriorState *DynamicValue `protobuf:"bytes,2,opt,name=prior_state,json=priorState,proto3" json:"prior_state,omitempty"`
+ ProposedNewState *DynamicValue `protobuf:"bytes,3,opt,name=proposed_new_state,json=proposedNewState,proto3" json:"proposed_new_state,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"`
+ PriorPrivate []byte `protobuf:"bytes,5,opt,name=prior_private,json=priorPrivate,proto3" json:"prior_private,omitempty"`
+ ProviderMeta *DynamicValue `protobuf:"bytes,6,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,7,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *PlanResourceChange_Request) Reset() {
*x = PlanResourceChange_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[58]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[64]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *PlanResourceChange_Request) String() string {
@@ -3373,8 +3468,8 @@ func (x *PlanResourceChange_Request) String() string {
func (*PlanResourceChange_Request) ProtoMessage() {}
func (x *PlanResourceChange_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[58]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[64]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3439,14 +3534,11 @@ func (x *PlanResourceChange_Request) GetClientCapabilities() *ClientCapabilities
}
type PlanResourceChange_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- PlannedState *DynamicValue `protobuf:"bytes,1,opt,name=planned_state,json=plannedState,proto3" json:"planned_state,omitempty"`
- RequiresReplace []*AttributePath `protobuf:"bytes,2,rep,name=requires_replace,json=requiresReplace,proto3" json:"requires_replace,omitempty"`
- PlannedPrivate []byte `protobuf:"bytes,3,opt,name=planned_private,json=plannedPrivate,proto3" json:"planned_private,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,4,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ PlannedState *DynamicValue `protobuf:"bytes,1,opt,name=planned_state,json=plannedState,proto3" json:"planned_state,omitempty"`
+ RequiresReplace []*AttributePath `protobuf:"bytes,2,rep,name=requires_replace,json=requiresReplace,proto3" json:"requires_replace,omitempty"`
+ PlannedPrivate []byte `protobuf:"bytes,3,opt,name=planned_private,json=plannedPrivate,proto3" json:"planned_private,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,4,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// This may be set only by the helper/schema "SDK" in the main Terraform
// repository, to request that Terraform Core >=0.12 permit additional
// inconsistencies that can result from the legacy SDK type system
@@ -3461,16 +3553,16 @@ type PlanResourceChange_Response struct {
LegacyTypeSystem bool `protobuf:"varint,5,opt,name=legacy_type_system,json=legacyTypeSystem,proto3" json:"legacy_type_system,omitempty"`
// deferred is set if the provider is deferring the change. If set the caller
// needs to handle the deferral.
- Deferred *Deferred `protobuf:"bytes,6,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ Deferred *Deferred `protobuf:"bytes,6,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *PlanResourceChange_Response) Reset() {
*x = PlanResourceChange_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[59]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[65]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *PlanResourceChange_Response) String() string {
@@ -3480,8 +3572,8 @@ func (x *PlanResourceChange_Response) String() string {
func (*PlanResourceChange_Response) ProtoMessage() {}
func (x *PlanResourceChange_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[59]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[65]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3539,25 +3631,22 @@ func (x *PlanResourceChange_Response) GetDeferred() *Deferred {
}
type ApplyResourceChange_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- PriorState *DynamicValue `protobuf:"bytes,2,opt,name=prior_state,json=priorState,proto3" json:"prior_state,omitempty"`
- PlannedState *DynamicValue `protobuf:"bytes,3,opt,name=planned_state,json=plannedState,proto3" json:"planned_state,omitempty"`
- Config *DynamicValue `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"`
- PlannedPrivate []byte `protobuf:"bytes,5,opt,name=planned_private,json=plannedPrivate,proto3" json:"planned_private,omitempty"`
- ProviderMeta *DynamicValue `protobuf:"bytes,6,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ PriorState *DynamicValue `protobuf:"bytes,2,opt,name=prior_state,json=priorState,proto3" json:"prior_state,omitempty"`
+ PlannedState *DynamicValue `protobuf:"bytes,3,opt,name=planned_state,json=plannedState,proto3" json:"planned_state,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"`
+ PlannedPrivate []byte `protobuf:"bytes,5,opt,name=planned_private,json=plannedPrivate,proto3" json:"planned_private,omitempty"`
+ ProviderMeta *DynamicValue `protobuf:"bytes,6,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ApplyResourceChange_Request) Reset() {
*x = ApplyResourceChange_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[60]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[66]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ApplyResourceChange_Request) String() string {
@@ -3567,8 +3656,8 @@ func (x *ApplyResourceChange_Request) String() string {
func (*ApplyResourceChange_Request) ProtoMessage() {}
func (x *ApplyResourceChange_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[60]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[66]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3626,13 +3715,10 @@ func (x *ApplyResourceChange_Request) GetProviderMeta() *DynamicValue {
}
type ApplyResourceChange_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- NewState *DynamicValue `protobuf:"bytes,1,opt,name=new_state,json=newState,proto3" json:"new_state,omitempty"`
- Private []byte `protobuf:"bytes,2,opt,name=private,proto3" json:"private,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,3,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ NewState *DynamicValue `protobuf:"bytes,1,opt,name=new_state,json=newState,proto3" json:"new_state,omitempty"`
+ Private []byte `protobuf:"bytes,2,opt,name=private,proto3" json:"private,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,3,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// This may be set only by the helper/schema "SDK" in the main Terraform
// repository, to request that Terraform Core >=0.12 permit additional
// inconsistencies that can result from the legacy SDK type system
@@ -3645,15 +3731,15 @@ type ApplyResourceChange_Response struct {
// ==== THIS MUST BE LEFT UNSET IN ALL OTHER SDKS ====
// ==== DO NOT USE THIS ====
LegacyTypeSystem bool `protobuf:"varint,4,opt,name=legacy_type_system,json=legacyTypeSystem,proto3" json:"legacy_type_system,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ApplyResourceChange_Response) Reset() {
*x = ApplyResourceChange_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[61]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[67]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ApplyResourceChange_Response) String() string {
@@ -3663,8 +3749,8 @@ func (x *ApplyResourceChange_Response) String() string {
func (*ApplyResourceChange_Response) ProtoMessage() {}
func (x *ApplyResourceChange_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[61]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[67]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3708,22 +3794,19 @@ func (x *ApplyResourceChange_Response) GetLegacyTypeSystem() bool {
}
type ImportResourceState_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ImportResourceState_Request) Reset() {
*x = ImportResourceState_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[62]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[68]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ImportResourceState_Request) String() string {
@@ -3733,8 +3816,8 @@ func (x *ImportResourceState_Request) String() string {
func (*ImportResourceState_Request) ProtoMessage() {}
func (x *ImportResourceState_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[62]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[68]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3771,22 +3854,19 @@ func (x *ImportResourceState_Request) GetClientCapabilities() *ClientCapabilitie
}
type ImportResourceState_ImportedResource struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ State *DynamicValue `protobuf:"bytes,2,opt,name=state,proto3" json:"state,omitempty"`
+ Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- State *DynamicValue `protobuf:"bytes,2,opt,name=state,proto3" json:"state,omitempty"`
- Private []byte `protobuf:"bytes,3,opt,name=private,proto3" json:"private,omitempty"`
+ sizeCache protoimpl.SizeCache
}
func (x *ImportResourceState_ImportedResource) Reset() {
*x = ImportResourceState_ImportedResource{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[63]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[69]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ImportResourceState_ImportedResource) String() string {
@@ -3796,8 +3876,8 @@ func (x *ImportResourceState_ImportedResource) String() string {
func (*ImportResourceState_ImportedResource) ProtoMessage() {}
func (x *ImportResourceState_ImportedResource) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[63]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[69]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3834,24 +3914,21 @@ func (x *ImportResourceState_ImportedResource) GetPrivate() []byte {
}
type ImportResourceState_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
ImportedResources []*ImportResourceState_ImportedResource `protobuf:"bytes,1,rep,name=imported_resources,json=importedResources,proto3" json:"imported_resources,omitempty"`
Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// deferred is set if the provider is deferring the change. If set the caller
// needs to handle the deferral.
- Deferred *Deferred `protobuf:"bytes,3,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ Deferred *Deferred `protobuf:"bytes,3,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ImportResourceState_Response) Reset() {
*x = ImportResourceState_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[64]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[70]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ImportResourceState_Response) String() string {
@@ -3861,8 +3938,8 @@ func (x *ImportResourceState_Response) String() string {
func (*ImportResourceState_Response) ProtoMessage() {}
func (x *ImportResourceState_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[64]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[70]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3899,10 +3976,7 @@ func (x *ImportResourceState_Response) GetDeferred() *Deferred {
}
type MoveResourceState_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The address of the provider the resource is being moved from.
SourceProviderAddress string `protobuf:"bytes,1,opt,name=source_provider_address,json=sourceProviderAddress,proto3" json:"source_provider_address,omitempty"`
// The resource type that the resource is being moved from.
@@ -3918,15 +3992,15 @@ type MoveResourceState_Request struct {
TargetTypeName string `protobuf:"bytes,5,opt,name=target_type_name,json=targetTypeName,proto3" json:"target_type_name,omitempty"`
// The private state of the resource being moved.
SourcePrivate []byte `protobuf:"bytes,6,opt,name=source_private,json=sourcePrivate,proto3" json:"source_private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *MoveResourceState_Request) Reset() {
*x = MoveResourceState_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[65]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[71]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *MoveResourceState_Request) String() string {
@@ -3936,8 +4010,8 @@ func (x *MoveResourceState_Request) String() string {
func (*MoveResourceState_Request) ProtoMessage() {}
func (x *MoveResourceState_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[65]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[71]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3995,25 +4069,22 @@ func (x *MoveResourceState_Request) GetSourcePrivate() []byte {
}
type MoveResourceState_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// The state of the resource after it has been moved.
TargetState *DynamicValue `protobuf:"bytes,1,opt,name=target_state,json=targetState,proto3" json:"target_state,omitempty"`
// Any diagnostics that occurred during the move.
Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// The private state of the resource after it has been moved.
TargetPrivate []byte `protobuf:"bytes,3,opt,name=target_private,json=targetPrivate,proto3" json:"target_private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *MoveResourceState_Response) Reset() {
*x = MoveResourceState_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[66]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[72]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *MoveResourceState_Response) String() string {
@@ -4023,8 +4094,8 @@ func (x *MoveResourceState_Response) String() string {
func (*MoveResourceState_Response) ProtoMessage() {}
func (x *MoveResourceState_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[66]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[72]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4061,23 +4132,20 @@ func (x *MoveResourceState_Response) GetTargetPrivate() []byte {
}
type ReadDataSource_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
- Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
- ProviderMeta *DynamicValue `protobuf:"bytes,3,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
- ClientCapabilities *ClientCapabilities `protobuf:"bytes,4,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ ProviderMeta *DynamicValue `protobuf:"bytes,3,opt,name=provider_meta,json=providerMeta,proto3" json:"provider_meta,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,4,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadDataSource_Request) Reset() {
*x = ReadDataSource_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[67]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[73]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadDataSource_Request) String() string {
@@ -4087,8 +4155,8 @@ func (x *ReadDataSource_Request) String() string {
func (*ReadDataSource_Request) ProtoMessage() {}
func (x *ReadDataSource_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[67]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[73]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4132,24 +4200,21 @@ func (x *ReadDataSource_Request) GetClientCapabilities() *ClientCapabilities {
}
type ReadDataSource_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
- State *DynamicValue `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"`
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ state protoimpl.MessageState `protogen:"open.v1"`
+ State *DynamicValue `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
// deferred is set if the provider is deferring the change. If set the caller
// needs to handle the deferral.
- Deferred *Deferred `protobuf:"bytes,3,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ Deferred *Deferred `protobuf:"bytes,3,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *ReadDataSource_Response) Reset() {
*x = ReadDataSource_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[68]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[74]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *ReadDataSource_Response) String() string {
@@ -4159,8 +4224,8 @@ func (x *ReadDataSource_Response) String() string {
func (*ReadDataSource_Response) ProtoMessage() {}
func (x *ReadDataSource_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[68]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[74]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4197,18 +4262,16 @@ func (x *ReadDataSource_Response) GetDeferred() *Deferred {
}
type GetFunctions_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
+ state protoimpl.MessageState `protogen:"open.v1"`
unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetFunctions_Request) Reset() {
*x = GetFunctions_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[69]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[75]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetFunctions_Request) String() string {
@@ -4218,8 +4281,8 @@ func (x *GetFunctions_Request) String() string {
func (*GetFunctions_Request) ProtoMessage() {}
func (x *GetFunctions_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[69]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[75]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4235,23 +4298,20 @@ func (*GetFunctions_Request) Descriptor() ([]byte, []int) {
}
type GetFunctions_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// functions is a mapping of function names to definitions.
- Functions map[string]*Function `protobuf:"bytes,1,rep,name=functions,proto3" json:"functions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
+ Functions map[string]*Function `protobuf:"bytes,1,rep,name=functions,proto3" json:"functions,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
// diagnostics is any warnings or errors.
- Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,2,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *GetFunctions_Response) Reset() {
*x = GetFunctions_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[70]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[76]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *GetFunctions_Response) String() string {
@@ -4261,8 +4321,8 @@ func (x *GetFunctions_Response) String() string {
func (*GetFunctions_Response) ProtoMessage() {}
func (x *GetFunctions_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[70]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[76]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4292,23 +4352,20 @@ func (x *GetFunctions_Response) GetDiagnostics() []*Diagnostic {
}
type CallFunction_Request struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// name is the name of the function being called.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// arguments is the data of each function argument value.
- Arguments []*DynamicValue `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"`
+ Arguments []*DynamicValue `protobuf:"bytes,2,rep,name=arguments,proto3" json:"arguments,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *CallFunction_Request) Reset() {
*x = CallFunction_Request{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[72]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[78]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *CallFunction_Request) String() string {
@@ -4318,8 +4375,8 @@ func (x *CallFunction_Request) String() string {
func (*CallFunction_Request) ProtoMessage() {}
func (x *CallFunction_Request) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[72]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[78]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4349,23 +4406,20 @@ func (x *CallFunction_Request) GetArguments() []*DynamicValue {
}
type CallFunction_Response struct {
- state protoimpl.MessageState
- sizeCache protoimpl.SizeCache
- unknownFields protoimpl.UnknownFields
-
+ state protoimpl.MessageState `protogen:"open.v1"`
// result is result value after running the function logic.
Result *DynamicValue `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"`
- // error is any errors from the function logic.
- Error *FunctionError `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"`
+ // error is any error from the function logic.
+ Error *FunctionError `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
}
func (x *CallFunction_Response) Reset() {
*x = CallFunction_Response{}
- if protoimpl.UnsafeEnabled {
- mi := &file_tfplugin6_proto_msgTypes[73]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
- }
+ mi := &file_tfplugin6_proto_msgTypes[79]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
}
func (x *CallFunction_Response) String() string {
@@ -4375,8 +4429,8 @@ func (x *CallFunction_Response) String() string {
func (*CallFunction_Response) ProtoMessage() {}
func (x *CallFunction_Response) ProtoReflect() protoreflect.Message {
- mi := &file_tfplugin6_proto_msgTypes[73]
- if protoimpl.UnsafeEnabled && x != nil {
+ mi := &file_tfplugin6_proto_msgTypes[79]
+ if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4405,110 +4459,556 @@ func (x *CallFunction_Response) GetError() *FunctionError {
return nil
}
+type ValidateEphemeralResourceConfig_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *ValidateEphemeralResourceConfig_Request) Reset() {
+ *x = ValidateEphemeralResourceConfig_Request{}
+ mi := &file_tfplugin6_proto_msgTypes[80]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *ValidateEphemeralResourceConfig_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ValidateEphemeralResourceConfig_Request) ProtoMessage() {}
+
+func (x *ValidateEphemeralResourceConfig_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[80]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use ValidateEphemeralResourceConfig_Request.ProtoReflect.Descriptor instead.
+func (*ValidateEphemeralResourceConfig_Request) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{26, 0}
+}
+
+func (x *ValidateEphemeralResourceConfig_Request) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+func (x *ValidateEphemeralResourceConfig_Request) GetConfig() *DynamicValue {
+ if x != nil {
+ return x.Config
+ }
+ return nil
+}
+
+type ValidateEphemeralResourceConfig_Response struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *ValidateEphemeralResourceConfig_Response) Reset() {
+ *x = ValidateEphemeralResourceConfig_Response{}
+ mi := &file_tfplugin6_proto_msgTypes[81]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *ValidateEphemeralResourceConfig_Response) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*ValidateEphemeralResourceConfig_Response) ProtoMessage() {}
+
+func (x *ValidateEphemeralResourceConfig_Response) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[81]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use ValidateEphemeralResourceConfig_Response.ProtoReflect.Descriptor instead.
+func (*ValidateEphemeralResourceConfig_Response) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{26, 1}
+}
+
+func (x *ValidateEphemeralResourceConfig_Response) GetDiagnostics() []*Diagnostic {
+ if x != nil {
+ return x.Diagnostics
+ }
+ return nil
+}
+
+type OpenEphemeralResource_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Config *DynamicValue `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"`
+ ClientCapabilities *ClientCapabilities `protobuf:"bytes,3,opt,name=client_capabilities,json=clientCapabilities,proto3" json:"client_capabilities,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *OpenEphemeralResource_Request) Reset() {
+ *x = OpenEphemeralResource_Request{}
+ mi := &file_tfplugin6_proto_msgTypes[82]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *OpenEphemeralResource_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*OpenEphemeralResource_Request) ProtoMessage() {}
+
+func (x *OpenEphemeralResource_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[82]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use OpenEphemeralResource_Request.ProtoReflect.Descriptor instead.
+func (*OpenEphemeralResource_Request) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{27, 0}
+}
+
+func (x *OpenEphemeralResource_Request) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+func (x *OpenEphemeralResource_Request) GetConfig() *DynamicValue {
+ if x != nil {
+ return x.Config
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Request) GetClientCapabilities() *ClientCapabilities {
+ if x != nil {
+ return x.ClientCapabilities
+ }
+ return nil
+}
+
+type OpenEphemeralResource_Response struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ RenewAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=renew_at,json=renewAt,proto3,oneof" json:"renew_at,omitempty"`
+ Result *DynamicValue `protobuf:"bytes,3,opt,name=result,proto3" json:"result,omitempty"`
+ Private []byte `protobuf:"bytes,4,opt,name=private,proto3,oneof" json:"private,omitempty"`
+ // deferred is set if the provider is deferring the change. If set the caller
+ // needs to handle the deferral.
+ Deferred *Deferred `protobuf:"bytes,5,opt,name=deferred,proto3" json:"deferred,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *OpenEphemeralResource_Response) Reset() {
+ *x = OpenEphemeralResource_Response{}
+ mi := &file_tfplugin6_proto_msgTypes[83]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *OpenEphemeralResource_Response) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*OpenEphemeralResource_Response) ProtoMessage() {}
+
+func (x *OpenEphemeralResource_Response) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[83]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use OpenEphemeralResource_Response.ProtoReflect.Descriptor instead.
+func (*OpenEphemeralResource_Response) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{27, 1}
+}
+
+func (x *OpenEphemeralResource_Response) GetDiagnostics() []*Diagnostic {
+ if x != nil {
+ return x.Diagnostics
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Response) GetRenewAt() *timestamppb.Timestamp {
+ if x != nil {
+ return x.RenewAt
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Response) GetResult() *DynamicValue {
+ if x != nil {
+ return x.Result
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Response) GetPrivate() []byte {
+ if x != nil {
+ return x.Private
+ }
+ return nil
+}
+
+func (x *OpenEphemeralResource_Response) GetDeferred() *Deferred {
+ if x != nil {
+ return x.Deferred
+ }
+ return nil
+}
+
+type RenewEphemeralResource_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Private []byte `protobuf:"bytes,2,opt,name=private,proto3,oneof" json:"private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *RenewEphemeralResource_Request) Reset() {
+ *x = RenewEphemeralResource_Request{}
+ mi := &file_tfplugin6_proto_msgTypes[84]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *RenewEphemeralResource_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*RenewEphemeralResource_Request) ProtoMessage() {}
+
+func (x *RenewEphemeralResource_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[84]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use RenewEphemeralResource_Request.ProtoReflect.Descriptor instead.
+func (*RenewEphemeralResource_Request) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{28, 0}
+}
+
+func (x *RenewEphemeralResource_Request) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+func (x *RenewEphemeralResource_Request) GetPrivate() []byte {
+ if x != nil {
+ return x.Private
+ }
+ return nil
+}
+
+type RenewEphemeralResource_Response struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ RenewAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=renew_at,json=renewAt,proto3,oneof" json:"renew_at,omitempty"`
+ Private []byte `protobuf:"bytes,3,opt,name=private,proto3,oneof" json:"private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *RenewEphemeralResource_Response) Reset() {
+ *x = RenewEphemeralResource_Response{}
+ mi := &file_tfplugin6_proto_msgTypes[85]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *RenewEphemeralResource_Response) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*RenewEphemeralResource_Response) ProtoMessage() {}
+
+func (x *RenewEphemeralResource_Response) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[85]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use RenewEphemeralResource_Response.ProtoReflect.Descriptor instead.
+func (*RenewEphemeralResource_Response) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{28, 1}
+}
+
+func (x *RenewEphemeralResource_Response) GetDiagnostics() []*Diagnostic {
+ if x != nil {
+ return x.Diagnostics
+ }
+ return nil
+}
+
+func (x *RenewEphemeralResource_Response) GetRenewAt() *timestamppb.Timestamp {
+ if x != nil {
+ return x.RenewAt
+ }
+ return nil
+}
+
+func (x *RenewEphemeralResource_Response) GetPrivate() []byte {
+ if x != nil {
+ return x.Private
+ }
+ return nil
+}
+
+type CloseEphemeralResource_Request struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ TypeName string `protobuf:"bytes,1,opt,name=type_name,json=typeName,proto3" json:"type_name,omitempty"`
+ Private []byte `protobuf:"bytes,2,opt,name=private,proto3,oneof" json:"private,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *CloseEphemeralResource_Request) Reset() {
+ *x = CloseEphemeralResource_Request{}
+ mi := &file_tfplugin6_proto_msgTypes[86]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *CloseEphemeralResource_Request) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CloseEphemeralResource_Request) ProtoMessage() {}
+
+func (x *CloseEphemeralResource_Request) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[86]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CloseEphemeralResource_Request.ProtoReflect.Descriptor instead.
+func (*CloseEphemeralResource_Request) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{29, 0}
+}
+
+func (x *CloseEphemeralResource_Request) GetTypeName() string {
+ if x != nil {
+ return x.TypeName
+ }
+ return ""
+}
+
+func (x *CloseEphemeralResource_Request) GetPrivate() []byte {
+ if x != nil {
+ return x.Private
+ }
+ return nil
+}
+
+type CloseEphemeralResource_Response struct {
+ state protoimpl.MessageState `protogen:"open.v1"`
+ Diagnostics []*Diagnostic `protobuf:"bytes,1,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"`
+ unknownFields protoimpl.UnknownFields
+ sizeCache protoimpl.SizeCache
+}
+
+func (x *CloseEphemeralResource_Response) Reset() {
+ *x = CloseEphemeralResource_Response{}
+ mi := &file_tfplugin6_proto_msgTypes[87]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+}
+
+func (x *CloseEphemeralResource_Response) String() string {
+ return protoimpl.X.MessageStringOf(x)
+}
+
+func (*CloseEphemeralResource_Response) ProtoMessage() {}
+
+func (x *CloseEphemeralResource_Response) ProtoReflect() protoreflect.Message {
+ mi := &file_tfplugin6_proto_msgTypes[87]
+ if x != nil {
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ if ms.LoadMessageInfo() == nil {
+ ms.StoreMessageInfo(mi)
+ }
+ return ms
+ }
+ return mi.MessageOf(x)
+}
+
+// Deprecated: Use CloseEphemeralResource_Response.ProtoReflect.Descriptor instead.
+func (*CloseEphemeralResource_Response) Descriptor() ([]byte, []int) {
+ return file_tfplugin6_proto_rawDescGZIP(), []int{29, 1}
+}
+
+func (x *CloseEphemeralResource_Response) GetDiagnostics() []*Diagnostic {
+ if x != nil {
+ return x.Diagnostics
+ }
+ return nil
+}
+
var File_tfplugin6_proto protoreflect.FileDescriptor
var file_tfplugin6_proto_rawDesc = []byte{
0x0a, 0x0f, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x70, 0x72, 0x6f, 0x74,
- 0x6f, 0x12, 0x09, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x22, 0x3c, 0x0a, 0x0c,
- 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x18, 0x0a, 0x07,
- 0x6d, 0x73, 0x67, 0x70, 0x61, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x6d,
- 0x73, 0x67, 0x70, 0x61, 0x63, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x02,
- 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0xe3, 0x01, 0x0a, 0x0a, 0x44,
- 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x12, 0x3a, 0x0a, 0x08, 0x73, 0x65, 0x76,
- 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
- 0x69, 0x63, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76,
- 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79,
- 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12,
- 0x16, 0x0a, 0x06, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x06, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x36, 0x0a, 0x09, 0x61, 0x74, 0x74, 0x72, 0x69,
- 0x62, 0x75, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
- 0x50, 0x61, 0x74, 0x68, 0x52, 0x09, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x22,
- 0x2f, 0x0a, 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, 0x07, 0x49,
- 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f,
- 0x52, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x02,
- 0x22, 0x6b, 0x0a, 0x0d, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f,
- 0x72, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
- 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x30, 0x0a, 0x11, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f,
- 0x6e, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03,
- 0x48, 0x00, 0x52, 0x10, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x72, 0x67, 0x75,
- 0x6d, 0x65, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x66, 0x75, 0x6e, 0x63,
- 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xdc, 0x01,
- 0x0a, 0x0d, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12,
- 0x33, 0x0a, 0x05, 0x73, 0x74, 0x65, 0x70, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69,
- 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x52, 0x05, 0x73,
- 0x74, 0x65, 0x70, 0x73, 0x1a, 0x95, 0x01, 0x0a, 0x04, 0x53, 0x74, 0x65, 0x70, 0x12, 0x27, 0x0a,
- 0x0e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18,
- 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0d, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75,
- 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2e, 0x0a, 0x12, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e,
- 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01,
- 0x28, 0x09, 0x48, 0x00, 0x52, 0x10, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4b, 0x65, 0x79,
- 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x28, 0x0a, 0x0f, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e,
- 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x48,
- 0x00, 0x52, 0x0d, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4b, 0x65, 0x79, 0x49, 0x6e, 0x74,
- 0x42, 0x0a, 0x0a, 0x08, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x3b, 0x0a, 0x0c,
- 0x53, 0x74, 0x6f, 0x70, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x1a, 0x09, 0x0a, 0x07,
- 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
- 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x96, 0x01, 0x0a, 0x08, 0x52, 0x61,
- 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x3a, 0x0a, 0x07, 0x66, 0x6c,
- 0x61, 0x74, 0x6d, 0x61, 0x70, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65,
- 0x2e, 0x46, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x66,
- 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x1a, 0x3a, 0x0a, 0x0c, 0x46, 0x6c, 0x61, 0x74, 0x6d, 0x61,
- 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75,
- 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02,
- 0x38, 0x01, 0x22, 0x95, 0x0a, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x18, 0x0a,
- 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07,
- 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2d, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b,
- 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52,
- 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x1a, 0xa2, 0x02, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63, 0x6b,
- 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x0a, 0x61, 0x74,
- 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d,
- 0x61, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x52, 0x0a, 0x61, 0x74, 0x74,
- 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x3e, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63, 0x6b,
- 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e,
- 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x0a, 0x62, 0x6c, 0x6f,
- 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72,
- 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65,
- 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x10, 0x64, 0x65, 0x73,
- 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x05, 0x20,
- 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x63,
- 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x64,
- 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52,
- 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x1a, 0xe4, 0x02, 0x0a, 0x09,
- 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d,
- 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a,
- 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x74, 0x79, 0x70,
- 0x65, 0x12, 0x39, 0x0a, 0x0b, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x79, 0x70, 0x65,
- 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74,
- 0x52, 0x0a, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x0b,
- 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28,
- 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1a,
- 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08,
- 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6f, 0x70,
- 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6f, 0x70,
- 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74,
- 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x75, 0x74,
- 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18,
- 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65,
- 0x12, 0x40, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f,
- 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e,
- 0x64, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x69,
- 0x6e, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64,
- 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74,
- 0x65, 0x64, 0x1a, 0xa7, 0x02, 0x0a, 0x0b, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42, 0x6c, 0x6f,
+ 0x6f, 0x12, 0x09, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x1a, 0x1f, 0x67, 0x6f,
+ 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69,
+ 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x3c, 0x0a,
+ 0x0c, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x18, 0x0a,
+ 0x07, 0x6d, 0x73, 0x67, 0x70, 0x61, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07,
+ 0x6d, 0x73, 0x67, 0x70, 0x61, 0x63, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18,
+ 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x22, 0xe3, 0x01, 0x0a, 0x0a,
+ 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x12, 0x3a, 0x0a, 0x08, 0x73, 0x65,
+ 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
+ 0x74, 0x69, 0x63, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65,
+ 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72,
+ 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79,
+ 0x12, 0x16, 0x0a, 0x06, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x06, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x12, 0x36, 0x0a, 0x09, 0x61, 0x74, 0x74, 0x72,
+ 0x69, 0x62, 0x75, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74,
+ 0x65, 0x50, 0x61, 0x74, 0x68, 0x52, 0x09, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
+ 0x22, 0x2f, 0x0a, 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, 0x07,
+ 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52,
+ 0x4f, 0x52, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, 0x47, 0x10,
+ 0x02, 0x22, 0x6b, 0x0a, 0x0d, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72,
+ 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x12, 0x30, 0x0a, 0x11, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69,
+ 0x6f, 0x6e, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28,
+ 0x03, 0x48, 0x00, 0x52, 0x10, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x41, 0x72, 0x67,
+ 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x66, 0x75, 0x6e,
+ 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0xdc,
+ 0x01, 0x0a, 0x0d, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68,
+ 0x12, 0x33, 0x0a, 0x05, 0x73, 0x74, 0x65, 0x70, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32,
+ 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x41, 0x74, 0x74, 0x72,
+ 0x69, 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x52, 0x05,
+ 0x73, 0x74, 0x65, 0x70, 0x73, 0x1a, 0x95, 0x01, 0x0a, 0x04, 0x53, 0x74, 0x65, 0x70, 0x12, 0x27,
+ 0x0a, 0x0e, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65,
+ 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0d, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62,
+ 0x75, 0x74, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2e, 0x0a, 0x12, 0x65, 0x6c, 0x65, 0x6d, 0x65,
+ 0x6e, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20,
+ 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x10, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4b, 0x65,
+ 0x79, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x28, 0x0a, 0x0f, 0x65, 0x6c, 0x65, 0x6d, 0x65,
+ 0x6e, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03,
+ 0x48, 0x00, 0x52, 0x0d, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x4b, 0x65, 0x79, 0x49, 0x6e,
+ 0x74, 0x42, 0x0a, 0x0a, 0x08, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x3b, 0x0a,
+ 0x0c, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x1a, 0x09, 0x0a,
+ 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20,
+ 0x01, 0x28, 0x09, 0x52, 0x05, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x96, 0x01, 0x0a, 0x08, 0x52,
+ 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x3a, 0x0a, 0x07, 0x66,
+ 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74,
+ 0x65, 0x2e, 0x46, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07,
+ 0x66, 0x6c, 0x61, 0x74, 0x6d, 0x61, 0x70, 0x1a, 0x3a, 0x0a, 0x0c, 0x46, 0x6c, 0x61, 0x74, 0x6d,
+ 0x61, 0x70, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01,
+ 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c,
+ 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a,
+ 0x02, 0x38, 0x01, 0x22, 0xb4, 0x0a, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x18,
+ 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52,
+ 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2d, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63,
+ 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x42, 0x6c, 0x6f, 0x63, 0x6b,
+ 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x1a, 0xa2, 0x02, 0x0a, 0x05, 0x42, 0x6c, 0x6f, 0x63,
+ 0x6b, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3b, 0x0a, 0x0a, 0x61,
+ 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32,
+ 0x1b, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65,
+ 0x6d, 0x61, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x52, 0x0a, 0x61, 0x74,
+ 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x3e, 0x0a, 0x0b, 0x62, 0x6c, 0x6f, 0x63,
+ 0x6b, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61,
+ 0x2e, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x52, 0x0a, 0x62, 0x6c,
+ 0x6f, 0x63, 0x6b, 0x54, 0x79, 0x70, 0x65, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63,
+ 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64,
+ 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x10, 0x64, 0x65,
+ 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x05,
+ 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e, 0x64, 0x52, 0x0f, 0x64, 0x65, 0x73,
+ 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x1e, 0x0a, 0x0a,
+ 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08,
+ 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x1a, 0x83, 0x03, 0x0a,
+ 0x09, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61,
+ 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12,
+ 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x74, 0x79,
+ 0x70, 0x65, 0x12, 0x39, 0x0a, 0x0b, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x79, 0x70,
+ 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63,
+ 0x74, 0x52, 0x0a, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a,
+ 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12,
+ 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28,
+ 0x08, 0x52, 0x08, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6f,
+ 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6f,
+ 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x75,
+ 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x75,
+ 0x74, 0x65, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65,
+ 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76,
+ 0x65, 0x12, 0x40, 0x0a, 0x10, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e,
+ 0x5f, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69,
+ 0x6e, 0x64, 0x52, 0x0f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x4b,
+ 0x69, 0x6e, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65,
+ 0x64, 0x18, 0x09, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61,
+ 0x74, 0x65, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x77, 0x72, 0x69, 0x74, 0x65, 0x5f, 0x6f, 0x6e, 0x6c,
+ 0x79, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x77, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x6e,
+ 0x6c, 0x79, 0x1a, 0xa7, 0x02, 0x0a, 0x0b, 0x4e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x42, 0x6c, 0x6f,
0x63, 0x6b, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12,
0x2d, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
@@ -4595,488 +5095,623 @@ var file_tfplugin6_proto_rawDesc = []byte{
0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x12, 0x2e, 0x0a, 0x13, 0x6d, 0x6f, 0x76, 0x65, 0x5f, 0x72,
0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20,
0x01, 0x28, 0x08, 0x52, 0x11, 0x6d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x22, 0x3f, 0x0a, 0x12, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74,
- 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x29, 0x0a, 0x10,
- 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x61, 0x6c, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64,
- 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x61, 0x6c,
- 0x41, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x22, 0xa2, 0x01, 0x0a, 0x08, 0x44, 0x65, 0x66, 0x65,
- 0x72, 0x72, 0x65, 0x64, 0x12, 0x32, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
- 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x2e, 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e,
- 0x52, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x22, 0x62, 0x0a, 0x06, 0x52, 0x65, 0x61, 0x73,
- 0x6f, 0x6e, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12,
- 0x1b, 0x0a, 0x17, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x43, 0x4f, 0x4e, 0x46,
- 0x49, 0x47, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x12, 0x1b, 0x0a, 0x17,
- 0x50, 0x52, 0x4f, 0x56, 0x49, 0x44, 0x45, 0x52, 0x5f, 0x43, 0x4f, 0x4e, 0x46, 0x49, 0x47, 0x5f,
- 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x02, 0x12, 0x11, 0x0a, 0x0d, 0x41, 0x42, 0x53,
- 0x45, 0x4e, 0x54, 0x5f, 0x50, 0x52, 0x45, 0x52, 0x45, 0x51, 0x10, 0x03, 0x22, 0x96, 0x04, 0x0a,
- 0x0b, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x1a, 0x09, 0x0a, 0x07,
- 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0xef, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70,
- 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4e, 0x0a, 0x13, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x63,
- 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x65,
- 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73,
- 0x52, 0x12, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
- 0x74, 0x69, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
- 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
- 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x4c, 0x0a,
- 0x0c, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20,
- 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x44, 0x61, 0x74, 0x61,
- 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x0b,
- 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x45, 0x0a, 0x09, 0x72,
- 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65,
- 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d,
- 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x73, 0x12, 0x45, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18,
- 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x46, 0x75,
- 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x09,
- 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x26, 0x0a, 0x10, 0x46, 0x75, 0x6e,
- 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a,
- 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
- 0x65, 0x1a, 0x31, 0x0a, 0x12, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d,
- 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f,
- 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65,
- 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x2f, 0x0a, 0x10, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65,
- 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70,
- 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xc7, 0x06, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f,
- 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x1a, 0x09, 0x0a, 0x07, 0x52,
- 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0xa6, 0x06, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
- 0x6e, 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x18,
- 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64,
- 0x65, 0x72, 0x12, 0x65, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73,
- 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3a, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76,
- 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f,
- 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65,
- 0x6d, 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72,
- 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x6c, 0x0a, 0x13, 0x64, 0x61, 0x74,
- 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73,
- 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63,
- 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44, 0x61,
- 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45,
- 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
- 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
- 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73,
- 0x12, 0x36, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74,
- 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
- 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76,
- 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x73, 0x65, 0x72, 0x76,
+ 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x22, 0x82, 0x01, 0x0a, 0x12, 0x43, 0x6c, 0x69, 0x65, 0x6e,
+ 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x29, 0x0a,
+ 0x10, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x61, 0x6c, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65,
+ 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x61,
+ 0x6c, 0x41, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x12, 0x41, 0x0a, 0x1d, 0x77, 0x72, 0x69, 0x74,
+ 0x65, 0x5f, 0x6f, 0x6e, 0x6c, 0x79, 0x5f, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
+ 0x73, 0x5f, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52,
+ 0x1a, 0x77, 0x72, 0x69, 0x74, 0x65, 0x4f, 0x6e, 0x6c, 0x79, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62,
+ 0x75, 0x74, 0x65, 0x73, 0x41, 0x6c, 0x6c, 0x6f, 0x77, 0x65, 0x64, 0x22, 0xa2, 0x01, 0x0a, 0x08,
+ 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x12, 0x32, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x73,
+ 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1a, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x2e, 0x52, 0x65,
+ 0x61, 0x73, 0x6f, 0x6e, 0x52, 0x06, 0x72, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x22, 0x62, 0x0a, 0x06,
+ 0x52, 0x65, 0x61, 0x73, 0x6f, 0x6e, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57,
+ 0x4e, 0x10, 0x00, 0x12, 0x1b, 0x0a, 0x17, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f,
+ 0x43, 0x4f, 0x4e, 0x46, 0x49, 0x47, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x01,
+ 0x12, 0x1b, 0x0a, 0x17, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x44, 0x45, 0x52, 0x5f, 0x43, 0x4f, 0x4e,
+ 0x46, 0x49, 0x47, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x02, 0x12, 0x11, 0x0a,
+ 0x0d, 0x41, 0x42, 0x53, 0x45, 0x4e, 0x54, 0x5f, 0x50, 0x52, 0x45, 0x52, 0x45, 0x51, 0x10, 0x03,
+ 0x22, 0xb3, 0x05, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
+ 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0xd2, 0x03, 0x0a, 0x08,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4e, 0x0a, 0x13, 0x73, 0x65, 0x72, 0x76,
0x65, 0x72, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18,
- 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
0x36, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61,
- 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x52, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63,
- 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69,
- 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72,
- 0x79, 0x52, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x55, 0x0a, 0x14,
- 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45,
- 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18,
- 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a,
- 0x02, 0x38, 0x01, 0x1a, 0x57, 0x0a, 0x16, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a,
- 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12,
- 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d,
- 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x51, 0x0a, 0x0e,
- 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10,
- 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79,
- 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32,
- 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x46, 0x75, 0x6e, 0x63,
- 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22,
- 0x99, 0x01, 0x0a, 0x16, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x76,
- 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x3a, 0x0a, 0x07, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18,
- 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06,
- 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
- 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
- 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b,
- 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0x90, 0x02, 0x0a, 0x14,
- 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53,
- 0x74, 0x61, 0x74, 0x65, 0x1a, 0x72, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
+ 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67,
+ 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f,
+ 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
+ 0x73, 0x12, 0x4c, 0x0a, 0x0c, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e,
+ 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61,
+ 0x74, 0x61, 0x52, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12,
+ 0x45, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03,
+ 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47,
+ 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x09, 0x72, 0x65, 0x73,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x45, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69,
+ 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
+ 0x61, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61,
+ 0x74, 0x61, 0x52, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x61, 0x0a,
+ 0x13, 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x30, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61,
+ 0x74, 0x61, 0x2e, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x12, 0x65, 0x70,
+ 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73,
+ 0x1a, 0x26, 0x0a, 0x10, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61,
+ 0x64, 0x61, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x1a, 0x31, 0x0a, 0x12, 0x44, 0x61, 0x74, 0x61,
+ 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b,
+ 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x2f, 0x0a, 0x10, 0x52,
+ 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12,
0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07,
- 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x76,
- 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x09, 0x72, 0x61, 0x77, 0x5f, 0x73, 0x74,
- 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x08,
- 0x72, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x83, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73,
- 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x0e, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65,
- 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e,
- 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69,
- 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0d, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x64,
- 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
- 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
- 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0xb6,
- 0x01, 0x0a, 0x16, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x57, 0x0a, 0x07, 0x52, 0x65, 0x71,
+ 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x38, 0x0a, 0x19,
+ 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70,
+ 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79,
+ 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0xab, 0x08, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x50, 0x72,
+ 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x1a, 0x09, 0x0a, 0x07,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x8a, 0x08, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
+ 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x76, 0x69,
+ 0x64, 0x65, 0x72, 0x12, 0x65, 0x0a, 0x10, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
+ 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3a, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f,
+ 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68,
+ 0x65, 0x6d, 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x72, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x6c, 0x0a, 0x13, 0x64, 0x61,
+ 0x74, 0x61, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61,
+ 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53,
+ 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x44,
+ 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73,
+ 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x64, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67,
+ 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f,
+ 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
+ 0x73, 0x12, 0x36, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65,
+ 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x0c, 0x70, 0x72, 0x6f,
+ 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x73, 0x65, 0x72,
+ 0x76, 0x65, 0x72, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73,
+ 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x36, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c,
+ 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x43, 0x61, 0x70,
+ 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x52, 0x0a, 0x09, 0x66, 0x75, 0x6e,
+ 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x34, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76,
+ 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74,
+ 0x72, 0x79, 0x52, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x81, 0x01,
+ 0x0a, 0x1a, 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x5f, 0x72, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x08, 0x20, 0x03,
+ 0x28, 0x0b, 0x32, 0x43, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47,
+ 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61,
+ 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65,
+ 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d,
+ 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x18, 0x65, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72,
+ 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61,
+ 0x73, 0x1a, 0x55, 0x0a, 0x14, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68,
+ 0x65, 0x6d, 0x61, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79,
+ 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76,
+ 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76,
+ 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x57, 0x0a, 0x16, 0x44, 0x61, 0x74, 0x61,
+ 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x45, 0x6e, 0x74,
+ 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20,
+ 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
+ 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38,
+ 0x01, 0x1a, 0x51, 0x0a, 0x0e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e,
+ 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
+ 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,
+ 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x5e, 0x0a, 0x1d, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61,
+ 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73,
+ 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01,
+ 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x27, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,
+ 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x36, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,
+ 0x3a, 0x02, 0x38, 0x01, 0x22, 0x99, 0x01, 0x0a, 0x16, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74,
+ 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a,
+ 0x3a, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f,
+ 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61,
+ 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
+ 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73,
+ 0x22, 0x90, 0x02, 0x0a, 0x14, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x72, 0x0a, 0x07, 0x52, 0x65, 0x71,
0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d,
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d,
- 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79,
- 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66,
- 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37,
- 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20,
- 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67,
- 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0xba, 0x01, 0x0a, 0x1a, 0x56, 0x61, 0x6c, 0x69,
- 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x57, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01,
- 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f,
- 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d,
- 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a,
- 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64,
- 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b,
- 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61,
- 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
- 0x74, 0x69, 0x63, 0x73, 0x22, 0x92, 0x02, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75,
- 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x1a, 0xb7, 0x01, 0x0a, 0x07, 0x52,
- 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2b, 0x0a, 0x11, 0x74, 0x65, 0x72, 0x72, 0x61, 0x66,
- 0x6f, 0x72, 0x6d, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x09, 0x52, 0x10, 0x74, 0x65, 0x72, 0x72, 0x61, 0x66, 0x6f, 0x72, 0x6d, 0x56, 0x65, 0x72, 0x73,
- 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20,
- 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f,
- 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63,
- 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6c,
- 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73,
- 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
- 0x74, 0x69, 0x65, 0x73, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
- 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18,
- 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69,
- 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0xe4, 0x03, 0x0a, 0x0c, 0x52, 0x65,
- 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x8c, 0x02, 0x0a, 0x07, 0x52,
- 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e,
- 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e,
- 0x61, 0x6d, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73,
- 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61,
- 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x63, 0x75, 0x72, 0x72, 0x65, 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74,
- 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01,
- 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70,
- 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01,
+ 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x03, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x09, 0x72,
+ 0x61, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x61, 0x77, 0x53, 0x74,
+ 0x61, 0x74, 0x65, 0x52, 0x08, 0x72, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x83, 0x01,
+ 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3e, 0x0a, 0x0e, 0x75, 0x70,
+ 0x67, 0x72, 0x61, 0x64, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44,
- 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69,
- 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73,
- 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x36, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c,
- 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70,
- 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0xc4, 0x01, 0x0a, 0x08, 0x52, 0x65,
- 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x09, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74,
- 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c,
- 0x75, 0x65, 0x52, 0x08, 0x6e, 0x65, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b,
- 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28,
- 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69,
- 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f,
- 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65,
- 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12,
- 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x65,
- 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64,
- 0x22, 0xf3, 0x05, 0x0a, 0x12, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x1a, 0x8b, 0x03, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75,
- 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65,
- 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65,
- 0x12, 0x38, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18,
- 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0a,
- 0x70, 0x72, 0x69, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x45, 0x0a, 0x12, 0x70, 0x72,
- 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x64, 0x5f, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65,
- 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52,
- 0x10, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x64, 0x4e, 0x65, 0x77, 0x53, 0x74, 0x61, 0x74,
- 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79,
- 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66,
- 0x69, 0x67, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x5f, 0x70, 0x72, 0x69, 0x76,
- 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x70, 0x72, 0x69, 0x6f, 0x72,
- 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69,
- 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d,
- 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65,
- 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f,
- 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x07, 0x20, 0x01,
- 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43,
- 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65,
- 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c,
- 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0xce, 0x02, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x73, 0x74,
- 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c,
- 0x75, 0x65, 0x52, 0x0c, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65,
- 0x12, 0x43, 0x0a, 0x10, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x73, 0x5f, 0x72, 0x65, 0x70,
- 0x6c, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66, 0x70,
- 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65,
- 0x50, 0x61, 0x74, 0x68, 0x52, 0x0f, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x73, 0x52, 0x65,
- 0x70, 0x6c, 0x61, 0x63, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64,
- 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e,
- 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x37,
- 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x04, 0x20,
- 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67,
- 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x6c, 0x65, 0x67, 0x61, 0x63,
- 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x05, 0x20,
- 0x01, 0x28, 0x08, 0x52, 0x10, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x54, 0x79, 0x70, 0x65, 0x53,
- 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65,
- 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
- 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65,
- 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0x92, 0x04, 0x0a, 0x13, 0x41, 0x70, 0x70, 0x6c, 0x79,
- 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x1a, 0xb6,
- 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79,
+ 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0d, 0x75, 0x70, 0x67,
+ 0x72, 0x61, 0x64, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69,
+ 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32,
+ 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67,
+ 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x73, 0x22, 0x87, 0x02, 0x0a, 0x16, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65,
+ 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0xa7,
+ 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79,
0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74,
- 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6f, 0x72,
- 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63,
- 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74,
- 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61,
- 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65,
+ 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65,
+ 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18,
+ 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x36, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
+ 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61,
+ 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
+ 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0xba, 0x01,
+ 0x0a, 0x1a, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65,
+ 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x57, 0x0a, 0x07,
+ 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f,
+ 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65,
+ 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63,
+ 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
+ 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73,
+ 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64,
+ 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0x92, 0x02, 0x0a, 0x11, 0x43,
+ 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
+ 0x1a, 0xb7, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2b, 0x0a, 0x11,
+ 0x74, 0x65, 0x72, 0x72, 0x61, 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f,
+ 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x74, 0x65, 0x72, 0x72, 0x61, 0x66, 0x6f,
+ 0x72, 0x6d, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e,
+ 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c,
+ 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c,
+ 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65,
+ 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69,
+ 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61,
+ 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65,
+ 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f,
+ 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22,
+ 0xe4, 0x03, 0x0a, 0x0c, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x1a, 0x8c, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09,
+ 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x63, 0x75, 0x72,
+ 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b,
+ 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e,
+ 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x63, 0x75, 0x72, 0x72, 0x65,
+ 0x6e, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61,
+ 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74,
+ 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65,
+ 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75,
- 0x65, 0x52, 0x0c, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12,
- 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12,
+ 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69,
+ 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43,
+ 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69,
+ 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a,
+ 0xc4, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x09,
+ 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61,
- 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67,
- 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x69, 0x76,
- 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e, 0x70, 0x6c, 0x61, 0x6e, 0x6e,
- 0x65, 0x64, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f,
- 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b,
- 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e,
- 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69,
- 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x1a, 0xc1, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70,
- 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x09, 0x6e, 0x65, 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74,
- 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x08, 0x6e, 0x65, 0x77, 0x53, 0x74, 0x61,
+ 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
+ 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b,
+ 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x70,
+ 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72,
+ 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65,
+ 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65,
+ 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0xf3, 0x05, 0x0a, 0x12, 0x50, 0x6c, 0x61, 0x6e, 0x52,
+ 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x1a, 0x8b, 0x03,
+ 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70,
+ 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79,
+ 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x0b, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x5f,
+ 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56,
+ 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0a, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x65,
+ 0x12, 0x45, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x64, 0x5f, 0x6e, 0x65, 0x77,
+ 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63,
+ 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x10, 0x70, 0x72, 0x6f, 0x70, 0x6f, 0x73, 0x65, 0x64, 0x4e,
+ 0x65, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65,
- 0x52, 0x08, 0x6e, 0x65, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72,
- 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69,
+ 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x23, 0x0a, 0x0d, 0x70, 0x72, 0x69, 0x6f,
+ 0x72, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52,
+ 0x0c, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a,
+ 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x06,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70,
+ 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x63,
+ 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69,
+ 0x65, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62,
+ 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43,
+ 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0xce, 0x02, 0x0a, 0x08,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x6c, 0x61, 0x6e,
+ 0x6e, 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61,
+ 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65,
+ 0x64, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x43, 0x0a, 0x10, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72,
+ 0x65, 0x73, 0x5f, 0x72, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b,
+ 0x32, 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x41, 0x74, 0x74,
+ 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x50, 0x61, 0x74, 0x68, 0x52, 0x0f, 0x72, 0x65, 0x71, 0x75,
+ 0x69, 0x72, 0x65, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x63, 0x65, 0x12, 0x27, 0x0a, 0x0f, 0x70,
+ 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03,
+ 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0e, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x50, 0x72, 0x69,
0x76, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
- 0x69, 0x63, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x69, 0x63, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c,
0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2c, 0x0a,
0x12, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x79, 0x73,
- 0x74, 0x65, 0x6d, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x6c, 0x65, 0x67, 0x61, 0x63,
- 0x79, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x22, 0xef, 0x03, 0x0a, 0x13,
- 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74,
- 0x61, 0x74, 0x65, 0x1a, 0x86, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
- 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
- 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x0e, 0x0a, 0x02,
- 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x4e, 0x0a, 0x13,
+ 0x74, 0x65, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x6c, 0x65, 0x67, 0x61, 0x63,
+ 0x79, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x2f, 0x0a, 0x08, 0x64,
+ 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72,
+ 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0x92, 0x04, 0x0a,
+ 0x13, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68,
+ 0x61, 0x6e, 0x67, 0x65, 0x1a, 0xb6, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
+ 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
+ 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a,
+ 0x0b, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44,
+ 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0a, 0x70, 0x72, 0x69,
+ 0x6f, 0x72, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x6c, 0x61, 0x6e, 0x6e,
+ 0x65, 0x64, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d,
+ 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64,
+ 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18,
+ 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06,
+ 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65,
+ 0x64, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52,
+ 0x0e, 0x70, 0x6c, 0x61, 0x6e, 0x6e, 0x65, 0x64, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12,
+ 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61,
+ 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52,
+ 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x1a, 0xc1, 0x01,
+ 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x34, 0x0a, 0x09, 0x6e, 0x65,
+ 0x77, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69,
+ 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x08, 0x6e, 0x65, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65,
+ 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
+ 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69,
+ 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32,
+ 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67,
+ 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x73, 0x12, 0x2c, 0x0a, 0x12, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x5f, 0x74, 0x79,
+ 0x70, 0x65, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52,
+ 0x10, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65,
+ 0x6d, 0x22, 0xef, 0x03, 0x0a, 0x13, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x86, 0x01, 0x0a, 0x07, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61,
+ 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61,
+ 0x6d, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02,
+ 0x69, 0x64, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70,
+ 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6c, 0x69, 0x65,
+ 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12,
+ 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69,
+ 0x65, 0x73, 0x1a, 0x78, 0x0a, 0x10, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x52, 0x65,
+ 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e,
+ 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e,
+ 0x61, 0x6d, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44,
+ 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61,
+ 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20,
+ 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a, 0xd4, 0x01, 0x0a,
+ 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x5e, 0x0a, 0x12, 0x69, 0x6d, 0x70,
+ 0x6f, 0x72, 0x74, 0x65, 0x64, 0x5f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18,
+ 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x36, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x52, 0x65,
+ 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x11, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64,
+ 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61,
+ 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
+ 0x63, 0x73, 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x03,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72,
+ 0x72, 0x65, 0x64, 0x22, 0xe7, 0x03, 0x0a, 0x11, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x1a, 0xa8, 0x02, 0x0a, 0x07, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x36, 0x0a, 0x17, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
+ 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73,
+ 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x15, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72,
+ 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, 0x28, 0x0a,
+ 0x10, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d,
+ 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54,
+ 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
+ 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x13, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63,
+ 0x68, 0x65, 0x6d, 0x61, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x36, 0x0a, 0x0c, 0x73,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28,
+ 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x61,
+ 0x77, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74,
+ 0x61, 0x74, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x74, 0x79,
+ 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x74,
+ 0x61, 0x72, 0x67, 0x65, 0x74, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x25, 0x0a,
+ 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18,
+ 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x69,
+ 0x76, 0x61, 0x74, 0x65, 0x1a, 0xa6, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
+ 0x65, 0x12, 0x3a, 0x0a, 0x0c, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x74,
+ 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67,
+ 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65,
+ 0x52, 0x0b, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a,
+ 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03,
+ 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44,
+ 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x25, 0x0a, 0x0e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74,
+ 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d,
+ 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x22, 0x9e, 0x03,
+ 0x0a, 0x0e, 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x1a, 0xe5, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09,
+ 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
+ 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e,
+ 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c,
+ 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72,
+ 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28,
+ 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79,
+ 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76,
+ 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65,
+ 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18,
+ 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x36, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69,
+ 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61,
+ 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0xa3, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73,
+ 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2d, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x73,
+ 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63,
+ 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2f, 0x0a,
+ 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
+ 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x65, 0x66, 0x65,
+ 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0x81,
+ 0x02, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a,
+ 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0xe5, 0x01, 0x0a, 0x08, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4d, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74,
+ 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x74, 0x66, 0x70,
+ 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69,
+ 0x6f, 0x6e, 0x73, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x75, 0x6e,
+ 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x66, 0x75, 0x6e,
+ 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f,
+ 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x1a,
+ 0x51, 0x0a, 0x0e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72,
+ 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03,
+ 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x46,
+ 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02,
+ 0x38, 0x01, 0x22, 0xd1, 0x01, 0x0a, 0x0c, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74,
+ 0x69, 0x6f, 0x6e, 0x1a, 0x54, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12,
+ 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61,
+ 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18,
+ 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09,
+ 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x1a, 0x6b, 0x0a, 0x08, 0x52, 0x65, 0x73,
+ 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18,
+ 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06,
+ 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x2e, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18,
+ 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x36, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52,
+ 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0xbf, 0x01, 0x0a, 0x1f, 0x56, 0x61, 0x6c, 0x69, 0x64,
+ 0x61, 0x74, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f,
+ 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x57, 0x0a, 0x07, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61,
+ 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61,
+ 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01,
+ 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44,
+ 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e,
+ 0x66, 0x69, 0x67, 0x1a, 0x43, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
+ 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01,
+ 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61,
+ 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x22, 0xdd, 0x03, 0x0a, 0x15, 0x4f, 0x70, 0x65,
+ 0x6e, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x1a, 0xa7, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b,
+ 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63,
+ 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66,
+ 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56,
+ 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x4e, 0x0a, 0x13,
0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74,
0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c,
0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61,
0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74,
- 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x78, 0x0a, 0x10,
- 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
- 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x2d, 0x0a,
- 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63,
- 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07,
- 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70,
- 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a, 0xd4, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
- 0x6e, 0x73, 0x65, 0x12, 0x5e, 0x0a, 0x12, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x5f,
- 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32,
- 0x2f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x49, 0x6d, 0x70, 0x6f,
- 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e,
- 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x52, 0x11, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x65, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
- 0x63, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
- 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x1a, 0x99, 0x02, 0x0a,
+ 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61,
+ 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e,
+ 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
+ 0x63, 0x73, 0x12, 0x3a, 0x0a, 0x08, 0x72, 0x65, 0x6e, 0x65, 0x77, 0x5f, 0x61, 0x74, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72,
+ 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70,
+ 0x48, 0x00, 0x52, 0x07, 0x72, 0x65, 0x6e, 0x65, 0x77, 0x41, 0x74, 0x88, 0x01, 0x01, 0x12, 0x2f,
+ 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d,
+ 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12,
+ 0x1d, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c,
+ 0x48, 0x01, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x12, 0x2f,
+ 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b,
+ 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x65, 0x66,
+ 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x42,
+ 0x0b, 0x0a, 0x09, 0x5f, 0x72, 0x65, 0x6e, 0x65, 0x77, 0x5f, 0x61, 0x74, 0x42, 0x0a, 0x0a, 0x08,
+ 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x22, 0xa5, 0x02, 0x0a, 0x16, 0x52, 0x65, 0x6e,
+ 0x65, 0x77, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x1a, 0x51, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b,
+ 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
+ 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x70,
+ 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07,
+ 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70,
+ 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a, 0xb7, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69,
+ 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52,
- 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2f, 0x0a, 0x08,
- 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72,
- 0x72, 0x65, 0x64, 0x52, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0xe7, 0x03,
- 0x0a, 0x11, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74,
- 0x61, 0x74, 0x65, 0x1a, 0xa8, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
- 0x36, 0x0a, 0x17, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64,
- 0x65, 0x72, 0x5f, 0x61, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
- 0x52, 0x15, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
- 0x41, 0x64, 0x64, 0x72, 0x65, 0x73, 0x73, 0x12, 0x28, 0x0a, 0x10, 0x73, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
- 0x09, 0x52, 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d,
- 0x65, 0x12, 0x32, 0x0a, 0x15, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x63, 0x68, 0x65,
- 0x6d, 0x61, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03,
- 0x52, 0x13, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x56, 0x65,
- 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x36, 0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f,
- 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x61, 0x77, 0x53, 0x74, 0x61, 0x74, 0x65,
- 0x52, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x28, 0x0a,
- 0x10, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d,
- 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x54,
- 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63,
- 0x65, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52,
- 0x0d, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a, 0xa6,
- 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3a, 0x0a, 0x0c, 0x74,
- 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
- 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79,
- 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0b, 0x74, 0x61, 0x72, 0x67,
- 0x65, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e,
- 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73,
- 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73,
- 0x12, 0x25, 0x0a, 0x0e, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61,
- 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74,
- 0x50, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x22, 0x9e, 0x03, 0x0a, 0x0e, 0x52, 0x65, 0x61, 0x64,
- 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0xe5, 0x01, 0x0a, 0x07, 0x52,
+ 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x3a, 0x0a, 0x08,
+ 0x72, 0x65, 0x6e, 0x65, 0x77, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a,
+ 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
+ 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, 0x07, 0x72, 0x65,
+ 0x6e, 0x65, 0x77, 0x41, 0x74, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76,
+ 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x01, 0x52, 0x07, 0x70, 0x72, 0x69,
+ 0x76, 0x61, 0x74, 0x65, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x72, 0x65, 0x6e, 0x65,
+ 0x77, 0x5f, 0x61, 0x74, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65,
+ 0x22, 0xb0, 0x01, 0x0a, 0x16, 0x43, 0x6c, 0x6f, 0x73, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65,
+ 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x1a, 0x51, 0x0a, 0x07, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x6e,
0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x79, 0x70, 0x65, 0x4e,
- 0x61, 0x6d, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20,
- 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x63, 0x6f,
- 0x6e, 0x66, 0x69, 0x67, 0x12, 0x3c, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
- 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d, 0x69, 0x63, 0x56,
- 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x4d, 0x65,
- 0x74, 0x61, 0x12, 0x4e, 0x0a, 0x13, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x63, 0x61, 0x70,
- 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32,
- 0x1d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6c, 0x69, 0x65,
- 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69, 0x65, 0x73, 0x52, 0x12,
- 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x43, 0x61, 0x70, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x69,
- 0x65, 0x73, 0x1a, 0xa3, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
- 0x2d, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61, 0x6d,
- 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x37,
- 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x02, 0x20,
- 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67,
- 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x12, 0x2f, 0x0a, 0x08, 0x64, 0x65, 0x66, 0x65, 0x72,
- 0x72, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x52, 0x08,
- 0x64, 0x65, 0x66, 0x65, 0x72, 0x72, 0x65, 0x64, 0x22, 0x81, 0x02, 0x0a, 0x0c, 0x47, 0x65, 0x74,
- 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71,
- 0x75, 0x65, 0x73, 0x74, 0x1a, 0xe5, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
- 0x65, 0x12, 0x4d, 0x0a, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01,
- 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
- 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x52, 0x65,
- 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73,
- 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x66, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73,
- 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18,
- 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69,
- 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x1a, 0x51, 0x0a, 0x0e, 0x46, 0x75, 0x6e,
- 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b,
- 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x29, 0x0a,
- 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74,
- 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f,
- 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xd1, 0x01, 0x0a,
- 0x0c, 0x43, 0x61, 0x6c, 0x6c, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0x54, 0x0a,
- 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65,
- 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x09,
- 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32,
- 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61,
- 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65,
- 0x6e, 0x74, 0x73, 0x1a, 0x6b, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
- 0x2f, 0x0a, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32,
- 0x17, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x79, 0x6e, 0x61,
- 0x6d, 0x69, 0x63, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74,
- 0x12, 0x2e, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32,
- 0x18, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x46, 0x75, 0x6e, 0x63,
- 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72,
- 0x2a, 0x25, 0x0a, 0x0a, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e, 0x64, 0x12, 0x09,
- 0x0a, 0x05, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x41, 0x52,
- 0x4b, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x32, 0xa4, 0x0c, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x76,
- 0x69, 0x64, 0x65, 0x72, 0x12, 0x4e, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64,
- 0x61, 0x74, 0x61, 0x12, 0x1e, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52, 0x65, 0x71, 0x75,
- 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70,
- 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x60, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69,
- 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x24, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65,
- 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
- 0x25, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x50,
- 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65,
- 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6f, 0x0a, 0x16, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61,
- 0x74, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67,
- 0x12, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x56, 0x61, 0x6c,
+ 0x61, 0x6d, 0x65, 0x12, 0x1d, 0x0a, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x18, 0x02,
+ 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x88,
+ 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x1a, 0x43,
+ 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x0b, 0x64, 0x69,
+ 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32,
+ 0x15, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x44, 0x69, 0x61, 0x67,
+ 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x52, 0x0b, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74,
+ 0x69, 0x63, 0x73, 0x2a, 0x25, 0x0a, 0x0a, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x4b, 0x69, 0x6e,
+ 0x64, 0x12, 0x09, 0x0a, 0x05, 0x50, 0x4c, 0x41, 0x49, 0x4e, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08,
+ 0x4d, 0x41, 0x52, 0x4b, 0x44, 0x4f, 0x57, 0x4e, 0x10, 0x01, 0x32, 0x81, 0x10, 0x0a, 0x08, 0x50,
+ 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x4e, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4d, 0x65,
+ 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1e, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52,
+ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x52,
+ 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x60, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x50, 0x72,
+ 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x24, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76,
+ 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65,
+ 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x47,
+ 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61,
+ 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6f, 0x0a, 0x16, 0x56, 0x61, 0x6c,
0x69, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e,
- 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65,
- 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52,
- 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6f, 0x0a, 0x16, 0x56, 0x61, 0x6c, 0x69, 0x64,
- 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69,
- 0x67, 0x12, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x56, 0x61,
+ 0x66, 0x69, 0x67, 0x12, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
+ 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72,
+ 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64,
+ 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6f, 0x0a, 0x16, 0x56, 0x61,
0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f,
- 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x74,
+ 0x6e, 0x66, 0x69, 0x67, 0x12, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
+ 0x2a, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x56, 0x61, 0x6c, 0x69,
+ 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66,
+ 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x7b, 0x0a, 0x1a, 0x56,
+ 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x6f, 0x75,
+ 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2d, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61,
+ 0x74, 0x61, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67,
+ 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74,
+ 0x61, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x69, 0x0a, 0x14, 0x55, 0x70, 0x67, 0x72,
+ 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65,
+ 0x12, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x55, 0x70, 0x67,
+ 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74,
+ 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x12, 0x60, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65,
+ 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x24, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x50, 0x72,
+ 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69,
+ 0x67, 0x75, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73,
+ 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73,
+ 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x36, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52,
+ 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
+ 0x6e, 0x36, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x12, 0x50, 0x6c, 0x61, 0x6e,
+ 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x25,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52,
+ 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65,
+ 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
+ 0x36, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68,
+ 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a,
+ 0x13, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68,
+ 0x61, 0x6e, 0x67, 0x65, 0x12, 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68,
+ 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65,
+ 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x73,
+ 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x13, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52,
+ 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x26, 0x2e, 0x74,
+ 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52,
+ 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53,
+ 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x60, 0x0a,
+ 0x11, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61,
+ 0x74, 0x65, 0x12, 0x24, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x4d,
+ 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65,
+ 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
+ 0x57, 0x0a, 0x0e, 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x12, 0x21, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x65,
+ 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x71,
+ 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36,
+ 0x2e, 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e,
+ 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x8a, 0x01, 0x0a, 0x1f, 0x56, 0x61, 0x6c,
+ 0x69, 0x64, 0x61, 0x74, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65,
+ 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x32, 0x2e, 0x74,
0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74,
- 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e,
- 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x7b, 0x0a, 0x1a, 0x56, 0x61, 0x6c, 0x69,
- 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
- 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2d, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x36, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x52,
- 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65,
+ 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
+ 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
+ 0x1a, 0x33, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x56, 0x61, 0x6c,
+ 0x69, 0x64, 0x61, 0x74, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73,
- 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x69, 0x0a, 0x14, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65,
- 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x27, 0x2e,
- 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64,
- 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52,
- 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69,
- 0x6e, 0x36, 0x2e, 0x55, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
- 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
- 0x12, 0x60, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x50, 0x72, 0x6f,
- 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x24, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69,
- 0x64, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72,
- 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72,
- 0x63, 0x65, 0x12, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52,
- 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75,
- 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e,
- 0x52, 0x65, 0x61, 0x64, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73,
- 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x63, 0x0a, 0x12, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73,
- 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x25, 0x2e, 0x74, 0x66,
- 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f,
- 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65,
- 0x73, 0x74, 0x1a, 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x50,
- 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67,
- 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x66, 0x0a, 0x13, 0x41, 0x70,
- 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67,
- 0x65, 0x12, 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x41, 0x70,
- 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67,
- 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x73, 0x6f, 0x75,
- 0x72, 0x63, 0x65, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x12, 0x66, 0x0a, 0x13, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f,
- 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x26, 0x2e, 0x74, 0x66, 0x70, 0x6c,
- 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f,
- 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x1a, 0x27, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x49, 0x6d,
- 0x70, 0x6f, 0x72, 0x74, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74,
- 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x60, 0x0a, 0x11, 0x4d, 0x6f,
- 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12,
- 0x24, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x4d, 0x6f, 0x76, 0x65,
- 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65,
- 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x25, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
- 0x36, 0x2e, 0x4d, 0x6f, 0x76, 0x65, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74,
- 0x61, 0x74, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x57, 0x0a, 0x0e,
- 0x52, 0x65, 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x21,
- 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x65, 0x61, 0x64, 0x44,
- 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
- 0x74, 0x1a, 0x22, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x65,
- 0x61, 0x64, 0x44, 0x61, 0x74, 0x61, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73,
+ 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6c, 0x0a, 0x15, 0x4f, 0x70, 0x65, 0x6e, 0x45, 0x70, 0x68,
+ 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x28,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x4f, 0x70, 0x65, 0x6e, 0x45,
+ 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x29, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x4f, 0x70, 0x65, 0x6e, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72,
+ 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f,
+ 0x6e, 0x73, 0x65, 0x12, 0x6f, 0x0a, 0x16, 0x52, 0x65, 0x6e, 0x65, 0x77, 0x45, 0x70, 0x68, 0x65,
+ 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x29, 0x2e,
+ 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x65, 0x6e, 0x65, 0x77, 0x45,
+ 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65,
+ 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75,
+ 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x52, 0x65, 0x6e, 0x65, 0x77, 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65,
+ 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70,
+ 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x6f, 0x0a, 0x16, 0x43, 0x6c, 0x6f, 0x73, 0x65, 0x45, 0x70, 0x68,
+ 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x29,
+ 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6c, 0x6f, 0x73, 0x65,
+ 0x45, 0x70, 0x68, 0x65, 0x6d, 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
+ 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x74, 0x66, 0x70, 0x6c,
+ 0x75, 0x67, 0x69, 0x6e, 0x36, 0x2e, 0x43, 0x6c, 0x6f, 0x73, 0x65, 0x45, 0x70, 0x68, 0x65, 0x6d,
+ 0x65, 0x72, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63,
0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1f, 0x2e, 0x74, 0x66, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e,
0x36, 0x2e, 0x47, 0x65, 0x74, 0x46, 0x75, 0x6e, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x52,
@@ -5113,214 +5748,252 @@ func file_tfplugin6_proto_rawDescGZIP() []byte {
}
var file_tfplugin6_proto_enumTypes = make([]protoimpl.EnumInfo, 5)
-var file_tfplugin6_proto_msgTypes = make([]protoimpl.MessageInfo, 74)
-var file_tfplugin6_proto_goTypes = []interface{}{
- (StringKind)(0), // 0: tfplugin6.StringKind
- (Diagnostic_Severity)(0), // 1: tfplugin6.Diagnostic.Severity
- (Schema_NestedBlock_NestingMode)(0), // 2: tfplugin6.Schema.NestedBlock.NestingMode
- (Schema_Object_NestingMode)(0), // 3: tfplugin6.Schema.Object.NestingMode
- (Deferred_Reason)(0), // 4: tfplugin6.Deferred.Reason
- (*DynamicValue)(nil), // 5: tfplugin6.DynamicValue
- (*Diagnostic)(nil), // 6: tfplugin6.Diagnostic
- (*FunctionError)(nil), // 7: tfplugin6.FunctionError
- (*AttributePath)(nil), // 8: tfplugin6.AttributePath
- (*StopProvider)(nil), // 9: tfplugin6.StopProvider
- (*RawState)(nil), // 10: tfplugin6.RawState
- (*Schema)(nil), // 11: tfplugin6.Schema
- (*Function)(nil), // 12: tfplugin6.Function
- (*ServerCapabilities)(nil), // 13: tfplugin6.ServerCapabilities
- (*ClientCapabilities)(nil), // 14: tfplugin6.ClientCapabilities
- (*Deferred)(nil), // 15: tfplugin6.Deferred
- (*GetMetadata)(nil), // 16: tfplugin6.GetMetadata
- (*GetProviderSchema)(nil), // 17: tfplugin6.GetProviderSchema
- (*ValidateProviderConfig)(nil), // 18: tfplugin6.ValidateProviderConfig
- (*UpgradeResourceState)(nil), // 19: tfplugin6.UpgradeResourceState
- (*ValidateResourceConfig)(nil), // 20: tfplugin6.ValidateResourceConfig
- (*ValidateDataResourceConfig)(nil), // 21: tfplugin6.ValidateDataResourceConfig
- (*ConfigureProvider)(nil), // 22: tfplugin6.ConfigureProvider
- (*ReadResource)(nil), // 23: tfplugin6.ReadResource
- (*PlanResourceChange)(nil), // 24: tfplugin6.PlanResourceChange
- (*ApplyResourceChange)(nil), // 25: tfplugin6.ApplyResourceChange
- (*ImportResourceState)(nil), // 26: tfplugin6.ImportResourceState
- (*MoveResourceState)(nil), // 27: tfplugin6.MoveResourceState
- (*ReadDataSource)(nil), // 28: tfplugin6.ReadDataSource
- (*GetFunctions)(nil), // 29: tfplugin6.GetFunctions
- (*CallFunction)(nil), // 30: tfplugin6.CallFunction
- (*AttributePath_Step)(nil), // 31: tfplugin6.AttributePath.Step
- (*StopProvider_Request)(nil), // 32: tfplugin6.StopProvider.Request
- (*StopProvider_Response)(nil), // 33: tfplugin6.StopProvider.Response
- nil, // 34: tfplugin6.RawState.FlatmapEntry
- (*Schema_Block)(nil), // 35: tfplugin6.Schema.Block
- (*Schema_Attribute)(nil), // 36: tfplugin6.Schema.Attribute
- (*Schema_NestedBlock)(nil), // 37: tfplugin6.Schema.NestedBlock
- (*Schema_Object)(nil), // 38: tfplugin6.Schema.Object
- (*Function_Parameter)(nil), // 39: tfplugin6.Function.Parameter
- (*Function_Return)(nil), // 40: tfplugin6.Function.Return
- (*GetMetadata_Request)(nil), // 41: tfplugin6.GetMetadata.Request
- (*GetMetadata_Response)(nil), // 42: tfplugin6.GetMetadata.Response
- (*GetMetadata_FunctionMetadata)(nil), // 43: tfplugin6.GetMetadata.FunctionMetadata
- (*GetMetadata_DataSourceMetadata)(nil), // 44: tfplugin6.GetMetadata.DataSourceMetadata
- (*GetMetadata_ResourceMetadata)(nil), // 45: tfplugin6.GetMetadata.ResourceMetadata
- (*GetProviderSchema_Request)(nil), // 46: tfplugin6.GetProviderSchema.Request
- (*GetProviderSchema_Response)(nil), // 47: tfplugin6.GetProviderSchema.Response
- nil, // 48: tfplugin6.GetProviderSchema.Response.ResourceSchemasEntry
- nil, // 49: tfplugin6.GetProviderSchema.Response.DataSourceSchemasEntry
- nil, // 50: tfplugin6.GetProviderSchema.Response.FunctionsEntry
- (*ValidateProviderConfig_Request)(nil), // 51: tfplugin6.ValidateProviderConfig.Request
- (*ValidateProviderConfig_Response)(nil), // 52: tfplugin6.ValidateProviderConfig.Response
- (*UpgradeResourceState_Request)(nil), // 53: tfplugin6.UpgradeResourceState.Request
- (*UpgradeResourceState_Response)(nil), // 54: tfplugin6.UpgradeResourceState.Response
- (*ValidateResourceConfig_Request)(nil), // 55: tfplugin6.ValidateResourceConfig.Request
- (*ValidateResourceConfig_Response)(nil), // 56: tfplugin6.ValidateResourceConfig.Response
- (*ValidateDataResourceConfig_Request)(nil), // 57: tfplugin6.ValidateDataResourceConfig.Request
- (*ValidateDataResourceConfig_Response)(nil), // 58: tfplugin6.ValidateDataResourceConfig.Response
- (*ConfigureProvider_Request)(nil), // 59: tfplugin6.ConfigureProvider.Request
- (*ConfigureProvider_Response)(nil), // 60: tfplugin6.ConfigureProvider.Response
- (*ReadResource_Request)(nil), // 61: tfplugin6.ReadResource.Request
- (*ReadResource_Response)(nil), // 62: tfplugin6.ReadResource.Response
- (*PlanResourceChange_Request)(nil), // 63: tfplugin6.PlanResourceChange.Request
- (*PlanResourceChange_Response)(nil), // 64: tfplugin6.PlanResourceChange.Response
- (*ApplyResourceChange_Request)(nil), // 65: tfplugin6.ApplyResourceChange.Request
- (*ApplyResourceChange_Response)(nil), // 66: tfplugin6.ApplyResourceChange.Response
- (*ImportResourceState_Request)(nil), // 67: tfplugin6.ImportResourceState.Request
- (*ImportResourceState_ImportedResource)(nil), // 68: tfplugin6.ImportResourceState.ImportedResource
- (*ImportResourceState_Response)(nil), // 69: tfplugin6.ImportResourceState.Response
- (*MoveResourceState_Request)(nil), // 70: tfplugin6.MoveResourceState.Request
- (*MoveResourceState_Response)(nil), // 71: tfplugin6.MoveResourceState.Response
- (*ReadDataSource_Request)(nil), // 72: tfplugin6.ReadDataSource.Request
- (*ReadDataSource_Response)(nil), // 73: tfplugin6.ReadDataSource.Response
- (*GetFunctions_Request)(nil), // 74: tfplugin6.GetFunctions.Request
- (*GetFunctions_Response)(nil), // 75: tfplugin6.GetFunctions.Response
- nil, // 76: tfplugin6.GetFunctions.Response.FunctionsEntry
- (*CallFunction_Request)(nil), // 77: tfplugin6.CallFunction.Request
- (*CallFunction_Response)(nil), // 78: tfplugin6.CallFunction.Response
+var file_tfplugin6_proto_msgTypes = make([]protoimpl.MessageInfo, 88)
+var file_tfplugin6_proto_goTypes = []any{
+ (StringKind)(0), // 0: tfplugin6.StringKind
+ (Diagnostic_Severity)(0), // 1: tfplugin6.Diagnostic.Severity
+ (Schema_NestedBlock_NestingMode)(0), // 2: tfplugin6.Schema.NestedBlock.NestingMode
+ (Schema_Object_NestingMode)(0), // 3: tfplugin6.Schema.Object.NestingMode
+ (Deferred_Reason)(0), // 4: tfplugin6.Deferred.Reason
+ (*DynamicValue)(nil), // 5: tfplugin6.DynamicValue
+ (*Diagnostic)(nil), // 6: tfplugin6.Diagnostic
+ (*FunctionError)(nil), // 7: tfplugin6.FunctionError
+ (*AttributePath)(nil), // 8: tfplugin6.AttributePath
+ (*StopProvider)(nil), // 9: tfplugin6.StopProvider
+ (*RawState)(nil), // 10: tfplugin6.RawState
+ (*Schema)(nil), // 11: tfplugin6.Schema
+ (*Function)(nil), // 12: tfplugin6.Function
+ (*ServerCapabilities)(nil), // 13: tfplugin6.ServerCapabilities
+ (*ClientCapabilities)(nil), // 14: tfplugin6.ClientCapabilities
+ (*Deferred)(nil), // 15: tfplugin6.Deferred
+ (*GetMetadata)(nil), // 16: tfplugin6.GetMetadata
+ (*GetProviderSchema)(nil), // 17: tfplugin6.GetProviderSchema
+ (*ValidateProviderConfig)(nil), // 18: tfplugin6.ValidateProviderConfig
+ (*UpgradeResourceState)(nil), // 19: tfplugin6.UpgradeResourceState
+ (*ValidateResourceConfig)(nil), // 20: tfplugin6.ValidateResourceConfig
+ (*ValidateDataResourceConfig)(nil), // 21: tfplugin6.ValidateDataResourceConfig
+ (*ConfigureProvider)(nil), // 22: tfplugin6.ConfigureProvider
+ (*ReadResource)(nil), // 23: tfplugin6.ReadResource
+ (*PlanResourceChange)(nil), // 24: tfplugin6.PlanResourceChange
+ (*ApplyResourceChange)(nil), // 25: tfplugin6.ApplyResourceChange
+ (*ImportResourceState)(nil), // 26: tfplugin6.ImportResourceState
+ (*MoveResourceState)(nil), // 27: tfplugin6.MoveResourceState
+ (*ReadDataSource)(nil), // 28: tfplugin6.ReadDataSource
+ (*GetFunctions)(nil), // 29: tfplugin6.GetFunctions
+ (*CallFunction)(nil), // 30: tfplugin6.CallFunction
+ (*ValidateEphemeralResourceConfig)(nil), // 31: tfplugin6.ValidateEphemeralResourceConfig
+ (*OpenEphemeralResource)(nil), // 32: tfplugin6.OpenEphemeralResource
+ (*RenewEphemeralResource)(nil), // 33: tfplugin6.RenewEphemeralResource
+ (*CloseEphemeralResource)(nil), // 34: tfplugin6.CloseEphemeralResource
+ (*AttributePath_Step)(nil), // 35: tfplugin6.AttributePath.Step
+ (*StopProvider_Request)(nil), // 36: tfplugin6.StopProvider.Request
+ (*StopProvider_Response)(nil), // 37: tfplugin6.StopProvider.Response
+ nil, // 38: tfplugin6.RawState.FlatmapEntry
+ (*Schema_Block)(nil), // 39: tfplugin6.Schema.Block
+ (*Schema_Attribute)(nil), // 40: tfplugin6.Schema.Attribute
+ (*Schema_NestedBlock)(nil), // 41: tfplugin6.Schema.NestedBlock
+ (*Schema_Object)(nil), // 42: tfplugin6.Schema.Object
+ (*Function_Parameter)(nil), // 43: tfplugin6.Function.Parameter
+ (*Function_Return)(nil), // 44: tfplugin6.Function.Return
+ (*GetMetadata_Request)(nil), // 45: tfplugin6.GetMetadata.Request
+ (*GetMetadata_Response)(nil), // 46: tfplugin6.GetMetadata.Response
+ (*GetMetadata_FunctionMetadata)(nil), // 47: tfplugin6.GetMetadata.FunctionMetadata
+ (*GetMetadata_DataSourceMetadata)(nil), // 48: tfplugin6.GetMetadata.DataSourceMetadata
+ (*GetMetadata_ResourceMetadata)(nil), // 49: tfplugin6.GetMetadata.ResourceMetadata
+ (*GetMetadata_EphemeralResourceMetadata)(nil), // 50: tfplugin6.GetMetadata.EphemeralResourceMetadata
+ (*GetProviderSchema_Request)(nil), // 51: tfplugin6.GetProviderSchema.Request
+ (*GetProviderSchema_Response)(nil), // 52: tfplugin6.GetProviderSchema.Response
+ nil, // 53: tfplugin6.GetProviderSchema.Response.ResourceSchemasEntry
+ nil, // 54: tfplugin6.GetProviderSchema.Response.DataSourceSchemasEntry
+ nil, // 55: tfplugin6.GetProviderSchema.Response.FunctionsEntry
+ nil, // 56: tfplugin6.GetProviderSchema.Response.EphemeralResourceSchemasEntry
+ (*ValidateProviderConfig_Request)(nil), // 57: tfplugin6.ValidateProviderConfig.Request
+ (*ValidateProviderConfig_Response)(nil), // 58: tfplugin6.ValidateProviderConfig.Response
+ (*UpgradeResourceState_Request)(nil), // 59: tfplugin6.UpgradeResourceState.Request
+ (*UpgradeResourceState_Response)(nil), // 60: tfplugin6.UpgradeResourceState.Response
+ (*ValidateResourceConfig_Request)(nil), // 61: tfplugin6.ValidateResourceConfig.Request
+ (*ValidateResourceConfig_Response)(nil), // 62: tfplugin6.ValidateResourceConfig.Response
+ (*ValidateDataResourceConfig_Request)(nil), // 63: tfplugin6.ValidateDataResourceConfig.Request
+ (*ValidateDataResourceConfig_Response)(nil), // 64: tfplugin6.ValidateDataResourceConfig.Response
+ (*ConfigureProvider_Request)(nil), // 65: tfplugin6.ConfigureProvider.Request
+ (*ConfigureProvider_Response)(nil), // 66: tfplugin6.ConfigureProvider.Response
+ (*ReadResource_Request)(nil), // 67: tfplugin6.ReadResource.Request
+ (*ReadResource_Response)(nil), // 68: tfplugin6.ReadResource.Response
+ (*PlanResourceChange_Request)(nil), // 69: tfplugin6.PlanResourceChange.Request
+ (*PlanResourceChange_Response)(nil), // 70: tfplugin6.PlanResourceChange.Response
+ (*ApplyResourceChange_Request)(nil), // 71: tfplugin6.ApplyResourceChange.Request
+ (*ApplyResourceChange_Response)(nil), // 72: tfplugin6.ApplyResourceChange.Response
+ (*ImportResourceState_Request)(nil), // 73: tfplugin6.ImportResourceState.Request
+ (*ImportResourceState_ImportedResource)(nil), // 74: tfplugin6.ImportResourceState.ImportedResource
+ (*ImportResourceState_Response)(nil), // 75: tfplugin6.ImportResourceState.Response
+ (*MoveResourceState_Request)(nil), // 76: tfplugin6.MoveResourceState.Request
+ (*MoveResourceState_Response)(nil), // 77: tfplugin6.MoveResourceState.Response
+ (*ReadDataSource_Request)(nil), // 78: tfplugin6.ReadDataSource.Request
+ (*ReadDataSource_Response)(nil), // 79: tfplugin6.ReadDataSource.Response
+ (*GetFunctions_Request)(nil), // 80: tfplugin6.GetFunctions.Request
+ (*GetFunctions_Response)(nil), // 81: tfplugin6.GetFunctions.Response
+ nil, // 82: tfplugin6.GetFunctions.Response.FunctionsEntry
+ (*CallFunction_Request)(nil), // 83: tfplugin6.CallFunction.Request
+ (*CallFunction_Response)(nil), // 84: tfplugin6.CallFunction.Response
+ (*ValidateEphemeralResourceConfig_Request)(nil), // 85: tfplugin6.ValidateEphemeralResourceConfig.Request
+ (*ValidateEphemeralResourceConfig_Response)(nil), // 86: tfplugin6.ValidateEphemeralResourceConfig.Response
+ (*OpenEphemeralResource_Request)(nil), // 87: tfplugin6.OpenEphemeralResource.Request
+ (*OpenEphemeralResource_Response)(nil), // 88: tfplugin6.OpenEphemeralResource.Response
+ (*RenewEphemeralResource_Request)(nil), // 89: tfplugin6.RenewEphemeralResource.Request
+ (*RenewEphemeralResource_Response)(nil), // 90: tfplugin6.RenewEphemeralResource.Response
+ (*CloseEphemeralResource_Request)(nil), // 91: tfplugin6.CloseEphemeralResource.Request
+ (*CloseEphemeralResource_Response)(nil), // 92: tfplugin6.CloseEphemeralResource.Response
+ (*timestamppb.Timestamp)(nil), // 93: google.protobuf.Timestamp
}
var file_tfplugin6_proto_depIdxs = []int32{
1, // 0: tfplugin6.Diagnostic.severity:type_name -> tfplugin6.Diagnostic.Severity
8, // 1: tfplugin6.Diagnostic.attribute:type_name -> tfplugin6.AttributePath
- 31, // 2: tfplugin6.AttributePath.steps:type_name -> tfplugin6.AttributePath.Step
- 34, // 3: tfplugin6.RawState.flatmap:type_name -> tfplugin6.RawState.FlatmapEntry
- 35, // 4: tfplugin6.Schema.block:type_name -> tfplugin6.Schema.Block
- 39, // 5: tfplugin6.Function.parameters:type_name -> tfplugin6.Function.Parameter
- 39, // 6: tfplugin6.Function.variadic_parameter:type_name -> tfplugin6.Function.Parameter
- 40, // 7: tfplugin6.Function.return:type_name -> tfplugin6.Function.Return
+ 35, // 2: tfplugin6.AttributePath.steps:type_name -> tfplugin6.AttributePath.Step
+ 38, // 3: tfplugin6.RawState.flatmap:type_name -> tfplugin6.RawState.FlatmapEntry
+ 39, // 4: tfplugin6.Schema.block:type_name -> tfplugin6.Schema.Block
+ 43, // 5: tfplugin6.Function.parameters:type_name -> tfplugin6.Function.Parameter
+ 43, // 6: tfplugin6.Function.variadic_parameter:type_name -> tfplugin6.Function.Parameter
+ 44, // 7: tfplugin6.Function.return:type_name -> tfplugin6.Function.Return
0, // 8: tfplugin6.Function.description_kind:type_name -> tfplugin6.StringKind
4, // 9: tfplugin6.Deferred.reason:type_name -> tfplugin6.Deferred.Reason
- 36, // 10: tfplugin6.Schema.Block.attributes:type_name -> tfplugin6.Schema.Attribute
- 37, // 11: tfplugin6.Schema.Block.block_types:type_name -> tfplugin6.Schema.NestedBlock
+ 40, // 10: tfplugin6.Schema.Block.attributes:type_name -> tfplugin6.Schema.Attribute
+ 41, // 11: tfplugin6.Schema.Block.block_types:type_name -> tfplugin6.Schema.NestedBlock
0, // 12: tfplugin6.Schema.Block.description_kind:type_name -> tfplugin6.StringKind
- 38, // 13: tfplugin6.Schema.Attribute.nested_type:type_name -> tfplugin6.Schema.Object
+ 42, // 13: tfplugin6.Schema.Attribute.nested_type:type_name -> tfplugin6.Schema.Object
0, // 14: tfplugin6.Schema.Attribute.description_kind:type_name -> tfplugin6.StringKind
- 35, // 15: tfplugin6.Schema.NestedBlock.block:type_name -> tfplugin6.Schema.Block
+ 39, // 15: tfplugin6.Schema.NestedBlock.block:type_name -> tfplugin6.Schema.Block
2, // 16: tfplugin6.Schema.NestedBlock.nesting:type_name -> tfplugin6.Schema.NestedBlock.NestingMode
- 36, // 17: tfplugin6.Schema.Object.attributes:type_name -> tfplugin6.Schema.Attribute
+ 40, // 17: tfplugin6.Schema.Object.attributes:type_name -> tfplugin6.Schema.Attribute
3, // 18: tfplugin6.Schema.Object.nesting:type_name -> tfplugin6.Schema.Object.NestingMode
0, // 19: tfplugin6.Function.Parameter.description_kind:type_name -> tfplugin6.StringKind
13, // 20: tfplugin6.GetMetadata.Response.server_capabilities:type_name -> tfplugin6.ServerCapabilities
6, // 21: tfplugin6.GetMetadata.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 44, // 22: tfplugin6.GetMetadata.Response.data_sources:type_name -> tfplugin6.GetMetadata.DataSourceMetadata
- 45, // 23: tfplugin6.GetMetadata.Response.resources:type_name -> tfplugin6.GetMetadata.ResourceMetadata
- 43, // 24: tfplugin6.GetMetadata.Response.functions:type_name -> tfplugin6.GetMetadata.FunctionMetadata
- 11, // 25: tfplugin6.GetProviderSchema.Response.provider:type_name -> tfplugin6.Schema
- 48, // 26: tfplugin6.GetProviderSchema.Response.resource_schemas:type_name -> tfplugin6.GetProviderSchema.Response.ResourceSchemasEntry
- 49, // 27: tfplugin6.GetProviderSchema.Response.data_source_schemas:type_name -> tfplugin6.GetProviderSchema.Response.DataSourceSchemasEntry
- 6, // 28: tfplugin6.GetProviderSchema.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 11, // 29: tfplugin6.GetProviderSchema.Response.provider_meta:type_name -> tfplugin6.Schema
- 13, // 30: tfplugin6.GetProviderSchema.Response.server_capabilities:type_name -> tfplugin6.ServerCapabilities
- 50, // 31: tfplugin6.GetProviderSchema.Response.functions:type_name -> tfplugin6.GetProviderSchema.Response.FunctionsEntry
- 11, // 32: tfplugin6.GetProviderSchema.Response.ResourceSchemasEntry.value:type_name -> tfplugin6.Schema
- 11, // 33: tfplugin6.GetProviderSchema.Response.DataSourceSchemasEntry.value:type_name -> tfplugin6.Schema
- 12, // 34: tfplugin6.GetProviderSchema.Response.FunctionsEntry.value:type_name -> tfplugin6.Function
- 5, // 35: tfplugin6.ValidateProviderConfig.Request.config:type_name -> tfplugin6.DynamicValue
- 6, // 36: tfplugin6.ValidateProviderConfig.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 10, // 37: tfplugin6.UpgradeResourceState.Request.raw_state:type_name -> tfplugin6.RawState
- 5, // 38: tfplugin6.UpgradeResourceState.Response.upgraded_state:type_name -> tfplugin6.DynamicValue
- 6, // 39: tfplugin6.UpgradeResourceState.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 5, // 40: tfplugin6.ValidateResourceConfig.Request.config:type_name -> tfplugin6.DynamicValue
- 6, // 41: tfplugin6.ValidateResourceConfig.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 5, // 42: tfplugin6.ValidateDataResourceConfig.Request.config:type_name -> tfplugin6.DynamicValue
- 6, // 43: tfplugin6.ValidateDataResourceConfig.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 5, // 44: tfplugin6.ConfigureProvider.Request.config:type_name -> tfplugin6.DynamicValue
- 14, // 45: tfplugin6.ConfigureProvider.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
- 6, // 46: tfplugin6.ConfigureProvider.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 5, // 47: tfplugin6.ReadResource.Request.current_state:type_name -> tfplugin6.DynamicValue
- 5, // 48: tfplugin6.ReadResource.Request.provider_meta:type_name -> tfplugin6.DynamicValue
- 14, // 49: tfplugin6.ReadResource.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
- 5, // 50: tfplugin6.ReadResource.Response.new_state:type_name -> tfplugin6.DynamicValue
- 6, // 51: tfplugin6.ReadResource.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 15, // 52: tfplugin6.ReadResource.Response.deferred:type_name -> tfplugin6.Deferred
- 5, // 53: tfplugin6.PlanResourceChange.Request.prior_state:type_name -> tfplugin6.DynamicValue
- 5, // 54: tfplugin6.PlanResourceChange.Request.proposed_new_state:type_name -> tfplugin6.DynamicValue
- 5, // 55: tfplugin6.PlanResourceChange.Request.config:type_name -> tfplugin6.DynamicValue
- 5, // 56: tfplugin6.PlanResourceChange.Request.provider_meta:type_name -> tfplugin6.DynamicValue
- 14, // 57: tfplugin6.PlanResourceChange.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
- 5, // 58: tfplugin6.PlanResourceChange.Response.planned_state:type_name -> tfplugin6.DynamicValue
- 8, // 59: tfplugin6.PlanResourceChange.Response.requires_replace:type_name -> tfplugin6.AttributePath
- 6, // 60: tfplugin6.PlanResourceChange.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 15, // 61: tfplugin6.PlanResourceChange.Response.deferred:type_name -> tfplugin6.Deferred
- 5, // 62: tfplugin6.ApplyResourceChange.Request.prior_state:type_name -> tfplugin6.DynamicValue
- 5, // 63: tfplugin6.ApplyResourceChange.Request.planned_state:type_name -> tfplugin6.DynamicValue
- 5, // 64: tfplugin6.ApplyResourceChange.Request.config:type_name -> tfplugin6.DynamicValue
- 5, // 65: tfplugin6.ApplyResourceChange.Request.provider_meta:type_name -> tfplugin6.DynamicValue
- 5, // 66: tfplugin6.ApplyResourceChange.Response.new_state:type_name -> tfplugin6.DynamicValue
- 6, // 67: tfplugin6.ApplyResourceChange.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 14, // 68: tfplugin6.ImportResourceState.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
- 5, // 69: tfplugin6.ImportResourceState.ImportedResource.state:type_name -> tfplugin6.DynamicValue
- 68, // 70: tfplugin6.ImportResourceState.Response.imported_resources:type_name -> tfplugin6.ImportResourceState.ImportedResource
- 6, // 71: tfplugin6.ImportResourceState.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 15, // 72: tfplugin6.ImportResourceState.Response.deferred:type_name -> tfplugin6.Deferred
- 10, // 73: tfplugin6.MoveResourceState.Request.source_state:type_name -> tfplugin6.RawState
- 5, // 74: tfplugin6.MoveResourceState.Response.target_state:type_name -> tfplugin6.DynamicValue
- 6, // 75: tfplugin6.MoveResourceState.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 5, // 76: tfplugin6.ReadDataSource.Request.config:type_name -> tfplugin6.DynamicValue
- 5, // 77: tfplugin6.ReadDataSource.Request.provider_meta:type_name -> tfplugin6.DynamicValue
- 14, // 78: tfplugin6.ReadDataSource.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
- 5, // 79: tfplugin6.ReadDataSource.Response.state:type_name -> tfplugin6.DynamicValue
- 6, // 80: tfplugin6.ReadDataSource.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 15, // 81: tfplugin6.ReadDataSource.Response.deferred:type_name -> tfplugin6.Deferred
- 76, // 82: tfplugin6.GetFunctions.Response.functions:type_name -> tfplugin6.GetFunctions.Response.FunctionsEntry
- 6, // 83: tfplugin6.GetFunctions.Response.diagnostics:type_name -> tfplugin6.Diagnostic
- 12, // 84: tfplugin6.GetFunctions.Response.FunctionsEntry.value:type_name -> tfplugin6.Function
- 5, // 85: tfplugin6.CallFunction.Request.arguments:type_name -> tfplugin6.DynamicValue
- 5, // 86: tfplugin6.CallFunction.Response.result:type_name -> tfplugin6.DynamicValue
- 7, // 87: tfplugin6.CallFunction.Response.error:type_name -> tfplugin6.FunctionError
- 41, // 88: tfplugin6.Provider.GetMetadata:input_type -> tfplugin6.GetMetadata.Request
- 46, // 89: tfplugin6.Provider.GetProviderSchema:input_type -> tfplugin6.GetProviderSchema.Request
- 51, // 90: tfplugin6.Provider.ValidateProviderConfig:input_type -> tfplugin6.ValidateProviderConfig.Request
- 55, // 91: tfplugin6.Provider.ValidateResourceConfig:input_type -> tfplugin6.ValidateResourceConfig.Request
- 57, // 92: tfplugin6.Provider.ValidateDataResourceConfig:input_type -> tfplugin6.ValidateDataResourceConfig.Request
- 53, // 93: tfplugin6.Provider.UpgradeResourceState:input_type -> tfplugin6.UpgradeResourceState.Request
- 59, // 94: tfplugin6.Provider.ConfigureProvider:input_type -> tfplugin6.ConfigureProvider.Request
- 61, // 95: tfplugin6.Provider.ReadResource:input_type -> tfplugin6.ReadResource.Request
- 63, // 96: tfplugin6.Provider.PlanResourceChange:input_type -> tfplugin6.PlanResourceChange.Request
- 65, // 97: tfplugin6.Provider.ApplyResourceChange:input_type -> tfplugin6.ApplyResourceChange.Request
- 67, // 98: tfplugin6.Provider.ImportResourceState:input_type -> tfplugin6.ImportResourceState.Request
- 70, // 99: tfplugin6.Provider.MoveResourceState:input_type -> tfplugin6.MoveResourceState.Request
- 72, // 100: tfplugin6.Provider.ReadDataSource:input_type -> tfplugin6.ReadDataSource.Request
- 74, // 101: tfplugin6.Provider.GetFunctions:input_type -> tfplugin6.GetFunctions.Request
- 77, // 102: tfplugin6.Provider.CallFunction:input_type -> tfplugin6.CallFunction.Request
- 32, // 103: tfplugin6.Provider.StopProvider:input_type -> tfplugin6.StopProvider.Request
- 42, // 104: tfplugin6.Provider.GetMetadata:output_type -> tfplugin6.GetMetadata.Response
- 47, // 105: tfplugin6.Provider.GetProviderSchema:output_type -> tfplugin6.GetProviderSchema.Response
- 52, // 106: tfplugin6.Provider.ValidateProviderConfig:output_type -> tfplugin6.ValidateProviderConfig.Response
- 56, // 107: tfplugin6.Provider.ValidateResourceConfig:output_type -> tfplugin6.ValidateResourceConfig.Response
- 58, // 108: tfplugin6.Provider.ValidateDataResourceConfig:output_type -> tfplugin6.ValidateDataResourceConfig.Response
- 54, // 109: tfplugin6.Provider.UpgradeResourceState:output_type -> tfplugin6.UpgradeResourceState.Response
- 60, // 110: tfplugin6.Provider.ConfigureProvider:output_type -> tfplugin6.ConfigureProvider.Response
- 62, // 111: tfplugin6.Provider.ReadResource:output_type -> tfplugin6.ReadResource.Response
- 64, // 112: tfplugin6.Provider.PlanResourceChange:output_type -> tfplugin6.PlanResourceChange.Response
- 66, // 113: tfplugin6.Provider.ApplyResourceChange:output_type -> tfplugin6.ApplyResourceChange.Response
- 69, // 114: tfplugin6.Provider.ImportResourceState:output_type -> tfplugin6.ImportResourceState.Response
- 71, // 115: tfplugin6.Provider.MoveResourceState:output_type -> tfplugin6.MoveResourceState.Response
- 73, // 116: tfplugin6.Provider.ReadDataSource:output_type -> tfplugin6.ReadDataSource.Response
- 75, // 117: tfplugin6.Provider.GetFunctions:output_type -> tfplugin6.GetFunctions.Response
- 78, // 118: tfplugin6.Provider.CallFunction:output_type -> tfplugin6.CallFunction.Response
- 33, // 119: tfplugin6.Provider.StopProvider:output_type -> tfplugin6.StopProvider.Response
- 104, // [104:120] is the sub-list for method output_type
- 88, // [88:104] is the sub-list for method input_type
- 88, // [88:88] is the sub-list for extension type_name
- 88, // [88:88] is the sub-list for extension extendee
- 0, // [0:88] is the sub-list for field type_name
+ 48, // 22: tfplugin6.GetMetadata.Response.data_sources:type_name -> tfplugin6.GetMetadata.DataSourceMetadata
+ 49, // 23: tfplugin6.GetMetadata.Response.resources:type_name -> tfplugin6.GetMetadata.ResourceMetadata
+ 47, // 24: tfplugin6.GetMetadata.Response.functions:type_name -> tfplugin6.GetMetadata.FunctionMetadata
+ 50, // 25: tfplugin6.GetMetadata.Response.ephemeral_resources:type_name -> tfplugin6.GetMetadata.EphemeralResourceMetadata
+ 11, // 26: tfplugin6.GetProviderSchema.Response.provider:type_name -> tfplugin6.Schema
+ 53, // 27: tfplugin6.GetProviderSchema.Response.resource_schemas:type_name -> tfplugin6.GetProviderSchema.Response.ResourceSchemasEntry
+ 54, // 28: tfplugin6.GetProviderSchema.Response.data_source_schemas:type_name -> tfplugin6.GetProviderSchema.Response.DataSourceSchemasEntry
+ 6, // 29: tfplugin6.GetProviderSchema.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 11, // 30: tfplugin6.GetProviderSchema.Response.provider_meta:type_name -> tfplugin6.Schema
+ 13, // 31: tfplugin6.GetProviderSchema.Response.server_capabilities:type_name -> tfplugin6.ServerCapabilities
+ 55, // 32: tfplugin6.GetProviderSchema.Response.functions:type_name -> tfplugin6.GetProviderSchema.Response.FunctionsEntry
+ 56, // 33: tfplugin6.GetProviderSchema.Response.ephemeral_resource_schemas:type_name -> tfplugin6.GetProviderSchema.Response.EphemeralResourceSchemasEntry
+ 11, // 34: tfplugin6.GetProviderSchema.Response.ResourceSchemasEntry.value:type_name -> tfplugin6.Schema
+ 11, // 35: tfplugin6.GetProviderSchema.Response.DataSourceSchemasEntry.value:type_name -> tfplugin6.Schema
+ 12, // 36: tfplugin6.GetProviderSchema.Response.FunctionsEntry.value:type_name -> tfplugin6.Function
+ 11, // 37: tfplugin6.GetProviderSchema.Response.EphemeralResourceSchemasEntry.value:type_name -> tfplugin6.Schema
+ 5, // 38: tfplugin6.ValidateProviderConfig.Request.config:type_name -> tfplugin6.DynamicValue
+ 6, // 39: tfplugin6.ValidateProviderConfig.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 10, // 40: tfplugin6.UpgradeResourceState.Request.raw_state:type_name -> tfplugin6.RawState
+ 5, // 41: tfplugin6.UpgradeResourceState.Response.upgraded_state:type_name -> tfplugin6.DynamicValue
+ 6, // 42: tfplugin6.UpgradeResourceState.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 5, // 43: tfplugin6.ValidateResourceConfig.Request.config:type_name -> tfplugin6.DynamicValue
+ 14, // 44: tfplugin6.ValidateResourceConfig.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
+ 6, // 45: tfplugin6.ValidateResourceConfig.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 5, // 46: tfplugin6.ValidateDataResourceConfig.Request.config:type_name -> tfplugin6.DynamicValue
+ 6, // 47: tfplugin6.ValidateDataResourceConfig.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 5, // 48: tfplugin6.ConfigureProvider.Request.config:type_name -> tfplugin6.DynamicValue
+ 14, // 49: tfplugin6.ConfigureProvider.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
+ 6, // 50: tfplugin6.ConfigureProvider.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 5, // 51: tfplugin6.ReadResource.Request.current_state:type_name -> tfplugin6.DynamicValue
+ 5, // 52: tfplugin6.ReadResource.Request.provider_meta:type_name -> tfplugin6.DynamicValue
+ 14, // 53: tfplugin6.ReadResource.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
+ 5, // 54: tfplugin6.ReadResource.Response.new_state:type_name -> tfplugin6.DynamicValue
+ 6, // 55: tfplugin6.ReadResource.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 15, // 56: tfplugin6.ReadResource.Response.deferred:type_name -> tfplugin6.Deferred
+ 5, // 57: tfplugin6.PlanResourceChange.Request.prior_state:type_name -> tfplugin6.DynamicValue
+ 5, // 58: tfplugin6.PlanResourceChange.Request.proposed_new_state:type_name -> tfplugin6.DynamicValue
+ 5, // 59: tfplugin6.PlanResourceChange.Request.config:type_name -> tfplugin6.DynamicValue
+ 5, // 60: tfplugin6.PlanResourceChange.Request.provider_meta:type_name -> tfplugin6.DynamicValue
+ 14, // 61: tfplugin6.PlanResourceChange.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
+ 5, // 62: tfplugin6.PlanResourceChange.Response.planned_state:type_name -> tfplugin6.DynamicValue
+ 8, // 63: tfplugin6.PlanResourceChange.Response.requires_replace:type_name -> tfplugin6.AttributePath
+ 6, // 64: tfplugin6.PlanResourceChange.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 15, // 65: tfplugin6.PlanResourceChange.Response.deferred:type_name -> tfplugin6.Deferred
+ 5, // 66: tfplugin6.ApplyResourceChange.Request.prior_state:type_name -> tfplugin6.DynamicValue
+ 5, // 67: tfplugin6.ApplyResourceChange.Request.planned_state:type_name -> tfplugin6.DynamicValue
+ 5, // 68: tfplugin6.ApplyResourceChange.Request.config:type_name -> tfplugin6.DynamicValue
+ 5, // 69: tfplugin6.ApplyResourceChange.Request.provider_meta:type_name -> tfplugin6.DynamicValue
+ 5, // 70: tfplugin6.ApplyResourceChange.Response.new_state:type_name -> tfplugin6.DynamicValue
+ 6, // 71: tfplugin6.ApplyResourceChange.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 14, // 72: tfplugin6.ImportResourceState.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
+ 5, // 73: tfplugin6.ImportResourceState.ImportedResource.state:type_name -> tfplugin6.DynamicValue
+ 74, // 74: tfplugin6.ImportResourceState.Response.imported_resources:type_name -> tfplugin6.ImportResourceState.ImportedResource
+ 6, // 75: tfplugin6.ImportResourceState.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 15, // 76: tfplugin6.ImportResourceState.Response.deferred:type_name -> tfplugin6.Deferred
+ 10, // 77: tfplugin6.MoveResourceState.Request.source_state:type_name -> tfplugin6.RawState
+ 5, // 78: tfplugin6.MoveResourceState.Response.target_state:type_name -> tfplugin6.DynamicValue
+ 6, // 79: tfplugin6.MoveResourceState.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 5, // 80: tfplugin6.ReadDataSource.Request.config:type_name -> tfplugin6.DynamicValue
+ 5, // 81: tfplugin6.ReadDataSource.Request.provider_meta:type_name -> tfplugin6.DynamicValue
+ 14, // 82: tfplugin6.ReadDataSource.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
+ 5, // 83: tfplugin6.ReadDataSource.Response.state:type_name -> tfplugin6.DynamicValue
+ 6, // 84: tfplugin6.ReadDataSource.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 15, // 85: tfplugin6.ReadDataSource.Response.deferred:type_name -> tfplugin6.Deferred
+ 82, // 86: tfplugin6.GetFunctions.Response.functions:type_name -> tfplugin6.GetFunctions.Response.FunctionsEntry
+ 6, // 87: tfplugin6.GetFunctions.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 12, // 88: tfplugin6.GetFunctions.Response.FunctionsEntry.value:type_name -> tfplugin6.Function
+ 5, // 89: tfplugin6.CallFunction.Request.arguments:type_name -> tfplugin6.DynamicValue
+ 5, // 90: tfplugin6.CallFunction.Response.result:type_name -> tfplugin6.DynamicValue
+ 7, // 91: tfplugin6.CallFunction.Response.error:type_name -> tfplugin6.FunctionError
+ 5, // 92: tfplugin6.ValidateEphemeralResourceConfig.Request.config:type_name -> tfplugin6.DynamicValue
+ 6, // 93: tfplugin6.ValidateEphemeralResourceConfig.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 5, // 94: tfplugin6.OpenEphemeralResource.Request.config:type_name -> tfplugin6.DynamicValue
+ 14, // 95: tfplugin6.OpenEphemeralResource.Request.client_capabilities:type_name -> tfplugin6.ClientCapabilities
+ 6, // 96: tfplugin6.OpenEphemeralResource.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 93, // 97: tfplugin6.OpenEphemeralResource.Response.renew_at:type_name -> google.protobuf.Timestamp
+ 5, // 98: tfplugin6.OpenEphemeralResource.Response.result:type_name -> tfplugin6.DynamicValue
+ 15, // 99: tfplugin6.OpenEphemeralResource.Response.deferred:type_name -> tfplugin6.Deferred
+ 6, // 100: tfplugin6.RenewEphemeralResource.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 93, // 101: tfplugin6.RenewEphemeralResource.Response.renew_at:type_name -> google.protobuf.Timestamp
+ 6, // 102: tfplugin6.CloseEphemeralResource.Response.diagnostics:type_name -> tfplugin6.Diagnostic
+ 45, // 103: tfplugin6.Provider.GetMetadata:input_type -> tfplugin6.GetMetadata.Request
+ 51, // 104: tfplugin6.Provider.GetProviderSchema:input_type -> tfplugin6.GetProviderSchema.Request
+ 57, // 105: tfplugin6.Provider.ValidateProviderConfig:input_type -> tfplugin6.ValidateProviderConfig.Request
+ 61, // 106: tfplugin6.Provider.ValidateResourceConfig:input_type -> tfplugin6.ValidateResourceConfig.Request
+ 63, // 107: tfplugin6.Provider.ValidateDataResourceConfig:input_type -> tfplugin6.ValidateDataResourceConfig.Request
+ 59, // 108: tfplugin6.Provider.UpgradeResourceState:input_type -> tfplugin6.UpgradeResourceState.Request
+ 65, // 109: tfplugin6.Provider.ConfigureProvider:input_type -> tfplugin6.ConfigureProvider.Request
+ 67, // 110: tfplugin6.Provider.ReadResource:input_type -> tfplugin6.ReadResource.Request
+ 69, // 111: tfplugin6.Provider.PlanResourceChange:input_type -> tfplugin6.PlanResourceChange.Request
+ 71, // 112: tfplugin6.Provider.ApplyResourceChange:input_type -> tfplugin6.ApplyResourceChange.Request
+ 73, // 113: tfplugin6.Provider.ImportResourceState:input_type -> tfplugin6.ImportResourceState.Request
+ 76, // 114: tfplugin6.Provider.MoveResourceState:input_type -> tfplugin6.MoveResourceState.Request
+ 78, // 115: tfplugin6.Provider.ReadDataSource:input_type -> tfplugin6.ReadDataSource.Request
+ 85, // 116: tfplugin6.Provider.ValidateEphemeralResourceConfig:input_type -> tfplugin6.ValidateEphemeralResourceConfig.Request
+ 87, // 117: tfplugin6.Provider.OpenEphemeralResource:input_type -> tfplugin6.OpenEphemeralResource.Request
+ 89, // 118: tfplugin6.Provider.RenewEphemeralResource:input_type -> tfplugin6.RenewEphemeralResource.Request
+ 91, // 119: tfplugin6.Provider.CloseEphemeralResource:input_type -> tfplugin6.CloseEphemeralResource.Request
+ 80, // 120: tfplugin6.Provider.GetFunctions:input_type -> tfplugin6.GetFunctions.Request
+ 83, // 121: tfplugin6.Provider.CallFunction:input_type -> tfplugin6.CallFunction.Request
+ 36, // 122: tfplugin6.Provider.StopProvider:input_type -> tfplugin6.StopProvider.Request
+ 46, // 123: tfplugin6.Provider.GetMetadata:output_type -> tfplugin6.GetMetadata.Response
+ 52, // 124: tfplugin6.Provider.GetProviderSchema:output_type -> tfplugin6.GetProviderSchema.Response
+ 58, // 125: tfplugin6.Provider.ValidateProviderConfig:output_type -> tfplugin6.ValidateProviderConfig.Response
+ 62, // 126: tfplugin6.Provider.ValidateResourceConfig:output_type -> tfplugin6.ValidateResourceConfig.Response
+ 64, // 127: tfplugin6.Provider.ValidateDataResourceConfig:output_type -> tfplugin6.ValidateDataResourceConfig.Response
+ 60, // 128: tfplugin6.Provider.UpgradeResourceState:output_type -> tfplugin6.UpgradeResourceState.Response
+ 66, // 129: tfplugin6.Provider.ConfigureProvider:output_type -> tfplugin6.ConfigureProvider.Response
+ 68, // 130: tfplugin6.Provider.ReadResource:output_type -> tfplugin6.ReadResource.Response
+ 70, // 131: tfplugin6.Provider.PlanResourceChange:output_type -> tfplugin6.PlanResourceChange.Response
+ 72, // 132: tfplugin6.Provider.ApplyResourceChange:output_type -> tfplugin6.ApplyResourceChange.Response
+ 75, // 133: tfplugin6.Provider.ImportResourceState:output_type -> tfplugin6.ImportResourceState.Response
+ 77, // 134: tfplugin6.Provider.MoveResourceState:output_type -> tfplugin6.MoveResourceState.Response
+ 79, // 135: tfplugin6.Provider.ReadDataSource:output_type -> tfplugin6.ReadDataSource.Response
+ 86, // 136: tfplugin6.Provider.ValidateEphemeralResourceConfig:output_type -> tfplugin6.ValidateEphemeralResourceConfig.Response
+ 88, // 137: tfplugin6.Provider.OpenEphemeralResource:output_type -> tfplugin6.OpenEphemeralResource.Response
+ 90, // 138: tfplugin6.Provider.RenewEphemeralResource:output_type -> tfplugin6.RenewEphemeralResource.Response
+ 92, // 139: tfplugin6.Provider.CloseEphemeralResource:output_type -> tfplugin6.CloseEphemeralResource.Response
+ 81, // 140: tfplugin6.Provider.GetFunctions:output_type -> tfplugin6.GetFunctions.Response
+ 84, // 141: tfplugin6.Provider.CallFunction:output_type -> tfplugin6.CallFunction.Response
+ 37, // 142: tfplugin6.Provider.StopProvider:output_type -> tfplugin6.StopProvider.Response
+ 123, // [123:143] is the sub-list for method output_type
+ 103, // [103:123] is the sub-list for method input_type
+ 103, // [103:103] is the sub-list for extension type_name
+ 103, // [103:103] is the sub-list for extension extendee
+ 0, // [0:103] is the sub-list for field type_name
}
func init() { file_tfplugin6_proto_init() }
@@ -5328,849 +6001,23 @@ func file_tfplugin6_proto_init() {
if File_tfplugin6_proto != nil {
return
}
- if !protoimpl.UnsafeEnabled {
- file_tfplugin6_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*DynamicValue); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Diagnostic); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*FunctionError); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*AttributePath); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*StopProvider); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*RawState); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Schema); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Function); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ServerCapabilities); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ClientCapabilities); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Deferred); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetProviderSchema); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateProviderConfig); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*UpgradeResourceState); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateResourceConfig); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateDataResourceConfig); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ConfigureProvider); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadResource); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*PlanResourceChange); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ApplyResourceChange); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ImportResourceState); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*MoveResourceState); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadDataSource); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetFunctions); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*CallFunction); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*AttributePath_Step); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*StopProvider_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*StopProvider_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Schema_Block); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Schema_Attribute); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Schema_NestedBlock); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Schema_Object); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Function_Parameter); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*Function_Return); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_FunctionMetadata); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_DataSourceMetadata); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetMetadata_ResourceMetadata); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetProviderSchema_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetProviderSchema_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[46].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateProviderConfig_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[47].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateProviderConfig_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[48].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*UpgradeResourceState_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[49].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*UpgradeResourceState_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[50].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateResourceConfig_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[51].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateResourceConfig_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[52].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateDataResourceConfig_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[53].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ValidateDataResourceConfig_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[54].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ConfigureProvider_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[55].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ConfigureProvider_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[56].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadResource_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[57].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadResource_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[58].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*PlanResourceChange_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[59].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*PlanResourceChange_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[60].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ApplyResourceChange_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[61].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ApplyResourceChange_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[62].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ImportResourceState_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[63].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ImportResourceState_ImportedResource); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[64].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ImportResourceState_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[65].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*MoveResourceState_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[66].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*MoveResourceState_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[67].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadDataSource_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[68].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*ReadDataSource_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[69].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetFunctions_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[70].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*GetFunctions_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[72].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*CallFunction_Request); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- file_tfplugin6_proto_msgTypes[73].Exporter = func(v interface{}, i int) interface{} {
- switch v := v.(*CallFunction_Response); i {
- case 0:
- return &v.state
- case 1:
- return &v.sizeCache
- case 2:
- return &v.unknownFields
- default:
- return nil
- }
- }
- }
- file_tfplugin6_proto_msgTypes[2].OneofWrappers = []interface{}{}
- file_tfplugin6_proto_msgTypes[26].OneofWrappers = []interface{}{
+ file_tfplugin6_proto_msgTypes[2].OneofWrappers = []any{}
+ file_tfplugin6_proto_msgTypes[30].OneofWrappers = []any{
(*AttributePath_Step_AttributeName)(nil),
(*AttributePath_Step_ElementKeyString)(nil),
(*AttributePath_Step_ElementKeyInt)(nil),
}
+ file_tfplugin6_proto_msgTypes[83].OneofWrappers = []any{}
+ file_tfplugin6_proto_msgTypes[84].OneofWrappers = []any{}
+ file_tfplugin6_proto_msgTypes[85].OneofWrappers = []any{}
+ file_tfplugin6_proto_msgTypes[86].OneofWrappers = []any{}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_tfplugin6_proto_rawDesc,
NumEnums: 5,
- NumMessages: 74,
+ NumMessages: 88,
NumExtensions: 0,
NumServices: 1,
},
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6.proto b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6.proto
index 8504e12dcc..8546ef5b6e 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6.proto
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6.proto
@@ -1,9 +1,9 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0
-// Terraform Plugin RPC protocol version 6.6
+// Terraform Plugin RPC protocol version 6.8
//
-// This file defines version 6.6 of the RPC protocol. To implement a plugin
+// This file defines version 6.8 of the RPC protocol. To implement a plugin
// against this protocol, copy this definition into your own codebase and
// use protoc to generate stubs for your target language.
//
@@ -22,6 +22,8 @@
syntax = "proto3";
option go_package = "github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6";
+import "google/protobuf/timestamp.proto";
+
package tfplugin6;
// DynamicValue is an opaque encoding of terraform data, with the field name
@@ -108,6 +110,11 @@ message Schema {
bool sensitive = 7;
StringKind description_kind = 8;
bool deprecated = 9;
+ // write_only indicates that the attribute value will be provided via
+ // configuration and must be omitted from state. write_only must be
+ // combined with optional or required, and is only valid for managed
+ // resource schemas.
+ bool write_only = 11;
}
message NestedBlock {
@@ -238,6 +245,9 @@ message ClientCapabilities {
// The deferral_allowed capability signals that the client is able to
// handle deferred responses from the provider.
bool deferral_allowed = 1;
+ // The write_only_attributes_allowed capability signals that the client
+ // is able to handle write_only attributes for managed resources.
+ bool write_only_attributes_allowed = 2;
}
// Deferred is a message that indicates that change is deferred for a reason.
@@ -288,6 +298,12 @@ service Provider {
rpc MoveResourceState(MoveResourceState.Request) returns (MoveResourceState.Response);
rpc ReadDataSource(ReadDataSource.Request) returns (ReadDataSource.Response);
+ //////// Ephemeral Resource Lifecycle
+ rpc ValidateEphemeralResourceConfig(ValidateEphemeralResourceConfig.Request) returns (ValidateEphemeralResourceConfig.Response);
+ rpc OpenEphemeralResource(OpenEphemeralResource.Request) returns (OpenEphemeralResource.Response);
+ rpc RenewEphemeralResource(RenewEphemeralResource.Request) returns (RenewEphemeralResource.Response);
+ rpc CloseEphemeralResource(CloseEphemeralResource.Request) returns (CloseEphemeralResource.Response);
+
// Functions
// GetFunctions returns the definitions of all functions.
@@ -313,6 +329,7 @@ message GetMetadata {
// functions returns metadata for any functions.
repeated FunctionMetadata functions = 5;
+ repeated EphemeralResourceMetadata ephemeral_resources = 6;
}
message FunctionMetadata {
@@ -327,6 +344,10 @@ message GetMetadata {
message ResourceMetadata {
string type_name = 1;
}
+
+ message EphemeralResourceMetadata {
+ string type_name = 1;
+ }
}
message GetProviderSchema {
@@ -342,6 +363,7 @@ message GetProviderSchema {
// functions is a mapping of function names to definitions.
map functions = 7;
+ map ephemeral_resource_schemas = 8;
}
}
@@ -394,6 +416,7 @@ message ValidateResourceConfig {
message Request {
string type_name = 1;
DynamicValue config = 2;
+ ClientCapabilities client_capabilities = 3;
}
message Response {
repeated Diagnostic diagnostics = 1;
@@ -464,6 +487,7 @@ message PlanResourceChange {
bytes planned_private = 3;
repeated Diagnostic diagnostics = 4;
+
// This may be set only by the helper/schema "SDK" in the main Terraform
// repository, to request that Terraform Core >=0.12 permit additional
// inconsistencies that can result from the legacy SDK type system
@@ -610,7 +634,56 @@ message CallFunction {
// result is result value after running the function logic.
DynamicValue result = 1;
- // error is any errors from the function logic.
+ // error is any error from the function logic.
FunctionError error = 2;
}
}
+
+message ValidateEphemeralResourceConfig {
+ message Request {
+ string type_name = 1;
+ DynamicValue config = 2;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ }
+}
+
+message OpenEphemeralResource {
+ message Request {
+ string type_name = 1;
+ DynamicValue config = 2;
+ ClientCapabilities client_capabilities = 3;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ optional google.protobuf.Timestamp renew_at = 2;
+ DynamicValue result = 3;
+ optional bytes private = 4;
+ // deferred is set if the provider is deferring the change. If set the caller
+ // needs to handle the deferral.
+ Deferred deferred = 5;
+ }
+}
+
+message RenewEphemeralResource {
+ message Request {
+ string type_name = 1;
+ optional bytes private = 2;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ optional google.protobuf.Timestamp renew_at = 2;
+ optional bytes private = 3;
+ }
+}
+
+message CloseEphemeralResource {
+ message Request {
+ string type_name = 1;
+ optional bytes private = 2;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ }
+}
\ No newline at end of file
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6_grpc.pb.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6_grpc.pb.go
index d1d31e196c..e757b68b81 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6_grpc.pb.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6/tfplugin6_grpc.pb.go
@@ -1,9 +1,9 @@
// Copyright (c) HashiCorp, Inc.
// SPDX-License-Identifier: MPL-2.0
-// Terraform Plugin RPC protocol version 6.6
+// Terraform Plugin RPC protocol version 6.8
//
-// This file defines version 6.6 of the RPC protocol. To implement a plugin
+// This file defines version 6.8 of the RPC protocol. To implement a plugin
// against this protocol, copy this definition into your own codebase and
// use protoc to generate stubs for your target language.
//
@@ -22,8 +22,8 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.3.0
-// - protoc v5.26.1
+// - protoc-gen-go-grpc v1.5.1
+// - protoc v5.29.3
// source: tfplugin6.proto
package tfplugin6
@@ -37,26 +37,30 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.32.0 or later.
-const _ = grpc.SupportPackageIsVersion7
+// Requires gRPC-Go v1.64.0 or later.
+const _ = grpc.SupportPackageIsVersion9
const (
- Provider_GetMetadata_FullMethodName = "/tfplugin6.Provider/GetMetadata"
- Provider_GetProviderSchema_FullMethodName = "/tfplugin6.Provider/GetProviderSchema"
- Provider_ValidateProviderConfig_FullMethodName = "/tfplugin6.Provider/ValidateProviderConfig"
- Provider_ValidateResourceConfig_FullMethodName = "/tfplugin6.Provider/ValidateResourceConfig"
- Provider_ValidateDataResourceConfig_FullMethodName = "/tfplugin6.Provider/ValidateDataResourceConfig"
- Provider_UpgradeResourceState_FullMethodName = "/tfplugin6.Provider/UpgradeResourceState"
- Provider_ConfigureProvider_FullMethodName = "/tfplugin6.Provider/ConfigureProvider"
- Provider_ReadResource_FullMethodName = "/tfplugin6.Provider/ReadResource"
- Provider_PlanResourceChange_FullMethodName = "/tfplugin6.Provider/PlanResourceChange"
- Provider_ApplyResourceChange_FullMethodName = "/tfplugin6.Provider/ApplyResourceChange"
- Provider_ImportResourceState_FullMethodName = "/tfplugin6.Provider/ImportResourceState"
- Provider_MoveResourceState_FullMethodName = "/tfplugin6.Provider/MoveResourceState"
- Provider_ReadDataSource_FullMethodName = "/tfplugin6.Provider/ReadDataSource"
- Provider_GetFunctions_FullMethodName = "/tfplugin6.Provider/GetFunctions"
- Provider_CallFunction_FullMethodName = "/tfplugin6.Provider/CallFunction"
- Provider_StopProvider_FullMethodName = "/tfplugin6.Provider/StopProvider"
+ Provider_GetMetadata_FullMethodName = "/tfplugin6.Provider/GetMetadata"
+ Provider_GetProviderSchema_FullMethodName = "/tfplugin6.Provider/GetProviderSchema"
+ Provider_ValidateProviderConfig_FullMethodName = "/tfplugin6.Provider/ValidateProviderConfig"
+ Provider_ValidateResourceConfig_FullMethodName = "/tfplugin6.Provider/ValidateResourceConfig"
+ Provider_ValidateDataResourceConfig_FullMethodName = "/tfplugin6.Provider/ValidateDataResourceConfig"
+ Provider_UpgradeResourceState_FullMethodName = "/tfplugin6.Provider/UpgradeResourceState"
+ Provider_ConfigureProvider_FullMethodName = "/tfplugin6.Provider/ConfigureProvider"
+ Provider_ReadResource_FullMethodName = "/tfplugin6.Provider/ReadResource"
+ Provider_PlanResourceChange_FullMethodName = "/tfplugin6.Provider/PlanResourceChange"
+ Provider_ApplyResourceChange_FullMethodName = "/tfplugin6.Provider/ApplyResourceChange"
+ Provider_ImportResourceState_FullMethodName = "/tfplugin6.Provider/ImportResourceState"
+ Provider_MoveResourceState_FullMethodName = "/tfplugin6.Provider/MoveResourceState"
+ Provider_ReadDataSource_FullMethodName = "/tfplugin6.Provider/ReadDataSource"
+ Provider_ValidateEphemeralResourceConfig_FullMethodName = "/tfplugin6.Provider/ValidateEphemeralResourceConfig"
+ Provider_OpenEphemeralResource_FullMethodName = "/tfplugin6.Provider/OpenEphemeralResource"
+ Provider_RenewEphemeralResource_FullMethodName = "/tfplugin6.Provider/RenewEphemeralResource"
+ Provider_CloseEphemeralResource_FullMethodName = "/tfplugin6.Provider/CloseEphemeralResource"
+ Provider_GetFunctions_FullMethodName = "/tfplugin6.Provider/GetFunctions"
+ Provider_CallFunction_FullMethodName = "/tfplugin6.Provider/CallFunction"
+ Provider_StopProvider_FullMethodName = "/tfplugin6.Provider/StopProvider"
)
// ProviderClient is the client API for Provider service.
@@ -85,6 +89,11 @@ type ProviderClient interface {
ImportResourceState(ctx context.Context, in *ImportResourceState_Request, opts ...grpc.CallOption) (*ImportResourceState_Response, error)
MoveResourceState(ctx context.Context, in *MoveResourceState_Request, opts ...grpc.CallOption) (*MoveResourceState_Response, error)
ReadDataSource(ctx context.Context, in *ReadDataSource_Request, opts ...grpc.CallOption) (*ReadDataSource_Response, error)
+ // ////// Ephemeral Resource Lifecycle
+ ValidateEphemeralResourceConfig(ctx context.Context, in *ValidateEphemeralResourceConfig_Request, opts ...grpc.CallOption) (*ValidateEphemeralResourceConfig_Response, error)
+ OpenEphemeralResource(ctx context.Context, in *OpenEphemeralResource_Request, opts ...grpc.CallOption) (*OpenEphemeralResource_Response, error)
+ RenewEphemeralResource(ctx context.Context, in *RenewEphemeralResource_Request, opts ...grpc.CallOption) (*RenewEphemeralResource_Response, error)
+ CloseEphemeralResource(ctx context.Context, in *CloseEphemeralResource_Request, opts ...grpc.CallOption) (*CloseEphemeralResource_Response, error)
// GetFunctions returns the definitions of all functions.
GetFunctions(ctx context.Context, in *GetFunctions_Request, opts ...grpc.CallOption) (*GetFunctions_Response, error)
// CallFunction runs the provider-defined function logic and returns
@@ -103,8 +112,9 @@ func NewProviderClient(cc grpc.ClientConnInterface) ProviderClient {
}
func (c *providerClient) GetMetadata(ctx context.Context, in *GetMetadata_Request, opts ...grpc.CallOption) (*GetMetadata_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(GetMetadata_Response)
- err := c.cc.Invoke(ctx, Provider_GetMetadata_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_GetMetadata_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -112,8 +122,9 @@ func (c *providerClient) GetMetadata(ctx context.Context, in *GetMetadata_Reques
}
func (c *providerClient) GetProviderSchema(ctx context.Context, in *GetProviderSchema_Request, opts ...grpc.CallOption) (*GetProviderSchema_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(GetProviderSchema_Response)
- err := c.cc.Invoke(ctx, Provider_GetProviderSchema_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_GetProviderSchema_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -121,8 +132,9 @@ func (c *providerClient) GetProviderSchema(ctx context.Context, in *GetProviderS
}
func (c *providerClient) ValidateProviderConfig(ctx context.Context, in *ValidateProviderConfig_Request, opts ...grpc.CallOption) (*ValidateProviderConfig_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ValidateProviderConfig_Response)
- err := c.cc.Invoke(ctx, Provider_ValidateProviderConfig_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ValidateProviderConfig_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -130,8 +142,9 @@ func (c *providerClient) ValidateProviderConfig(ctx context.Context, in *Validat
}
func (c *providerClient) ValidateResourceConfig(ctx context.Context, in *ValidateResourceConfig_Request, opts ...grpc.CallOption) (*ValidateResourceConfig_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ValidateResourceConfig_Response)
- err := c.cc.Invoke(ctx, Provider_ValidateResourceConfig_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ValidateResourceConfig_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -139,8 +152,9 @@ func (c *providerClient) ValidateResourceConfig(ctx context.Context, in *Validat
}
func (c *providerClient) ValidateDataResourceConfig(ctx context.Context, in *ValidateDataResourceConfig_Request, opts ...grpc.CallOption) (*ValidateDataResourceConfig_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ValidateDataResourceConfig_Response)
- err := c.cc.Invoke(ctx, Provider_ValidateDataResourceConfig_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ValidateDataResourceConfig_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -148,8 +162,9 @@ func (c *providerClient) ValidateDataResourceConfig(ctx context.Context, in *Val
}
func (c *providerClient) UpgradeResourceState(ctx context.Context, in *UpgradeResourceState_Request, opts ...grpc.CallOption) (*UpgradeResourceState_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(UpgradeResourceState_Response)
- err := c.cc.Invoke(ctx, Provider_UpgradeResourceState_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_UpgradeResourceState_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -157,8 +172,9 @@ func (c *providerClient) UpgradeResourceState(ctx context.Context, in *UpgradeRe
}
func (c *providerClient) ConfigureProvider(ctx context.Context, in *ConfigureProvider_Request, opts ...grpc.CallOption) (*ConfigureProvider_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ConfigureProvider_Response)
- err := c.cc.Invoke(ctx, Provider_ConfigureProvider_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ConfigureProvider_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -166,8 +182,9 @@ func (c *providerClient) ConfigureProvider(ctx context.Context, in *ConfigurePro
}
func (c *providerClient) ReadResource(ctx context.Context, in *ReadResource_Request, opts ...grpc.CallOption) (*ReadResource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ReadResource_Response)
- err := c.cc.Invoke(ctx, Provider_ReadResource_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ReadResource_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -175,8 +192,9 @@ func (c *providerClient) ReadResource(ctx context.Context, in *ReadResource_Requ
}
func (c *providerClient) PlanResourceChange(ctx context.Context, in *PlanResourceChange_Request, opts ...grpc.CallOption) (*PlanResourceChange_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(PlanResourceChange_Response)
- err := c.cc.Invoke(ctx, Provider_PlanResourceChange_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_PlanResourceChange_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -184,8 +202,9 @@ func (c *providerClient) PlanResourceChange(ctx context.Context, in *PlanResourc
}
func (c *providerClient) ApplyResourceChange(ctx context.Context, in *ApplyResourceChange_Request, opts ...grpc.CallOption) (*ApplyResourceChange_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ApplyResourceChange_Response)
- err := c.cc.Invoke(ctx, Provider_ApplyResourceChange_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ApplyResourceChange_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -193,8 +212,9 @@ func (c *providerClient) ApplyResourceChange(ctx context.Context, in *ApplyResou
}
func (c *providerClient) ImportResourceState(ctx context.Context, in *ImportResourceState_Request, opts ...grpc.CallOption) (*ImportResourceState_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ImportResourceState_Response)
- err := c.cc.Invoke(ctx, Provider_ImportResourceState_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ImportResourceState_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -202,8 +222,9 @@ func (c *providerClient) ImportResourceState(ctx context.Context, in *ImportReso
}
func (c *providerClient) MoveResourceState(ctx context.Context, in *MoveResourceState_Request, opts ...grpc.CallOption) (*MoveResourceState_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(MoveResourceState_Response)
- err := c.cc.Invoke(ctx, Provider_MoveResourceState_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_MoveResourceState_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -211,8 +232,49 @@ func (c *providerClient) MoveResourceState(ctx context.Context, in *MoveResource
}
func (c *providerClient) ReadDataSource(ctx context.Context, in *ReadDataSource_Request, opts ...grpc.CallOption) (*ReadDataSource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ReadDataSource_Response)
- err := c.cc.Invoke(ctx, Provider_ReadDataSource_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_ReadDataSource_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *providerClient) ValidateEphemeralResourceConfig(ctx context.Context, in *ValidateEphemeralResourceConfig_Request, opts ...grpc.CallOption) (*ValidateEphemeralResourceConfig_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(ValidateEphemeralResourceConfig_Response)
+ err := c.cc.Invoke(ctx, Provider_ValidateEphemeralResourceConfig_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *providerClient) OpenEphemeralResource(ctx context.Context, in *OpenEphemeralResource_Request, opts ...grpc.CallOption) (*OpenEphemeralResource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(OpenEphemeralResource_Response)
+ err := c.cc.Invoke(ctx, Provider_OpenEphemeralResource_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *providerClient) RenewEphemeralResource(ctx context.Context, in *RenewEphemeralResource_Request, opts ...grpc.CallOption) (*RenewEphemeralResource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(RenewEphemeralResource_Response)
+ err := c.cc.Invoke(ctx, Provider_RenewEphemeralResource_FullMethodName, in, out, cOpts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *providerClient) CloseEphemeralResource(ctx context.Context, in *CloseEphemeralResource_Request, opts ...grpc.CallOption) (*CloseEphemeralResource_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
+ out := new(CloseEphemeralResource_Response)
+ err := c.cc.Invoke(ctx, Provider_CloseEphemeralResource_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -220,8 +282,9 @@ func (c *providerClient) ReadDataSource(ctx context.Context, in *ReadDataSource_
}
func (c *providerClient) GetFunctions(ctx context.Context, in *GetFunctions_Request, opts ...grpc.CallOption) (*GetFunctions_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(GetFunctions_Response)
- err := c.cc.Invoke(ctx, Provider_GetFunctions_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_GetFunctions_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -229,8 +292,9 @@ func (c *providerClient) GetFunctions(ctx context.Context, in *GetFunctions_Requ
}
func (c *providerClient) CallFunction(ctx context.Context, in *CallFunction_Request, opts ...grpc.CallOption) (*CallFunction_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(CallFunction_Response)
- err := c.cc.Invoke(ctx, Provider_CallFunction_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_CallFunction_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -238,8 +302,9 @@ func (c *providerClient) CallFunction(ctx context.Context, in *CallFunction_Requ
}
func (c *providerClient) StopProvider(ctx context.Context, in *StopProvider_Request, opts ...grpc.CallOption) (*StopProvider_Response, error) {
+ cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(StopProvider_Response)
- err := c.cc.Invoke(ctx, Provider_StopProvider_FullMethodName, in, out, opts...)
+ err := c.cc.Invoke(ctx, Provider_StopProvider_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -248,7 +313,7 @@ func (c *providerClient) StopProvider(ctx context.Context, in *StopProvider_Requ
// ProviderServer is the server API for Provider service.
// All implementations must embed UnimplementedProviderServer
-// for forward compatibility
+// for forward compatibility.
type ProviderServer interface {
// GetMetadata returns upfront information about server capabilities and
// supported resource types without requiring the server to instantiate all
@@ -272,6 +337,11 @@ type ProviderServer interface {
ImportResourceState(context.Context, *ImportResourceState_Request) (*ImportResourceState_Response, error)
MoveResourceState(context.Context, *MoveResourceState_Request) (*MoveResourceState_Response, error)
ReadDataSource(context.Context, *ReadDataSource_Request) (*ReadDataSource_Response, error)
+ // ////// Ephemeral Resource Lifecycle
+ ValidateEphemeralResourceConfig(context.Context, *ValidateEphemeralResourceConfig_Request) (*ValidateEphemeralResourceConfig_Response, error)
+ OpenEphemeralResource(context.Context, *OpenEphemeralResource_Request) (*OpenEphemeralResource_Response, error)
+ RenewEphemeralResource(context.Context, *RenewEphemeralResource_Request) (*RenewEphemeralResource_Response, error)
+ CloseEphemeralResource(context.Context, *CloseEphemeralResource_Request) (*CloseEphemeralResource_Response, error)
// GetFunctions returns the definitions of all functions.
GetFunctions(context.Context, *GetFunctions_Request) (*GetFunctions_Response, error)
// CallFunction runs the provider-defined function logic and returns
@@ -282,9 +352,12 @@ type ProviderServer interface {
mustEmbedUnimplementedProviderServer()
}
-// UnimplementedProviderServer must be embedded to have forward compatible implementations.
-type UnimplementedProviderServer struct {
-}
+// UnimplementedProviderServer must be embedded to have
+// forward compatible implementations.
+//
+// NOTE: this should be embedded by value instead of pointer to avoid a nil
+// pointer dereference when methods are called.
+type UnimplementedProviderServer struct{}
func (UnimplementedProviderServer) GetMetadata(context.Context, *GetMetadata_Request) (*GetMetadata_Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetMetadata not implemented")
@@ -325,6 +398,18 @@ func (UnimplementedProviderServer) MoveResourceState(context.Context, *MoveResou
func (UnimplementedProviderServer) ReadDataSource(context.Context, *ReadDataSource_Request) (*ReadDataSource_Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method ReadDataSource not implemented")
}
+func (UnimplementedProviderServer) ValidateEphemeralResourceConfig(context.Context, *ValidateEphemeralResourceConfig_Request) (*ValidateEphemeralResourceConfig_Response, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method ValidateEphemeralResourceConfig not implemented")
+}
+func (UnimplementedProviderServer) OpenEphemeralResource(context.Context, *OpenEphemeralResource_Request) (*OpenEphemeralResource_Response, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method OpenEphemeralResource not implemented")
+}
+func (UnimplementedProviderServer) RenewEphemeralResource(context.Context, *RenewEphemeralResource_Request) (*RenewEphemeralResource_Response, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method RenewEphemeralResource not implemented")
+}
+func (UnimplementedProviderServer) CloseEphemeralResource(context.Context, *CloseEphemeralResource_Request) (*CloseEphemeralResource_Response, error) {
+ return nil, status.Errorf(codes.Unimplemented, "method CloseEphemeralResource not implemented")
+}
func (UnimplementedProviderServer) GetFunctions(context.Context, *GetFunctions_Request) (*GetFunctions_Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetFunctions not implemented")
}
@@ -335,6 +420,7 @@ func (UnimplementedProviderServer) StopProvider(context.Context, *StopProvider_R
return nil, status.Errorf(codes.Unimplemented, "method StopProvider not implemented")
}
func (UnimplementedProviderServer) mustEmbedUnimplementedProviderServer() {}
+func (UnimplementedProviderServer) testEmbeddedByValue() {}
// UnsafeProviderServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to ProviderServer will
@@ -344,6 +430,13 @@ type UnsafeProviderServer interface {
}
func RegisterProviderServer(s grpc.ServiceRegistrar, srv ProviderServer) {
+ // If the following call pancis, it indicates UnimplementedProviderServer was
+ // embedded by pointer and is nil. This will cause panics if an
+ // unimplemented method is ever invoked, so we test this at initialization
+ // time to prevent it from happening at runtime later due to I/O.
+ if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
+ t.testEmbeddedByValue()
+ }
s.RegisterService(&Provider_ServiceDesc, srv)
}
@@ -581,6 +674,78 @@ func _Provider_ReadDataSource_Handler(srv interface{}, ctx context.Context, dec
return interceptor(ctx, in, info, handler)
}
+func _Provider_ValidateEphemeralResourceConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(ValidateEphemeralResourceConfig_Request)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(ProviderServer).ValidateEphemeralResourceConfig(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: Provider_ValidateEphemeralResourceConfig_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(ProviderServer).ValidateEphemeralResourceConfig(ctx, req.(*ValidateEphemeralResourceConfig_Request))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _Provider_OpenEphemeralResource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(OpenEphemeralResource_Request)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(ProviderServer).OpenEphemeralResource(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: Provider_OpenEphemeralResource_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(ProviderServer).OpenEphemeralResource(ctx, req.(*OpenEphemeralResource_Request))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _Provider_RenewEphemeralResource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(RenewEphemeralResource_Request)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(ProviderServer).RenewEphemeralResource(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: Provider_RenewEphemeralResource_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(ProviderServer).RenewEphemeralResource(ctx, req.(*RenewEphemeralResource_Request))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _Provider_CloseEphemeralResource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(CloseEphemeralResource_Request)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(ProviderServer).CloseEphemeralResource(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: Provider_CloseEphemeralResource_FullMethodName,
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(ProviderServer).CloseEphemeralResource(ctx, req.(*CloseEphemeralResource_Request))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
func _Provider_GetFunctions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetFunctions_Request)
if err := dec(in); err != nil {
@@ -694,6 +859,22 @@ var Provider_ServiceDesc = grpc.ServiceDesc{
MethodName: "ReadDataSource",
Handler: _Provider_ReadDataSource_Handler,
},
+ {
+ MethodName: "ValidateEphemeralResourceConfig",
+ Handler: _Provider_ValidateEphemeralResourceConfig_Handler,
+ },
+ {
+ MethodName: "OpenEphemeralResource",
+ Handler: _Provider_OpenEphemeralResource_Handler,
+ },
+ {
+ MethodName: "RenewEphemeralResource",
+ Handler: _Provider_RenewEphemeralResource_Handler,
+ },
+ {
+ MethodName: "CloseEphemeralResource",
+ Handler: _Provider_CloseEphemeralResource_Handler,
+ },
{
MethodName: "GetFunctions",
Handler: _Provider_GetFunctions_Handler,
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/ephemeral_resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/ephemeral_resource.go
new file mode 100644
index 0000000000..0c0439b6bd
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/ephemeral_resource.go
@@ -0,0 +1,65 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package toproto
+
+import (
+ "github.com/hashicorp/terraform-plugin-go/tfprotov6"
+ "github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/tfplugin6"
+)
+
+func GetMetadata_EphemeralResourceMetadata(in *tfprotov6.EphemeralResourceMetadata) *tfplugin6.GetMetadata_EphemeralResourceMetadata {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin6.GetMetadata_EphemeralResourceMetadata{
+ TypeName: in.TypeName,
+ }
+}
+
+func ValidateEphemeralResourceConfig_Response(in *tfprotov6.ValidateEphemeralResourceConfigResponse) *tfplugin6.ValidateEphemeralResourceConfig_Response {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin6.ValidateEphemeralResourceConfig_Response{
+ Diagnostics: Diagnostics(in.Diagnostics),
+ }
+}
+
+func OpenEphemeralResource_Response(in *tfprotov6.OpenEphemeralResourceResponse) *tfplugin6.OpenEphemeralResource_Response {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin6.OpenEphemeralResource_Response{
+ Result: DynamicValue(in.Result),
+ Diagnostics: Diagnostics(in.Diagnostics),
+ Private: in.Private,
+ RenewAt: Timestamp(in.RenewAt),
+ Deferred: Deferred(in.Deferred),
+ }
+}
+
+func RenewEphemeralResource_Response(in *tfprotov6.RenewEphemeralResourceResponse) *tfplugin6.RenewEphemeralResource_Response {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin6.RenewEphemeralResource_Response{
+ Diagnostics: Diagnostics(in.Diagnostics),
+ Private: in.Private,
+ RenewAt: Timestamp(in.RenewAt),
+ }
+}
+
+func CloseEphemeralResource_Response(in *tfprotov6.CloseEphemeralResourceResponse) *tfplugin6.CloseEphemeralResource_Response {
+ if in == nil {
+ return nil
+ }
+
+ return &tfplugin6.CloseEphemeralResource_Response{
+ Diagnostics: Diagnostics(in.Diagnostics),
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/provider.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/provider.go
index 7b283c9d47..b0a4c31490 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/provider.go
@@ -16,6 +16,7 @@ func GetMetadata_Response(in *tfprotov6.GetMetadataResponse) *tfplugin6.GetMetad
resp := &tfplugin6.GetMetadata_Response{
DataSources: make([]*tfplugin6.GetMetadata_DataSourceMetadata, 0, len(in.DataSources)),
Diagnostics: Diagnostics(in.Diagnostics),
+ EphemeralResources: make([]*tfplugin6.GetMetadata_EphemeralResourceMetadata, 0, len(in.EphemeralResources)),
Functions: make([]*tfplugin6.GetMetadata_FunctionMetadata, 0, len(in.Functions)),
Resources: make([]*tfplugin6.GetMetadata_ResourceMetadata, 0, len(in.Resources)),
ServerCapabilities: ServerCapabilities(in.ServerCapabilities),
@@ -25,6 +26,10 @@ func GetMetadata_Response(in *tfprotov6.GetMetadataResponse) *tfplugin6.GetMetad
resp.DataSources = append(resp.DataSources, GetMetadata_DataSourceMetadata(&datasource))
}
+ for _, ephemeralResource := range in.EphemeralResources {
+ resp.EphemeralResources = append(resp.EphemeralResources, GetMetadata_EphemeralResourceMetadata(&ephemeralResource))
+ }
+
for _, function := range in.Functions {
resp.Functions = append(resp.Functions, GetMetadata_FunctionMetadata(&function))
}
@@ -42,13 +47,18 @@ func GetProviderSchema_Response(in *tfprotov6.GetProviderSchemaResponse) *tfplug
}
resp := &tfplugin6.GetProviderSchema_Response{
- DataSourceSchemas: make(map[string]*tfplugin6.Schema, len(in.DataSourceSchemas)),
- Diagnostics: Diagnostics(in.Diagnostics),
- Functions: make(map[string]*tfplugin6.Function, len(in.Functions)),
- Provider: Schema(in.Provider),
- ProviderMeta: Schema(in.ProviderMeta),
- ResourceSchemas: make(map[string]*tfplugin6.Schema, len(in.ResourceSchemas)),
- ServerCapabilities: ServerCapabilities(in.ServerCapabilities),
+ DataSourceSchemas: make(map[string]*tfplugin6.Schema, len(in.DataSourceSchemas)),
+ Diagnostics: Diagnostics(in.Diagnostics),
+ EphemeralResourceSchemas: make(map[string]*tfplugin6.Schema, len(in.EphemeralResourceSchemas)),
+ Functions: make(map[string]*tfplugin6.Function, len(in.Functions)),
+ Provider: Schema(in.Provider),
+ ProviderMeta: Schema(in.ProviderMeta),
+ ResourceSchemas: make(map[string]*tfplugin6.Schema, len(in.ResourceSchemas)),
+ ServerCapabilities: ServerCapabilities(in.ServerCapabilities),
+ }
+
+ for name, schema := range in.EphemeralResourceSchemas {
+ resp.EphemeralResourceSchemas[name] = Schema(schema)
}
for name, schema := range in.ResourceSchemas {
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/schema.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/schema.go
index fb46bd676d..d5f118ffef 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/schema.go
@@ -54,6 +54,7 @@ func Schema_Attribute(in *tfprotov6.SchemaAttribute) *tfplugin6.Schema_Attribute
Required: in.Required,
Sensitive: in.Sensitive,
Type: CtyType(in.Type),
+ WriteOnly: in.WriteOnly,
}
return resp
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/timestamp.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/timestamp.go
new file mode 100644
index 0000000000..3ee28365e9
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/internal/toproto/timestamp.go
@@ -0,0 +1,18 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package toproto
+
+import (
+ "time"
+
+ "google.golang.org/protobuf/types/known/timestamppb"
+)
+
+func Timestamp(in time.Time) *timestamppb.Timestamp {
+ if in.IsZero() {
+ return nil
+ }
+
+ return timestamppb.New(in)
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/provider.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/provider.go
index a5185138f0..6776a9d0b2 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/provider.go
@@ -54,6 +54,13 @@ type ProviderServer interface {
// terraform-plugin-go, so they are their own interface that is composed
// into ProviderServer.
FunctionServer
+
+ // EphemeralResourceServer is an interface encapsulating all the ephemeral
+ // resource-related RPC requests. ProviderServer implementations must
+ // implement them, but they are a handy interface for defining what an
+ // ephemeral resource is to terraform-plugin-go, so they're their own
+ // interface that is composed into ProviderServer.
+ EphemeralResourceServer
}
// GetMetadataRequest represents a GetMetadata RPC request.
@@ -78,6 +85,9 @@ type GetMetadataResponse struct {
// Resources returns metadata for all managed resources.
Resources []ResourceMetadata
+
+ // EphemeralResources returns metadata for all ephemeral resources.
+ EphemeralResources []EphemeralResourceMetadata
}
// GetProviderSchemaRequest represents a Terraform RPC request for the
@@ -124,6 +134,13 @@ type GetProviderSchemaResponse struct {
// includes the provider name.
Functions map[string]*Function
+ // EphemeralResourceSchemas is a map of ephemeral resource names to the schema for
+ // the configuration specified in the ephemeral resource. The name should be an
+ // ephemeral resource name, and should be prefixed with your provider's
+ // shortname and an underscore. It should match the first label after
+ // `ephemeral` in a user's configuration.
+ EphemeralResourceSchemas map[string]*Schema
+
// Diagnostics report errors or warnings related to returning the
// provider's schemas. Returning an empty slice indicates success, with
// no errors or warnings generated.
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/resource.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/resource.go
index bf1a6e387b..4ae1d372df 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/resource.go
@@ -65,26 +65,6 @@ type ResourceServer interface {
MoveResourceState(context.Context, *MoveResourceStateRequest) (*MoveResourceStateResponse, error)
}
-// ResourceServerWithMoveResourceState is a temporary interface for servers
-// to implement MoveResourceState RPC handling.
-//
-// Deprecated: This interface will be removed in a future version. Use
-// ResourceServer instead.
-type ResourceServerWithMoveResourceState interface {
- ResourceServer
-
- // MoveResourceState is called when Terraform is asked to change a resource
- // type for an existing resource. The provider must accept the change as
- // valid by ensuring the source resource type, schema version, and provider
- // address are compatible to convert the source state into the target
- // resource type and latest state version.
- //
- // This functionality is only supported in Terraform 1.8 and later. The
- // provider must have enabled the MoveResourceState server capability to
- // enable these requests.
- MoveResourceState(context.Context, *MoveResourceStateRequest) (*MoveResourceStateResponse, error)
-}
-
// ValidateResourceConfigRequest is the request Terraform sends when it
// wants to validate a resource's configuration.
type ValidateResourceConfigRequest struct {
@@ -103,6 +83,10 @@ type ValidateResourceConfigRequest struct {
// from knowing the value at request time. Any attributes not directly
// set in the configuration will be null.
Config *DynamicValue
+
+ // ClientCapabilities defines optionally supported protocol features for the
+ // ValidateResourceConfig RPC, such as forward-compatible Terraform behavior changes.
+ ClientCapabilities *ValidateResourceConfigClientCapabilities
}
// ValidateResourceConfigResponse is the response from the provider about
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/schema.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/schema.go
index b368c620fb..c287de966f 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/schema.go
@@ -253,6 +253,12 @@ type SchemaAttribute struct {
// experiences. Providers should set it when deprecating attributes in
// preparation for these tools.
Deprecated bool
+
+ // WriteOnly, when set to true, indicates that the attribute value will
+ // be provided via configuration and must be omitted from plan and state response objects. WriteOnly
+ // must be combined with Optional or Required, and is only valid for managed
+ // resource schemas.
+ WriteOnly bool
}
// ValueType returns the tftypes.Type for a SchemaAttribute.
diff --git a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/tf6server/server.go b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/tf6server/server.go
index cb79928c17..6c50b47336 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/tf6server/server.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-go/tfprotov6/tf6server/server.go
@@ -49,7 +49,7 @@ const (
//
// In the future, it may be possible to include this information directly
// in the protocol buffers rather than recreating a constant here.
- protocolVersionMinor uint = 6
+ protocolVersionMinor uint = 8
)
// protocolVersion represents the combined major and minor version numbers of
@@ -717,6 +717,7 @@ func (s *server) ValidateResourceConfig(ctx context.Context, protoReq *tfplugin6
req := fromproto.ValidateResourceConfigRequest(protoReq)
+ tf6serverlogging.ValidateResourceConfigClientCapabilities(ctx, req.ClientCapabilities)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", req.Config)
ctx = tf6serverlogging.DownstreamRequest(ctx)
@@ -1003,6 +1004,121 @@ func (s *server) GetFunctions(ctx context.Context, protoReq *tfplugin6.GetFuncti
return protoResp, nil
}
+func (s *server) ValidateEphemeralResourceConfig(ctx context.Context, protoReq *tfplugin6.ValidateEphemeralResourceConfig_Request) (*tfplugin6.ValidateEphemeralResourceConfig_Response, error) {
+ rpc := "ValidateEphemeralResourceConfig"
+ ctx = s.loggingContext(ctx)
+ ctx = logging.RpcContext(ctx, rpc)
+ ctx = logging.EphemeralResourceContext(ctx, protoReq.TypeName)
+ ctx = s.stoppableContext(ctx)
+ logging.ProtocolTrace(ctx, "Received request")
+ defer logging.ProtocolTrace(ctx, "Served request")
+
+ req := fromproto.ValidateEphemeralResourceConfigRequest(protoReq)
+
+ logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", req.Config)
+
+ ctx = tf6serverlogging.DownstreamRequest(ctx)
+
+ resp, err := s.downstream.ValidateEphemeralResourceConfig(ctx, req)
+ if err != nil {
+ logging.ProtocolError(ctx, "Error from downstream", map[string]any{logging.KeyError: err})
+ return nil, err
+ }
+
+ tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
+
+ protoResp := toproto.ValidateEphemeralResourceConfig_Response(resp)
+
+ return protoResp, nil
+}
+
+func (s *server) OpenEphemeralResource(ctx context.Context, protoReq *tfplugin6.OpenEphemeralResource_Request) (*tfplugin6.OpenEphemeralResource_Response, error) {
+ rpc := "OpenEphemeralResource"
+ ctx = s.loggingContext(ctx)
+ ctx = logging.RpcContext(ctx, rpc)
+ ctx = logging.EphemeralResourceContext(ctx, protoReq.TypeName)
+ ctx = s.stoppableContext(ctx)
+ logging.ProtocolTrace(ctx, "Received request")
+ defer logging.ProtocolTrace(ctx, "Served request")
+
+ req := fromproto.OpenEphemeralResourceRequest(protoReq)
+
+ tf6serverlogging.OpenEphemeralResourceClientCapabilities(ctx, req.ClientCapabilities)
+ logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", req.Config)
+
+ ctx = tf6serverlogging.DownstreamRequest(ctx)
+
+ resp, err := s.downstream.OpenEphemeralResource(ctx, req)
+ if err != nil {
+ logging.ProtocolError(ctx, "Error from downstream", map[string]any{logging.KeyError: err})
+ return nil, err
+ }
+
+ tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
+ logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "Result", resp.Result)
+ tf6serverlogging.Deferred(ctx, resp.Deferred)
+
+ if resp.Deferred != nil && (req.ClientCapabilities == nil || !req.ClientCapabilities.DeferralAllowed) {
+ resp.Diagnostics = append(resp.Diagnostics, invalidDeferredResponseDiag(resp.Deferred.Reason))
+ }
+
+ protoResp := toproto.OpenEphemeralResource_Response(resp)
+
+ return protoResp, nil
+}
+
+func (s *server) RenewEphemeralResource(ctx context.Context, protoReq *tfplugin6.RenewEphemeralResource_Request) (*tfplugin6.RenewEphemeralResource_Response, error) {
+ rpc := "RenewEphemeralResource"
+ ctx = s.loggingContext(ctx)
+ ctx = logging.RpcContext(ctx, rpc)
+ ctx = logging.EphemeralResourceContext(ctx, protoReq.TypeName)
+ ctx = s.stoppableContext(ctx)
+ logging.ProtocolTrace(ctx, "Received request")
+ defer logging.ProtocolTrace(ctx, "Served request")
+
+ req := fromproto.RenewEphemeralResourceRequest(protoReq)
+
+ ctx = tf6serverlogging.DownstreamRequest(ctx)
+
+ resp, err := s.downstream.RenewEphemeralResource(ctx, req)
+ if err != nil {
+ logging.ProtocolError(ctx, "Error from downstream", map[string]any{logging.KeyError: err})
+ return nil, err
+ }
+
+ tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
+
+ protoResp := toproto.RenewEphemeralResource_Response(resp)
+
+ return protoResp, nil
+}
+
+func (s *server) CloseEphemeralResource(ctx context.Context, protoReq *tfplugin6.CloseEphemeralResource_Request) (*tfplugin6.CloseEphemeralResource_Response, error) {
+ rpc := "CloseEphemeralResource"
+ ctx = s.loggingContext(ctx)
+ ctx = logging.RpcContext(ctx, rpc)
+ ctx = logging.EphemeralResourceContext(ctx, protoReq.TypeName)
+ ctx = s.stoppableContext(ctx)
+ logging.ProtocolTrace(ctx, "Received request")
+ defer logging.ProtocolTrace(ctx, "Served request")
+
+ req := fromproto.CloseEphemeralResourceRequest(protoReq)
+
+ ctx = tf6serverlogging.DownstreamRequest(ctx)
+
+ resp, err := s.downstream.CloseEphemeralResource(ctx, req)
+ if err != nil {
+ logging.ProtocolError(ctx, "Error from downstream", map[string]any{logging.KeyError: err})
+ return nil, err
+ }
+
+ tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
+
+ protoResp := toproto.CloseEphemeralResource_Response(resp)
+
+ return protoResp, nil
+}
+
func invalidDeferredResponseDiag(reason tfprotov6.DeferredReason) *tfprotov6.Diagnostic {
return &tfprotov6.Diagnostic{
Severity: tfprotov6.DiagnosticSeverityError,
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest/random.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest/random.go
index c26303eb60..382163cbc2 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest/random.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest/random.go
@@ -34,9 +34,9 @@ func RandomWithPrefix(name string) string {
return fmt.Sprintf("%s-%d", name, RandInt())
}
-// RandIntRange returns a random integer between min (inclusive) and max (exclusive)
-func RandIntRange(min int, max int) int {
- return rand.Intn(max-min) + min
+// RandIntRange returns a random integer between minVal (inclusive) and maxVal (exclusive)
+func RandIntRange(minVal int, maxVal int) int {
+ return rand.Intn(maxVal-minVal) + minVal
}
// RandString generates a random alphanumeric string of the length specified
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging/logging.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging/logging.go
index ea0764d13a..d012245f03 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging/logging.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging/logging.go
@@ -90,7 +90,7 @@ func LogOutput(t testing.T) (logOutput io.Writer, err error) {
// SetOutput checks for a log destination with LogOutput, and calls
// log.SetOutput with the result. If LogOutput returns nil, SetOutput uses
-// io.Discard. Any error from LogOutout is fatal.
+// io.Discard. Any error from LogOutput is fatal.
func SetOutput(t testing.T) {
out, err := LogOutput(t)
if err != nil {
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource/testing.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource/testing.go
index 9bde8e22aa..ac575ed43c 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource/testing.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource/testing.go
@@ -28,7 +28,7 @@ import (
)
// flagSweep is a flag available when running tests on the command line. It
-// contains a comma seperated list of regions to for the sweeper functions to
+// contains a comma separated list of regions to for the sweeper functions to
// run in. This flag bypasses the normal Test path and instead runs functions designed to
// clean up any leaked resources a testing environment could have created. It is
// a best effort attempt, and relies on Provider authors to implement "Sweeper"
@@ -49,7 +49,7 @@ import (
var flagSweep = flag.String("sweep", "", "List of Regions to run available Sweepers")
var flagSweepAllowFailures = flag.Bool("sweep-allow-failures", false, "Enable to allow Sweeper Tests to continue after failures")
-var flagSweepRun = flag.String("sweep-run", "", "Comma seperated list of Sweeper Tests to run")
+var flagSweepRun = flag.String("sweep-run", "", "Comma separated list of Sweeper Tests to run")
var sweeperFuncs map[string]*Sweeper
// SweeperFunc is a signature for a function that acts as a sweeper. It
@@ -103,7 +103,7 @@ func AddTestSweepers(name string, s *Sweeper) {
// Sweeper flags added to the "go test" command:
//
// -sweep: Comma-separated list of locations/regions to run available sweepers.
-// -sweep-allow-failues: Enable to allow other sweepers to run after failures.
+// -sweep-allow-failures: Enable to allow other sweepers to run after failures.
// -sweep-run: Comma-separated list of resource type sweepers to run. Defaults
// to all sweepers.
//
@@ -183,7 +183,7 @@ func runSweepers(regions []string, sweepers map[string]*Sweeper, allowFailures b
return sweeperRunList, nil
}
-// filterSweepers takes a comma seperated string listing the names of sweepers
+// filterSweepers takes a comma separated string listing the names of sweepers
// to be ran, and returns a filtered set from the list of all of sweepers to
// run based on the names given.
func filterSweepers(f string, source map[string]*Sweeper) map[string]*Sweeper {
@@ -230,7 +230,7 @@ func filterSweeperWithDependencies(name string, source map[string]*Sweeper) map[
return result
}
-// runSweeperWithRegion recieves a sweeper and a region, and recursively calls
+// runSweeperWithRegion receives a sweeper and a region, and recursively calls
// itself with that region for every dependency found for that sweeper. If there
// are no dependencies, invoke the contained sweeper fun with the region, and
// add the success/fail status to the sweeperRunList.
@@ -812,7 +812,7 @@ func testResource(c TestStep, state *terraform.State) (*terraform.ResourceState,
// into smaller pieces more easily.
//
// ComposeTestCheckFunc returns immediately on the first TestCheckFunc error.
-// To aggregrate all errors, use ComposeAggregateTestCheckFunc instead.
+// To aggregate all errors, use ComposeAggregateTestCheckFunc instead.
func ComposeTestCheckFunc(fs ...TestCheckFunc) TestCheckFunc {
return func(s *terraform.State) error {
for i, f := range fs {
@@ -831,7 +831,7 @@ func ComposeTestCheckFunc(fs ...TestCheckFunc) TestCheckFunc {
// As a user testing their provider, this lets you decompose your checks
// into smaller pieces more easily.
//
-// Unlike ComposeTestCheckFunc, ComposeAggergateTestCheckFunc runs _all_ of the
+// Unlike ComposeTestCheckFunc, ComposeAggregateTestCheckFunc runs _all_ of the
// TestCheckFuncs and aggregates failures.
func ComposeAggregateTestCheckFunc(fs ...TestCheckFunc) TestCheckFunc {
return func(s *terraform.State) error {
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource/testing_sets.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource/testing_sets.go
index 8f5a731c32..45cce95732 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource/testing_sets.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource/testing_sets.go
@@ -20,7 +20,7 @@ const (
// TestCheckTypeSetElemNestedAttrs ensures a subset map of values is stored in
// state for the given name and key combination of attributes nested under a
// list or set block. Use this TestCheckFunc in preference over non-set
-// variants to simplify testing code and ensure compatibility with indicies,
+// variants to simplify testing code and ensure compatibility with indices,
// which can easily change with schema changes. State value checking is only
// recommended for testing Computed attributes and attribute defaults.
//
@@ -92,7 +92,7 @@ func TestCheckTypeSetElemNestedAttrs(name, attr string, values map[string]string
// regular expressions, is stored in state for the given name and key
// combination of attributes nested under a list or set block. Use this
// TestCheckFunc in preference over non-set variants to simplify testing code
-// and ensure compatibility with indicies, which can easily change with schema
+// and ensure compatibility with indices, which can easily change with schema
// changes. State value checking is only recommended for testing Computed
// attributes and attribute defaults.
//
@@ -171,7 +171,7 @@ func TestMatchTypeSetElemNestedAttrs(name, attr string, values map[string]*regex
// TestCheckTypeSetElemAttr ensures a specific value is stored in state for the
// given name and key combination under a list or set. Use this TestCheckFunc
// in preference over non-set variants to simplify testing code and ensure
-// compatibility with indicies, which can easily change with schema changes.
+// compatibility with indices, which can easily change with schema changes.
// State value checking is only recommended for testing Computed attributes and
// attribute defaults.
//
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/core_schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/core_schema.go
index 736af218da..9247adde7e 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/core_schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/core_schema.go
@@ -123,7 +123,7 @@ func (m schemaMap) CoreConfigSchema() *configschema.Block {
// whose elem is a whole resource.
func (s *Schema) coreConfigSchemaAttribute() *configschema.Attribute {
// The Schema.DefaultFunc capability adds some extra weirdness here since
- // it can be combined with "Required: true" to create a sitution where
+ // it can be combined with "Required: true" to create a situation where
// required-ness is conditional. Terraform Core doesn't share this concept,
// so we must sniff for this possibility here and conditionally turn
// off the "Required" flag if it looks like the DefaultFunc is going
@@ -167,6 +167,7 @@ func (s *Schema) coreConfigSchemaAttribute() *configschema.Attribute {
Description: desc,
DescriptionKind: descKind,
Deprecated: s.Deprecated != "",
+ WriteOnly: s.WriteOnly,
}
}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/field_reader_config.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/field_reader_config.go
index df317c20bb..91b7412fcc 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/field_reader_config.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/field_reader_config.go
@@ -269,7 +269,7 @@ func (r *ConfigFieldReader) readSet(
return FieldReadResult{Value: set}, nil
}
- // If the list is computed, the set is necessarilly computed
+ // If the list is computed, the set is necessarily computed
if raw.Computed {
return FieldReadResult{
Value: set,
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/grpc_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/grpc_provider.go
index ec5d74301a..f942814806 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/grpc_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/grpc_provider.go
@@ -84,6 +84,7 @@ func (s *GRPCProviderServer) GetMetadata(ctx context.Context, req *tfprotov5.Get
resp := &tfprotov5.GetMetadataResponse{
DataSources: make([]tfprotov5.DataSourceMetadata, 0, len(s.provider.DataSourcesMap)),
+ EphemeralResources: make([]tfprotov5.EphemeralResourceMetadata, 0),
Functions: make([]tfprotov5.FunctionMetadata, 0),
Resources: make([]tfprotov5.ResourceMetadata, 0, len(s.provider.ResourcesMap)),
ServerCapabilities: s.serverCapabilities(),
@@ -110,10 +111,11 @@ func (s *GRPCProviderServer) GetProviderSchema(ctx context.Context, req *tfproto
logging.HelperSchemaTrace(ctx, "Getting provider schema")
resp := &tfprotov5.GetProviderSchemaResponse{
- DataSourceSchemas: make(map[string]*tfprotov5.Schema, len(s.provider.DataSourcesMap)),
- Functions: make(map[string]*tfprotov5.Function, 0),
- ResourceSchemas: make(map[string]*tfprotov5.Schema, len(s.provider.ResourcesMap)),
- ServerCapabilities: s.serverCapabilities(),
+ DataSourceSchemas: make(map[string]*tfprotov5.Schema, len(s.provider.DataSourcesMap)),
+ EphemeralResourceSchemas: make(map[string]*tfprotov5.Schema, 0),
+ Functions: make(map[string]*tfprotov5.Function, 0),
+ ResourceSchemas: make(map[string]*tfprotov5.Schema, len(s.provider.ResourcesMap)),
+ ServerCapabilities: s.serverCapabilities(),
}
resp.Provider = &tfprotov5.Schema{
@@ -281,6 +283,32 @@ func (s *GRPCProviderServer) ValidateResourceTypeConfig(ctx context.Context, req
resp.Diagnostics = convert.AppendProtoDiag(ctx, resp.Diagnostics, err)
return resp, nil
}
+ if req.ClientCapabilities == nil || !req.ClientCapabilities.WriteOnlyAttributesAllowed {
+ resp.Diagnostics = convert.AppendProtoDiag(ctx, resp.Diagnostics, validateWriteOnlyNullValues(configVal, schemaBlock, cty.Path{}))
+ }
+
+ r := s.provider.ResourcesMap[req.TypeName]
+
+ // Calling all ValidateRawResourceConfigFunc here since they validate on the raw go-cty config value
+ // and were introduced after the public provider.ValidateResource method.
+ if r.ValidateRawResourceConfigFuncs != nil {
+ writeOnlyAllowed := false
+
+ if req.ClientCapabilities != nil {
+ writeOnlyAllowed = req.ClientCapabilities.WriteOnlyAttributesAllowed
+ }
+
+ validateReq := ValidateResourceConfigFuncRequest{
+ WriteOnlyAttributesAllowed: writeOnlyAllowed,
+ RawConfig: configVal,
+ }
+
+ for _, validateFunc := range r.ValidateRawResourceConfigFuncs {
+ validateResp := &ValidateResourceConfigFuncResponse{}
+ validateFunc(ctx, validateReq, validateResp)
+ resp.Diagnostics = convert.AppendProtoDiag(ctx, resp.Diagnostics, validateResp.Diagnostics)
+ }
+ }
config := terraform.NewResourceConfigShimmed(configVal, schemaBlock)
@@ -392,6 +420,9 @@ func (s *GRPCProviderServer) UpgradeResourceState(ctx context.Context, req *tfpr
// Normalize the value and fill in any missing blocks.
val = objchange.NormalizeObjectFromLegacySDK(val, schemaBlock)
+ // Set any write-only attribute values to null
+ val = setWriteOnlyNullValues(val, schemaBlock)
+
// encode the final state to the expected msgpack format
newStateMP, err := msgpack.Marshal(val, schemaBlock.ImpliedType())
if err != nil {
@@ -501,7 +532,7 @@ func (s *GRPCProviderServer) upgradeJSONState(ctx context.Context, version int,
// Remove any attributes no longer present in the schema, so that the json can
// be correctly decoded.
func (s *GRPCProviderServer) removeAttributes(ctx context.Context, v interface{}, ty cty.Type) {
- // we're only concerned with finding maps that corespond to object
+ // we're only concerned with finding maps that correspond to object
// attributes
switch v := v.(type) {
case []interface{}:
@@ -736,6 +767,7 @@ func (s *GRPCProviderServer) ReadResource(ctx context.Context, req *tfprotov5.Re
newStateVal = normalizeNullValues(newStateVal, stateVal, false)
newStateVal = copyTimeoutValues(newStateVal, stateVal)
+ newStateVal = setWriteOnlyNullValues(newStateVal, schemaBlock)
newStateMP, err := msgpack.Marshal(newStateVal, schemaBlock.ImpliedType())
if err != nil {
@@ -935,6 +967,9 @@ func (s *GRPCProviderServer) PlanResourceChange(ctx context.Context, req *tfprot
plannedStateVal = SetUnknowns(plannedStateVal, schemaBlock)
}
+ // Set any write-only attribute values to null
+ plannedStateVal = setWriteOnlyNullValues(plannedStateVal, schemaBlock)
+
plannedMP, err := msgpack.Marshal(plannedStateVal, schemaBlock.ImpliedType())
if err != nil {
resp.Diagnostics = convert.AppendProtoDiag(ctx, resp.Diagnostics, err)
@@ -1182,6 +1217,8 @@ func (s *GRPCProviderServer) ApplyResourceChange(ctx context.Context, req *tfpro
newStateVal = copyTimeoutValues(newStateVal, plannedStateVal)
+ newStateVal = setWriteOnlyNullValues(newStateVal, schemaBlock)
+
newStateMP, err := msgpack.Marshal(newStateVal, schemaBlock.ImpliedType())
if err != nil {
resp.Diagnostics = convert.AppendProtoDiag(ctx, resp.Diagnostics, err)
@@ -1303,6 +1340,9 @@ func (s *GRPCProviderServer) ImportResourceState(ctx context.Context, req *tfpro
newStateVal = cty.ObjectVal(newStateValueMap)
}
+ // Set any write-only attribute values to null
+ newStateVal = setWriteOnlyNullValues(newStateVal, schemaBlock)
+
newStateMP, err := msgpack.Marshal(newStateVal, schemaBlock.ImpliedType())
if err != nil {
resp.Diagnostics = convert.AppendProtoDiag(ctx, resp.Diagnostics, err)
@@ -1482,6 +1522,78 @@ func (s *GRPCProviderServer) GetFunctions(ctx context.Context, req *tfprotov5.Ge
return resp, nil
}
+func (s *GRPCProviderServer) ValidateEphemeralResourceConfig(ctx context.Context, req *tfprotov5.ValidateEphemeralResourceConfigRequest) (*tfprotov5.ValidateEphemeralResourceConfigResponse, error) {
+ ctx = logging.InitContext(ctx)
+
+ logging.HelperSchemaTrace(ctx, "Returning error for ephemeral resource validate")
+
+ resp := &tfprotov5.ValidateEphemeralResourceConfigResponse{
+ Diagnostics: []*tfprotov5.Diagnostic{
+ {
+ Severity: tfprotov5.DiagnosticSeverityError,
+ Summary: "Unknown Ephemeral Resource Type",
+ Detail: fmt.Sprintf("The %q ephemeral resource type is not supported by this provider.", req.TypeName),
+ },
+ },
+ }
+
+ return resp, nil
+}
+
+func (s *GRPCProviderServer) OpenEphemeralResource(ctx context.Context, req *tfprotov5.OpenEphemeralResourceRequest) (*tfprotov5.OpenEphemeralResourceResponse, error) {
+ ctx = logging.InitContext(ctx)
+
+ logging.HelperSchemaTrace(ctx, "Returning error for ephemeral resource open")
+
+ resp := &tfprotov5.OpenEphemeralResourceResponse{
+ Diagnostics: []*tfprotov5.Diagnostic{
+ {
+ Severity: tfprotov5.DiagnosticSeverityError,
+ Summary: "Unknown Ephemeral Resource Type",
+ Detail: fmt.Sprintf("The %q ephemeral resource type is not supported by this provider.", req.TypeName),
+ },
+ },
+ }
+
+ return resp, nil
+}
+
+func (s *GRPCProviderServer) RenewEphemeralResource(ctx context.Context, req *tfprotov5.RenewEphemeralResourceRequest) (*tfprotov5.RenewEphemeralResourceResponse, error) {
+ ctx = logging.InitContext(ctx)
+
+ logging.HelperSchemaTrace(ctx, "Returning error for ephemeral resource renew")
+
+ resp := &tfprotov5.RenewEphemeralResourceResponse{
+ Diagnostics: []*tfprotov5.Diagnostic{
+ {
+ Severity: tfprotov5.DiagnosticSeverityError,
+ Summary: "Unknown Ephemeral Resource Type",
+ Detail: fmt.Sprintf("The %q ephemeral resource type is not supported by this provider.", req.TypeName),
+ },
+ },
+ }
+
+ return resp, nil
+}
+
+func (s *GRPCProviderServer) CloseEphemeralResource(ctx context.Context, req *tfprotov5.CloseEphemeralResourceRequest) (*tfprotov5.CloseEphemeralResourceResponse, error) {
+ ctx = logging.InitContext(ctx)
+
+ logging.HelperSchemaTrace(ctx, "Returning error for ephemeral resource close")
+
+ resp := &tfprotov5.CloseEphemeralResourceResponse{
+ Diagnostics: []*tfprotov5.Diagnostic{
+ {
+ Severity: tfprotov5.DiagnosticSeverityError,
+ Summary: "Unknown Ephemeral Resource Type",
+ Detail: fmt.Sprintf("The %q ephemeral resource type is not supported by this provider.", req.TypeName),
+ },
+ },
+ }
+
+ return resp, nil
+}
+
func pathToAttributePath(path cty.Path) *tftypes.AttributePath {
var steps []tftypes.AttributePathStep
@@ -1593,7 +1705,7 @@ func stripSchema(s *Schema) *Schema {
}
// Zero values and empty containers may be interchanged by the apply process.
-// When there is a discrepency between src and dst value being null or empty,
+// When there is a discrepancy between src and dst value being null or empty,
// prefer the src value. This takes a little more liberty with set types, since
// we can't correlate modified set values. In the case of sets, if the src set
// was wholly known we assume the value was correctly applied and copy that
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/provider.go
index a75ae2fc28..45f1e0d466 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/provider.go
@@ -13,6 +13,7 @@ import (
"strings"
"github.com/hashicorp/terraform-plugin-go/tfprotov5"
+
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging"
@@ -192,11 +193,23 @@ func (p *Provider) InternalValidate() error {
}
var validationErrors []error
+
+ // Provider schema validation
sm := schemaMap(p.Schema)
if err := sm.InternalValidate(sm); err != nil {
validationErrors = append(validationErrors, err)
}
+ if sm.hasWriteOnly() {
+ validationErrors = append(validationErrors, fmt.Errorf("provider schema cannot contain write-only attributes"))
+ }
+
+ // Provider meta schema validation
+ providerMeta := schemaMap(p.ProviderMetaSchema)
+ if providerMeta.hasWriteOnly() {
+ validationErrors = append(validationErrors, fmt.Errorf("provider meta schema cannot contain write-only attributes"))
+ }
+
// Provider-specific checks
for k := range sm {
if isReservedProviderFieldName(k) {
@@ -214,6 +227,15 @@ func (p *Provider) InternalValidate() error {
if err := r.InternalValidate(nil, false); err != nil {
validationErrors = append(validationErrors, fmt.Errorf("data source %s: %s", k, err))
}
+
+ if len(r.ValidateRawResourceConfigFuncs) > 0 {
+ validationErrors = append(validationErrors, fmt.Errorf("data source %s cannot contain ValidateRawResourceConfigFuncs", k))
+ }
+
+ dataSourceSchema := schemaMap(r.SchemaMap())
+ if dataSourceSchema.hasWriteOnly() {
+ validationErrors = append(validationErrors, fmt.Errorf("data source %s cannot contain write-only attributes", k))
+ }
}
return errors.Join(validationErrors...)
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource.go
index 1c944c9b48..32a21d2edf 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource.go
@@ -189,7 +189,7 @@ type Resource struct {
// This implementation is optional. If omitted, all Schema must enable
// the ForceNew field and any practitioner changes that would have
// caused and update will instead destroy and recreate the infrastructure
- // compontent.
+ // component.
//
// The *ResourceData parameter contains the plan and state data for this
// managed resource instance. The available data in the Get* methods is the
@@ -321,7 +321,7 @@ type Resource struct {
// This implementation is optional. If omitted, all Schema must enable
// the ForceNew field and any practitioner changes that would have
// caused and update will instead destroy and recreate the infrastructure
- // compontent.
+ // component.
//
// The Context parameter stores SDK information, such as loggers and
// timeout deadlines. It also is wired to receive any cancellation from
@@ -460,7 +460,7 @@ type Resource struct {
// This implementation is optional. If omitted, all Schema must enable
// the ForceNew field and any practitioner changes that would have
// caused and update will instead destroy and recreate the infrastructure
- // compontent.
+ // component.
//
// The Context parameter stores SDK information, such as loggers. It also
// is wired to receive any cancellation from Terraform such as a system or
@@ -644,6 +644,19 @@ type Resource struct {
// ResourceBehavior is used to control SDK-specific logic when
// interacting with this resource.
ResourceBehavior ResourceBehavior
+
+ // ValidateRawResourceConfigFuncs allows functions to define arbitrary validation
+ // logic during the ValidateResourceTypeConfig RPC. ValidateRawResourceConfigFunc receives
+ // the client capabilities from the ValidateResourceTypeConfig RPC and the raw cty
+ // config value for the entire resource before it is shimmed, and it can return error
+ // diagnostics based on the inspection of those values.
+ //
+ // ValidateRawResourceConfigFuncs is only valid for Managed Resource types and will not be
+ // called for Data Resource or Provider types.
+ //
+ // Developers should prefer other validation methods first as this validation function
+ // deals with raw cty values.
+ ValidateRawResourceConfigFuncs []ValidateRawResourceConfigFunc
}
// ResourceBehavior controls SDK-specific logic when interacting
@@ -664,12 +677,31 @@ type ResourceBehavior struct {
// to change or break without warning. It is not protected by version compatibility guarantees.
type ProviderDeferredBehavior struct {
// When EnablePlanModification is true, the SDK will execute provider-defined logic
- // during plan (CustomizeDiff, Default, DiffSupressFunc, etc.) if ConfigureProvider
+ // during plan (CustomizeDiff, Default, DiffSuppressFunc, etc.) if ConfigureProvider
// returns a deferred response. The SDK will then automatically return a deferred response
// along with the modified plan.
EnablePlanModification bool
}
+// ValidateRawResourceConfigFunc is a function used to validate the raw resource config
+// and has Diagnostic support. it is only valid for Managed Resource types and will not be
+// called for Data Resource or Block types.
+type ValidateRawResourceConfigFunc func(context.Context, ValidateResourceConfigFuncRequest, *ValidateResourceConfigFuncResponse)
+
+type ValidateResourceConfigFuncRequest struct {
+ // WriteOnlyAttributesAllowed indicates that the Terraform client
+ // initiating the request supports write-only attributes for managed
+ // resources.
+ WriteOnlyAttributesAllowed bool
+
+ // The raw config value provided by Terraform core
+ RawConfig cty.Value
+}
+
+type ValidateResourceConfigFuncResponse struct {
+ Diagnostics diag.Diagnostics
+}
+
// SchemaMap returns the schema information for this Resource whether it is
// defined via the SchemaFunc field or Schema field. The SchemaFunc field, if
// defined, takes precedence over the Schema field.
@@ -748,7 +780,7 @@ type StateUpgrader struct {
// Upgrade takes the JSON encoded state and the provider meta value, and
// upgrades the state one single schema version. The provided state is
- // deocded into the default json types using a map[string]interface{}. It
+ // decoded into the default json types using a map[string]interface{}. It
// is up to the StateUpgradeFunc to ensure that the returned value can be
// encoded using the new schema.
Upgrade StateUpgradeFunc
@@ -873,7 +905,7 @@ func (r *Resource) Apply(
data.providerMeta = s.ProviderMeta
}
- // Instance Diff shoould have the timeout info, need to copy it over to the
+ // Instance Diff should have the timeout info, need to copy it over to the
// ResourceData meta
rt := ResourceTimeout{}
if _, ok := d.Meta[TimeoutKey]; ok {
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_data.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_data.go
index 4380db7e1a..5129c925c4 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_data.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_data.go
@@ -4,6 +4,7 @@
package schema
import (
+ "fmt"
"log"
"reflect"
"strings"
@@ -13,6 +14,8 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/go-cty/cty"
"github.com/hashicorp/go-cty/cty/gocty"
+
+ "github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)
@@ -82,7 +85,7 @@ func (d *ResourceData) GetChange(key string) (interface{}, interface{}) {
// GetOk returns the data for the given key and whether or not the key
// has been set to a non-zero value at some point.
//
-// The first result will not necessarilly be nil if the value doesn't exist.
+// The first result will not necessarily be nil if the value doesn't exist.
// The second result should be checked to determine this information.
func (d *ResourceData) GetOk(key string) (interface{}, bool) {
r := d.getRaw(key, getSourceSet)
@@ -604,6 +607,67 @@ func (d *ResourceData) GetRawConfig() cty.Value {
return cty.NullVal(schemaMap(d.schema).CoreConfigSchema().ImpliedType())
}
+// GetRawConfigAt is a helper method for retrieving specific values
+// from the RawConfig returned from GetRawConfig. It returns the cty.Value
+// for a given cty.Path or an error diagnostic if the value at the given path does not exist.
+//
+// GetRawConfigAt is considered advanced functionality, and
+// familiarity with the Terraform protocol is suggested when using it.
+func (d *ResourceData) GetRawConfigAt(valPath cty.Path) (cty.Value, diag.Diagnostics) {
+ rawConfig := d.GetRawConfig()
+ configVal := cty.DynamicVal
+
+ if rawConfig.IsNull() {
+ return configVal, diag.Diagnostics{
+ {
+ Severity: diag.Error,
+ Summary: "Empty Raw Config",
+ Detail: "The Terraform Provider unexpectedly received an empty configuration. " +
+ "This is almost always an issue with the Terraform Plugin SDK used to create providers. " +
+ "Please report this to the provider developers. \n\n" +
+ "The RawConfig is empty.",
+ AttributePath: valPath,
+ },
+ }
+ }
+ err := cty.Walk(rawConfig, func(path cty.Path, value cty.Value) (bool, error) {
+ if path.Equals(valPath) {
+ configVal = value
+ return false, nil
+ }
+ return true, nil
+ })
+ if err != nil {
+ return configVal, diag.Diagnostics{
+ {
+ Severity: diag.Error,
+ Summary: "Invalid config path",
+ Detail: "The Terraform Provider unexpectedly provided a path that does not match the current schema. " +
+ "This can happen if the path does not correctly follow the schema in structure or types. " +
+ "Please report this to the provider developers. \n\n" +
+ fmt.Sprintf("Encountered error while retrieving config value %s", err.Error()),
+ AttributePath: valPath,
+ },
+ }
+ }
+
+ if configVal.RawEquals(cty.DynamicVal) {
+ return configVal, diag.Diagnostics{
+ {
+ Severity: diag.Error,
+ Summary: "Invalid config path",
+ Detail: "The Terraform Provider unexpectedly provided a path that does not match the current schema. " +
+ "This can happen if the path does not correctly follow the schema in structure or types. " +
+ "Please report this to the provider developers. \n\n" +
+ "Cannot find config value for given path.",
+ AttributePath: valPath,
+ },
+ }
+ }
+
+ return configVal, nil
+}
+
// GetRawState returns the cty.Value that Terraform sent the SDK for the state.
// If no value was sent, or if a null value was sent, the value will be a null
// value of the resource's type.
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_diff.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_diff.go
index 6af9490b9e..9f7dab683b 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_diff.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_diff.go
@@ -11,6 +11,8 @@ import (
"sync"
"github.com/hashicorp/go-cty/cty"
+
+ "github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)
@@ -480,6 +482,67 @@ func (d *ResourceDiff) GetRawConfig() cty.Value {
return cty.NullVal(schemaMap(d.schema).CoreConfigSchema().ImpliedType())
}
+// GetRawConfigAt is a helper method for retrieving specific values
+// from the RawConfig returned from GetRawConfig. It returns the cty.Value
+// for a given cty.Path or an error diagnostic if the value at the given path does not exist.
+//
+// GetRawConfigAt is considered advanced functionality, and
+// familiarity with the Terraform protocol is suggested when using it.
+func (d *ResourceDiff) GetRawConfigAt(valPath cty.Path) (cty.Value, diag.Diagnostics) {
+ rawConfig := d.GetRawConfig()
+ configVal := cty.DynamicVal
+
+ if rawConfig.IsNull() {
+ return configVal, diag.Diagnostics{
+ {
+ Severity: diag.Error,
+ Summary: "Empty Raw Config",
+ Detail: "The Terraform Provider unexpectedly received an empty configuration. " +
+ "This is almost always an issue with the Terraform Plugin SDK used to create providers. " +
+ "Please report this to the provider developers. \n\n" +
+ "The RawConfig is empty.",
+ AttributePath: valPath,
+ },
+ }
+ }
+ err := cty.Walk(rawConfig, func(path cty.Path, value cty.Value) (bool, error) {
+ if path.Equals(valPath) {
+ configVal = value
+ return false, nil
+ }
+ return true, nil
+ })
+ if err != nil {
+ return configVal, diag.Diagnostics{
+ {
+ Severity: diag.Error,
+ Summary: "Invalid config path",
+ Detail: "The Terraform Provider unexpectedly provided a path that does not match the current schema. " +
+ "This can happen if the path does not correctly follow the schema in structure or types. " +
+ "Please report this to the provider developers. \n\n" +
+ fmt.Sprintf("Encountered error while retrieving config value %s", err.Error()),
+ AttributePath: valPath,
+ },
+ }
+ }
+
+ if configVal.RawEquals(cty.DynamicVal) {
+ return configVal, diag.Diagnostics{
+ {
+ Severity: diag.Error,
+ Summary: "Invalid config path",
+ Detail: "The Terraform Provider unexpectedly provided a path that does not match the current schema. " +
+ "This can happen if the path does not correctly follow the schema in structure or types. " +
+ "Please report this to the provider developers. \n\n" +
+ "Cannot find config value for given path.",
+ AttributePath: valPath,
+ },
+ }
+ }
+
+ return configVal, nil
+}
+
// GetRawState returns the cty.Value that Terraform sent the SDK for the state.
// If no value was sent, or if a null value was sent, the value will be a null
// value of the resource's type.
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_timeout.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_timeout.go
index 90d29e6259..72fd6602ea 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_timeout.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/resource_timeout.go
@@ -132,7 +132,7 @@ func (t *ResourceTimeout) ConfigDecode(s *Resource, c *terraform.ResourceConfig)
timeout = t.Default
}
- // If the resource has not delcared this in the definition, then error
+ // If the resource has not declared this in the definition, then error
// with an unsupported message
if timeout == nil {
return unsupportedTimeoutKeyError(timeKey)
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/schema.go
index 176288b0cd..ea6cd768d1 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/schema.go
@@ -145,7 +145,7 @@ type Schema struct {
//
// The key benefit of activating this flag is that the result of Read or
// ReadContext will be cleaned of normalization-only changes in the same
- // way as the planning result would normaly be, which therefore prevents
+ // way as the planning result would normally be, which therefore prevents
// churn for downstream expressions deriving from this attribute and
// prevents incorrect "Values changed outside of Terraform" messages
// when the remote API returns values which have the same meaning as the
@@ -395,6 +395,18 @@ type Schema struct {
// as sensitive. Any outputs containing a sensitive value must enable the
// output sensitive argument.
Sensitive bool
+
+ // WriteOnly indicates that the practitioner can choose a value for this
+ // attribute, but Terraform will not store this attribute in plan or state.
+ // WriteOnly can only be set for managed resource schemas. If WriteOnly is true,
+ // either Optional or Required must also be true. WriteOnly cannot be set with ForceNew.
+ //
+ // WriteOnly cannot be set to true for TypeList, TypeMap, or TypeSet.
+ //
+ // This functionality is only supported in Terraform 1.11 and later.
+ // Practitioners that choose a value for this attribute with older
+ // versions of Terraform will receive an error.
+ WriteOnly bool
}
// SchemaConfigMode is used to influence how a schema item is mapped into a
@@ -838,6 +850,18 @@ func (m schemaMap) internalValidate(topSchemaMap schemaMap, attrsOnly bool) erro
return fmt.Errorf("%s: One of optional, required, or computed must be set", k)
}
+ if v.WriteOnly && v.Required && v.Optional {
+ return fmt.Errorf("%s: WriteOnly must be set with either Required or Optional", k)
+ }
+
+ if v.WriteOnly && v.Computed {
+ return fmt.Errorf("%s: WriteOnly cannot be set with Computed", k)
+ }
+
+ if v.WriteOnly && v.ForceNew {
+ return fmt.Errorf("%s: WriteOnly cannot be set with ForceNew", k)
+ }
+
computedOnly := v.Computed && !v.Optional
switch v.ConfigMode {
@@ -874,6 +898,14 @@ func (m schemaMap) internalValidate(topSchemaMap schemaMap, attrsOnly bool) erro
return fmt.Errorf("%s: Default cannot be set with Required", k)
}
+ if v.WriteOnly && v.Default != nil {
+ return fmt.Errorf("%s: Default cannot be set with WriteOnly", k)
+ }
+
+ if v.WriteOnly && v.DefaultFunc != nil {
+ return fmt.Errorf("%s: DefaultFunc cannot be set with WriteOnly", k)
+ }
+
if len(v.ComputedWhen) > 0 && !v.Computed {
return fmt.Errorf("%s: ComputedWhen can only be set with Computed", k)
}
@@ -923,6 +955,10 @@ func (m schemaMap) internalValidate(topSchemaMap schemaMap, attrsOnly bool) erro
}
if v.Type == TypeList || v.Type == TypeSet {
+ if v.WriteOnly {
+ return fmt.Errorf("%s: WriteOnly is not valid for lists or sets", k)
+ }
+
if v.Elem == nil {
return fmt.Errorf("%s: Elem must be set for lists", k)
}
@@ -939,6 +975,16 @@ func (m schemaMap) internalValidate(topSchemaMap schemaMap, attrsOnly bool) erro
case *Resource:
attrsOnly := attrsOnly || v.ConfigMode == SchemaConfigModeAttr
+ blockHasWriteOnly := schemaMap(t.SchemaMap()).hasWriteOnly()
+
+ if v.Type == TypeSet && blockHasWriteOnly {
+ return fmt.Errorf("%s: Set Block type cannot contain WriteOnly attributes", k)
+ }
+
+ if v.Computed && blockHasWriteOnly {
+ return fmt.Errorf("%s: Block types with Computed set to true cannot contain WriteOnly attributes", k)
+ }
+
if err := schemaMap(t.SchemaMap()).internalValidate(topSchemaMap, attrsOnly); err != nil {
return err
}
@@ -956,6 +1002,10 @@ func (m schemaMap) internalValidate(topSchemaMap schemaMap, attrsOnly bool) erro
}
if v.Type == TypeMap && v.Elem != nil {
+ if v.WriteOnly {
+ return fmt.Errorf("%s: WriteOnly is not valid for maps", k)
+ }
+
switch v.Elem.(type) {
case *Resource:
return fmt.Errorf("%s: TypeMap with Elem *Resource not supported,"+
@@ -1099,7 +1149,7 @@ func isValidFieldName(name string) bool {
}
// resourceDiffer is an interface that is used by the private diff functions.
-// This helps facilitate diff logic for both ResourceData and ResoureDiff with
+// This helps facilitate diff logic for both ResourceData and ResourceDiff with
// minimal divergence in code.
type resourceDiffer interface {
diffChange(string) (interface{}, interface{}, bool, bool, bool)
@@ -1119,24 +1169,24 @@ func (m schemaMap) diff(
d resourceDiffer,
all bool) error {
- unsupressedDiff := new(terraform.InstanceDiff)
- unsupressedDiff.Attributes = make(map[string]*terraform.ResourceAttrDiff)
+ unsuppressedDiff := new(terraform.InstanceDiff)
+ unsuppressedDiff.Attributes = make(map[string]*terraform.ResourceAttrDiff)
var err error
switch schema.Type {
case TypeBool, TypeInt, TypeFloat, TypeString:
- err = m.diffString(k, schema, unsupressedDiff, d, all)
+ err = m.diffString(k, schema, unsuppressedDiff, d, all)
case TypeList:
- err = m.diffList(ctx, k, schema, unsupressedDiff, d, all)
+ err = m.diffList(ctx, k, schema, unsuppressedDiff, d, all)
case TypeMap:
- err = m.diffMap(k, schema, unsupressedDiff, d, all)
+ err = m.diffMap(k, schema, unsuppressedDiff, d, all)
case TypeSet:
- err = m.diffSet(ctx, k, schema, unsupressedDiff, d, all)
+ err = m.diffSet(ctx, k, schema, unsuppressedDiff, d, all)
default:
err = fmt.Errorf("%s: unknown type %#v", k, schema.Type)
}
- for attrK, attrV := range unsupressedDiff.Attributes {
+ for attrK, attrV := range unsuppressedDiff.Attributes {
switch rd := d.(type) {
case *ResourceData:
if schema.DiffSuppressFunc != nil && attrV != nil &&
@@ -2353,6 +2403,36 @@ func (m schemaMap) validateType(
return diags
}
+// hasWriteOnly returns true if the schemaMap contains any WriteOnly attributes.
+func (m schemaMap) hasWriteOnly() bool {
+ for _, v := range m {
+ if v.WriteOnly {
+ return true
+ }
+
+ if v.Elem != nil {
+ switch t := v.Elem.(type) {
+ case *Resource:
+ return schemaMap(t.SchemaMap()).hasWriteOnly()
+ case *Schema:
+ if t.WriteOnly {
+ return true
+ }
+
+ // Test the edge case where elements in a collection are set to writeOnly.
+ // Technically, this is an invalid schema as collections cannot have write-only
+ // attributes. However, this method is not concerned with the validity of the schema.
+ isNestedWriteOnly := schemaMap(map[string]*Schema{"nested": t}).hasWriteOnly()
+ if isNestedWriteOnly {
+ return true
+ }
+ }
+ }
+ }
+
+ return false
+}
+
// Zero returns the zero value for a type.
func (t ValueType) Zero() interface{} {
switch t {
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/set.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/set.go
index e897817fd3..6ef786bd0c 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/set.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/set.go
@@ -219,7 +219,7 @@ func (s *Set) add(item interface{}, computed bool) string {
func (s *Set) hash(item interface{}) string {
code := s.F(item)
- // Always return a nonnegative hashcode.
+ // Always return a non-negative hashcode.
if code < 0 {
code = -code
}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/write_only.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/write_only.go
new file mode 100644
index 0000000000..287c8bd8fd
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema/write_only.go
@@ -0,0 +1,214 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package schema
+
+import (
+ "fmt"
+ "sort"
+
+ "github.com/hashicorp/go-cty/cty"
+
+ "github.com/hashicorp/terraform-plugin-sdk/v2/diag"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema"
+)
+
+// setWriteOnlyNullValues takes a cty.Value, and compares it to the schema setting any non-null
+// values that are writeOnly to null.
+func setWriteOnlyNullValues(val cty.Value, schema *configschema.Block) cty.Value {
+ if !val.IsKnown() || val.IsNull() {
+ return val
+ }
+
+ valMap := val.AsValueMap()
+ newVals := make(map[string]cty.Value)
+
+ for name, attr := range schema.Attributes {
+ v := valMap[name]
+
+ if attr.WriteOnly && !v.IsNull() {
+ newVals[name] = cty.NullVal(attr.Type)
+ continue
+ }
+
+ newVals[name] = v
+ }
+
+ for name, blockS := range schema.BlockTypes {
+ blockVal := valMap[name]
+ if blockVal.IsNull() || !blockVal.IsKnown() {
+ newVals[name] = blockVal
+ continue
+ }
+
+ blockValType := blockVal.Type()
+ blockElementType := blockS.Block.ImpliedType()
+
+ // This switches on the value type here, so we can correctly switch
+ // between Tuples/Lists and Maps/Objects.
+ switch {
+ case blockS.Nesting == configschema.NestingSingle || blockS.Nesting == configschema.NestingGroup:
+ // NestingSingle is the only exception here, where we treat the
+ // block directly as an object
+ newVals[name] = setWriteOnlyNullValues(blockVal, &blockS.Block)
+
+ case blockValType.IsSetType(), blockValType.IsListType(), blockValType.IsTupleType():
+ listVals := blockVal.AsValueSlice()
+ newListVals := make([]cty.Value, 0, len(listVals))
+
+ for _, v := range listVals {
+ newListVals = append(newListVals, setWriteOnlyNullValues(v, &blockS.Block))
+ }
+
+ switch {
+ case blockValType.IsSetType():
+ switch len(newListVals) {
+ case 0:
+ newVals[name] = cty.SetValEmpty(blockElementType)
+ default:
+ newVals[name] = cty.SetVal(newListVals)
+ }
+ case blockValType.IsListType():
+ switch len(newListVals) {
+ case 0:
+ newVals[name] = cty.ListValEmpty(blockElementType)
+ default:
+ newVals[name] = cty.ListVal(newListVals)
+ }
+ case blockValType.IsTupleType():
+ newVals[name] = cty.TupleVal(newListVals)
+ }
+
+ case blockValType.IsMapType(), blockValType.IsObjectType():
+ mapVals := blockVal.AsValueMap()
+ newMapVals := make(map[string]cty.Value)
+
+ for k, v := range mapVals {
+ newMapVals[k] = setWriteOnlyNullValues(v, &blockS.Block)
+ }
+
+ switch {
+ case blockValType.IsMapType():
+ switch len(newMapVals) {
+ case 0:
+ newVals[name] = cty.MapValEmpty(blockElementType)
+ default:
+ newVals[name] = cty.MapVal(newMapVals)
+ }
+ case blockValType.IsObjectType():
+ if len(newMapVals) == 0 {
+ // We need to populate empty values to make a valid object.
+ for attr, ty := range blockElementType.AttributeTypes() {
+ newMapVals[attr] = cty.NullVal(ty)
+ }
+ }
+ newVals[name] = cty.ObjectVal(newMapVals)
+ }
+
+ default:
+ panic(fmt.Sprintf("failed to set null values for nested block %q:%#v", name, blockValType))
+ }
+ }
+
+ return cty.ObjectVal(newVals)
+}
+
+// validateWriteOnlyNullValues validates that write-only attribute values
+// are null to ensure that write-only values are not sent to unsupported
+// Terraform client versions.
+//
+// it takes a cty.Value, and compares it to the schema and throws an
+// error diagnostic for each non-null writeOnly attribute value.
+func validateWriteOnlyNullValues(val cty.Value, schema *configschema.Block, path cty.Path) diag.Diagnostics {
+ if !val.IsKnown() || val.IsNull() {
+ return diag.Diagnostics{}
+ }
+
+ valMap := val.AsValueMap()
+ diags := make([]diag.Diagnostic, 0)
+
+ var attrNames []string
+ for k := range schema.Attributes {
+ attrNames = append(attrNames, k)
+ }
+
+ // Sort the attribute names to produce diags in a consistent order.
+ sort.Strings(attrNames)
+
+ for _, name := range attrNames {
+ attr := schema.Attributes[name]
+ v := valMap[name]
+
+ if attr.WriteOnly && !v.IsNull() {
+ diags = append(diags, diag.Diagnostic{
+ Severity: diag.Error,
+ Summary: "Write-only Attribute Not Allowed",
+ Detail: fmt.Sprintf("The resource contains a non-null value for write-only attribute %q ", name) +
+ "Write-only attributes are only supported in Terraform 1.11 and later.",
+ AttributePath: append(path, cty.GetAttrStep{Name: name}),
+ })
+ }
+ }
+
+ var blockNames []string
+ for k := range schema.BlockTypes {
+ blockNames = append(blockNames, k)
+ }
+
+ // Sort the block names to produce diags in a consistent order.
+ sort.Strings(blockNames)
+
+ for _, name := range blockNames {
+ blockS := schema.BlockTypes[name]
+ blockVal := valMap[name]
+ if blockVal.IsNull() || !blockVal.IsKnown() {
+ continue
+ }
+
+ blockValType := blockVal.Type()
+ blockPath := append(path, cty.GetAttrStep{Name: name})
+
+ // This switches on the value type here, so we can correctly switch
+ // between Tuples/Lists and Maps/Objects.
+ switch {
+ case blockS.Nesting == configschema.NestingSingle || blockS.Nesting == configschema.NestingGroup:
+ // NestingSingle is the only exception here, where we treat the
+ // block directly as an object
+ diags = append(diags, validateWriteOnlyNullValues(blockVal, &blockS.Block, blockPath)...)
+ case blockValType.IsSetType():
+ setVals := blockVal.AsValueSlice()
+
+ for _, v := range setVals {
+ setBlockPath := append(blockPath, cty.IndexStep{
+ Key: v,
+ })
+ diags = append(diags, validateWriteOnlyNullValues(v, &blockS.Block, setBlockPath)...)
+ }
+
+ case blockValType.IsListType(), blockValType.IsTupleType():
+ listVals := blockVal.AsValueSlice()
+
+ for i, v := range listVals {
+ listBlockPath := append(blockPath, cty.IndexStep{
+ Key: cty.NumberIntVal(int64(i)),
+ })
+ diags = append(diags, validateWriteOnlyNullValues(v, &blockS.Block, listBlockPath)...)
+ }
+
+ case blockValType.IsMapType(), blockValType.IsObjectType():
+ mapVals := blockVal.AsValueMap()
+
+ for k, v := range mapVals {
+ mapBlockPath := append(blockPath, cty.IndexStep{
+ Key: cty.StringVal(k),
+ })
+ diags = append(diags, validateWriteOnlyNullValues(v, &blockS.Block, mapBlockPath)...)
+ }
+
+ default:
+ panic(fmt.Sprintf("failed to validate WriteOnly values for nested block %q:%#v", name, blockValType))
+ }
+ }
+
+ return diags
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/float.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/float.go
index dfc261842d..2573c33786 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/float.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/float.go
@@ -10,8 +10,8 @@ import (
)
// FloatBetween returns a SchemaValidateFunc which tests if the provided value
-// is of type float64 and is between min and max (inclusive).
-func FloatBetween(min, max float64) schema.SchemaValidateFunc {
+// is of type float64 and is between minVal and maxVal (inclusive).
+func FloatBetween(minVal, maxVal float64) schema.SchemaValidateFunc {
return func(i interface{}, k string) (s []string, es []error) {
v, ok := i.(float64)
if !ok {
@@ -19,8 +19,8 @@ func FloatBetween(min, max float64) schema.SchemaValidateFunc {
return
}
- if v < min || v > max {
- es = append(es, fmt.Errorf("expected %s to be in the range (%f - %f), got %f", k, min, max, v))
+ if v < minVal || v > maxVal {
+ es = append(es, fmt.Errorf("expected %s to be in the range (%f - %f), got %f", k, minVal, maxVal, v))
return
}
@@ -29,8 +29,8 @@ func FloatBetween(min, max float64) schema.SchemaValidateFunc {
}
// FloatAtLeast returns a SchemaValidateFunc which tests if the provided value
-// is of type float and is at least min (inclusive)
-func FloatAtLeast(min float64) schema.SchemaValidateFunc {
+// is of type float and is at least minVal (inclusive)
+func FloatAtLeast(minVal float64) schema.SchemaValidateFunc {
return func(i interface{}, k string) (s []string, es []error) {
v, ok := i.(float64)
if !ok {
@@ -38,8 +38,8 @@ func FloatAtLeast(min float64) schema.SchemaValidateFunc {
return
}
- if v < min {
- es = append(es, fmt.Errorf("expected %s to be at least (%f), got %f", k, min, v))
+ if v < minVal {
+ es = append(es, fmt.Errorf("expected %s to be at least (%f), got %f", k, minVal, v))
return
}
@@ -48,8 +48,8 @@ func FloatAtLeast(min float64) schema.SchemaValidateFunc {
}
// FloatAtMost returns a SchemaValidateFunc which tests if the provided value
-// is of type float and is at most max (inclusive)
-func FloatAtMost(max float64) schema.SchemaValidateFunc {
+// is of type float and is at most maxVal (inclusive)
+func FloatAtMost(maxVal float64) schema.SchemaValidateFunc {
return func(i interface{}, k string) (s []string, es []error) {
v, ok := i.(float64)
if !ok {
@@ -57,8 +57,8 @@ func FloatAtMost(max float64) schema.SchemaValidateFunc {
return
}
- if v > max {
- es = append(es, fmt.Errorf("expected %s to be at most (%f), got %f", k, max, v))
+ if v > maxVal {
+ es = append(es, fmt.Errorf("expected %s to be at most (%f), got %f", k, maxVal, v))
return
}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/int.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/int.go
index 2873897f27..a240e447a9 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/int.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/int.go
@@ -11,8 +11,8 @@ import (
)
// IntBetween returns a SchemaValidateFunc which tests if the provided value
-// is of type int and is between min and max (inclusive)
-func IntBetween(min, max int) schema.SchemaValidateFunc {
+// is of type int and is between minVal and maxVal (inclusive)
+func IntBetween(minVal, maxVal int) schema.SchemaValidateFunc {
return func(i interface{}, k string) (warnings []string, errors []error) {
v, ok := i.(int)
if !ok {
@@ -20,8 +20,8 @@ func IntBetween(min, max int) schema.SchemaValidateFunc {
return warnings, errors
}
- if v < min || v > max {
- errors = append(errors, fmt.Errorf("expected %s to be in the range (%d - %d), got %d", k, min, max, v))
+ if v < minVal || v > maxVal {
+ errors = append(errors, fmt.Errorf("expected %s to be in the range (%d - %d), got %d", k, minVal, maxVal, v))
return warnings, errors
}
@@ -30,8 +30,8 @@ func IntBetween(min, max int) schema.SchemaValidateFunc {
}
// IntAtLeast returns a SchemaValidateFunc which tests if the provided value
-// is of type int and is at least min (inclusive)
-func IntAtLeast(min int) schema.SchemaValidateFunc {
+// is of type int and is at least minVal (inclusive)
+func IntAtLeast(minVal int) schema.SchemaValidateFunc {
return func(i interface{}, k string) (warnings []string, errors []error) {
v, ok := i.(int)
if !ok {
@@ -39,8 +39,8 @@ func IntAtLeast(min int) schema.SchemaValidateFunc {
return warnings, errors
}
- if v < min {
- errors = append(errors, fmt.Errorf("expected %s to be at least (%d), got %d", k, min, v))
+ if v < minVal {
+ errors = append(errors, fmt.Errorf("expected %s to be at least (%d), got %d", k, minVal, v))
return warnings, errors
}
@@ -49,8 +49,8 @@ func IntAtLeast(min int) schema.SchemaValidateFunc {
}
// IntAtMost returns a SchemaValidateFunc which tests if the provided value
-// is of type int and is at most max (inclusive)
-func IntAtMost(max int) schema.SchemaValidateFunc {
+// is of type int and is at most maxVal (inclusive)
+func IntAtMost(maxVal int) schema.SchemaValidateFunc {
return func(i interface{}, k string) (warnings []string, errors []error) {
v, ok := i.(int)
if !ok {
@@ -58,8 +58,8 @@ func IntAtMost(max int) schema.SchemaValidateFunc {
return warnings, errors
}
- if v > max {
- errors = append(errors, fmt.Errorf("expected %s to be at most (%d), got %d", k, max, v))
+ if v > maxVal {
+ errors = append(errors, fmt.Errorf("expected %s to be at most (%d), got %d", k, maxVal, v))
return warnings, errors
}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/map.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/map.go
index 7c92509054..1465859870 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/map.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/map.go
@@ -9,23 +9,24 @@ import (
"sort"
"github.com/hashicorp/go-cty/cty"
+
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)
// MapKeyLenBetween returns a SchemaValidateDiagFunc which tests if the provided value
-// is of type map and the length of all keys are between min and max (inclusive)
-func MapKeyLenBetween(min, max int) schema.SchemaValidateDiagFunc {
+// is of type map and the length of all keys are between minVal and maxVal (inclusive)
+func MapKeyLenBetween(minVal, maxVal int) schema.SchemaValidateDiagFunc {
return func(v interface{}, path cty.Path) diag.Diagnostics {
var diags diag.Diagnostics
for _, key := range sortedKeys(v.(map[string]interface{})) {
keyLen := len(key)
- if keyLen < min || keyLen > max {
+ if keyLen < minVal || keyLen > maxVal {
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "Bad map key length",
- Detail: fmt.Sprintf("Map key lengths should be in the range (%d - %d): %s (length = %d)", min, max, key, keyLen),
+ Detail: fmt.Sprintf("Map key lengths should be in the range (%d - %d): %s (length = %d)", minVal, maxVal, key, keyLen),
AttributePath: append(path, cty.IndexStep{Key: cty.StringVal(key)}),
})
}
@@ -36,8 +37,8 @@ func MapKeyLenBetween(min, max int) schema.SchemaValidateDiagFunc {
}
// MapValueLenBetween returns a SchemaValidateDiagFunc which tests if the provided value
-// is of type map and the length of all values are between min and max (inclusive)
-func MapValueLenBetween(min, max int) schema.SchemaValidateDiagFunc {
+// is of type map and the length of all values are between minVal and maxVal (inclusive)
+func MapValueLenBetween(minVal, maxVal int) schema.SchemaValidateDiagFunc {
return func(v interface{}, path cty.Path) diag.Diagnostics {
var diags diag.Diagnostics
@@ -57,11 +58,11 @@ func MapValueLenBetween(min, max int) schema.SchemaValidateDiagFunc {
}
valLen := len(val.(string))
- if valLen < min || valLen > max {
+ if valLen < minVal || valLen > maxVal {
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "Bad map value length",
- Detail: fmt.Sprintf("Map value lengths should be in the range (%d - %d): %s => %v (length = %d)", min, max, key, val, valLen),
+ Detail: fmt.Sprintf("Map value lengths should be in the range (%d - %d): %s => %v (length = %d)", minVal, maxVal, key, val, valLen),
AttributePath: append(path, cty.IndexStep{Key: cty.StringVal(key)}),
})
}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/network.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/network.go
index 9bc6da2b8e..1aadcdb981 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/network.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/network.go
@@ -99,8 +99,8 @@ func IsCIDR(i interface{}, k string) (warnings []string, errors []error) {
}
// IsCIDRNetwork returns a SchemaValidateFunc which tests if the provided value
-// is of type string, is in valid Value network notation, and has significant bits between min and max (inclusive)
-func IsCIDRNetwork(min, max int) schema.SchemaValidateFunc {
+// is of type string, is in valid Value network notation, and has significant bits between minVal and maxVal (inclusive)
+func IsCIDRNetwork(minVal, maxVal int) schema.SchemaValidateFunc {
return func(i interface{}, k string) (warnings []string, errors []error) {
v, ok := i.(string)
if !ok {
@@ -120,8 +120,8 @@ func IsCIDRNetwork(min, max int) schema.SchemaValidateFunc {
}
sigbits, _ := ipnet.Mask.Size()
- if sigbits < min || sigbits > max {
- errors = append(errors, fmt.Errorf("expected %q to contain a network Value with between %d and %d significant bits, got: %d", k, min, max, sigbits))
+ if sigbits < minVal || sigbits > maxVal {
+ errors = append(errors, fmt.Errorf("expected %q to contain a network Value with between %d and %d significant bits, got: %d", k, minVal, maxVal, sigbits))
}
return warnings, errors
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/path.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/path.go
new file mode 100644
index 0000000000..b8707330d0
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/path.go
@@ -0,0 +1,55 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package validation
+
+import (
+ "github.com/hashicorp/go-cty/cty"
+)
+
+// PathMatches compares two Paths for equality. For cty.IndexStep,
+// unknown key values are treated as an Any qualifier and will
+// match any index step of the same type.
+func PathMatches(p cty.Path, other cty.Path) bool {
+ if len(p) != len(other) {
+ return false
+ }
+
+ for i := range p {
+ pv := p[i]
+ switch pv := pv.(type) {
+ case cty.GetAttrStep:
+ ov, ok := other[i].(cty.GetAttrStep)
+ if !ok || pv != ov {
+ return false
+ }
+ case cty.IndexStep:
+ ov, ok := other[i].(cty.IndexStep)
+ if !ok {
+ return false
+ }
+
+ // Sets need special handling since their Type is the entire object
+ // with attributes.
+ if pv.Key.Type().IsObjectType() && ov.Key.Type().IsObjectType() {
+ if !pv.Key.IsKnown() || !ov.Key.IsKnown() {
+ break
+ }
+ }
+ if !pv.Key.Type().Equals(ov.Key.Type()) {
+ return false
+ }
+
+ if pv.Key.IsKnown() && ov.Key.IsKnown() {
+ if !pv.Key.RawEquals(ov.Key) {
+ return false
+ }
+ }
+ default:
+ // Any invalid steps default to evaluating false.
+ return false
+ }
+ }
+
+ return true
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/strings.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/strings.go
index 375a698f2c..d8c2243937 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/strings.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/strings.go
@@ -70,8 +70,8 @@ func StringIsWhiteSpace(i interface{}, k string) ([]string, []error) {
}
// StringLenBetween returns a SchemaValidateFunc which tests if the provided value
-// is of type string and has length between min and max (inclusive)
-func StringLenBetween(min, max int) schema.SchemaValidateFunc {
+// is of type string and has length between minVal and maxVal (inclusive)
+func StringLenBetween(minVal, maxVal int) schema.SchemaValidateFunc {
return func(i interface{}, k string) (warnings []string, errors []error) {
v, ok := i.(string)
if !ok {
@@ -79,8 +79,8 @@ func StringLenBetween(min, max int) schema.SchemaValidateFunc {
return warnings, errors
}
- if len(v) < min || len(v) > max {
- errors = append(errors, fmt.Errorf("expected length of %s to be in the range (%d - %d), got %s", k, min, max, v))
+ if len(v) < minVal || len(v) > maxVal {
+ errors = append(errors, fmt.Errorf("expected length of %s to be in the range (%d - %d), got %s", k, minVal, maxVal, v))
}
return warnings, errors
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/write_only.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/write_only.go
new file mode 100644
index 0000000000..303e72dd2d
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation/write_only.go
@@ -0,0 +1,116 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: MPL-2.0
+
+package validation
+
+import (
+ "context"
+ "fmt"
+
+ "github.com/hashicorp/go-cty/cty"
+
+ "github.com/hashicorp/terraform-plugin-sdk/v2/diag"
+ "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
+)
+
+// PreferWriteOnlyAttribute is a ValidateRawResourceConfigFunc that returns a warning
+// if the Terraform client supports write-only attributes and the old attribute is
+// not null.
+// The last step in the path must be a cty.GetAttrStep{}.
+// When creating a cty.IndexStep{} to into a nested attribute, use an unknown value
+// of the index type to indicate any key value.
+// For lists: cty.Index(cty.UnknownVal(cty.Number)),
+// For maps: cty.Index(cty.UnknownVal(cty.String)),
+// For sets: cty.Index(cty.UnknownVal(cty.Object(nil))),
+func PreferWriteOnlyAttribute(oldAttribute cty.Path, writeOnlyAttribute cty.Path) schema.ValidateRawResourceConfigFunc {
+ return func(ctx context.Context, req schema.ValidateResourceConfigFuncRequest, resp *schema.ValidateResourceConfigFuncResponse) {
+ if !req.WriteOnlyAttributesAllowed {
+ return
+ }
+
+ pathLen := len(writeOnlyAttribute)
+
+ if pathLen == 0 {
+ return
+ }
+
+ lastStep := writeOnlyAttribute[pathLen-1]
+
+ // Only attribute steps have a Name field
+ writeOnlyAttrStep, ok := lastStep.(cty.GetAttrStep)
+ if !ok {
+ resp.Diagnostics = diag.Diagnostics{
+ {
+ Severity: diag.Error,
+ Summary: "Invalid writeOnlyAttribute path",
+ Detail: "The Terraform Provider unexpectedly provided a path that does not match the current schema. " +
+ "This can happen if the path does not correctly follow the schema in structure or types. " +
+ "Please report this to the provider developers. \n\n" +
+ "The writeOnlyAttribute path provided is invalid. The last step in the path must be a cty.GetAttrStep{}",
+ AttributePath: writeOnlyAttribute,
+ },
+ }
+ return
+ }
+
+ var oldAttrs []attribute
+
+ err := cty.Walk(req.RawConfig, func(path cty.Path, value cty.Value) (bool, error) {
+ if PathMatches(path, oldAttribute) {
+ oldAttrs = append(oldAttrs, attribute{
+ value: value,
+ path: path,
+ })
+ }
+
+ return true, nil
+ })
+ if err != nil {
+ return
+ }
+
+ for _, attr := range oldAttrs {
+ attrPath := attr.path.Copy()
+
+ pathLen = len(attrPath)
+
+ if pathLen == 0 {
+ return
+ }
+
+ lastStep = attrPath[pathLen-1]
+
+ // Only attribute steps have a Name field
+ attrStep, ok := lastStep.(cty.GetAttrStep)
+ if !ok {
+ resp.Diagnostics = diag.Diagnostics{
+ {
+ Severity: diag.Error,
+ Summary: "Invalid oldAttribute path",
+ Detail: "The Terraform Provider unexpectedly provided a path that does not match the current schema. " +
+ "This can happen if the path does not correctly follow the schema in structure or types. " +
+ "Please report this to the provider developers. \n\n" +
+ "The oldAttribute path provided is invalid. The last step in the path must be a cty.GetAttrStep{}",
+ AttributePath: attrPath,
+ },
+ }
+ return
+ }
+
+ if !attr.value.IsNull() {
+ resp.Diagnostics = append(resp.Diagnostics, diag.Diagnostic{
+ Severity: diag.Warning,
+ Summary: "Available Write-only Attribute Alternative",
+ Detail: fmt.Sprintf("The attribute %s has a write-only alternative %s available. "+
+ "Use the write-only alternative of the attribute when possible.", attrStep.Name, writeOnlyAttrStep.Name),
+ AttributePath: attr.path,
+ })
+ }
+ }
+ }
+}
+
+type attribute struct {
+ value cty.Value
+ path cty.Path
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/addrs/instance_key.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/addrs/instance_key.go
index 8373297f87..56700fc057 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/addrs/instance_key.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/addrs/instance_key.go
@@ -20,7 +20,7 @@ type instanceKey interface {
String() string
}
-// NoKey represents the absense of an instanceKey, for the single instance
+// NoKey represents the absence of an instanceKey, for the single instance
// of a configuration object that does not use "count" or "for_each" at all.
var NoKey instanceKey
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/coerce_value.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/coerce_value.go
index d12ff8cced..dab927c8dd 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/coerce_value.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/coerce_value.go
@@ -11,7 +11,7 @@ import (
)
// CoerceValue attempts to force the given value to conform to the type
-// implied by the receiever.
+// implied by the receiver.
//
// This is useful in situations where a configuration must be derived from
// an already-decoded value. It is always better to decode directly from
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/empty_value.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/empty_value.go
index 3c9573bc56..cc1107fa0b 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/empty_value.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/empty_value.go
@@ -7,12 +7,12 @@ import (
"github.com/hashicorp/go-cty/cty"
)
-// EmptyValue returns the "empty value" for the recieving block, which for
+// EmptyValue returns the "empty value" for the receiving block, which for
// a block type is a non-null object where all of the attribute values are
// the empty values of the block's attributes and nested block types.
//
// In other words, it returns the value that would be returned if an empty
-// block were decoded against the recieving schema, assuming that no required
+// block were decoded against the receiving schema, assuming that no required
// attribute or block constraints were honored.
func (b *Block) EmptyValue() cty.Value {
vals := make(map[string]cty.Value)
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/schema.go
index c445b4ba55..983d20bdf0 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema/schema.go
@@ -83,6 +83,18 @@ type Attribute struct {
// Deprecated indicates whether the attribute has been marked as deprecated in the
// provider and usage should be discouraged.
Deprecated bool
+
+ // WriteOnly indicates that the practitioner can choose a value for this
+ // attribute, but Terraform will not store this attribute in plan or state.
+ // WriteOnly can only be set for managed resource schemas. If WriteOnly is true,
+ // either Optional or Required must also be true. WriteOnly cannot be set with ForceNew.
+ //
+ // WriteOnly cannot be set to true for TypeList, TypeMap, or TypeSet.
+ //
+ // This functionality is only supported in Terraform 1.11 and later.
+ // Practitioners that choose a value for this attribute with older
+ // versions of Terraform will receive an error.
+ WriteOnly bool
}
// NestedBlock represents the embedding of one block within another.
@@ -125,7 +137,7 @@ const (
NestingSingle
// NestingGroup is similar to NestingSingle in that it calls for only a
- // single instance of a given block type with no labels, but it additonally
+ // single instance of a given block type with no labels, but it additionally
// guarantees that its result will never be null, even if the block is
// absent, and instead the nested attributes and blocks will be treated
// as absent in that case. (Any required attributes or blocks within the
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/hcl2shim/values_equiv.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/hcl2shim/values_equiv.go
index 6b2be2239d..37d3104975 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/hcl2shim/values_equiv.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/hcl2shim/values_equiv.go
@@ -187,8 +187,8 @@ func valuesSDKEquivalentMappings(a, b cty.Value) bool {
// precision in the round-trip.
//
// This does _not_ attempt to allow for an epsilon difference that may be
-// caused by accumulated innacuracy in a float calculation, under the
-// expectation that providers generally do not actually do compuations on
+// caused by accumulated inaccuracy in a float calculation, under the
+// expectation that providers generally do not actually do computations on
// floats and instead just pass string representations of them on verbatim
// to remote APIs. A remote API _itself_ may introduce inaccuracy, but that's
// a problem for the provider itself to deal with, based on its knowledge of
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging/environment_variables.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging/environment_variables.go
index 2ffc73eee6..5919d373a7 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging/environment_variables.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging/environment_variables.go
@@ -10,7 +10,7 @@ const (
// usage, this environment variable is handled by terraform-plugin-go.
//
// Terraform CLI's logging must be explicitly turned on before this
- // environment varable can be used to reduce the SDK logging levels. It
+ // environment variable can be used to reduce the SDK logging levels. It
// cannot be used to show only SDK logging unless all other logging levels
// are turned off.
EnvTfLogSdk = "TF_LOG_SDK"
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugin/convert/diagnostics.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugin/convert/diagnostics.go
index 672f75e6d8..6c1ce3f454 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugin/convert/diagnostics.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugin/convert/diagnostics.go
@@ -10,6 +10,7 @@ import (
"github.com/hashicorp/terraform-plugin-go/tfprotov5"
"github.com/hashicorp/terraform-plugin-go/tftypes"
+
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging"
)
@@ -136,7 +137,7 @@ func AttributePathToPath(ap *tftypes.AttributePath) cty.Path {
// PathToAttributePath takes a cty.Path and converts it to a proto-encoded path.
func PathToAttributePath(p cty.Path) *tftypes.AttributePath {
- if p == nil || len(p) < 1 {
+ if len(p) < 1 {
return nil
}
ap := tftypes.NewAttributePath()
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugin/convert/schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugin/convert/schema.go
index e2b4e431ce..a02aaec007 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugin/convert/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugin/convert/schema.go
@@ -12,6 +12,7 @@ import (
"github.com/hashicorp/go-cty/cty"
"github.com/hashicorp/terraform-plugin-go/tfprotov5"
"github.com/hashicorp/terraform-plugin-go/tftypes"
+
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/configs/configschema"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging"
)
@@ -151,6 +152,7 @@ func ConfigSchemaToProto(ctx context.Context, b *configschema.Block) *tfprotov5.
Required: a.Required,
Sensitive: a.Sensitive,
Deprecated: a.Deprecated,
+ WriteOnly: a.WriteOnly,
}
var err error
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/meta/meta.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/meta/meta.go
index 8a3247afb7..0a928c8b04 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/meta/meta.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/meta/meta.go
@@ -17,7 +17,7 @@ import (
//
// Deprecated: Use Go standard library [runtime/debug] package build information
// instead.
-var SDKVersion = "2.34.0"
+var SDKVersion = "2.36.1"
// A pre-release marker for the version. If this is "" (empty string)
// then it means that it is a final release. Otherwise, this is a pre-release
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/diff.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/diff.go
index 7b988d9f3d..3b4179b4b3 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/diff.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/diff.go
@@ -183,7 +183,7 @@ func (d *InstanceDiff) applyBlockDiff(path []string, attrs map[string]string, sc
// check each set candidate to see if it was removed.
// we need to do this, because when entire sets are removed, they may
- // have the wrong key, and ony show diffs going to ""
+ // have the wrong key, and only show diffs going to ""
if block.Nesting == configschema.NestingSet {
for k := range candidateKeys {
indexPrefix := strings.Join(append(path, n, k), ".") + "."
@@ -359,7 +359,7 @@ func (d *InstanceDiff) applySingleAttrDiff(path []string, attrs map[string]strin
return result, nil
}
- // check for missmatched diff values
+ // check for mismatched diff values
if exists &&
old != diff.Old &&
old != hcl2shim.UnknownVariableValue &&
@@ -892,7 +892,7 @@ func (d *InstanceDiff) Same(d2 *InstanceDiff) (bool, string) {
continue
}
- // If the last diff was a computed value then the absense of
+ // If the last diff was a computed value then the absence of
// that value is allowed since it may mean the value ended up
// being the same.
if diffOld.NewComputed {
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/state.go
index 7d2179358a..60723de772 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/state.go
@@ -46,7 +46,7 @@ var rootModulePath = []string{"root"}
// normalizeModulePath takes a raw module path and returns a path that
// has the rootModulePath prepended to it. If I could go back in time I
// would've never had a rootModulePath (empty path would be root). We can
-// still fix this but thats a big refactor that my branch doesn't make sense
+// still fix this but that's a big refactor that my branch doesn't make sense
// for. Instead, this function normalizes paths.
func normalizeModulePath(p []string) addrs.ModuleInstance {
// FIXME: Remove this once everyone is using addrs.ModuleInstance.
@@ -799,7 +799,7 @@ func (s *OutputState) Equal(other *OutputState) bool {
// module.
type ModuleState struct {
// Path is the import path from the root module. Modules imports are
- // always disjoint, so the path represents amodule tree
+ // always disjoint, so the path represents a module tree
Path []string `json:"path"`
// Locals are kept only transiently in-memory, because we can always
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/state_filter.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/state_filter.go
index caf2c79674..3cb5c57e51 100644
--- a/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/state_filter.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/v2/terraform/state_filter.go
@@ -263,7 +263,7 @@ func (s stateFilterResultSlice) Less(i, j int) bool {
return addrA.Index < addrB.Index
}
- // If the addresses are different it is just lexographic sorting
+ // If the addresses are different it is just lexicographic sorting
if a.Address != b.Address {
return a.Address < b.Address
}
diff --git a/vendor/github.com/jjti/go-spancheck/.gitignore b/vendor/github.com/jjti/go-spancheck/.gitignore
index 1f83be414c..04b66d911b 100644
--- a/vendor/github.com/jjti/go-spancheck/.gitignore
+++ b/vendor/github.com/jjti/go-spancheck/.gitignore
@@ -17,3 +17,5 @@
# Dependency directories (remove the comment below to include it)
# vendor/
src/
+
+.vscode
\ No newline at end of file
diff --git a/vendor/github.com/jjti/go-spancheck/.golangci.yml b/vendor/github.com/jjti/go-spancheck/.golangci.yml
index 15d8513d68..5d6ab12875 100644
--- a/vendor/github.com/jjti/go-spancheck/.golangci.yml
+++ b/vendor/github.com/jjti/go-spancheck/.golangci.yml
@@ -17,7 +17,6 @@ linters:
- errcheck
- errname
- errorlint
- - exhaustive # checks exhaustiveness of enum switch statements
- exportloopref # checks for pointers to enclosing loop variables
- gci
- gochecknoinits # checks that no init functions are present in Go code
@@ -59,12 +58,6 @@ linters-settings:
- standard # Standard section: captures all standard packages.
- default # Default section: contains all imports that could not be matched to another section type.
- prefix(github.com/jjti)
- exhaustive:
- # Program elements to check for exhaustiveness.
- # Default: [ switch ]
- check:
- - switch
- - map
gocritic:
settings:
captLocal:
diff --git a/vendor/github.com/jjti/go-spancheck/Makefile b/vendor/github.com/jjti/go-spancheck/Makefile
index 39d80f7c61..8e9d07be31 100644
--- a/vendor/github.com/jjti/go-spancheck/Makefile
+++ b/vendor/github.com/jjti/go-spancheck/Makefile
@@ -14,12 +14,12 @@ test: testvendor
# Follow https://github.com/golang/go/issues/37054 for more details.
.PHONY: testvendor
testvendor:
- @rm -rf base/src
- @cd testdata/base && go mod vendor
- @cp -r testdata/base/vendor testdata/base/src
- @cp -r testdata/base/vendor testdata/disableerrorchecks/src
- @cp -r testdata/base/vendor testdata/enableall/src
- @rm -rf testdata/base/vendor
+ rm -rf testdata/base/src
+ cd testdata/base && GOWORK=off go mod vendor
+ cp -r testdata/base/vendor testdata/base/src
+ cp -r testdata/base/vendor testdata/disableerrorchecks/src
+ cp -r testdata/base/vendor testdata/enableall/src
+ rm -rf testdata/base/vendor
.PHONY: install
install:
diff --git a/vendor/github.com/jjti/go-spancheck/go.work b/vendor/github.com/jjti/go-spancheck/go.work
index 7d0a87b9e1..ff04ca17e2 100644
--- a/vendor/github.com/jjti/go-spancheck/go.work
+++ b/vendor/github.com/jjti/go-spancheck/go.work
@@ -1,4 +1,4 @@
-go 1.20
+go 1.22.1
use (
.
diff --git a/vendor/github.com/jjti/go-spancheck/go.work.sum b/vendor/github.com/jjti/go-spancheck/go.work.sum
index 85e99bad5a..c96d590d61 100644
--- a/vendor/github.com/jjti/go-spancheck/go.work.sum
+++ b/vendor/github.com/jjti/go-spancheck/go.work.sum
@@ -1,4 +1,11 @@
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/mod v0.18.0 h1:5+9lSbEzPSdWkH32vYPBwEpX8KwDbM52Ud9xBUvNlb0=
+golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=
+golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
+golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU=
golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
-golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
+golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
+golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457/go.mod h1:pRgIJT+bRLFKnoM1ldnzKoxTIn14Yxz928LQRYYgIN0=
diff --git a/vendor/github.com/jjti/go-spancheck/spancheck.go b/vendor/github.com/jjti/go-spancheck/spancheck.go
index d5d35a5b11..49e5817285 100644
--- a/vendor/github.com/jjti/go-spancheck/spancheck.go
+++ b/vendor/github.com/jjti/go-spancheck/spancheck.go
@@ -309,6 +309,11 @@ outer:
}
seen[b] = true
+ // Skip successors that are not nested within this current block.
+ if _, ok := nestedBlockTypes[b.Kind]; !ok {
+ continue
+ }
+
// Prune the search if the block uses v.
if blockUses(pass, b) {
continue
@@ -330,6 +335,21 @@ outer:
return search(defBlock.Succs)
}
+var nestedBlockTypes = map[cfg.BlockKind]struct{}{
+ cfg.KindBody: {},
+ cfg.KindForBody: {},
+ cfg.KindForLoop: {},
+ cfg.KindIfElse: {},
+ cfg.KindIfThen: {},
+ cfg.KindLabel: {},
+ cfg.KindRangeBody: {},
+ cfg.KindRangeLoop: {},
+ cfg.KindSelectCaseBody: {},
+ cfg.KindSelectAfterCase: {},
+ cfg.KindSwitchCaseBody: {},
+ cfg.KindSwitchNextCase: {},
+}
+
// usesCall reports whether stmts contain a use of the selName call on variable v.
func usesCall(
pass *analysis.Pass,
@@ -340,10 +360,12 @@ func usesCall(
startSpanMatchers []spanStartMatcher,
depth int,
) bool {
- if depth > 1 { // for perf reasons, do not dive too deep thru func literals, just one level deep check.
+ if depth > 1 { // for perf reasons, do not dive too deep thru func literals, just two levels deep.
return false
}
+ cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs)
+
found, reAssigned := false, false
for _, subStmt := range stmts {
stack := []ast.Node{}
@@ -351,7 +373,6 @@ func usesCall(
switch n := n.(type) {
case *ast.FuncLit:
if len(stack) > 0 {
- cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs)
g := cfgs.FuncLit(n)
if g != nil && len(g.Blocks) > 0 {
return usesCall(pass, g.Blocks[0].Nodes, sv, selName, ignoreCheckSig, startSpanMatchers, depth+1)
@@ -367,6 +388,32 @@ func usesCall(
return false
}
}
+ case *ast.DeferStmt:
+ if n.Call == nil {
+ break
+ }
+
+ f, ok := n.Call.Fun.(*ast.FuncLit)
+ if !ok {
+ break
+ }
+
+ if g := cfgs.FuncLit(f); g != nil && len(g.Blocks) > 0 {
+ for _, b := range g.Blocks {
+ if usesCall(
+ pass,
+ b.Nodes,
+ sv,
+ selName,
+ ignoreCheckSig,
+ startSpanMatchers,
+ depth+1,
+ ) {
+ found = true
+ return false
+ }
+ }
+ }
case nil:
if len(stack) > 0 {
stack = stack[:len(stack)-1] // pop
@@ -389,7 +436,7 @@ func usesCall(
// Selector (End, SetStatus, RecordError) hit.
if n.Sel.Name == selName {
id, ok := n.X.(*ast.Ident)
- found = ok && id.Obj.Decl == sv.id.Obj.Decl
+ found = ok && id.Obj != nil && id.Obj.Decl == sv.id.Obj.Decl
}
// Check if an ignore signature matches.
diff --git a/vendor/github.com/julz/importas/Makefile b/vendor/github.com/julz/importas/Makefile
new file mode 100644
index 0000000000..e9838b43bd
--- /dev/null
+++ b/vendor/github.com/julz/importas/Makefile
@@ -0,0 +1,17 @@
+# default task since it's first
+.PHONY: all
+all: build test
+
+BINARY = importas
+$(BINARY): *.go go.mod go.sum
+ go build -o $(BINARY)
+
+.PHONY: build
+build: $(BINARY) ## Build binary
+
+.PHONY: test
+test: build ## Unit test
+ go test -v ./...
+
+install: ## Install binary
+ go install
diff --git a/vendor/github.com/julz/importas/analyzer.go b/vendor/github.com/julz/importas/analyzer.go
index f196534784..25bc09b82f 100644
--- a/vendor/github.com/julz/importas/analyzer.go
+++ b/vendor/github.com/julz/importas/analyzer.go
@@ -13,7 +13,7 @@ import (
)
var config = &Config{
- RequiredAlias: make(map[string]string),
+ RequiredAlias: make([][]string, 0),
}
var Analyzer = &analysis.Analyzer{
@@ -129,11 +129,19 @@ func findEdits(node ast.Node, uses map[*ast.Ident]types.Object, importPath, orig
// skip identifiers pointing to a different import statement.
continue
}
+ pos := use.Pos()
+ end := use.End()
+ replacement := packageReplacement
+
+ if packageReplacement == "." {
+ replacement = ""
+ end = end + 1
+ }
result = append(result, analysis.TextEdit{
- Pos: use.Pos(),
- End: use.End(),
- NewText: []byte(packageReplacement),
+ Pos: pos,
+ End: end,
+ NewText: []byte(replacement),
})
}
diff --git a/vendor/github.com/julz/importas/config.go b/vendor/github.com/julz/importas/config.go
index 8c9c76d916..58be86c75f 100644
--- a/vendor/github.com/julz/importas/config.go
+++ b/vendor/github.com/julz/importas/config.go
@@ -4,18 +4,26 @@ import (
"errors"
"fmt"
"regexp"
+ "sync"
)
type Config struct {
- RequiredAlias map[string]string
+ RequiredAlias aliasList
Rules []*Rule
DisallowUnaliased bool
DisallowExtraAliases bool
+ muRules sync.Mutex
}
func (c *Config) CompileRegexp() error {
+ c.muRules.Lock()
+ defer c.muRules.Unlock()
+ if c.Rules != nil {
+ return nil
+ }
rules := make([]*Rule, 0, len(c.RequiredAlias))
- for path, alias := range c.RequiredAlias {
+ for _, aliases := range c.RequiredAlias {
+ path, alias := aliases[0], aliases[1]
reg, err := regexp.Compile(fmt.Sprintf("^%s$", path))
if err != nil {
return err
@@ -26,13 +34,15 @@ func (c *Config) CompileRegexp() error {
Alias: alias,
})
}
-
c.Rules = rules
return nil
}
func (c *Config) findRule(path string) *Rule {
- for _, rule := range c.Rules {
+ c.muRules.Lock()
+ rules := c.Rules
+ c.muRules.Unlock()
+ for _, rule := range rules {
if rule.Regexp.MatchString(path) {
return rule
}
diff --git a/vendor/github.com/julz/importas/flags.go b/vendor/github.com/julz/importas/flags.go
index f8107104ad..cc3f1f3aae 100644
--- a/vendor/github.com/julz/importas/flags.go
+++ b/vendor/github.com/julz/importas/flags.go
@@ -7,26 +7,27 @@ import (
"strings"
)
+var errWrongAlias = errors.New("import flag must be of form path:alias")
+
func flags(config *Config) flag.FlagSet {
fs := flag.FlagSet{}
- fs.Var(stringMap(config.RequiredAlias), "alias", "required import alias in form path:alias")
+ fs.Var(&config.RequiredAlias, "alias", "required import alias in form path:alias")
fs.BoolVar(&config.DisallowUnaliased, "no-unaliased", false, "do not allow unaliased imports of aliased packages")
fs.BoolVar(&config.DisallowExtraAliases, "no-extra-aliases", false, "do not allow non-required aliases")
return fs
}
-type stringMap map[string]string
+type aliasList [][]string
-func (v stringMap) Set(val string) error {
- spl := strings.SplitN(val, ":", 2)
- if len(spl) != 2 {
- return errors.New("import flag must be of form path:alias")
+func (v *aliasList) Set(val string) error {
+ lastColon := strings.LastIndex(val, ":")
+ if lastColon <= 1 {
+ return errWrongAlias
}
-
- v[spl[0]] = spl[1]
+ *v = append(*v, []string{val[:lastColon], val[lastColon+1:]})
return nil
}
-func (v stringMap) String() string {
- return fmt.Sprintf("%v", (map[string]string)(v))
+func (v *aliasList) String() string {
+ return fmt.Sprintf("%v", ([][]string)(*v))
}
diff --git a/vendor/github.com/karamaru-alpha/copyloopvar/copyloopvar.go b/vendor/github.com/karamaru-alpha/copyloopvar/copyloopvar.go
index 79dc6afcc4..00c8e0e3dc 100644
--- a/vendor/github.com/karamaru-alpha/copyloopvar/copyloopvar.go
+++ b/vendor/github.com/karamaru-alpha/copyloopvar/copyloopvar.go
@@ -15,7 +15,7 @@ var checkAlias bool
func NewAnalyzer() *analysis.Analyzer {
analyzer := &analysis.Analyzer{
Name: "copyloopvar",
- Doc: "copyloopvar is a linter detects places where loop variables are copied",
+ Doc: "a linter detects places where loop variables are copied",
Run: run,
Requires: []*analysis.Analyzer{
inspect.Analyzer,
@@ -77,10 +77,8 @@ func checkRangeStmt(pass *analysis.Pass, rangeStmt *ast.RangeStmt) {
continue
}
}
- pass.Report(analysis.Diagnostic{
- Pos: assignStmt.Pos(),
- Message: fmt.Sprintf(`The copy of the 'for' variable "%s" can be deleted (Go 1.22+)`, right.Name),
- })
+
+ report(pass, assignStmt, right, i)
}
}
}
@@ -124,10 +122,40 @@ func checkForStmt(pass *analysis.Pass, forStmt *ast.ForStmt) {
continue
}
}
- pass.Report(analysis.Diagnostic{
- Pos: assignStmt.Pos(),
- Message: fmt.Sprintf(`The copy of the 'for' variable "%s" can be deleted (Go 1.22+)`, right.Name),
- })
+
+ report(pass, assignStmt, right, i)
}
}
}
+
+func report(pass *analysis.Pass, assignStmt *ast.AssignStmt, right *ast.Ident, i int) {
+ diagnostic := analysis.Diagnostic{
+ Pos: assignStmt.Pos(),
+ Message: fmt.Sprintf(`The copy of the 'for' variable "%s" can be deleted (Go 1.22+)`, right.Name),
+ }
+
+ if i == 0 && isSimpleAssignStmt(assignStmt, right) {
+ diagnostic.SuggestedFixes = append(diagnostic.SuggestedFixes, analysis.SuggestedFix{
+ TextEdits: []analysis.TextEdit{{
+ Pos: assignStmt.Pos(),
+ End: assignStmt.End(),
+ NewText: nil,
+ }},
+ })
+ }
+
+ pass.Report(diagnostic)
+}
+
+func isSimpleAssignStmt(assignStmt *ast.AssignStmt, rhs *ast.Ident) bool {
+ if len(assignStmt.Lhs) != 1 {
+ return false
+ }
+
+ lhs, ok := assignStmt.Lhs[0].(*ast.Ident)
+ if !ok {
+ return false
+ }
+
+ return rhs.Name == lhs.Name
+}
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go
index dff391797d..98f28e9a6b 100644
--- a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go
+++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go
@@ -84,7 +84,8 @@ func walkThroughEmbeddedInterfaces(sel *types.Selection) ([]types.Type, bool) {
}
func getTypeAtFieldIndex(startingAt types.Type, fieldIndex int) types.Type {
- t := maybeUnname(maybeDereference(startingAt))
+ t := maybeDereference(maybeUnalias(startingAt))
+ t = maybeUnname(maybeUnalias(t))
s, ok := t.(*types.Struct)
if !ok {
panic(fmt.Sprintf("cannot get Field of a type that is not a struct, got a %T", t))
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go
new file mode 100644
index 0000000000..f2df6849bb
--- /dev/null
+++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go
@@ -0,0 +1,10 @@
+//go:build !go1.22
+// +build !go1.22
+
+package errcheck
+
+import "go/types"
+
+func maybeUnalias(t types.Type) types.Type {
+ return t
+}
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go
new file mode 100644
index 0000000000..cbff3cd434
--- /dev/null
+++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go
@@ -0,0 +1,10 @@
+//go:build go1.22
+// +build go1.22
+
+package errcheck
+
+import "go/types"
+
+func maybeUnalias(t types.Type) types.Type {
+ return types.Unalias(t)
+}
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go
index d61d348f77..325aeec98b 100644
--- a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go
+++ b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go
@@ -23,7 +23,9 @@ func init() {
}
var (
- // ErrNoGoFiles is returned when CheckPackage is run on a package with no Go source files
+ // ErrNoGoFiles is returned when CheckPackage is run on a package with no Go source files.
+ //
+ // Deprecated: this error is no longer returned by errcheck.LoadPackages.
ErrNoGoFiles = errors.New("package contains no go source files")
)
@@ -80,7 +82,7 @@ func (r *Result) Append(other Result) {
r.UncheckedErrors = append(r.UncheckedErrors, other.UncheckedErrors...)
}
-// Returns the unique errors that have been accumulated. Duplicates may occur
+// Unique returns the unique errors that have been accumulated. Duplicates may occur
// when a file containing an unchecked error belongs to > 1 package.
//
// The method receiver remains unmodified after the call to Unique.
@@ -162,7 +164,7 @@ var loadPackages = func(cfg *packages.Config, paths ...string) ([]*packages.Pack
// LoadPackages loads all the packages in all the paths provided. It uses the
// exclusions and build tags provided to by the user when loading the packages.
func (c *Checker) LoadPackages(paths ...string) ([]*packages.Package, error) {
- buildFlags := []string{fmtTags(c.Tags)}
+ buildFlags := []string{fmt.Sprintf("-tags=%s", strings.Join(c.Tags, ","))}
if c.Mod != "" {
buildFlags = append(buildFlags, fmt.Sprintf("-mod=%s", c.Mod))
}
@@ -338,7 +340,7 @@ func (v *visitor) selectorName(call *ast.CallExpr) string {
// then just that function's fullName is returned.
//
// Otherwise, we walk through all the potentially embedded interfaces of the receiver
-// the collect a list of type-qualified function names that we will check.
+// to collect a list of type-qualified function names that we will check.
func (v *visitor) namesForExcludeCheck(call *ast.CallExpr) []string {
sel, fn, ok := v.selectorAndFunc(call)
if !ok {
@@ -351,7 +353,7 @@ func (v *visitor) namesForExcludeCheck(call *ast.CallExpr) []string {
}
// This will be missing for functions without a receiver (like fmt.Printf),
- // so just fall back to the the function's fullName in that case.
+ // so just fall back to the function's fullName in that case.
selection, ok := v.typesInfo.Selections[sel]
if !ok {
return []string{name}
@@ -420,9 +422,9 @@ func (v *visitor) ignoreCall(call *ast.CallExpr) bool {
// 2. x.y.f()
var id *ast.Ident
switch exp := call.Fun.(type) {
- case (*ast.Ident):
+ case *ast.Ident:
id = exp
- case (*ast.SelectorExpr):
+ case *ast.SelectorExpr:
id = exp.Sel
default:
// eg: *ast.SliceExpr, *ast.IndexExpr
@@ -586,26 +588,38 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor {
for _, name := range vspec.Names {
lhs = append(lhs, ast.Expr(name))
}
- v.checkAssignment(lhs, vspec.Values)
+ followed := v.checkAssignment(lhs, vspec.Values)
+ if !followed {
+ return nil
+ }
}
case *ast.AssignStmt:
- v.checkAssignment(stmt.Lhs, stmt.Rhs)
+ followed := v.checkAssignment(stmt.Lhs, stmt.Rhs)
+ if !followed {
+ return nil
+ }
+
+ case *ast.TypeAssertExpr:
+ v.checkAssertExpr(stmt)
+ return nil
default:
}
return v
}
-func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) {
+// checkAssignment checks the assignment statement and returns a boolean value
+// indicating whether to continue checking the substructure in AssignStmt or not
+func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) (followed bool) {
if len(rhs) == 1 {
// single value on rhs; check against lhs identifiers
if call, ok := rhs[0].(*ast.CallExpr); ok {
if !v.blank {
- return
+ return true
}
if v.ignoreCall(call) {
- return
+ return true
}
isError := v.errorsByArg(call)
for i := 0; i < len(lhs); i++ {
@@ -619,11 +633,11 @@ func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) {
}
} else if assert, ok := rhs[0].(*ast.TypeAssertExpr); ok {
if !v.asserts {
- return
+ return false
}
if assert.Type == nil {
// type switch
- return
+ return false
}
if len(lhs) < 2 {
// assertion result not read
@@ -632,6 +646,7 @@ func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) {
// assertion result ignored
v.addErrorAtPosition(id.NamePos, nil)
}
+ return false
}
} else {
// multiple value on rhs; in this case a call can't return
@@ -661,6 +676,19 @@ func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) {
}
}
}
+
+ return true
+}
+
+func (v *visitor) checkAssertExpr(expr *ast.TypeAssertExpr) {
+ if !v.asserts {
+ return
+ }
+ if expr.Type == nil {
+ // type switch
+ return
+ }
+ v.addErrorAtPosition(expr.Pos(), nil)
}
func isErrorType(t types.Type) bool {
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go b/vendor/github.com/kisielk/errcheck/errcheck/excludes.go
index a783b5a763..450b798e4e 100644
--- a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go
+++ b/vendor/github.com/kisielk/errcheck/errcheck/excludes.go
@@ -47,6 +47,11 @@ var DefaultExcludedSymbols = []string{
// hash
"(hash.Hash).Write",
+
+ // hash/maphash
+ "(*hash/maphash.Hash).Write",
+ "(*hash/maphash.Hash).WriteByte",
+ "(*hash/maphash.Hash).WriteString",
}
// ReadExcludes reads an excludes file, a newline delimited file that lists
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/tags.go b/vendor/github.com/kisielk/errcheck/errcheck/tags.go
deleted file mode 100644
index 7b423ca69c..0000000000
--- a/vendor/github.com/kisielk/errcheck/errcheck/tags.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// +build go1.13
-
-package errcheck
-
-import (
- "fmt"
- "strings"
-)
-
-func fmtTags(tags []string) string {
- return fmt.Sprintf("-tags=%s", strings.Join(tags, ","))
-}
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go b/vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go
deleted file mode 100644
index 2f534f40a8..0000000000
--- a/vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// +build go1.11
-// +build !go1.13
-
-package errcheck
-
-import (
- "fmt"
- "strings"
-)
-
-func fmtTags(tags []string) string {
- return fmt.Sprintf("-tags=%s", strings.Join(tags, " "))
-}
diff --git a/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go b/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go
index 62696351ad..c62909a873 100644
--- a/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go
+++ b/vendor/github.com/kkHAIKE/contextcheck/contextcheck.go
@@ -727,6 +727,14 @@ func (r *runner) getFunction(instr ssa.Instruction) (f *ssa.Function) {
}
func (r *runner) isCtxType(tp types.Type) bool {
+ if p, ok := tp.(*types.Pointer); ok {
+ // opaqueType is not exposed and lead to unreachable error.
+ // Related to https://github.com/golang/tools/blob/63229bc79404d8cf2fe4e88ad569168fe251d993/go/ssa/builder.go#L107
+ if p.Elem().String() == "deferStack" {
+ return false
+ }
+ }
+
return types.Identical(tp, r.ctxTyp) || types.Identical(tp, r.ctxPTyp)
}
diff --git a/vendor/github.com/kyoh86/exportloopref/.golangci.yml b/vendor/github.com/kyoh86/exportloopref/.golangci.yml
deleted file mode 100644
index e876057f3f..0000000000
--- a/vendor/github.com/kyoh86/exportloopref/.golangci.yml
+++ /dev/null
@@ -1,4 +0,0 @@
-linters:
- enable:
- - unparam
- - exportloopref
diff --git a/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml b/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml
deleted file mode 100644
index 95d44aaac3..0000000000
--- a/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml
+++ /dev/null
@@ -1,51 +0,0 @@
-# yaml-language-server: $schema=https://goreleaser.com/static/schema.json
-
-project_name: exportloopref
-builds:
- - id: default
- goos:
- - linux
- - darwin
- - windows
- goarch:
- - amd64
- - arm64
- - "386"
- main: ./cmd/exportloopref
- binary: exportloopref
-brews:
- - install: |
- bin.install "exportloopref"
- tap:
- owner: kyoh86
- name: homebrew-tap
- folder: Formula
- homepage: https://github.com/kyoh86/exportloopref
- description: An analyzer that finds exporting pointers for loop variables.
- license: MIT
-nfpms:
- - builds:
- - default
- maintainer: kyoh86
- homepage: https://github.com/kyoh86/exportloopref
- description: An analyzer that finds exporting pointers for loop variables.
- license: MIT
- formats:
- - apk
- - deb
- - rpm
-archives:
- - id: gzip
- format: tar.gz
- format_overrides:
- - goos: windows
- format: zip
- files:
- - licence*
- - LICENCE*
- - license*
- - LICENSE*
- - readme*
- - README*
- - changelog*
- - CHANGELOG*
diff --git a/vendor/github.com/kyoh86/exportloopref/LICENSE b/vendor/github.com/kyoh86/exportloopref/LICENSE
deleted file mode 100644
index 7ac9dba4a0..0000000000
--- a/vendor/github.com/kyoh86/exportloopref/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2020 kyoh86
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
-OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/kyoh86/exportloopref/Makefile b/vendor/github.com/kyoh86/exportloopref/Makefile
deleted file mode 100644
index 4d3ef22f7f..0000000000
--- a/vendor/github.com/kyoh86/exportloopref/Makefile
+++ /dev/null
@@ -1,16 +0,0 @@
-.PHONY: gen lint test install man
-
-VERSION := `git vertag get`
-COMMIT := `git rev-parse HEAD`
-
-gen:
- go generate ./...
-
-lint: gen
- golangci-lint run
-
-test: lint
- go test -v --race ./...
-
-install: test
- go install -a -ldflags "-X=main.version=$(VERSION) -X=main.commit=$(COMMIT)" ./...
diff --git a/vendor/github.com/kyoh86/exportloopref/README.md b/vendor/github.com/kyoh86/exportloopref/README.md
deleted file mode 100644
index 0f581ffcee..0000000000
--- a/vendor/github.com/kyoh86/exportloopref/README.md
+++ /dev/null
@@ -1,223 +0,0 @@
-# exportloopref
-
-An analyzer that finds exporting pointers for loop variables.
-
-Pin them all!
-
-[](https://pkg.go.dev/kyoh86/exportloopref)
-[](https://goreportcard.com/report/github.com/kyoh86/exportloopref)
-[](https://codecov.io/gh/kyoh86/exportloopref)
-[](https://github.com/kyoh86/exportloopref/releases)
-
-## What's this?
-
-Sample problem code from: https://github.com/kyoh86/exportloopref/blob/main/testdata/src/simple/simple.go
-
-```go
-package main
-
-func main() {
- var intArray [4]*int
- var intSlice []*int
- var intRef *int
- var intStr struct{ x *int }
-
- println("loop expecting 10, 11, 12, 13")
- for i, p := range []int{10, 11, 12, 13} {
- printp(&p) // not a diagnostic
- intSlice = append(intSlice, &p) // want "exporting a pointer for the loop variable p"
- intArray[i] = &p // want "exporting a pointer for the loop variable p"
- if i%2 == 0 {
- intRef = &p // want "exporting a pointer for the loop variable p"
- intStr.x = &p // want "exporting a pointer for the loop variable p"
- }
- var vStr struct{ x *int }
- var vArray [4]*int
- var v *int
- if i%2 == 0 {
- v = &p // not a diagnostic (x is local variable)
- vArray[1] = &p // not a diagnostic (x is local variable)
- vStr.x = &p
- }
- _ = v
- }
-
- println(`slice expecting "10, 11, 12, 13" but "13, 13, 13, 13"`)
- for _, p := range intSlice {
- printp(p)
- }
- println(`array expecting "10, 11, 12, 13" but "13, 13, 13, 13"`)
- for _, p := range intArray {
- printp(p)
- }
- println(`captured value expecting "12" but "13"`)
- printp(intRef)
-}
-
-func printp(p *int) {
- println(*p)
-}
-```
-
-In Go, the `p` variable in the above loops is actually a single variable.
-So in many case (like the above), using it makes for us annoying bugs.
-
-You can find them with `exportloopref`, and fix it.
-
-```go
-package main
-
-func main() {
- var intArray [4]*int
- var intSlice []*int
- var intRef *int
- var intStr struct{ x *int }
-
- println("loop expecting 10, 11, 12, 13")
- for i, p := range []int{10, 11, 12, 13} {
- p := p // FIX variable into the local variable
- printp(&p)
- intSlice = append(intSlice, &p)
- intArray[i] = &p
- if i%2 == 0 {
- intRef = &p
- intStr.x = &p
- }
- var vStr struct{ x *int }
- var vArray [4]*int
- var v *int
- if i%2 == 0 {
- v = &p
- vArray[1] = &p
- vStr.x = &p
- }
- _ = v
- }
-
- println(`slice expecting "10, 11, 12, 13"`)
- for _, p := range intSlice {
- printp(p)
- }
- println(`array expecting "10, 11, 12, 13"`)
- for _, p := range intArray {
- printp(p)
- }
- println(`captured value expecting "12"`)
- printp(intRef)
-}
-
-func printp(p *int) {
- println(*p)
-}
-```
-
-ref: https://github.com/kyoh86/exportloopref/blob/main/testdata/src/fixed/fixed.go
-
-## Sensing policy
-
-I want to make exportloopref as accurately as possible.
-So some cases of lints will be false-negative.
-
-e.g.
-
-```go
-var s Foo
-for _, p := range []int{10, 11, 12, 13} {
- s.Bar(&p) // If s stores the pointer, it will be bug.
-}
-```
-
-If you want to report all of lints (with some false-positives),
-you should use [looppointer](https://github.com/kyoh86/looppointer).
-
-### Known false negatives
-
-Case 1: pass the pointer to function to export.
-
-Case 2: pass the pointer to local variable, and export it.
-
-```go
-package main
-
-type List []*int
-
-func (l *List) AppendP(p *int) {
- *l = append(*l, p)
-}
-
-func main() {
- var slice []*int
- list := List{}
-
- println("loop expect exporting 10, 11, 12, 13")
- for _, v := range []int{10, 11, 12, 13} {
- list.AppendP(&v) // Case 1: wanted "exporting a pointer for the loop variable v", but cannot be found
-
- p := &v // p is the local variable
- slice = append(slice, p) // Case 2: wanted "exporting a pointer for the loop variable v", but cannot be found
- }
-
- println(`slice expecting "10, 11, 12, 13" but "13, 13, 13, 13"`)
- for _, p := range slice {
- printp(p)
- }
- println(`array expecting "10, 11, 12, 13" but "13, 13, 13, 13"`)
- for _, p := range ([]*int)(list) {
- printp(p)
- }
-}
-
-func printp(p *int) {
- println(*p)
-}
-```
-
-## Install
-
-go:
-
-```console
-$ go get github.com/kyoh86/exportloopref/cmd/exportloopref
-```
-
-[homebrew](https://brew.sh/):
-
-```console
-$ brew install kyoh86/tap/exportloopref
-```
-
-[gordon](https://github.com/kyoh86/gordon):
-
-```console
-$ gordon install kyoh86/exportloopref
-```
-
-## Usage
-
-```
-exportloopref [-flag] [package]
-```
-
-### Flags
-
-| Flag | Description |
-| --- | --- |
-| -V | print version and exit |
-| -all | no effect (deprecated) |
-| -c int | display offending line with this many lines of context (default -1) |
-| -cpuprofile string | write CPU profile to this file |
-| -debug string | debug flags, any subset of "fpstv" |
-| -fix | apply all suggested fixes |
-| -flags | print analyzer flags in JSON |
-| -json | emit JSON output |
-| -memprofile string | write memory profile to this file |
-| -source | no effect (deprecated) |
-| -tags string | no effect (deprecated) |
-| -trace string | write trace log to this file |
-| -v | no effect (deprecated) |
-
-# LICENSE
-
-[](http://www.opensource.org/licenses/MIT)
-
-This is distributed under the [MIT License](http://www.opensource.org/licenses/MIT).
diff --git a/vendor/github.com/kyoh86/exportloopref/exportloopref.go b/vendor/github.com/kyoh86/exportloopref/exportloopref.go
deleted file mode 100644
index d071d5c35f..0000000000
--- a/vendor/github.com/kyoh86/exportloopref/exportloopref.go
+++ /dev/null
@@ -1,334 +0,0 @@
-package exportloopref
-
-import (
- "fmt"
- "go/ast"
- "go/token"
- "go/types"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/inspector"
-)
-
-var Analyzer = &analysis.Analyzer{
- Name: "exportloopref",
- Doc: "checks for pointers to enclosing loop variables",
- Run: run,
- RunDespiteErrors: true,
- Requires: []*analysis.Analyzer{inspect.Analyzer},
-}
-
-func run(pass *analysis.Pass) (interface{}, error) {
- inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
-
- search := &Searcher{
- LoopVars: map[token.Pos]struct{}{},
- LocalVars: map[token.Pos]map[token.Pos]struct{}{},
- Pass: pass,
- }
-
- nodeFilter := []ast.Node{
- (*ast.RangeStmt)(nil),
- (*ast.ForStmt)(nil),
- (*ast.DeclStmt)(nil),
- (*ast.AssignStmt)(nil),
- (*ast.UnaryExpr)(nil),
- }
-
- inspect.WithStack(nodeFilter, search.CheckAndReport)
-
- return nil, nil
-}
-
-type Searcher struct {
- // LoopVars is positions that loop-variables are declared like below.
- // - for , := range ...
- // - for := ; ;
- LoopVars map[token.Pos]struct{}
- // LocalVars is positions of loops and the variables declared in them.
- // Use this to determine if a point assignment is an export outside the loop.
- LocalVars map[token.Pos]map[token.Pos]struct{}
-
- Pass *analysis.Pass
-}
-
-// CheckAndReport inspects each node with stack.
-// It is implemented as the I/F of the "golang.org/x/tools/go/analysis/passes/inspect".Analysis.WithStack.
-func (s *Searcher) CheckAndReport(n ast.Node, push bool, stack []ast.Node) bool {
- id, insert, digg := s.Check(n, stack)
- if id == nil {
- // no prob.
- return digg
- }
-
- // suggests fix
- var suggest []analysis.SuggestedFix
- if insert != token.NoPos {
- suggest = []analysis.SuggestedFix{{
- Message: fmt.Sprintf("loop variable %s should be pinned", id.Name),
- TextEdits: []analysis.TextEdit{{
- Pos: insert,
- End: insert,
- NewText: []byte(fmt.Sprintf("%[1]s := %[1]s\n", id.Name)),
- }},
- }}
- }
-
- // report a diagnostic
- d := analysis.Diagnostic{Pos: id.Pos(),
- End: id.End(),
- Message: fmt.Sprintf("exporting a pointer for the loop variable %s", id.Name),
- Category: "exportloopref",
- SuggestedFixes: suggest,
- }
- s.Pass.Report(d)
- return digg
-}
-
-// Check each node and stack, whether it exports loop variables or not.
-// Finding export, report the *ast.Ident of exported loop variable,
-// and token.Pos to insert assignment to fix the diagnostic.
-func (s *Searcher) Check(n ast.Node, stack []ast.Node) (loopVar *ast.Ident, insertPos token.Pos, digg bool) {
- switch typed := n.(type) {
- case *ast.RangeStmt:
- s.parseRangeStmt(typed)
- case *ast.ForStmt:
- s.parseForStmt(typed)
- case *ast.DeclStmt:
- s.parseDeclStmt(typed, stack)
- case *ast.AssignStmt:
- s.parseAssignStmt(typed, stack)
-
- case *ast.UnaryExpr:
- return s.checkUnaryExpr(typed, stack)
- }
- return nil, token.NoPos, true
-}
-
-// parseRangeStmt will check range statement (i.e. `for , := range ...`),
-// and collect positions of and .
-func (s *Searcher) parseRangeStmt(n *ast.RangeStmt) {
- s.storeLoopVars(n.Key)
- s.storeLoopVars(n.Value)
-}
-
-// parseForStmt will check for statement (i.e. `for := ; ; `),
-// and collect positions of .
-func (s *Searcher) parseForStmt(n *ast.ForStmt) {
- switch post := n.Post.(type) {
- case *ast.AssignStmt:
- // e.g. for p = head; p != nil; p = p.next
- for _, lhs := range post.Lhs {
- s.storeLoopVars(lhs)
- }
- case *ast.IncDecStmt:
- // e.g. for i := 0; i < n; i++
- s.storeLoopVars(post.X)
- }
-}
-
-func (s *Searcher) storeLoopVars(expr ast.Expr) {
- if id, ok := expr.(*ast.Ident); ok {
- s.LoopVars[id.Pos()] = struct{}{}
- }
-}
-
-// parseDeclStmt will parse declaring statement (i.e. `var`, `type`, `const`),
-// and store the position if it is "var" declaration and is in any loop.
-func (s *Searcher) parseDeclStmt(n *ast.DeclStmt, stack []ast.Node) {
- genDecl, ok := n.Decl.(*ast.GenDecl)
- if !ok {
- // (dead branch)
- // if the Decl is not GenDecl (i.e. `var`, `type` or `const` statement), it is ignored
- return
- }
- if genDecl.Tok != token.VAR {
- // if the Decl is not `var` (may be `type` or `const`), it is ignored
- return
- }
-
- loop, _ := s.innermostLoop(stack)
- if loop == nil {
- return
- }
-
- // Register declared variables
- for _, spec := range genDecl.Specs {
- for _, name := range spec.(*ast.ValueSpec).Names {
- s.storeLocalVar(loop, name)
- }
- }
-}
-
-// parseDeclStmt will parse assignment statement (i.e. ` = `),
-// and store the position if it is .
-func (s *Searcher) parseAssignStmt(n *ast.AssignStmt, stack []ast.Node) {
- if n.Tok != token.DEFINE {
- // if the statement is simple assignment (without definement), it is ignored
- return
- }
-
- loop, _ := s.innermostLoop(stack)
- if loop == nil {
- return
- }
-
- // Find statements declaring local variable
- for _, h := range n.Lhs {
- s.storeLocalVar(loop, h)
- }
-}
-
-func (s *Searcher) storeLocalVar(loop ast.Node, expr ast.Expr) {
- loopPos := loop.Pos()
- id, ok := expr.(*ast.Ident)
- if !ok {
- return
- }
- vars, ok := s.LocalVars[loopPos]
- if !ok {
- vars = map[token.Pos]struct{}{}
- }
- vars[id.Obj.Pos()] = struct{}{}
- s.LocalVars[loopPos] = vars
-}
-
-func insertionPosition(block *ast.BlockStmt) token.Pos {
- if len(block.List) > 0 {
- return block.List[0].Pos()
- }
- return token.NoPos
-}
-
-func (s *Searcher) innermostLoop(stack []ast.Node) (ast.Node, token.Pos) {
- for i := len(stack) - 1; i >= 0; i-- {
- switch typed := stack[i].(type) {
- case *ast.RangeStmt:
- return typed, insertionPosition(typed.Body)
- case *ast.ForStmt:
- return typed, insertionPosition(typed.Body)
- }
- }
- return nil, token.NoPos
-}
-
-// checkUnaryExpr check unary expression (i.e. like `-x`, `*p` or `&v`) and stack.
-// THIS IS THE ESSENTIAL PART OF THIS PARSER.
-func (s *Searcher) checkUnaryExpr(n *ast.UnaryExpr, stack []ast.Node) (*ast.Ident, token.Pos, bool) {
- if n.Op != token.AND {
- return nil, token.NoPos, true
- }
-
- loop, insert := s.innermostLoop(stack)
- if loop == nil {
- return nil, token.NoPos, true
- }
-
- // Get identity of the referred item
- id := s.getIdentity(n.X)
- if id == nil {
- return nil, token.NoPos, true
- }
-
- // If the identity is not the loop statement variable,
- // it will not be reported.
- if _, isDecl := s.LoopVars[id.Obj.Pos()]; !isDecl {
- return nil, token.NoPos, true
- }
-
- // check stack append(), []X{}, map[Type]X{}, Struct{}, &Struct{}, X.(Type), (X)
- // in the =
- var mayRHPos token.Pos
- for i := len(stack) - 2; i >= 0; i-- {
- switch typed := stack[i].(type) {
- case (*ast.UnaryExpr):
- // noop
- case (*ast.CompositeLit):
- // noop
- case (*ast.KeyValueExpr):
- // noop
- case (*ast.CallExpr):
- fun, ok := typed.Fun.(*ast.Ident)
- if !ok {
- return nil, token.NoPos, false // it's calling a function other of `append`. It cannot be checked
- }
-
- if fun.Name != "append" {
- return nil, token.NoPos, false // it's calling a function other of `append`. It cannot be checked
- }
-
- case (*ast.AssignStmt):
- if len(typed.Rhs) != len(typed.Lhs) {
- return nil, token.NoPos, false // dead logic
- }
-
- // search x where Rhs[x].Pos() == mayRHPos
- var index int
- for ri, rh := range typed.Rhs {
- if rh.Pos() == mayRHPos {
- index = ri
- break
- }
- }
-
- // check Lhs[x] is not local variable
- lh := typed.Lhs[index]
- isVar := s.isVar(loop, lh)
- if !isVar {
- return id, insert, false
- }
-
- return nil, token.NoPos, true
- default:
- // Other statement is not able to be checked.
- return nil, token.NoPos, false
- }
-
- // memory an expr that may be right-hand in the AssignStmt
- mayRHPos = stack[i].Pos()
- }
- return nil, token.NoPos, true
-}
-
-func (s *Searcher) isVar(loop ast.Node, expr ast.Expr) bool {
- vars := s.LocalVars[loop.Pos()] // map[token.Pos]struct{}
- if vars == nil {
- return false
- }
- switch typed := expr.(type) {
- case (*ast.Ident):
- if typed.Obj == nil {
- return false // global var in another file (ref: #13)
- }
- _, isVar := vars[typed.Obj.Pos()]
- return isVar
- case (*ast.IndexExpr): // like X[Y], check X
- return s.isVar(loop, typed.X)
- case (*ast.SelectorExpr): // like X.Y, check X
- return s.isVar(loop, typed.X)
- }
- return false
-}
-
-// Get variable identity
-func (s *Searcher) getIdentity(expr ast.Expr) *ast.Ident {
- switch typed := expr.(type) {
- case *ast.SelectorExpr:
- // Ignore if the parent is pointer ref (fix for #2)
- if _, ok := s.Pass.TypesInfo.Types[typed.X].Type.(*types.Pointer); ok {
- return nil
- }
-
- // Get parent identity; i.e. `a.b` of the `a.b.c`.
- return s.getIdentity(typed.X)
-
- case *ast.Ident:
- // Get simple identity; i.e. `a` of the `a`.
- if typed.Obj == nil {
- return nil
- }
- return typed
- }
- return nil
-}
diff --git a/vendor/github.com/lasiar/canonicalheader/.golangci.yaml b/vendor/github.com/lasiar/canonicalheader/.golangci.yaml
index 5652c8d6cc..997ec0cb01 100644
--- a/vendor/github.com/lasiar/canonicalheader/.golangci.yaml
+++ b/vendor/github.com/lasiar/canonicalheader/.golangci.yaml
@@ -40,6 +40,9 @@ linters:
fast: false
enable:
+ # Globals and init() are no ok, because this linter use on golangci lint.
+ - gochecknoglobals
+ - gochecknoinits
# Check for pass []any as any in variadic func(...any).
# Rare case but saved me from debugging a few times.
- asasalint
@@ -58,6 +61,12 @@ linters:
# Check whether the function uses a non-inherited context.
- contextcheck
+ # after go 1.22 don't need copy var at for range.
+ - copyloopvar
+
+ # Find duplicate words, rare.
+ - dupword
+
# Check for two durations multiplied together.
- durationcheck
@@ -73,6 +82,10 @@ linters:
# Checks for pointers to enclosing loop variables.
- exportloopref
+
+ # Imports order.
+ - gci
+
# As you already know I'm a co-author. It would be strange to not use
# one of my warmly loved projects.
- gocritic
@@ -104,9 +117,15 @@ linters:
# Last week I caught a bug with it.
- ineffassign
+ # range over int, work after go 1.22
+ - intrange
+
# Fix all the misspells, amazing thing.
- misspell
+ # Reports wrong mirror patterns of bytes/strings usage.
+ - mirror
+
# Finds naked/bare returns and requires change them.
- nakedret
@@ -121,6 +140,9 @@ linters:
# Better not to have //nolint: at all ;)
- nolintlint
+ # aiming at usages of fmt.Sprintf which have faster alternatives.
+ - perfsprint
+
# Finds slices that could potentially be pre-allocated.
# Small performance win + cleaner code.
- prealloc
@@ -144,6 +166,9 @@ linters:
- rowserrcheck
- sqlclosecheck
+ # Ensure consistent code style when using log/slog.
+ - sloglint
+
# I have found that it's not the same as staticcheck binary :\
- staticcheck
@@ -156,6 +181,7 @@ linters:
# Test-related checks. All of them are good.
- tenv
- testableexamples
+ - testifylint
- thelper
- tparallel
@@ -185,9 +211,6 @@ linters:
# (c) Bryan C. Mills / https://github.com/bcmills
- cyclop
- # Abandoned, replaced by `unused`.
- - deadcode
-
# Check declaration order of types, consts, vars and funcs.
# I like it but I don't use it.
- decorder
@@ -202,9 +225,6 @@ linters:
# Tool for code clone detection.
- dupl
- # Find duplicate words, rare.
- - dupword
-
# I'm fine to check the error from json.Marshal ¯\_(ツ)_/¯
- errchkjson
@@ -213,7 +233,6 @@ linters:
# Forces to handle more cases. Cool but noisy.
- exhaustive
- - exhaustivestruct # Deprecated, replaced by check below.
- exhaustruct
# Forbids some identifiers. I don't have a case for it.
@@ -225,19 +244,12 @@ linters:
# I might have long but a simple function.
- funlen
- # Imports order. I do this manually ¯\_(ツ)_/¯
- - gci
-
# I'm not a fan of ginkgo and gomega packages.
- ginkgolinter
# Checks that compiler directive comments (//go:) are valid. Rare.
- gocheckcompilerdirectives
- # Globals and init() are ok.
- - gochecknoglobals
- - gochecknoinits
-
# Same as `cyclop` linter (see above)
- gocognit
- goconst
@@ -247,16 +259,13 @@ linters:
- godox
# Check the error handling expressions. Too noisy.
- - goerr113
+ - err113
# I don't use file headers.
- goheader
- # 1st Go linter, deprecated :( use `revive`.
- - golint
-
# Reports magic consts. Might be noisy but still good.
- - gomnd
+ - mnd
# Allowed/blocked packages to import. I prefer to do it manually.
- gomodguard
@@ -267,9 +276,6 @@ linters:
# Groupt declarations, I prefer manually.
- grouper
- # Deprecated.
- - ifshort
-
# Checks imports aliases, rare.
- importas
@@ -291,9 +297,6 @@ linters:
# Slice declarations with non-zero initial length. Not my case.
- makezero
- # Deprecated. Use govet `fieldalignment`.
- - maligned
-
# Enforce tags in un/marshaled structs. Cool but not my case.
- musttag
@@ -306,9 +309,6 @@ linters:
# Reports all named returns, not that bad.
- nonamedreturns
- # Deprecated. Replaced by `revive`.
- - nosnakecase
-
# Finds misuse of Sprintf with host:port in a URL. Cool but rare.
- nosprintfhostport
@@ -335,6 +335,12 @@ linters:
- wsl
linters-settings:
+ gci:
+ sections:
+ - standard
+ - default
+ - localmodule
+
revive:
# Maximum number of open files at the same time.
# See https://github.com/mgechev/revive#command-line-flags
@@ -378,7 +384,6 @@ linters-settings:
- name: banned-characters
severity: warning
disabled: false
- arguments: ["Ω", "Σ", "σ", "7"]
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#bare-return
- name: bare-return
severity: warning
@@ -404,9 +409,6 @@ linters-settings:
- name: comment-spacings
severity: warning
disabled: false
- arguments:
- - mypragma
- - otherpragma
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#confusing-naming
- name: confusing-naming
severity: warning
@@ -444,8 +446,6 @@ linters-settings:
- name: defer
severity: warning
disabled: false
- arguments:
- - ["call-chain", "loop"]
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#dot-imports
- name: dot-imports
severity: warning
@@ -470,8 +470,6 @@ linters-settings:
- name: enforce-map-style
severity: warning
disabled: false
- arguments:
- - "make"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-naming
- name: error-naming
severity: warning
@@ -530,8 +528,6 @@ linters-settings:
- name: indent-error-flow
severity: warning
disabled: false
- arguments:
- - "preserveScope"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-alias-naming
- name: import-alias-naming
severity: warning
@@ -542,9 +538,6 @@ linters-settings:
- name: imports-blacklist
severity: warning
disabled: false
- arguments:
- - "crypto/md5"
- - "crypto/sha1"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-shadowing
- name: import-shadowing
severity: warning
@@ -632,8 +625,6 @@ linters-settings:
- name: superfluous-else
severity: warning
disabled: false
- arguments:
- - "preserveScope"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#time-equal
- name: time-equal
severity: warning
@@ -646,10 +637,6 @@ linters-settings:
- name: var-naming
severity: warning
disabled: false
- arguments:
- - ["ID"] # AllowList
- - ["VM"] # DenyList
- - - upperCaseConst: true
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#var-declaration
- name: var-declaration
severity: warning
@@ -670,9 +657,6 @@ linters-settings:
- name: unhandled-error
severity: warning
disabled: false
- arguments:
- - "fmt.Printf"
- - "myFunction"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unnecessary-stmt
- name: unnecessary-stmt
severity: warning
@@ -691,8 +675,6 @@ linters-settings:
- name: unused-receiver
severity: warning
disabled: false
- arguments:
- - allowRegex: "^_"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#useless-break
- name: useless-break
severity: warning
diff --git a/vendor/github.com/lasiar/canonicalheader/analyzer.go b/vendor/github.com/lasiar/canonicalheader/analyzer.go
index d3fb529ebd..258ebdfd4d 100644
--- a/vendor/github.com/lasiar/canonicalheader/analyzer.go
+++ b/vendor/github.com/lasiar/canonicalheader/analyzer.go
@@ -18,6 +18,7 @@ const (
name = "Header"
)
+//nolint:gochecknoglobals // struct is not big, can be skip.
var Analyzer = &analysis.Analyzer{
Name: "canonicalheader",
Doc: "canonicalheader checks whether net/http.Header uses canonical header",
diff --git a/vendor/github.com/ldez/exptostd/.gitignore b/vendor/github.com/ldez/exptostd/.gitignore
new file mode 100644
index 0000000000..ec3a603988
--- /dev/null
+++ b/vendor/github.com/ldez/exptostd/.gitignore
@@ -0,0 +1,2 @@
+/exptostd
+.idea
diff --git a/vendor/github.com/ldez/exptostd/.golangci.yml b/vendor/github.com/ldez/exptostd/.golangci.yml
new file mode 100644
index 0000000000..e615d3e5c1
--- /dev/null
+++ b/vendor/github.com/ldez/exptostd/.golangci.yml
@@ -0,0 +1,83 @@
+linters:
+ enable-all: true
+ disable:
+ - exportloopref # deprecated
+ - sqlclosecheck # not relevant (SQL)
+ - rowserrcheck # not relevant (SQL)
+ - cyclop # duplicate of gocyclo
+ - lll
+ - dupl
+ - nlreturn
+ - exhaustive
+ - exhaustruct
+ - testpackage
+ - tparallel
+ - paralleltest
+ - prealloc
+ - varnamelen
+ - nilnil
+ - errchkjson
+ - nonamedreturns
+
+linters-settings:
+ govet:
+ enable-all: true
+ disable:
+ - fieldalignment
+ gocyclo:
+ min-complexity: 20
+ goconst:
+ min-len: 5
+ min-occurrences: 3
+ misspell:
+ locale: US
+ funlen:
+ lines: -1
+ statements: 40
+ godox:
+ keywords:
+ - FIXME
+ gofumpt:
+ extra-rules: true
+ depguard:
+ rules:
+ main:
+ deny:
+ - pkg: "github.com/instana/testify"
+ desc: not allowed
+ - pkg: "github.com/pkg/errors"
+ desc: Should be replaced by standard lib errors package
+ wsl:
+ force-case-trailing-whitespace: 1
+ allow-trailing-comment: true
+ gocritic:
+ enabled-tags:
+ - diagnostic
+ - style
+ - performance
+ disabled-checks:
+ - sloppyReassign
+ - rangeValCopy
+ - octalLiteral
+ - paramTypeCombine # already handle by gofumpt.extra-rules
+ settings:
+ hugeParam:
+ sizeThreshold: 100
+ nolintlint:
+ require-specific: true
+ require-explanation: true
+
+issues:
+ exclude-use-default: false
+ max-issues-per-linter: 0
+ max-same-issues: 0
+
+output:
+ show-stats: true
+ sort-results: true
+ sort-order:
+ - linter
+ - file
+
+run:
+ timeout: 5m
diff --git a/vendor/github.com/ldez/exptostd/LICENSE b/vendor/github.com/ldez/exptostd/LICENSE
new file mode 100644
index 0000000000..c1bf0c3288
--- /dev/null
+++ b/vendor/github.com/ldez/exptostd/LICENSE
@@ -0,0 +1,190 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ Copyright 2024 Fernandez Ludovic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/ldez/exptostd/Makefile b/vendor/github.com/ldez/exptostd/Makefile
new file mode 100644
index 0000000000..ad72751490
--- /dev/null
+++ b/vendor/github.com/ldez/exptostd/Makefile
@@ -0,0 +1,15 @@
+.PHONY: clean check test build
+
+default: clean check test build
+
+clean:
+ rm -rf dist/ cover.out
+
+test: clean
+ go test -v -cover ./...
+
+check:
+ golangci-lint run
+
+build:
+ go build -ldflags "-s -w" -trimpath ./cmd/exptostd/
diff --git a/vendor/github.com/ldez/exptostd/exptostd.go b/vendor/github.com/ldez/exptostd/exptostd.go
new file mode 100644
index 0000000000..cf6c5e8421
--- /dev/null
+++ b/vendor/github.com/ldez/exptostd/exptostd.go
@@ -0,0 +1,475 @@
+// Package exptostd It is an analyzer that detects functions from golang.org/x/exp/ that can be replaced by std functions.
+package exptostd
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/build"
+ "go/printer"
+ "go/token"
+ "go/types"
+ "os"
+ "slices"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+)
+
+const (
+ pkgExpMaps = "golang.org/x/exp/maps"
+ pkgExpSlices = "golang.org/x/exp/slices"
+ pkgExpConstraints = "golang.org/x/exp/constraints"
+)
+
+const (
+ pkgMaps = "maps"
+ pkgSlices = "slices"
+ pkgComp = "cmp"
+)
+
+const (
+ go123 = 123
+ go121 = 121
+ goDevel = 666
+)
+
+// Result is step analysis results.
+type Result struct {
+ shouldKeepImport bool
+ Diagnostics []analysis.Diagnostic
+}
+
+type stdReplacement[T ast.Expr] struct {
+ MinGo int
+ Text string
+ Suggested func(callExpr T) (analysis.SuggestedFix, error)
+}
+
+type analyzer struct {
+ mapsPkgReplacements map[string]stdReplacement[*ast.CallExpr]
+ slicesPkgReplacements map[string]stdReplacement[*ast.CallExpr]
+ constraintsPkgReplacements map[string]stdReplacement[*ast.SelectorExpr]
+
+ skipGoVersionDetection bool
+ goVersion int
+}
+
+// NewAnalyzer create a new Analyzer.
+func NewAnalyzer() *analysis.Analyzer {
+ _, skip := os.LookupEnv("EXPTOSTD_SKIP_GO_VERSION_CHECK")
+
+ l := &analyzer{
+ skipGoVersionDetection: skip,
+ mapsPkgReplacements: map[string]stdReplacement[*ast.CallExpr]{
+ "Keys": {MinGo: go123, Text: "slices.AppendSeq(make([]T, 0, len(data)), maps.Keys(data))", Suggested: suggestedFixForKeysOrValues},
+ "Values": {MinGo: go123, Text: "slices.AppendSeq(make([]T, 0, len(data)), maps.Values(data))", Suggested: suggestedFixForKeysOrValues},
+ "Equal": {MinGo: go121, Text: "maps.Equal()"},
+ "EqualFunc": {MinGo: go121, Text: "maps.EqualFunc()"},
+ "Clone": {MinGo: go121, Text: "maps.Clone()"},
+ "Copy": {MinGo: go121, Text: "maps.Copy()"},
+ "DeleteFunc": {MinGo: go121, Text: "maps.DeleteFunc()"},
+ "Clear": {MinGo: go121, Text: "clear()", Suggested: suggestedFixForClear},
+ },
+ slicesPkgReplacements: map[string]stdReplacement[*ast.CallExpr]{
+ "Equal": {MinGo: go121, Text: "slices.Equal()"},
+ "EqualFunc": {MinGo: go121, Text: "slices.EqualFunc()"},
+ "Compare": {MinGo: go121, Text: "slices.Compare()"},
+ "CompareFunc": {MinGo: go121, Text: "slices.CompareFunc()"},
+ "Index": {MinGo: go121, Text: "slices.Index()"},
+ "IndexFunc": {MinGo: go121, Text: "slices.IndexFunc()"},
+ "Contains": {MinGo: go121, Text: "slices.Contains()"},
+ "ContainsFunc": {MinGo: go121, Text: "slices.ContainsFunc()"},
+ "Insert": {MinGo: go121, Text: "slices.Insert()"},
+ "Delete": {MinGo: go121, Text: "slices.Delete()"},
+ "DeleteFunc": {MinGo: go121, Text: "slices.DeleteFunc()"},
+ "Replace": {MinGo: go121, Text: "slices.Replace()"},
+ "Clone": {MinGo: go121, Text: "slices.Clone()"},
+ "Compact": {MinGo: go121, Text: "slices.Compact()"},
+ "CompactFunc": {MinGo: go121, Text: "slices.CompactFunc()"},
+ "Grow": {MinGo: go121, Text: "slices.Grow()"},
+ "Clip": {MinGo: go121, Text: "slices.Clip()"},
+ "Reverse": {MinGo: go121, Text: "slices.Reverse()"},
+
+ "Sort": {MinGo: go121, Text: "slices.Sort()"},
+ "SortFunc": {MinGo: go121, Text: "slices.SortFunc()"},
+ "SortStableFunc": {MinGo: go121, Text: "slices.SortStableFunc()"},
+ "IsSorted": {MinGo: go121, Text: "slices.IsSorted()"},
+ "IsSortedFunc": {MinGo: go121, Text: "slices.IsSortedFunc()"},
+ "Min": {MinGo: go121, Text: "slices.Min()"},
+ "MinFunc": {MinGo: go121, Text: "slices.MinFunc()"},
+ "Max": {MinGo: go121, Text: "slices.Max()"},
+ "MaxFunc": {MinGo: go121, Text: "slices.MaxFunc()"},
+ "BinarySearch": {MinGo: go121, Text: "slices.BinarySearch()"},
+ "BinarySearchFunc": {MinGo: go121, Text: "slices.BinarySearchFunc()"},
+ },
+ constraintsPkgReplacements: map[string]stdReplacement[*ast.SelectorExpr]{
+ "Ordered": {MinGo: go121, Text: "cmp.Ordered", Suggested: suggestedFixForConstraintsOrder},
+ },
+ }
+
+ return &analysis.Analyzer{
+ Name: "exptostd",
+ Doc: "Detects functions from golang.org/x/exp/ that can be replaced by std functions.",
+ Run: l.run,
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ }
+}
+
+//nolint:gocognit,gocyclo // The complexity is expected by the cases to handle.
+func (a *analyzer) run(pass *analysis.Pass) (any, error) {
+ insp, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ if !ok {
+ return nil, nil
+ }
+
+ a.goVersion = getGoVersion(pass)
+
+ nodeFilter := []ast.Node{
+ (*ast.CallExpr)(nil),
+ (*ast.FuncDecl)(nil),
+ (*ast.TypeSpec)(nil),
+ (*ast.ImportSpec)(nil),
+ }
+
+ imports := map[string]*ast.ImportSpec{}
+
+ var shouldKeepExpMaps bool
+
+ var resultExpSlices Result
+
+ resultExpConstraints := &Result{}
+
+ insp.Preorder(nodeFilter, func(n ast.Node) {
+ switch node := n.(type) {
+ case *ast.ImportSpec:
+ // skip aliases
+ if node.Name == nil || node.Name.Name == "" {
+ imports[trimImportPath(node)] = node
+ }
+
+ return
+
+ case *ast.CallExpr:
+ selExpr, ok := node.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return
+ }
+
+ ident, ok := selExpr.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+
+ switch ident.Name {
+ case pkgMaps:
+ diagnostic, usage := a.detectPackageUsage(pass, a.mapsPkgReplacements, selExpr, ident, node, pkgExpMaps)
+ if usage {
+ pass.Report(diagnostic)
+ }
+
+ shouldKeepExpMaps = shouldKeepExpMaps || !usage
+
+ case pkgSlices:
+ diagnostic, usage := a.detectPackageUsage(pass, a.slicesPkgReplacements, selExpr, ident, node, pkgExpSlices)
+ if usage {
+ resultExpSlices.Diagnostics = append(resultExpSlices.Diagnostics, diagnostic)
+ }
+
+ resultExpSlices.shouldKeepImport = resultExpSlices.shouldKeepImport || !usage
+ }
+
+ case *ast.FuncDecl:
+ if node.Type.TypeParams != nil {
+ for _, field := range node.Type.TypeParams.List {
+ a.detectConstraintsUsage(pass, field.Type, resultExpConstraints)
+ }
+ }
+
+ case *ast.TypeSpec:
+ if node.TypeParams != nil {
+ for _, field := range node.TypeParams.List {
+ a.detectConstraintsUsage(pass, field.Type, resultExpConstraints)
+ }
+ }
+
+ interfaceType, ok := node.Type.(*ast.InterfaceType)
+ if !ok {
+ return
+ }
+
+ for _, method := range interfaceType.Methods.List {
+ switch exp := method.Type.(type) {
+ case *ast.BinaryExpr:
+ a.detectConstraintsUsage(pass, exp.X, resultExpConstraints)
+ a.detectConstraintsUsage(pass, exp.Y, resultExpConstraints)
+
+ case *ast.SelectorExpr:
+ a.detectConstraintsUsage(pass, exp, resultExpConstraints)
+ }
+ }
+ }
+ })
+
+ // maps
+ a.suggestReplaceImport(pass, imports, shouldKeepExpMaps, pkgExpMaps, pkgMaps)
+
+ // slices
+ if resultExpSlices.shouldKeepImport {
+ for _, diagnostic := range resultExpSlices.Diagnostics {
+ pass.Report(diagnostic)
+ }
+ } else {
+ a.suggestReplaceImport(pass, imports, resultExpSlices.shouldKeepImport, pkgExpSlices, pkgSlices)
+ }
+
+ // constraints
+ a.suggestReplaceImport(pass, imports, resultExpConstraints.shouldKeepImport, pkgExpConstraints, pkgComp)
+
+ return nil, nil
+}
+
+func (a *analyzer) detectPackageUsage(pass *analysis.Pass,
+ replacements map[string]stdReplacement[*ast.CallExpr],
+ selExpr *ast.SelectorExpr, ident *ast.Ident, callExpr *ast.CallExpr,
+ importPath string,
+) (analysis.Diagnostic, bool) {
+ rp, ok := replacements[selExpr.Sel.Name]
+ if !ok {
+ return analysis.Diagnostic{}, false
+ }
+
+ if !a.skipGoVersionDetection && rp.MinGo > a.goVersion {
+ return analysis.Diagnostic{}, false
+ }
+
+ if !isPackageUsed(pass, ident, importPath) {
+ return analysis.Diagnostic{}, false
+ }
+
+ diagnostic := analysis.Diagnostic{
+ Pos: callExpr.Pos(),
+ Message: fmt.Sprintf("%s.%s() can be replaced by %s", importPath, selExpr.Sel.Name, rp.Text),
+ }
+
+ if rp.Suggested != nil {
+ fix, err := rp.Suggested(callExpr)
+ if err != nil {
+ diagnostic.Message = fmt.Sprintf("Suggested fix error: %v", err)
+ } else {
+ diagnostic.SuggestedFixes = append(diagnostic.SuggestedFixes, fix)
+ }
+ }
+
+ return diagnostic, true
+}
+
+func (a *analyzer) detectConstraintsUsage(pass *analysis.Pass, expr ast.Expr, result *Result) {
+ selExpr, ok := expr.(*ast.SelectorExpr)
+ if !ok {
+ return
+ }
+
+ ident, ok := selExpr.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+
+ if !isPackageUsed(pass, ident, pkgExpConstraints) {
+ return
+ }
+
+ rp, ok := a.constraintsPkgReplacements[selExpr.Sel.Name]
+ if !ok {
+ result.shouldKeepImport = true
+ return
+ }
+
+ if !a.skipGoVersionDetection && rp.MinGo > a.goVersion {
+ result.shouldKeepImport = true
+ return
+ }
+
+ diagnostic := analysis.Diagnostic{
+ Pos: selExpr.Pos(),
+ Message: fmt.Sprintf("%s.%s can be replaced by %s", pkgExpConstraints, selExpr.Sel.Name, rp.Text),
+ }
+
+ if rp.Suggested != nil {
+ fix, err := rp.Suggested(selExpr)
+ if err != nil {
+ diagnostic.Message = fmt.Sprintf("Suggested fix error: %v", err)
+ } else {
+ diagnostic.SuggestedFixes = append(diagnostic.SuggestedFixes, fix)
+ }
+ }
+
+ pass.Report(diagnostic)
+}
+
+func (a *analyzer) suggestReplaceImport(pass *analysis.Pass, imports map[string]*ast.ImportSpec, shouldKeep bool, importPath, stdPackage string) {
+ imp, ok := imports[importPath]
+ if !ok || shouldKeep {
+ return
+ }
+
+ src := trimImportPath(imp)
+
+ pass.Report(analysis.Diagnostic{
+ Pos: imp.Pos(),
+ End: imp.End(),
+ Message: fmt.Sprintf("Import statement '%s' can be replaced by '%s'", src, stdPackage),
+ SuggestedFixes: []analysis.SuggestedFix{{
+ TextEdits: []analysis.TextEdit{{
+ Pos: imp.Path.Pos(),
+ End: imp.Path.End(),
+ NewText: []byte(string(imp.Path.Value[0]) + stdPackage + string(imp.Path.Value[0])),
+ }},
+ }},
+ })
+}
+
+func suggestedFixForClear(callExpr *ast.CallExpr) (analysis.SuggestedFix, error) {
+ s := &ast.CallExpr{
+ Fun: ast.NewIdent("clear"),
+ Args: callExpr.Args,
+ Ellipsis: callExpr.Ellipsis,
+ }
+
+ buf := bytes.NewBuffer(nil)
+
+ err := printer.Fprint(buf, token.NewFileSet(), s)
+ if err != nil {
+ return analysis.SuggestedFix{}, fmt.Errorf("print suggested fix: %w", err)
+ }
+
+ return analysis.SuggestedFix{
+ TextEdits: []analysis.TextEdit{{
+ Pos: callExpr.Pos(),
+ End: callExpr.End(),
+ NewText: buf.Bytes(),
+ }},
+ }, nil
+}
+
+func suggestedFixForKeysOrValues(callExpr *ast.CallExpr) (analysis.SuggestedFix, error) {
+ s := &ast.CallExpr{
+ Fun: &ast.SelectorExpr{
+ X: &ast.Ident{Name: "slices"},
+ Sel: &ast.Ident{Name: "AppendSeq"},
+ },
+ Args: []ast.Expr{
+ &ast.CallExpr{
+ Fun: &ast.Ident{Name: "make"},
+ Args: []ast.Expr{
+ &ast.ArrayType{
+ Elt: &ast.Ident{Name: "T"}, // TODO(ldez) improve the type detection.
+ },
+ &ast.BasicLit{Kind: token.INT, Value: "0"},
+ &ast.CallExpr{
+ Fun: &ast.Ident{Name: "len"},
+ Args: callExpr.Args,
+ },
+ },
+ },
+ callExpr,
+ },
+ }
+
+ buf := bytes.NewBuffer(nil)
+
+ err := printer.Fprint(buf, token.NewFileSet(), s)
+ if err != nil {
+ return analysis.SuggestedFix{}, fmt.Errorf("print suggested fix: %w", err)
+ }
+
+ return analysis.SuggestedFix{
+ TextEdits: []analysis.TextEdit{{
+ Pos: callExpr.Pos(),
+ End: callExpr.End(),
+ NewText: buf.Bytes(),
+ }},
+ }, nil
+}
+
+func suggestedFixForConstraintsOrder(selExpr *ast.SelectorExpr) (analysis.SuggestedFix, error) {
+ s := &ast.SelectorExpr{
+ X: &ast.Ident{Name: pkgComp},
+ Sel: &ast.Ident{Name: "Ordered"},
+ }
+
+ buf := bytes.NewBuffer(nil)
+
+ err := printer.Fprint(buf, token.NewFileSet(), s)
+ if err != nil {
+ return analysis.SuggestedFix{}, fmt.Errorf("print suggested fix: %w", err)
+ }
+
+ return analysis.SuggestedFix{
+ TextEdits: []analysis.TextEdit{{
+ Pos: selExpr.Pos(),
+ End: selExpr.End(),
+ NewText: buf.Bytes(),
+ }},
+ }, nil
+}
+
+func isPackageUsed(pass *analysis.Pass, ident *ast.Ident, importPath string) bool {
+ obj := pass.TypesInfo.Uses[ident]
+ if obj == nil {
+ return false
+ }
+
+ pkg, ok := obj.(*types.PkgName)
+ if !ok {
+ return false
+ }
+
+ if pkg.Imported().Path() != importPath {
+ return false
+ }
+
+ return true
+}
+
+func getGoVersion(pass *analysis.Pass) int {
+ // Prior to go1.22, versions.FileVersion returns only the toolchain version,
+ // which is of no use to us,
+ // so disable this analyzer on earlier versions.
+ if !slices.Contains(build.Default.ReleaseTags, "go1.22") {
+ return 0 // false
+ }
+
+ pkgVersion := pass.Pkg.GoVersion()
+ if pkgVersion == "" {
+ // Empty means Go devel.
+ return goDevel // true
+ }
+
+ raw := strings.TrimPrefix(pkgVersion, "go")
+
+ // prerelease version (go1.24rc1)
+ idx := strings.IndexFunc(raw, func(r rune) bool {
+ return (r < '0' || r > '9') && r != '.'
+ })
+
+ if idx != -1 {
+ raw = raw[:idx]
+ }
+
+ vParts := strings.Split(raw, ".")
+
+ v, err := strconv.Atoi(strings.Join(vParts[:2], ""))
+ if err != nil {
+ v = 116
+ }
+
+ return v
+}
+
+func trimImportPath(spec *ast.ImportSpec) string {
+ return spec.Path.Value[1 : len(spec.Path.Value)-1]
+}
diff --git a/vendor/github.com/ldez/exptostd/readme.md b/vendor/github.com/ldez/exptostd/readme.md
new file mode 100644
index 0000000000..bd1df8d547
--- /dev/null
+++ b/vendor/github.com/ldez/exptostd/readme.md
@@ -0,0 +1,116 @@
+# ExpToStd
+
+Detects functions from golang.org/x/exp/ that can be replaced by std functions.
+
+[](https://github.com/sponsors/ldez)
+
+Actual detections:
+
+- `golang.org/x/exp/maps`:
+ - `Keys`
+ - `Values`
+ - `Equal`
+ - `EqualFunc`
+ - `Clone`
+ - `Copy`
+ - `DeleteFunc`
+ - `Clear`
+
+- `golang.org/x/exp/slices`:
+ - `Equal`
+ - `EqualFunc`
+ - `Compare`
+ - `CompareFunc`
+ - `Index`
+ - `IndexFunc`
+ - `Contains`
+ - `ContainsFunc`
+ - `Insert`
+ - `Delete`
+ - `DeleteFunc`
+ - `Replace`
+ - `Clone`
+ - `Compact`
+ - `CompactFunc`
+ - `Grow`
+ - `Clip`
+ - `Reverse`
+ - `Sort`
+ - `SortFunc`
+ - `SortStableFunc`
+ - `IsSorted`
+ - `IsSortedFunc`
+ - `Min`
+ - `MinFunc`
+ - `Max`
+ - `MaxFunc`
+ - `BinarySearch`
+ - `BinarySearchFunc`
+
+- `golang.org/x/exp/constraints`:
+ - `Ordered`
+
+## Usages
+
+### Inside golangci-lint
+
+Recommended.
+
+```yaml
+linters:
+ enable:
+ - exptostd
+```
+
+### As a CLI
+
+```bash
+go install github.com/ldez/exptostd/cmd/exptostd@latest
+```
+
+```bash
+./exptostd ./...
+```
+
+## Examples
+
+```go
+package foo
+
+import (
+ "fmt"
+
+ "golang.org/x/exp/maps"
+)
+
+func foo(m map[string]string) {
+ clone := maps.Clone(m)
+
+ fmt.Println(clone)
+}
+```
+
+It can be replaced by:
+
+```go
+package foo
+
+import (
+ "fmt"
+ "maps"
+)
+
+func foo(m map[string]string) {
+ clone := maps.Clone(m)
+
+ fmt.Println(clone)
+}
+
+```
+
+## References
+
+- https://tip.golang.org/doc/go1.21#maps
+- https://tip.golang.org/doc/go1.21#slices
+- https://tip.golang.org/doc/go1.23#iterators
+- https://tip.golang.org/doc/go1.21#cmp
diff --git a/vendor/github.com/ldez/gomoddirectives/.golangci.yml b/vendor/github.com/ldez/gomoddirectives/.golangci.yml
index 034745570a..7f25666569 100644
--- a/vendor/github.com/ldez/gomoddirectives/.golangci.yml
+++ b/vendor/github.com/ldez/gomoddirectives/.golangci.yml
@@ -1,9 +1,31 @@
-run:
- timeout: 2m
+linters:
+ enable-all: true
+ disable:
+ - exportloopref # deprecated
+ - sqlclosecheck # not relevant (SQL)
+ - rowserrcheck # not relevant (SQL)
+ - cyclop # duplicate of gocyclo
+ - lll
+ - dupl
+ - prealloc
+ - bodyclose
+ - wsl
+ - nlreturn
+ - mnd
+ - testpackage
+ - paralleltest
+ - tparallel
+ - err113
+ - wrapcheck
+ - exhaustive
+ - exhaustruct
+ - varnamelen
linters-settings:
govet:
enable-all: true
+ disable:
+ - fieldalignment
gocyclo:
min-complexity: 12
goconst:
@@ -49,39 +71,6 @@ linters-settings:
rules:
json: pascal
-linters:
- enable-all: true
- disable:
- - deadcode # deprecated
- - exhaustivestruct # deprecated
- - golint # deprecated
- - ifshort # deprecated
- - interfacer # deprecated
- - maligned # deprecated
- - nosnakecase # deprecated
- - scopelint # deprecated
- - structcheck # deprecated
- - varcheck # deprecated
- - sqlclosecheck # not relevant (SQL)
- - rowserrcheck # not relevant (SQL)
- - execinquery # not relevant (SQL)
- - cyclop # duplicate of gocyclo
- - lll
- - dupl
- - prealloc
- - bodyclose
- - wsl
- - nlreturn
- - gomnd
- - testpackage
- - paralleltest
- - tparallel
- - goerr113
- - wrapcheck
- - exhaustive
- - exhaustruct
- - varnamelen
-
issues:
exclude-use-default: false
max-issues-per-linter: 0
@@ -92,11 +81,20 @@ issues:
exclude-rules:
- path: "(.+)_test.go"
linters:
- - funlen
- - goconst
+ - funlen
+ - goconst
+ - maintidx
- path: cmd/gomoddirectives/gomoddirectives.go
+ linters:
+ - forbidigo
text: 'use of `fmt.Println` forbidden'
output:
show-stats: true
sort-results: true
+ sort-order:
+ - linter
+ - file
+
+run:
+ timeout: 2m
diff --git a/vendor/github.com/ldez/gomoddirectives/LICENSE b/vendor/github.com/ldez/gomoddirectives/LICENSE
index caed523b49..c1bf0c3288 100644
--- a/vendor/github.com/ldez/gomoddirectives/LICENSE
+++ b/vendor/github.com/ldez/gomoddirectives/LICENSE
@@ -175,7 +175,7 @@
END OF TERMS AND CONDITIONS
- Copyright 2021 Fernandez Ludovic
+ Copyright 2024 Fernandez Ludovic
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
diff --git a/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go b/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go
index 2a4c904746..857d22f9b2 100644
--- a/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go
+++ b/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go
@@ -1,17 +1,26 @@
-// Package gomoddirectives a linter that handle `replace`, `retract`, `exclude` directives into `go.mod`.
+// Package gomoddirectives a linter that handle directives into `go.mod`.
package gomoddirectives
import (
+ "context"
"fmt"
"go/token"
+ "regexp"
"strings"
+ "github.com/ldez/grignotin/gomod"
"golang.org/x/mod/modfile"
+ "golang.org/x/tools/go/analysis"
)
const (
reasonRetract = "a comment is mandatory to explain why the version has been retracted"
reasonExclude = "exclude directive is not allowed"
+ reasonToolchain = "toolchain directive is not allowed"
+ reasonToolchainPattern = "toolchain directive (%s) doesn't match the pattern '%s'"
+ reasonTool = "tool directive is not allowed"
+ reasonGoDebug = "godebug directive is not allowed"
+ reasonGoVersion = "go directive (%s) doesn't match the pattern '%s'"
reasonReplaceLocal = "local replacement are not allowed"
reasonReplace = "replacement are not allowed"
reasonReplaceIdentical = "the original module and the replacement are identical"
@@ -44,6 +53,36 @@ type Options struct {
ReplaceAllowLocal bool
ExcludeForbidden bool
RetractAllowNoExplanation bool
+ ToolchainForbidden bool
+ ToolchainPattern *regexp.Regexp
+ ToolForbidden bool
+ GoDebugForbidden bool
+ GoVersionPattern *regexp.Regexp
+}
+
+// AnalyzePass analyzes a pass.
+func AnalyzePass(pass *analysis.Pass, opts Options) ([]Result, error) {
+ info, err := gomod.GetModuleInfo(context.Background())
+ if err != nil {
+ return nil, fmt.Errorf("get information about modules: %w", err)
+ }
+
+ goMod := info[0].GoMod
+ if pass.Module != nil && pass.Module.Path != "" {
+ for _, m := range info {
+ if m.Path == pass.Module.Path {
+ goMod = m.GoMod
+ break
+ }
+ }
+ }
+
+ f, err := parseGoMod(goMod)
+ if err != nil {
+ return nil, fmt.Errorf("parse %s: %w", goMod, err)
+ }
+
+ return AnalyzeFile(f, opts), nil
}
// Analyze analyzes a project.
@@ -58,58 +97,135 @@ func Analyze(opts Options) ([]Result, error) {
// AnalyzeFile analyzes a mod file.
func AnalyzeFile(file *modfile.File, opts Options) []Result {
+ checks := []func(file *modfile.File, opts Options) []Result{
+ checkRetractDirectives,
+ checkExcludeDirectives,
+ checkToolDirectives,
+ checkReplaceDirectives,
+ checkToolchainDirective,
+ checkGoDebugDirectives,
+ checkGoVersionDirectives,
+ }
+
var results []Result
+ for _, check := range checks {
+ results = append(results, check(file, opts)...)
+ }
- if !opts.RetractAllowNoExplanation {
- for _, r := range file.Retract {
- if r.Rationale != "" {
- continue
- }
+ return results
+}
- results = append(results, NewResult(file, r.Syntax, reasonRetract))
- }
+func checkGoVersionDirectives(file *modfile.File, opts Options) []Result {
+ if file == nil || file.Go == nil || opts.GoVersionPattern == nil || opts.GoVersionPattern.MatchString(file.Go.Version) {
+ return nil
}
- if opts.ExcludeForbidden {
- for _, e := range file.Exclude {
- results = append(results, NewResult(file, e.Syntax, reasonExclude))
+ return []Result{NewResult(file, file.Go.Syntax, fmt.Sprintf(reasonGoVersion, file.Go.Version, opts.GoVersionPattern.String()))}
+}
+
+func checkToolchainDirective(file *modfile.File, opts Options) []Result {
+ if file.Toolchain == nil {
+ return nil
+ }
+
+ if opts.ToolchainForbidden {
+ return []Result{NewResult(file, file.Toolchain.Syntax, reasonToolchain)}
+ }
+
+ if opts.ToolchainPattern == nil {
+ return nil
+ }
+
+ if !opts.ToolchainPattern.MatchString(file.Toolchain.Name) {
+ return []Result{NewResult(file, file.Toolchain.Syntax, fmt.Sprintf(reasonToolchainPattern, file.Toolchain.Name, opts.ToolchainPattern.String()))}
+ }
+
+ return nil
+}
+
+func checkRetractDirectives(file *modfile.File, opts Options) []Result {
+ if opts.RetractAllowNoExplanation {
+ return nil
+ }
+
+ var results []Result
+
+ for _, retract := range file.Retract {
+ if retract.Rationale != "" {
+ continue
}
+
+ results = append(results, NewResult(file, retract.Syntax, reasonRetract))
+ }
+
+ return results
+}
+
+func checkExcludeDirectives(file *modfile.File, opts Options) []Result {
+ if !opts.ExcludeForbidden {
+ return nil
+ }
+
+ var results []Result
+
+ for _, exclude := range file.Exclude {
+ results = append(results, NewResult(file, exclude.Syntax, reasonExclude))
}
+ return results
+}
+
+func checkToolDirectives(file *modfile.File, opts Options) []Result {
+ if !opts.ToolForbidden {
+ return nil
+ }
+
+ var results []Result
+
+ for _, tool := range file.Tool {
+ results = append(results, NewResult(file, tool.Syntax, reasonTool))
+ }
+
+ return results
+}
+
+func checkReplaceDirectives(file *modfile.File, opts Options) []Result {
+ var results []Result
+
uniqReplace := map[string]struct{}{}
- for _, r := range file.Replace {
- reason := check(opts, r)
+ for _, replace := range file.Replace {
+ reason := checkReplaceDirective(opts, replace)
if reason != "" {
- results = append(results, NewResult(file, r.Syntax, reason))
+ results = append(results, NewResult(file, replace.Syntax, reason))
continue
}
- if r.Old.Path == r.New.Path && r.Old.Version == r.New.Version {
- results = append(results, NewResult(file, r.Syntax, reasonReplaceIdentical))
+ if replace.Old.Path == replace.New.Path && replace.Old.Version == replace.New.Version {
+ results = append(results, NewResult(file, replace.Syntax, reasonReplaceIdentical))
continue
}
- if _, ok := uniqReplace[r.Old.Path+r.Old.Version]; ok {
- results = append(results, NewResult(file, r.Syntax, reasonReplaceDuplicate))
+ if _, ok := uniqReplace[replace.Old.Path+replace.Old.Version]; ok {
+ results = append(results, NewResult(file, replace.Syntax, reasonReplaceDuplicate))
}
- uniqReplace[r.Old.Path+r.Old.Version] = struct{}{}
+ uniqReplace[replace.Old.Path+replace.Old.Version] = struct{}{}
}
return results
}
-func check(o Options, r *modfile.Replace) string {
+func checkReplaceDirective(opts Options, r *modfile.Replace) string {
if isLocal(r) {
- if o.ReplaceAllowLocal {
+ if opts.ReplaceAllowLocal {
return ""
}
return fmt.Sprintf("%s: %s", reasonReplaceLocal, r.Old.Path)
}
- for _, v := range o.ReplaceAllowList {
+ for _, v := range opts.ReplaceAllowList {
if r.Old.Path == v {
return ""
}
@@ -118,6 +234,20 @@ func check(o Options, r *modfile.Replace) string {
return fmt.Sprintf("%s: %s", reasonReplace, r.Old.Path)
}
+func checkGoDebugDirectives(file *modfile.File, opts Options) []Result {
+ if !opts.GoDebugForbidden {
+ return nil
+ }
+
+ var results []Result
+
+ for _, goDebug := range file.Godebug {
+ results = append(results, NewResult(file, goDebug.Syntax, reasonGoDebug))
+ }
+
+ return results
+}
+
// Filesystem paths found in "replace" directives are represented by a path with an empty version.
// https://github.com/golang/mod/blob/bc388b264a244501debfb9caea700c6dcaff10e2/module/module.go#L122-L124
func isLocal(r *modfile.Replace) bool {
diff --git a/vendor/github.com/ldez/gomoddirectives/module.go b/vendor/github.com/ldez/gomoddirectives/module.go
index 4cb3653794..c3e47c8a4f 100644
--- a/vendor/github.com/ldez/gomoddirectives/module.go
+++ b/vendor/github.com/ldez/gomoddirectives/module.go
@@ -1,45 +1,32 @@
package gomoddirectives
import (
- "bytes"
- "encoding/json"
- "errors"
+ "context"
"fmt"
"os"
- "os/exec"
+ "path/filepath"
+ "github.com/ldez/grignotin/goenv"
"golang.org/x/mod/modfile"
)
-type modInfo struct {
- Path string `json:"Path"`
- Dir string `json:"Dir"`
- GoMod string `json:"GoMod"`
- GoVersion string `json:"GoVersion"`
- Main bool `json:"Main"`
-}
-
// GetModuleFile gets module file.
func GetModuleFile() (*modfile.File, error) {
- // https://github.com/golang/go/issues/44753#issuecomment-790089020
- cmd := exec.Command("go", "list", "-m", "-json")
-
- raw, err := cmd.Output()
+ goMod, err := goenv.GetOne(context.Background(), goenv.GOMOD)
if err != nil {
- return nil, fmt.Errorf("command go list: %w: %s", err, string(raw))
+ return nil, err
}
- var v modInfo
- err = json.NewDecoder(bytes.NewBuffer(raw)).Decode(&v)
+ mod, err := parseGoMod(goMod)
if err != nil {
- return nil, fmt.Errorf("unmarshaling error: %w: %s", err, string(raw))
+ return nil, fmt.Errorf("failed to parse go.mod (%s): %w", goMod, err)
}
- if v.GoMod == "" {
- return nil, errors.New("working directory is not part of a module")
- }
+ return mod, nil
+}
- raw, err = os.ReadFile(v.GoMod)
+func parseGoMod(goMod string) (*modfile.File, error) {
+ raw, err := os.ReadFile(filepath.Clean(goMod))
if err != nil {
return nil, fmt.Errorf("reading go.mod file: %w", err)
}
diff --git a/vendor/github.com/ldez/gomoddirectives/readme.md b/vendor/github.com/ldez/gomoddirectives/readme.md
index 510c8502e2..7d6d2765b9 100644
--- a/vendor/github.com/ldez/gomoddirectives/readme.md
+++ b/vendor/github.com/ldez/gomoddirectives/readme.md
@@ -1,16 +1,196 @@
# gomoddirectives
+A linter that handle directives into `go.mod`.
+
[](https://github.com/sponsors/ldez)
[](https://github.com/ldez/gomoddirectives/actions)
-A linter that handle [`replace`](https://golang.org/ref/mod#go-mod-file-replace), [`retract`](https://golang.org/ref/mod#go-mod-file-retract), [`exclude`](https://golang.org/ref/mod#go-mod-file-exclude) directives into `go.mod`.
+## Usage
+
+### Inside golangci-lint
+
+Recommended.
+
+```yml
+linters:
+ enable:
+ - gomoddirectives
+
+linters-settings:
+ gomoddirectives:
+ # Allow local `replace` directives.
+ # Default: false
+ replace-local: true
+
+ # List of allowed `replace` directives.
+ # Default: []
+ replace-allow-list:
+ - launchpad.net/gocheck
+ # Allow to not explain why the version has been retracted in the `retract` directives.
+ # Default: false
+ retract-allow-no-explanation: true
+
+ # Forbid the use of the `exclude` directives.
+ # Default: false
+ exclude-forbidden: true
+
+ # Forbid the use of the `toolchain` directive.
+ # Default: false
+ toolchain-forbidden: true
+
+ # Defines a pattern to validate `toolchain` directive.
+ # Default: '' (no match)
+ toolchain-pattern: 'go1\.22\.\d+$'
+
+ # Forbid the use of the `tool` directives.
+ # Default: false
+ tool-forbidden: true
+
+ # Forbid the use of the `godebug` directive.
+ # Default: false
+ go-debug-forbidden: true
+
+ # Defines a pattern to validate `go` minimum version directive.
+ # Default: '' (no match)
+ go-version-pattern: '1\.\d+(\.0)?$'
+```
+
+### As a CLI
+
+```
+gomoddirectives [flags]
+
+Flags:
+ -exclude
+ Forbid the use of exclude directives
+ -godebug
+ Forbid the use of godebug directives
+ -goversion string
+ Pattern to validate go min version directive
+ -h Show this help.
+ -list value
+ List of allowed replace directives
+ -local
+ Allow local replace directives
+ -retract-no-explanation
+ Allow to use retract directives without explanation
+ -tool
+ Forbid the use of tool directives
+ -toolchain
+ Forbid the use of toolchain directive
+ -toolchain-pattern string
+ Pattern to validate toolchain directive
+```
+
+## Details
+
+### [`retract`](https://golang.org/ref/mod#go-mod-file-retract) directives
+
+- Force explanation for `retract` directives.
+
+```go
+module example.com/foo
+
+go 1.22
+
+require (
+ github.com/ldez/grignotin v0.4.1
+)
+
+retract (
+ v1.0.0 // Explanation
+)
+```
+
+### [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives
+
+- Ban all `replace` directives.
+- Allow only local `replace` directives.
+- Allow only some `replace` directives.
+- Detect duplicated `replace` directives.
+- Detect identical `replace` directives.
+
+```go
+module example.com/foo
+
+go 1.22
+
+require (
+ github.com/ldez/grignotin v0.4.1
+)
+
+replace github.com/ldez/grignotin => ../grignotin/
+```
+
+### [`exclude`](https://golang.org/ref/mod#go-mod-file-exclude) directives
+
+- Ban all `exclude` directives.
+
+```go
+module example.com/foo
+
+go 1.22
+
+require (
+ github.com/ldez/grignotin v0.4.1
+)
+
+exclude (
+ golang.org/x/crypto v1.4.5
+ golang.org/x/text v1.6.7
+)
+```
+
+### [`tool`](https://golang.org/ref/mod#go-mod-file-tool) directives
+
+- Ban all `tool` directives.
+
+```go
+module example.com/foo
+
+go 1.24
+
+tool (
+ example.com/module/cmd/a
+ example.com/module/cmd/b
+)
+```
+
+### [`toolchain`](https://golang.org/ref/mod#go-mod-file-toolchain) directive
+
+- Ban `toolchain` directive.
+- Use a regular expression to constraint the Go minimum version.
+
+```go
+module example.com/foo
+
+go 1.22
+
+toolchain go1.23.3
+```
+
+### [`godebug`](https://go.dev/ref/mod#go-mod-file-godebug) directives
+
+- Ban `godebug` directive.
+
+```go
+module example.com/foo
+
+go 1.22
+
+godebug default=go1.21
+godebug (
+ panicnil=1
+ asynctimerchan=0
+)
+```
+
+### [`go`](https://go.dev/ref/mod#go-mod-file-go) directive
+
+- Use a regular expression to constraint the Go minimum version.
-Features:
+```go
+module example.com/foo
-- ban all [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives
-- allow only local [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives
-- allow only some [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives
-- force explanation for [`retract`](https://golang.org/ref/mod#go-mod-file-retract) directives
-- ban all [`exclude`](https://golang.org/ref/mod#go-mod-file-exclude) directives
-- detect duplicated [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives
-- detect identical [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives
+go 1.22.0
+```
diff --git a/vendor/github.com/ldez/grignotin/goenv/goenv.go b/vendor/github.com/ldez/grignotin/goenv/goenv.go
new file mode 100644
index 0000000000..1f0c31e4e5
--- /dev/null
+++ b/vendor/github.com/ldez/grignotin/goenv/goenv.go
@@ -0,0 +1,50 @@
+// Package goenv A set of functions to get information from `go env`.
+package goenv
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "os/exec"
+ "strings"
+)
+
+// GetAll gets information from "go env".
+func GetAll(ctx context.Context) (map[string]string, error) {
+ v, err := Get(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ return v, nil
+}
+
+// GetOne gets information from "go env" for one environment variable.
+func GetOne(ctx context.Context, name string) (string, error) {
+ v, err := Get(ctx, name)
+ if err != nil {
+ return "", err
+ }
+
+ return v[name], nil
+}
+
+// Get gets information from "go env" for one or several environment variables.
+func Get(ctx context.Context, name ...string) (map[string]string, error) {
+ args := append([]string{"env", "-json"}, name...)
+ cmd := exec.CommandContext(ctx, "go", args...) //nolint:gosec // The env var names must be checked by the user.
+
+ out, err := cmd.Output()
+ if err != nil {
+ return nil, fmt.Errorf("command %q: %w: %s", strings.Join(cmd.Args, " "), err, string(out))
+ }
+
+ v := map[string]string{}
+ err = json.NewDecoder(bytes.NewBuffer(out)).Decode(&v)
+ if err != nil {
+ return nil, err
+ }
+
+ return v, nil
+}
diff --git a/vendor/github.com/ldez/grignotin/goenv/names.go b/vendor/github.com/ldez/grignotin/goenv/names.go
new file mode 100644
index 0000000000..a5d6eeeebd
--- /dev/null
+++ b/vendor/github.com/ldez/grignotin/goenv/names.go
@@ -0,0 +1,276 @@
+package goenv
+
+// General-purpose environment variables.
+// Reference: https://github.com/golang/go/blob/0afd7e85e5d7154161770f06a17d09bf1ffa3e94/src/cmd/go/internal/help/helpdoc.go#L490
+const (
+ // GCCGO The gccgo command to run for 'go build -compiler=gccgo'.
+ GCCGO = "GCCGO"
+ // GO111MODULE Controls whether the go command runs in module-aware mode or GOPATH mode.
+ // May be "off", "on", or "auto".
+ // See https://golang.org/ref/mod#mod-commands.
+ GO111MODULE = "GO111MODULE"
+ // GOARCH The architecture, or processor, for which to compile code.
+ // Examples are amd64, 386, arm, ppc64.
+ GOARCH = "GOARCH"
+ // GOAUTH Controls authentication for go-import and HTTPS module mirror interactions.
+ // See 'go help goauth'.
+ GOAUTH = "GOAUTH"
+ // GOBIN The directory where 'go install' will install a command.
+ GOBIN = "GOBIN"
+ // GOCACHE The directory where the go command will store cached
+ // information for reuse in future builds.
+ GOCACHE = "GOCACHE"
+ // GOCACHEPROG A command (with optional space-separated flags) that implements an
+ // external go command build cache.
+ // See 'go doc cmd/go/internal/cacheprog'.
+ GOCACHEPROG = "GOCACHEPROG"
+ // GODEBUG Enable various debugging facilities. See https://go.dev/doc/godebug
+ // for details.
+ GODEBUG = "GODEBUG"
+ // GOENV The location of the Go environment configuration file.
+ // Cannot be set using 'go env -w'.
+ // Setting GOENV=off in the environment disables the use of the
+ // default configuration file.
+ GOENV = "GOENV"
+ // GOFLAGS A space-separated list of -flag=value settings to apply
+ // to go commands by default, when the given flag is known by
+ // the current command. Each entry must be a standalone flag.
+ // Because the entries are space-separated, flag values must
+ // not contain spaces. Flags listed on the command line
+ // are applied after this list and therefore override it.
+ GOFLAGS = "GOFLAGS"
+ // GOINSECURE Comma-separated list of glob patterns (in the syntax of Go's path.Match)
+ // of module path prefixes that should always be fetched in an insecure
+ // manner. Only applies to dependencies that are being fetched directly.
+ // GOINSECURE does not disable checksum database validation. GOPRIVATE or
+ // GONOSUMDB may be used to achieve that.
+ GOINSECURE = "GOINSECURE"
+ // GOMODCACHE The directory where the go command will store downloaded modules.
+ GOMODCACHE = "GOMODCACHE"
+ // GOOS The operating system for which to compile code.
+ // Examples are linux, darwin, windows, netbsd.
+ GOOS = "GOOS"
+ // GOPATH Controls where various files are stored. See: 'go help gopath'.
+ GOPATH = "GOPATH"
+ // GOPROXY URL of Go module proxy. See https://golang.org/ref/mod#environment-variables
+ // and https://golang.org/ref/mod#module-proxy for details.
+ GOPROXY = "GOPROXY"
+ // GOROOT The root of the go tree.
+ GOROOT = "GOROOT"
+ // GOSUMDB The name of checksum database to use and optionally its public key and
+ // URL. See https://golang.org/ref/mod#authenticating.
+ GOSUMDB = "GOSUMDB"
+ // GOTMPDIR The directory where the go command will write
+ // temporary source files, packages, and binaries.
+ GOTMPDIR = "GOTMPDIR"
+ // GOTOOLCHAIN Controls which Go toolchain is used. See https://go.dev/doc/toolchain.
+ GOTOOLCHAIN = "GOTOOLCHAIN"
+ // GOVCS Lists version control commands that may be used with matching servers.
+ // See 'go help vcs'.
+ GOVCS = "GOVCS"
+ // GOWORK In module aware mode, use the given go.work file as a workspace file.
+ // By default or when GOWORK is "auto", the go command searches for a
+ // file named go.work in the current directory and then containing directories
+ // until one is found. If a valid go.work file is found, the modules
+ // specified will collectively be used as the main modules. If GOWORK
+ // is "off", or a go.work file is not found in "auto" mode, workspace
+ // mode is disabled.
+ GOWORK = "GOWORK"
+
+ // GOPRIVATE Comma-separated list of glob patterns (in the syntax of Go's path.Match)
+ // of module path prefixes that should always be fetched directly
+ // or that should not be compared against the checksum database.
+ // See https://golang.org/ref/mod#private-modules.
+ GOPRIVATE = "GOPRIVATE"
+ // GONOPROXY Comma-separated list of glob patterns (in the syntax of Go's path.Match)
+ // of module path prefixes that should always be fetched directly
+ // or that should not be compared against the checksum database.
+ // See https://golang.org/ref/mod#private-modules.
+ GONOPROXY = "GONOPROXY"
+ // GONOSUMDB Comma-separated list of glob patterns (in the syntax of Go's path.Match)
+ // of module path prefixes that should always be fetched directly
+ // or that should not be compared against the checksum database.
+ // See https://golang.org/ref/mod#private-modules.
+ GONOSUMDB = "GONOSUMDB"
+)
+
+// Environment variables for use with cgo.
+// Reference: https://github.com/golang/go/blob/0afd7e85e5d7154161770f06a17d09bf1ffa3e94/src/cmd/go/internal/help/helpdoc.go#L571
+const (
+ // AR The command to use to manipulate library archives when
+ // building with the gccgo compiler.
+ // The default is 'ar'.
+ AR = "AR"
+ // CC The command to use to compile C code.
+ CC = "CC"
+ // CGO_CFLAGS Flags that cgo will pass to the compiler when compiling
+ // C code.
+ CGO_CFLAGS = "CGO_CFLAGS"
+ // CGO_CFLAGS_ALLOW A regular expression specifying additional flags to allow
+ // to appear in #cgo CFLAGS source code directives.
+ // Does not apply to the CGO_CFLAGS environment variable.
+ CGO_CFLAGS_ALLOW = "CGO_CFLAGS_ALLOW"
+ // CGO_CFLAGS_DISALLOW A regular expression specifying flags that must be disallowed
+ // from appearing in #cgo CFLAGS source code directives.
+ // Does not apply to the CGO_CFLAGS environment variable.
+ CGO_CFLAGS_DISALLOW = "CGO_CFLAGS_DISALLOW"
+ // CGO_ENABLED Whether the cgo command is supported. Either 0 or 1.
+ CGO_ENABLED = "CGO_ENABLED"
+ // CXX The command to use to compile C++ code.
+ CXX = "CXX"
+ // FC The command to use to compile Fortran code.
+ FC = "FC"
+ // PKG_CONFIG Path to pkg-config tool.
+ PKG_CONFIG = "PKG_CONFIG"
+
+ // CGO_CPPFLAGS Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the C preprocessor.
+ CGO_CPPFLAGS = "CGO_CPPFLAGS"
+ // CGO_CPPFLAGS_ALLOW Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the C preprocessor.
+ CGO_CPPFLAGS_ALLOW = "CGO_CPPFLAGS_ALLOW"
+ // CGO_CPPFLAGS_DISALLOW Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the C preprocessor.
+ CGO_CPPFLAGS_DISALLOW = "CGO_CPPFLAGS_DISALLOW"
+
+ // CGO_CXXFLAGS Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the C++ compiler.
+ CGO_CXXFLAGS = "CGO_CXXFLAGS"
+ // CGO_CXXFLAGS_ALLOW Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the C++ compiler.
+ CGO_CXXFLAGS_ALLOW = "CGO_CXXFLAGS_ALLOW"
+ // CGO_CXXFLAGS_DISALLOW Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the C++ compiler.
+ CGO_CXXFLAGS_DISALLOW = "CGO_CXXFLAGS_DISALLOW"
+
+ // CGO_FFLAGS Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the Fortran compiler.
+ CGO_FFLAGS = "CGO_FFLAGS"
+ // CGO_FFLAGS_ALLOW Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the Fortran compiler.
+ CGO_FFLAGS_ALLOW = "CGO_FFLAGS_ALLOW"
+ // CGO_FFLAGS_DISALLOW Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the Fortran compiler.
+ CGO_FFLAGS_DISALLOW = "CGO_FFLAGS_DISALLOW"
+
+ // CGO_LDFLAGS Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the linker.
+ CGO_LDFLAGS = "CGO_LDFLAGS"
+ // CGO_LDFLAGS_ALLOW Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the linker.
+ CGO_LDFLAGS_ALLOW = "CGO_LDFLAGS_ALLOW"
+ // CGO_LDFLAGS_DISALLOW Like CGO_CFLAGS, CGO_CFLAGS_ALLOW, and CGO_CFLAGS_DISALLOW,
+ // but for the linker.
+ CGO_LDFLAGS_DISALLOW = "CGO_LDFLAGS_DISALLOW"
+)
+
+// Architecture-specific environment variables.
+// Reference: https://github.com/golang/go/blob/0afd7e85e5d7154161770f06a17d09bf1ffa3e94/src/cmd/go/internal/help/helpdoc.go#L611
+const (
+ // GO386 For GOARCH=386, how to implement floating point instructions.
+ // Valid values are sse2 (default), softfloat.
+ GO386 = "GO386"
+ // GOAMD64 For GOARCH=amd64, the microarchitecture level for which to compile.
+ // Valid values are v1 (default), v2, v3, v4.
+ // See https://golang.org/wiki/MinimumRequirements#amd64
+ GOAMD64 = "GOAMD64"
+ // GOARM For GOARCH=arm, the ARM architecture for which to compile.
+ // Valid values are 5, 6, 7.
+ // When the Go tools are built on an arm system,
+ // the default value is set based on what the build system supports.
+ // When the Go tools are not built on an arm system
+ // (that is, when building a cross-compiler),
+ // the default value is 7.
+ // The value can be followed by an option specifying how to implement floating point instructions.
+ // Valid options are ,softfloat (default for 5) and ,hardfloat (default for 6 and 7).
+ GOARM = "GOARM"
+ // GOARM64 For GOARCH=arm64, the ARM64 architecture for which to compile.
+ // Valid values are v8.0 (default), v8.{1-9}, v9.{0-5}.
+ // The value can be followed by an option specifying extensions implemented by target hardware.
+ // Valid options are ,lse and ,crypto.
+ // Note that some extensions are enabled by default starting from a certain GOARM64 version;
+ // for example, lse is enabled by default starting from v8.1.
+ GOARM64 = "GOARM64"
+ // GOMIPS For GOARCH=mips{,le}, whether to use floating point instructions.
+ // Valid values are hardfloat (default), softfloat.
+ GOMIPS = "GOMIPS"
+ // GOMIPS64 For GOARCH=mips64{,le}, whether to use floating point instructions.
+ // Valid values are hardfloat (default), softfloat.
+ GOMIPS64 = "GOMIPS64"
+ // GOPPC64 For GOARCH=ppc64{,le}, the target ISA (Instruction Set Architecture).
+ // Valid values are power8 (default), power9, power10.
+ GOPPC64 = "GOPPC64"
+ // GORISCV64 For GOARCH=riscv64, the RISC-V user-mode application profile for which
+ // to compile. Valid values are rva20u64 (default), rva22u64.
+ // See https://github.com/riscv/riscv-profiles/blob/main/src/profiles.adoc
+ GORISCV64 = "GORISCV64"
+ // GOWASM For GOARCH=wasm, comma-separated list of experimental WebAssembly features to use.
+ // Valid values are satconv, signext.
+ GOWASM = "GOWASM"
+)
+
+// Environment variables for use with code coverage.
+// Reference: https://github.com/golang/go/blob/0afd7e85e5d7154161770f06a17d09bf1ffa3e94/src/cmd/go/internal/help/helpdoc.go#L654
+const (
+ // GOCOVERDIR Directory into which to write code coverage data files
+ // generated by running a "go build -cover" binary.
+ // Requires that GOEXPERIMENT=coverageredesign is enabled.
+ GOCOVERDIR = "GOCOVERDIR"
+)
+
+// Special-purpose environment variables.
+// Reference: https://github.com/golang/go/blob/0afd7e85e5d7154161770f06a17d09bf1ffa3e94/src/cmd/go/internal/help/helpdoc.go#L661
+const (
+ // GCCGOTOOLDIR If set, where to find gccgo tools, such as cgo.
+ // The default is based on how gccgo was configured.
+ GCCGOTOOLDIR = "GCCGOTOOLDIR"
+ // GOEXPERIMENT Comma-separated list of toolchain experiments to enable or disable.
+ // The list of available experiments may change arbitrarily over time.
+ // See GOROOT/src/internal/goexperiment/flags.go for currently valid values.
+ // Warning: This variable is provided for the development and testing
+ // of the Go toolchain itself. Use beyond that purpose is unsupported.
+ GOEXPERIMENT = "GOEXPERIMENT"
+ // GOFIPS140 The FIPS-140 cryptography mode to use when building binaries.
+ // The default is GOFIPS140=off, which makes no FIPS-140 changes at all.
+ // Other values enable FIPS-140 compliance measures and select alternate
+ // versions of the cryptography source code.
+ // See https://go.dev/security/fips140 for details.
+ GOFIPS140 = "GOFIPS140"
+ // GO_EXTLINK_ENABLED Whether the linker should use external linking mode
+ // when using -linkmode=auto with code that uses cgo.
+ // Set to 0 to disable external linking mode, 1 to enable it.
+ GO_EXTLINK_ENABLED = "GO_EXTLINK_ENABLED"
+ // GIT_ALLOW_PROTOCOL Defined by Git. A colon-separated list of schemes that are allowed
+ // to be used with git fetch/clone. If set, any scheme not explicitly
+ // mentioned will be considered insecure by 'go get'.
+ // Because the variable is defined by Git, the default value cannot
+ // be set using 'go env -w'.
+ GIT_ALLOW_PROTOCOL = "GIT_ALLOW_PROTOCOL"
+)
+
+// Additional information available from 'go env' but not read from the environment.
+// Reference: https://github.com/golang/go/blob/0afd7e85e5d7154161770f06a17d09bf1ffa3e94/src/cmd/go/internal/help/helpdoc.go#L689
+const (
+ // GOEXE The executable file name suffix (".exe" on Windows, "" on other systems).
+ GOEXE = "GOEXE"
+ // GOGCCFLAGS A space-separated list of arguments supplied to the CC command.
+ GOGCCFLAGS = "GOGCCFLAGS"
+ // GOHOSTARCH The architecture (GOARCH) of the Go toolchain binaries.
+ GOHOSTARCH = "GOHOSTARCH"
+ // GOHOSTOS The operating system (GOOS) of the Go toolchain binaries.
+ GOHOSTOS = "GOHOSTOS"
+ // GOMOD The absolute path to the go.mod of the main module.
+ // If module-aware mode is enabled, but there is no go.mod, GOMOD will be
+ // os.DevNull ("/dev/null" on Unix-like systems, "NUL" on Windows).
+ // If module-aware mode is disabled, GOMOD will be the empty string.
+ GOMOD = "GOMOD"
+ // GOTELEMETRY The current Go telemetry mode ("off", "local", or "on").
+ // See "go help telemetry" for more information.
+ GOTELEMETRY = "GOTELEMETRY"
+ // GOTELEMETRYDIR The directory Go telemetry data is written is written to.
+ GOTELEMETRYDIR = "GOTELEMETRYDIR"
+ // GOTOOLDIR The directory where the go tools (compile, cover, doc, etc...) are installed.
+ GOTOOLDIR = "GOTOOLDIR"
+ // GOVERSION The version of the installed Go tree, as reported by runtime.Version.
+ GOVERSION = "GOVERSION"
+)
diff --git a/vendor/github.com/ldez/grignotin/gomod/gomod.go b/vendor/github.com/ldez/grignotin/gomod/gomod.go
new file mode 100644
index 0000000000..76e17870df
--- /dev/null
+++ b/vendor/github.com/ldez/grignotin/gomod/gomod.go
@@ -0,0 +1,85 @@
+// Package gomod A set of functions to get information about module (go list).
+package gomod
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+
+ "github.com/ldez/grignotin/goenv"
+ "golang.org/x/mod/modfile"
+)
+
+// ModInfo Module information.
+//
+//nolint:tagliatelle // temporary: the next version of golangci-lint will allow configuration by package.
+type ModInfo struct {
+ Path string `json:"Path"`
+ Dir string `json:"Dir"`
+ GoMod string `json:"GoMod"`
+ GoVersion string `json:"GoVersion"`
+ Main bool `json:"Main"`
+}
+
+// GetModuleInfo gets modules information from `go list`.
+func GetModuleInfo(ctx context.Context) ([]ModInfo, error) {
+ // https://github.com/golang/go/issues/44753#issuecomment-790089020
+ cmd := exec.CommandContext(ctx, "go", "list", "-m", "-json")
+
+ out, err := cmd.Output()
+ if err != nil {
+ return nil, fmt.Errorf("command %q: %w: %s", strings.Join(cmd.Args, " "), err, string(out))
+ }
+
+ var infos []ModInfo
+
+ for dec := json.NewDecoder(bytes.NewBuffer(out)); dec.More(); {
+ var v ModInfo
+ if err := dec.Decode(&v); err != nil {
+ return nil, fmt.Errorf("unmarshaling error: %w: %s", err, string(out))
+ }
+
+ if v.GoMod == "" {
+ return nil, errors.New("working directory is not part of a module")
+ }
+
+ if !v.Main || v.Dir == "" {
+ continue
+ }
+
+ infos = append(infos, v)
+ }
+
+ if len(infos) == 0 {
+ return nil, errors.New("go.mod file not found")
+ }
+
+ return infos, nil
+}
+
+// GetModulePath extracts module path from go.mod.
+func GetModulePath(ctx context.Context) (string, error) {
+ p, err := goenv.GetOne(ctx, goenv.GOMOD)
+ if err != nil {
+ return "", err
+ }
+
+ b, err := os.ReadFile(filepath.Clean(p))
+ if err != nil {
+ return "", fmt.Errorf("reading go.mod: %w", err)
+ }
+
+ return modfile.ModulePath(b), nil
+}
+
+// GetGoModPath extracts go.mod path from "go env".
+// Deprecated: use `goenv.GetOne(context.Background(), goenv.GOMOD)` instead.
+func GetGoModPath() (string, error) {
+ return goenv.GetOne(context.Background(), goenv.GOMOD)
+}
diff --git a/vendor/github.com/ldez/tagliatelle/.golangci.yml b/vendor/github.com/ldez/tagliatelle/.golangci.yml
index ec5c5c7661..01c76dca99 100644
--- a/vendor/github.com/ldez/tagliatelle/.golangci.yml
+++ b/vendor/github.com/ldez/tagliatelle/.golangci.yml
@@ -1,7 +1,28 @@
-run:
- timeout: 5m
- skip-files: [ ]
- skip-dirs: [ ]
+linters:
+ enable-all: true
+ disable:
+ - exportloopref # deprecated
+ - sqlclosecheck # not relevant (SQL)
+ - rowserrcheck # not relevant (SQL)
+ - cyclop # duplicate of gocyclo
+ - lll
+ - dupl
+ - wsl
+ - nlreturn
+ - mnd
+ - err113
+ - wrapcheck
+ - exhaustive
+ - exhaustruct
+ - testpackage
+ - tparallel
+ - paralleltest
+ - prealloc
+ - forcetypeassert
+ - varnamelen
+ - nilnil
+ - errchkjson
+ - nonamedreturns
linters-settings:
govet:
@@ -9,7 +30,7 @@ linters-settings:
disable:
- fieldalignment
gocyclo:
- min-complexity: 15
+ min-complexity: 20
goconst:
min-len: 5
min-occurrences: 3
@@ -24,11 +45,13 @@ linters-settings:
gofumpt:
extra-rules: true
depguard:
- list-type: denylist
- include-go-root: false
- packages:
- - github.com/sirupsen/logrus
- - github.com/pkg/errors
+ rules:
+ main:
+ deny:
+ - pkg: "github.com/instana/testify"
+ desc: not allowed
+ - pkg: "github.com/pkg/errors"
+ desc: Should be replaced by standard lib errors package
gocritic:
enabled-tags:
- diagnostic
@@ -43,46 +66,12 @@ linters-settings:
hugeParam:
sizeThreshold: 100
-linters:
- enable-all: true
- disable:
- - deadcode # deprecated
- - exhaustivestruct # deprecated
- - golint # deprecated
- - ifshort # deprecated
- - interfacer # deprecated
- - maligned # deprecated
- - nosnakecase # deprecated
- - scopelint # deprecated
- - structcheck # deprecated
- - varcheck # deprecated
- - sqlclosecheck # not relevant (SQL)
- - rowserrcheck # not relevant (SQL)
- - execinquery # not relevant (SQL)
- - cyclop # duplicate of gocyclo
- - lll
- - dupl
- - wsl
- - nlreturn
- - gomnd
- - goerr113
- - wrapcheck
- - exhaustive
- - exhaustruct
- - testpackage
- - tparallel
- - paralleltest
- - prealloc
- - ifshort
- - forcetypeassert
- - varnamelen
- - nilnil
- - errchkjson
- - nonamedreturns
-
issues:
exclude-use-default: false
- max-per-linter: 0
+ max-issues-per-linter: 0
max-same-issues: 0
exclude:
- 'package-comments: should have a package comment'
+
+run:
+ timeout: 5m
diff --git a/vendor/github.com/ldez/tagliatelle/converter.go b/vendor/github.com/ldez/tagliatelle/converter.go
new file mode 100644
index 0000000000..6005f5b755
--- /dev/null
+++ b/vendor/github.com/ldez/tagliatelle/converter.go
@@ -0,0 +1,116 @@
+package tagliatelle
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/ettle/strcase"
+)
+
+// https://github.com/dominikh/go-tools/blob/v0.5.1/config/config.go#L167-L175
+//
+//nolint:gochecknoglobals // For now I'll accept this, but I think will refactor to use a structure.
+var staticcheckInitialisms = map[string]bool{
+ "AMQP": true,
+ "DB": true,
+ "GID": true,
+ "LHS": false,
+ "RHS": false,
+ "RTP": true,
+ "SIP": true,
+ "TS": true,
+}
+
+// Converter is the signature of a case converter.
+type Converter func(s string) string
+
+// ConverterCallback allows to abstract `getSimpleConverter` and `ruleToConverter`.
+type ConverterCallback func() (Converter, error)
+
+func getSimpleConverter(c string) (Converter, error) {
+ switch c {
+ case "camel":
+ return strcase.ToCamel, nil
+ case "pascal":
+ return strcase.ToPascal, nil
+ case "kebab":
+ return strcase.ToKebab, nil
+ case "snake":
+ return strcase.ToSnake, nil
+ case "goCamel":
+ return strcase.ToGoCamel, nil
+ case "goPascal":
+ return strcase.ToGoPascal, nil
+ case "goKebab":
+ return strcase.ToGoKebab, nil
+ case "goSnake":
+ return strcase.ToGoSnake, nil
+ case "upperSnake":
+ return strcase.ToSNAKE, nil
+ case "header":
+ return toHeader, nil
+ case "upper":
+ return strings.ToUpper, nil
+ case "lower":
+ return strings.ToLower, nil
+ default:
+ return nil, fmt.Errorf("unsupported case: %s", c)
+ }
+}
+
+func toHeader(s string) string {
+ return strcase.ToCase(s, strcase.TitleCase, '-')
+}
+
+func ruleToConverter(rule ExtendedRule) (Converter, error) {
+ if rule.ExtraInitialisms {
+ for k, v := range staticcheckInitialisms {
+ if _, found := rule.InitialismOverrides[k]; found {
+ continue
+ }
+
+ rule.InitialismOverrides[k] = v
+ }
+ }
+
+ caser := strcase.NewCaser(strings.HasPrefix(rule.Case, "go"), rule.InitialismOverrides, nil)
+
+ switch strings.ToLower(strings.TrimPrefix(rule.Case, "go")) {
+ case "camel":
+ return caser.ToCamel, nil
+
+ case "pascal":
+ return caser.ToPascal, nil
+
+ case "kebab":
+ return caser.ToKebab, nil
+
+ case "snake":
+ return caser.ToSnake, nil
+
+ case "uppersnake":
+ return caser.ToSNAKE, nil
+
+ case "header":
+ return toHeaderCase(caser), nil
+
+ case "upper":
+ return func(s string) string {
+ return caser.ToCase(s, strcase.UpperCase, 0)
+ }, nil
+
+ case "lower":
+ return func(s string) string {
+ return caser.ToCase(s, strcase.LowerCase, 0)
+ }, nil
+
+ default:
+ return nil, fmt.Errorf("unsupported case: %s", rule.Case)
+ }
+}
+
+func toHeaderCase(caser *strcase.Caser) Converter {
+ return func(s string) string {
+ return caser.ToCase(s, strcase.TitleCase, '-')
+ }
+}
diff --git a/vendor/github.com/ldez/tagliatelle/readme.md b/vendor/github.com/ldez/tagliatelle/readme.md
index 55a544db81..52d10304b1 100644
--- a/vendor/github.com/ldez/tagliatelle/readme.md
+++ b/vendor/github.com/ldez/tagliatelle/readme.md
@@ -97,15 +97,14 @@ type Foo struct {
}
```
-## What this tool is about
+## What this linter is about
-This tool is about validating tags according to rules you define.
-The tool also allows to fix tags according to the rules you defined.
+This linter is about validating tags according to rules you define.
+The linter also allows to fix tags according to the rules you defined.
-This tool is not intended to validate the fact a tag in valid or not.
-To do that, you can use `go vet`, or use [golangci-lint](https://golangci-lint.run) ["go vet"](https://golangci-lint.run/usage/linters/#govet) linter.
+This linter is not intended to validate the fact a tag in valid or not.
-## How to use the tool
+## How to use the linter
### As a golangci-lint linter
@@ -114,17 +113,149 @@ Define the rules, you want via your [golangci-lint](https://golangci-lint.run) c
```yaml
linters-settings:
tagliatelle:
- # Check the struck tag name case.
+ # Checks the struct tag name case.
case:
- # Use the struct field name to check the name of the struct tag.
+ # Defines the association between tag name and case.
+ # Any struct tag name can be used.
+ # Supported string cases:
+ # - `camel`
+ # - `pascal`
+ # - `kebab`
+ # - `snake`
+ # - `upperSnake`
+ # - `goCamel`
+ # - `goPascal`
+ # - `goKebab`
+ # - `goSnake`
+ # - `upper`
+ # - `lower`
+ # - `header`
+ rules:
+ json: camel
+ yaml: camel
+ xml: camel
+ toml: camel
+ bson: camel
+ avro: snake
+ mapstructure: kebab
+ env: upperSnake
+ envconfig: upperSnake
+ whatever: snake
+ # Defines the association between tag name and case.
+ # Important: the `extended-rules` overrides `rules`.
+ # Default: empty
+ extended-rules:
+ json:
+ # Supported string cases:
+ # - `camel`
+ # - `pascal`
+ # - `kebab`
+ # - `snake`
+ # - `upperSnake`
+ # - `goCamel`
+ # - `goPascal`
+ # - `goKebab`
+ # - `goSnake`
+ # - `header`
+ # - `lower`
+ # - `header`
+ #
+ # Required
+ case: camel
+ # Adds 'AMQP', 'DB', 'GID', 'RTP', 'SIP', 'TS' to initialisms,
+ # and removes 'LHS', 'RHS' from initialisms.
+ # Default: false
+ extra-initialisms: true
+ # Defines initialism additions and overrides.
+ # Default: empty
+ initialism-overrides:
+ DB: true # add a new initialism
+ LHS: false # disable a default initialism.
+ # ...
+ # Uses the struct field name to check the name of the struct tag.
# Default: false
use-field-name: true
+ # The field names to ignore.
+ # Default: []
+ ignored-fields:
+ - Bar
+ - Foo
+ # Overrides the default/root configuration.
+ # Default: []
+ overrides:
+ -
+ # The package path (uses `/` only as a separator).
+ # Required
+ pkg: foo/bar
+ # Default: empty or the same as the default/root configuration.
+ rules:
+ json: snake
+ xml: pascal
+ # Default: empty or the same as the default/root configuration.
+ extended-rules:
+ # same options as the base `extended-rules`.
+ # Default: false (WARNING: it doesn't follow the default/root configuration)
+ use-field-name: true
+ # The field names to ignore.
+ # Default: [] or the same as the default/root configuration.
+ ignored-fields:
+ - Bar
+ - Foo
+ # Ignore the package (takes precedence over all other configurations).
+ # Default: false
+ ignore: true
+
+```
+
+#### Examples
+
+Overrides case rules for the package `foo/bar`:
+
+```yaml
+linters-settings:
+ tagliatelle:
+ case:
rules:
- # Any struct tag type can be used.
- # Support string case: `camel`, `pascal`, `kebab`, `snake`, `upperSnake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower`, `header`.
json: camel
yaml: camel
xml: camel
+ overrides:
+ - pkg: foo/bar
+ rules:
+ json: snake
+ xml: pascal
+```
+
+Ignore fields inside the package `foo/bar`:
+
+```yaml
+linters-settings:
+ tagliatelle:
+ case:
+ rules:
+ json: camel
+ yaml: camel
+ xml: camel
+ overrides:
+ - pkg: foo/bar
+ ignored-fields:
+ - Bar
+ - Foo
+```
+
+Ignore the package `foo/bar`:
+
+```yaml
+linters-settings:
+ tagliatelle:
+ case:
+ rules:
+ json: camel
+ yaml: camel
+ xml: camel
+ overrides:
+ - pkg: foo/bar
+ ignore: true
```
More information here https://golangci-lint.run/usage/linters/#tagliatelle
@@ -149,13 +280,14 @@ Here are the default rules for the well known and used tags, when using tagliate
- `bson`: `camel`
- `avro`: `snake`
- `header`: `header`
+- `env`: `upperSnake`
- `envconfig`: `upperSnake`
### Custom Rules
-The tool is not limited to the tags used in example, you can use it to validate any tag.
+The linter is not limited to the tags used in example, **you can use it to validate any tag**.
-You can add your own tag, for example `whatever` and tells the tool you want to use `kebab`.
+You can add your own tag, for example `whatever` and tells the linter you want to use `kebab`.
This option is only available via [golangci-lint](https://golangci-lint.run).
@@ -164,14 +296,15 @@ linters-settings:
tagliatelle:
# Check the struck tag name case.
case:
- # Use the struct field name to check the name of the struct tag.
- # Default: false
- use-field-name: true
rules:
# Any struct tag type can be used.
# Support string case: `camel`, `pascal`, `kebab`, `snake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower`
- json: camel
- yaml: camel
- xml: camel
+ json: camel
+ yaml: camel
+ xml: camel
+ toml: camel
whatever: kebab
+ # Use the struct field name to check the name of the struct tag.
+ # Default: false
+ use-field-name: true
```
diff --git a/vendor/github.com/ldez/tagliatelle/tagliatelle.go b/vendor/github.com/ldez/tagliatelle/tagliatelle.go
index 22c5feb3d8..99c7da2d04 100644
--- a/vendor/github.com/ldez/tagliatelle/tagliatelle.go
+++ b/vendor/github.com/ldez/tagliatelle/tagliatelle.go
@@ -6,10 +6,14 @@ import (
"errors"
"fmt"
"go/ast"
+ "maps"
+ "path"
+ "path/filepath"
"reflect"
+ "slices"
"strings"
- "github.com/ettle/strcase"
+ iradix "github.com/hashicorp/go-immutable-radix/v2"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
@@ -17,8 +21,30 @@ import (
// Config the tagliatelle configuration.
type Config struct {
- Rules map[string]string
- UseFieldName bool
+ Base
+ Overrides []Overrides
+}
+
+// Overrides applies configuration overrides by package.
+type Overrides struct {
+ Base
+ Package string
+}
+
+// Base shared configuration between rules.
+type Base struct {
+ Rules map[string]string
+ ExtendedRules map[string]ExtendedRule
+ UseFieldName bool
+ IgnoredFields []string
+ Ignore bool
+}
+
+// ExtendedRule allows to customize rules.
+type ExtendedRule struct {
+ Case string
+ ExtraInitialisms bool
+ InitialismOverrides map[string]bool
}
// New creates an analyzer.
@@ -26,20 +52,18 @@ func New(config Config) *analysis.Analyzer {
return &analysis.Analyzer{
Name: "tagliatelle",
Doc: "Checks the struct tags.",
- Run: func(pass *analysis.Pass) (interface{}, error) {
- if len(config.Rules) == 0 {
+ Run: func(pass *analysis.Pass) (any, error) {
+ if len(config.Rules) == 0 && len(config.ExtendedRules) == 0 && len(config.Overrides) == 0 {
return nil, nil
}
return run(pass, config)
},
- Requires: []*analysis.Analyzer{
- inspect.Analyzer,
- },
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
}
}
-func run(pass *analysis.Pass, config Config) (interface{}, error) {
+func run(pass *analysis.Pass, config Config) (any, error) {
isp, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
if !ok {
return nil, errors.New("missing inspect analyser")
@@ -49,6 +73,16 @@ func run(pass *analysis.Pass, config Config) (interface{}, error) {
(*ast.StructType)(nil),
}
+ cfg := config.Base
+ if pass.Module != nil {
+ radixTree := createRadixTree(config, pass.Module.Path)
+ _, cfg, _ = radixTree.Root().LongestPrefix([]byte(pass.Pkg.Path()))
+ }
+
+ if cfg.Ignore {
+ return nil, nil
+ }
+
isp.Preorder(nodeFilter, func(n ast.Node) {
node, ok := n.(*ast.StructType)
if !ok {
@@ -56,14 +90,14 @@ func run(pass *analysis.Pass, config Config) (interface{}, error) {
}
for _, field := range node.Fields.List {
- analyze(pass, config, node, field)
+ analyze(pass, cfg, node, field)
}
})
return nil, nil
}
-func analyze(pass *analysis.Pass, config Config, n *ast.StructType, field *ast.Field) {
+func analyze(pass *analysis.Pass, config Base, n *ast.StructType, field *ast.Field) {
if n.Fields == nil || n.Fields.NumFields() < 1 {
// skip empty structs
return
@@ -80,49 +114,74 @@ func analyze(pass *analysis.Pass, config Config, n *ast.StructType, field *ast.F
return
}
+ cleanRules(config)
+
+ if slices.Contains(config.IgnoredFields, fieldName) {
+ return
+ }
+
+ for key, extRule := range config.ExtendedRules {
+ report(pass, config, key, extRule.Case, fieldName, n, field, func() (Converter, error) {
+ return ruleToConverter(extRule)
+ })
+ }
+
for key, convName := range config.Rules {
- if convName == "" {
- continue
- }
+ report(pass, config, key, convName, fieldName, n, field, func() (Converter, error) {
+ return getSimpleConverter(convName)
+ })
+ }
+}
- value, flags, ok := lookupTagValue(field.Tag, key)
- if !ok {
- // skip when no struct tag for the key
- continue
- }
+func report(pass *analysis.Pass, config Base, key, convName, fieldName string, n *ast.StructType, field *ast.Field, fn ConverterCallback) {
+ if convName == "" {
+ return
+ }
- if value == "-" {
- // skip when skipped :)
- continue
- }
+ value, flags, ok := lookupTagValue(field.Tag, key)
+ if !ok {
+ // skip when no struct tag for the key
+ return
+ }
- // TODO(ldez): need to be rethink.
- // This is an exception because of a bug.
- // https://github.com/ldez/tagliatelle/issues/8
- // For now, tagliatelle should try to remain neutral in terms of format.
- if hasTagFlag(flags, "inline") {
- // skip for inline children (no name to lint)
- continue
- }
+ if value == "-" {
+ // skip when skipped :)
+ return
+ }
- if value == "" {
- value = fieldName
- }
+ // TODO(ldez): need to be rethink.
+ // tagliatelle should try to remain neutral in terms of format.
+ if key == "xml" && strings.ContainsAny(value, ">:") {
+ // ignore XML names than contains path
+ return
+ }
- converter, err := getConverter(convName)
- if err != nil {
- pass.Reportf(n.Pos(), "%s(%s): %v", key, convName, err)
- continue
- }
+ // TODO(ldez): need to be rethink.
+ // This is an exception because of a bug.
+ // https://github.com/ldez/tagliatelle/issues/8
+ // For now, tagliatelle should try to remain neutral in terms of format.
+ if hasTagFlag(flags, "inline") {
+ // skip for inline children (no name to lint)
+ return
+ }
- expected := value
- if config.UseFieldName {
- expected = fieldName
- }
+ if value == "" {
+ value = fieldName
+ }
- if value != converter(expected) {
- pass.Reportf(field.Tag.Pos(), "%s(%s): got '%s' want '%s'", key, convName, value, converter(expected))
- }
+ converter, err := fn()
+ if err != nil {
+ pass.Reportf(n.Pos(), "%s(%s): %v", key, convName, err)
+ return
+ }
+
+ expected := value
+ if config.UseFieldName {
+ expected = fieldName
+ }
+
+ if value != converter(expected) {
+ pass.Reportf(field.Tag.Pos(), "%s(%s): got '%s' want '%s'", key, convName, value, converter(expected))
}
}
@@ -182,37 +241,62 @@ func hasTagFlag(flags []string, query string) bool {
return false
}
-func getConverter(c string) (func(s string) string, error) {
- switch c {
- case "camel":
- return strcase.ToCamel, nil
- case "pascal":
- return strcase.ToPascal, nil
- case "kebab":
- return strcase.ToKebab, nil
- case "snake":
- return strcase.ToSnake, nil
- case "goCamel":
- return strcase.ToGoCamel, nil
- case "goPascal":
- return strcase.ToGoPascal, nil
- case "goKebab":
- return strcase.ToGoKebab, nil
- case "goSnake":
- return strcase.ToGoSnake, nil
- case "header":
- return toHeader, nil
- case "upper":
- return strings.ToUpper, nil
- case "upperSnake":
- return strcase.ToSNAKE, nil
- case "lower":
- return strings.ToLower, nil
- default:
- return nil, fmt.Errorf("unsupported case: %s", c)
+func createRadixTree(config Config, modPath string) *iradix.Tree[Base] {
+ r := iradix.New[Base]()
+
+ defaultRule := Base{
+ Rules: maps.Clone(config.Rules),
+ ExtendedRules: maps.Clone(config.ExtendedRules),
+ UseFieldName: config.UseFieldName,
+ Ignore: config.Ignore,
}
+
+ defaultRule.IgnoredFields = append(defaultRule.IgnoredFields, config.IgnoredFields...)
+
+ r, _, _ = r.Insert([]byte(""), defaultRule)
+
+ for _, override := range config.Overrides {
+ c := Base{
+ UseFieldName: override.UseFieldName,
+ Ignore: override.Ignore,
+ }
+
+ // If there is an override the base configuration is ignored.
+ if len(override.IgnoredFields) == 0 {
+ c.IgnoredFields = append(c.IgnoredFields, config.IgnoredFields...)
+ } else {
+ c.IgnoredFields = append(c.IgnoredFields, override.IgnoredFields...)
+ }
+
+ // Copy the rules from the base.
+ c.Rules = maps.Clone(config.Rules)
+
+ // Overrides the rule from the base.
+ for k, v := range override.Rules {
+ c.Rules[k] = v
+ }
+
+ // Copy the extended rules from the base.
+ c.ExtendedRules = maps.Clone(config.ExtendedRules)
+
+ // Overrides the extended rule from the base.
+ for k, v := range override.ExtendedRules {
+ c.ExtendedRules[k] = v
+ }
+
+ key := path.Join(modPath, override.Package)
+ if filepath.Base(modPath) == override.Package {
+ key = modPath
+ }
+
+ r, _, _ = r.Insert([]byte(key), c)
+ }
+
+ return r
}
-func toHeader(s string) string {
- return strcase.ToCase(s, strcase.TitleCase, '-')
+func cleanRules(config Base) {
+ for k := range config.ExtendedRules {
+ delete(config.Rules, k)
+ }
}
diff --git a/vendor/github.com/ldez/usetesting/.gitignore b/vendor/github.com/ldez/usetesting/.gitignore
new file mode 100644
index 0000000000..0907a9069e
--- /dev/null
+++ b/vendor/github.com/ldez/usetesting/.gitignore
@@ -0,0 +1,2 @@
+/usetesting
+.idea
diff --git a/vendor/github.com/ldez/usetesting/.golangci.yml b/vendor/github.com/ldez/usetesting/.golangci.yml
new file mode 100644
index 0000000000..597647d242
--- /dev/null
+++ b/vendor/github.com/ldez/usetesting/.golangci.yml
@@ -0,0 +1,83 @@
+linters:
+ enable-all: true
+ disable:
+ - exportloopref # deprecated
+ - sqlclosecheck # not relevant (SQL)
+ - rowserrcheck # not relevant (SQL)
+ - cyclop # duplicate of gocyclo
+ - lll
+ - dupl
+ - nlreturn
+ - exhaustive
+ - exhaustruct
+ - testpackage
+ - tparallel
+ - paralleltest
+ - prealloc
+ - varnamelen
+ - nilnil
+ - errchkjson
+ - nonamedreturns
+
+linters-settings:
+ govet:
+ enable-all: true
+ disable:
+ - fieldalignment
+ mnd:
+ ignored-numbers:
+ - "124"
+ gocyclo:
+ min-complexity: 20
+ goconst:
+ min-len: 5
+ min-occurrences: 3
+ misspell:
+ locale: US
+ funlen:
+ lines: -1
+ statements: 40
+ godox:
+ keywords:
+ - FIXME
+ gofumpt:
+ extra-rules: true
+ depguard:
+ rules:
+ main:
+ deny:
+ - pkg: "github.com/instana/testify"
+ desc: not allowed
+ - pkg: "github.com/pkg/errors"
+ desc: Should be replaced by standard lib errors package
+ wsl:
+ force-case-trailing-whitespace: 1
+ allow-trailing-comment: true
+ gocritic:
+ enabled-tags:
+ - diagnostic
+ - style
+ - performance
+ disabled-checks:
+ - sloppyReassign
+ - rangeValCopy
+ - octalLiteral
+ - paramTypeCombine # already handle by gofumpt.extra-rules
+ settings:
+ hugeParam:
+ sizeThreshold: 100
+
+issues:
+ exclude-use-default: false
+ max-issues-per-linter: 0
+ max-same-issues: 0
+
+output:
+ show-stats: true
+ sort-results: true
+ sort-order:
+ - linter
+ - file
+
+run:
+ timeout: 5m
diff --git a/vendor/github.com/ldez/usetesting/LICENSE b/vendor/github.com/ldez/usetesting/LICENSE
new file mode 100644
index 0000000000..c1bf0c3288
--- /dev/null
+++ b/vendor/github.com/ldez/usetesting/LICENSE
@@ -0,0 +1,190 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ Copyright 2024 Fernandez Ludovic
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/ldez/usetesting/Makefile b/vendor/github.com/ldez/usetesting/Makefile
new file mode 100644
index 0000000000..b8eca65980
--- /dev/null
+++ b/vendor/github.com/ldez/usetesting/Makefile
@@ -0,0 +1,15 @@
+.PHONY: clean check test build
+
+default: clean check test build
+
+clean:
+ rm -rf dist/ cover.out
+
+test: clean
+ go test -v -cover ./...
+
+check:
+ golangci-lint run
+
+build:
+ go build -ldflags "-s -w" -trimpath ./cmd/usetesting/
diff --git a/vendor/github.com/ldez/usetesting/readme.md b/vendor/github.com/ldez/usetesting/readme.md
new file mode 100644
index 0000000000..e21ba06e63
--- /dev/null
+++ b/vendor/github.com/ldez/usetesting/readme.md
@@ -0,0 +1,209 @@
+# UseTesting
+
+Detects when some calls can be replaced by methods from the testing package.
+
+[](https://github.com/sponsors/ldez)
+
+## Usages
+
+### Inside golangci-lint
+
+Recommended.
+
+```yml
+linters-settings:
+ usetesting:
+ # Enable/disable `os.CreateTemp("", ...)` detections.
+ # Default: true
+ os-create-temp: false
+
+ # Enable/disable `os.MkdirTemp()` detections.
+ # Default: true
+ os-mkdir-temp: false
+
+ # Enable/disable `os.Setenv()` detections.
+ # Default: false
+ os-setenv: true
+
+ # Enable/disable `os.TempDir()` detections.
+ # Default: false
+ os-temp-dir: true
+
+ # Enable/disable `os.Chdir()` detections.
+ # Disabled if Go < 1.24.
+ # Default: true
+ os-chdir: false
+
+ # Enable/disable `context.Background()` detections.
+ # Disabled if Go < 1.24.
+ # Default: true
+ context-background: false
+
+ # Enable/disable `context.TODO()` detections.
+ # Disabled if Go < 1.24.
+ # Default: true
+ context-todo: false
+```
+
+### As a CLI
+
+```shell
+go install github.com/ldez/usetesting/cmd/usetesting@latest
+```
+
+```
+usetesting: Reports uses of functions with replacement inside the testing package.
+
+Usage: usetesting [-flag] [package]
+
+Flags:
+ -contextbackground
+ Enable/disable context.Background() detections (default true)
+ -contexttodo
+ Enable/disable context.TODO() detections (default true)
+ -oschdir
+ Enable/disable os.Chdir() detections (default true)
+ -osmkdirtemp
+ Enable/disable os.MkdirTemp() detections (default true)
+ -ossetenv
+ Enable/disable os.Setenv() detections (default false)
+ -ostempdir
+ Enable/disable os.TempDir() detections (default false)
+ -oscreatetemp
+ Enable/disable os.CreateTemp("", ...) detections (default true)
+...
+```
+
+## Examples
+
+### `os.MkdirTemp`
+
+```go
+func TestExample(t *testing.T) {
+ os.MkdirTemp("a", "b")
+ // ...
+}
+```
+
+It can be replaced by:
+
+```go
+func TestExample(t *testing.T) {
+ t.TempDir()
+ // ...
+}
+```
+
+### `os.TempDir`
+
+```go
+func TestExample(t *testing.T) {
+ os.TempDir()
+ // ...
+}
+```
+
+It can be replaced by:
+
+```go
+func TestExample(t *testing.T) {
+ t.TempDir()
+ // ...
+}
+```
+
+### `os.CreateTemp`
+
+```go
+func TestExample(t *testing.T) {
+ os.CreateTemp("", "x")
+ // ...
+}
+```
+
+It can be replaced by:
+
+```go
+func TestExample(t *testing.T) {
+ os.CreateTemp(t.TempDir(), "x")
+ // ...
+}
+```
+
+### `os.Setenv`
+
+```go
+func TestExample(t *testing.T) {
+ os.Setenv("A", "b")
+ // ...
+}
+```
+
+It can be replaced by:
+
+```go
+func TestExample(t *testing.T) {
+ t.Setenv("A", "b")
+ // ...
+}
+```
+
+### `os.Chdir` (Go >= 1.24)
+
+```go
+func TestExample(t *testing.T) {
+ os.Chdir("x")
+ // ...
+}
+```
+
+It can be replaced by:
+
+```go
+func TestExample(t *testing.T) {
+ t.Chdir("x")
+ // ...
+}
+```
+
+### `context.Background` (Go >= 1.24)
+
+```go
+func TestExample(t *testing.T) {
+ ctx := context.Background()
+ // ...
+}
+```
+
+It can be replaced by:
+
+```go
+func TestExample(t *testing.T) {
+ ctx := t.Context()
+ // ...
+}
+```
+
+### `context.TODO` (Go >= 1.24)
+
+```go
+func TestExample(t *testing.T) {
+ ctx := context.TODO()
+ // ...
+}
+```
+
+It can be replaced by:
+
+```go
+func TestExample(t *testing.T) {
+ ctx := t.Context()
+ // ...
+}
+```
+
+## References
+
+- https://tip.golang.org/doc/go1.15#testingpkgtesting (`TempDir`)
+- https://tip.golang.org/doc/go1.17#testingpkgtesting (`SetEnv`)
+- https://tip.golang.org/doc/go1.24#testingpkgtesting (`Chdir`, `Context`)
diff --git a/vendor/github.com/ldez/usetesting/report.go b/vendor/github.com/ldez/usetesting/report.go
new file mode 100644
index 0000000000..3c90b6baec
--- /dev/null
+++ b/vendor/github.com/ldez/usetesting/report.go
@@ -0,0 +1,200 @@
+package usetesting
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/printer"
+ "go/token"
+ "slices"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// because [os.CreateTemp] takes 2 args.
+const nbArgCreateTemp = 2
+
+func (a *analyzer) reportCallExpr(pass *analysis.Pass, ce *ast.CallExpr, fnInfo *FuncInfo) bool {
+ if !a.osCreateTemp {
+ return false
+ }
+
+ if len(ce.Args) != nbArgCreateTemp {
+ return false
+ }
+
+ switch fun := ce.Fun.(type) {
+ case *ast.SelectorExpr:
+ if fun.Sel == nil || fun.Sel.Name != createTempName {
+ return false
+ }
+
+ expr, ok := fun.X.(*ast.Ident)
+ if !ok {
+ return false
+ }
+
+ if expr.Name == osPkgName && isFirstArgEmptyString(ce) {
+ pass.Report(diagnosticOSCreateTemp(ce, fnInfo))
+
+ return true
+ }
+
+ case *ast.Ident:
+ if fun.Name != createTempName {
+ return false
+ }
+
+ pkgName := getPkgNameFromType(pass, fun)
+
+ if pkgName == osPkgName && isFirstArgEmptyString(ce) {
+ pass.Report(diagnosticOSCreateTemp(ce, fnInfo))
+
+ return true
+ }
+ }
+
+ return false
+}
+
+func diagnosticOSCreateTemp(ce *ast.CallExpr, fnInfo *FuncInfo) analysis.Diagnostic {
+ diagnostic := analysis.Diagnostic{
+ Pos: ce.Pos(),
+ Message: fmt.Sprintf(
+ `%s.%s("", ...) could be replaced by %[1]s.%[2]s(%s.%s(), ...) in %s`,
+ osPkgName, createTempName, fnInfo.ArgName, tempDirName, fnInfo.Name,
+ ),
+ }
+
+ // Skip `` arg names.
+ if !strings.Contains(fnInfo.ArgName, "<") {
+ g := &ast.CallExpr{
+ Fun: ce.Fun,
+ Args: []ast.Expr{
+ &ast.CallExpr{
+ Fun: &ast.SelectorExpr{
+ X: &ast.Ident{Name: fnInfo.ArgName},
+ Sel: &ast.Ident{Name: tempDirName},
+ },
+ },
+ ce.Args[1],
+ },
+ }
+
+ buf := bytes.NewBuffer(nil)
+
+ err := printer.Fprint(buf, token.NewFileSet(), g)
+ if err != nil {
+ diagnostic.Message = fmt.Sprintf("Suggested fix error: %v", err)
+ return diagnostic
+ }
+
+ diagnostic.SuggestedFixes = append(diagnostic.SuggestedFixes, analysis.SuggestedFix{
+ TextEdits: []analysis.TextEdit{{
+ Pos: ce.Pos(),
+ End: ce.End(),
+ NewText: buf.Bytes(),
+ }},
+ })
+ }
+
+ return diagnostic
+}
+
+func (a *analyzer) reportSelector(pass *analysis.Pass, se *ast.SelectorExpr, fnInfo *FuncInfo) bool {
+ if se.Sel == nil || !se.Sel.IsExported() {
+ return false
+ }
+
+ ident, ok := se.X.(*ast.Ident)
+ if !ok {
+ return false
+ }
+
+ return a.report(pass, se, ident.Name, se.Sel.Name, fnInfo)
+}
+
+func (a *analyzer) reportIdent(pass *analysis.Pass, ident *ast.Ident, fnInfo *FuncInfo) bool {
+ if !ident.IsExported() {
+ return false
+ }
+
+ if !slices.Contains(a.fieldNames, ident.Name) {
+ return false
+ }
+
+ pkgName := getPkgNameFromType(pass, ident)
+
+ return a.report(pass, ident, pkgName, ident.Name, fnInfo)
+}
+
+//nolint:gocyclo // The complexity is expected by the number of cases to check.
+func (a *analyzer) report(pass *analysis.Pass, rg analysis.Range, origPkgName, origName string, fnInfo *FuncInfo) bool {
+ switch {
+ case a.osMkdirTemp && origPkgName == osPkgName && origName == mkdirTempName:
+ report(pass, rg, origPkgName, origName, tempDirName, fnInfo)
+
+ case a.osTempDir && origPkgName == osPkgName && origName == tempDirName:
+ report(pass, rg, origPkgName, origName, tempDirName, fnInfo)
+
+ case a.osSetenv && origPkgName == osPkgName && origName == setenvName:
+ report(pass, rg, origPkgName, origName, setenvName, fnInfo)
+
+ case a.geGo124 && a.osChdir && origPkgName == osPkgName && origName == chdirName:
+ report(pass, rg, origPkgName, origName, chdirName, fnInfo)
+
+ case a.geGo124 && a.contextBackground && origPkgName == contextPkgName && origName == backgroundName:
+ report(pass, rg, origPkgName, origName, contextName, fnInfo)
+
+ case a.geGo124 && a.contextTodo && origPkgName == contextPkgName && origName == todoName:
+ report(pass, rg, origPkgName, origName, contextName, fnInfo)
+
+ default:
+ return false
+ }
+
+ return true
+}
+
+func report(pass *analysis.Pass, rg analysis.Range, origPkgName, origName, expectName string, fnInfo *FuncInfo) {
+ diagnostic := analysis.Diagnostic{
+ Pos: rg.Pos(),
+ Message: fmt.Sprintf("%s.%s() could be replaced by %s.%s() in %s",
+ origPkgName, origName, fnInfo.ArgName, expectName, fnInfo.Name,
+ ),
+ }
+
+ // Skip `` arg names.
+ // Only applies on `context.XXX` because the nb of return parameters is the same as the replacement.
+ if !strings.Contains(fnInfo.ArgName, "<") && origPkgName == contextPkgName {
+ diagnostic.SuggestedFixes = append(diagnostic.SuggestedFixes, analysis.SuggestedFix{
+ TextEdits: []analysis.TextEdit{{
+ Pos: rg.Pos(),
+ End: rg.End(),
+ NewText: []byte(fmt.Sprintf("%s.%s", fnInfo.ArgName, expectName)),
+ }},
+ })
+ }
+
+ pass.Report(diagnostic)
+}
+
+func isFirstArgEmptyString(ce *ast.CallExpr) bool {
+ bl, ok := ce.Args[0].(*ast.BasicLit)
+ if !ok {
+ return false
+ }
+
+ return bl.Kind == token.STRING && bl.Value == `""`
+}
+
+func getPkgNameFromType(pass *analysis.Pass, ident *ast.Ident) string {
+ o := pass.TypesInfo.ObjectOf(ident)
+
+ if o == nil || o.Pkg() == nil {
+ return ""
+ }
+
+ return o.Pkg().Name()
+}
diff --git a/vendor/github.com/ldez/usetesting/usetesting.go b/vendor/github.com/ldez/usetesting/usetesting.go
new file mode 100644
index 0000000000..7258278329
--- /dev/null
+++ b/vendor/github.com/ldez/usetesting/usetesting.go
@@ -0,0 +1,268 @@
+// Package usetesting It is an analyzer that detects when some calls can be replaced by methods from the testing package.
+package usetesting
+
+import (
+ "go/ast"
+ "go/build"
+ "os"
+ "slices"
+ "strconv"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+)
+
+const (
+ chdirName = "Chdir"
+ mkdirTempName = "MkdirTemp"
+ createTempName = "CreateTemp"
+ setenvName = "Setenv"
+ tempDirName = "TempDir"
+ backgroundName = "Background"
+ todoName = "TODO"
+ contextName = "Context"
+)
+
+const (
+ osPkgName = "os"
+ contextPkgName = "context"
+ testingPkgName = "testing"
+)
+
+// FuncInfo information about the test function.
+type FuncInfo struct {
+ Name string
+ ArgName string
+}
+
+// analyzer is the UseTesting linter.
+type analyzer struct {
+ contextBackground bool
+ contextTodo bool
+ osChdir bool
+ osMkdirTemp bool
+ osTempDir bool
+ osSetenv bool
+ osCreateTemp bool
+
+ fieldNames []string
+
+ skipGoVersionDetection bool
+ geGo124 bool
+}
+
+// NewAnalyzer create a new UseTesting.
+func NewAnalyzer() *analysis.Analyzer {
+ _, skip := os.LookupEnv("USETESTING_SKIP_GO_VERSION_CHECK") // TODO should be removed when go1.25 will be released.
+
+ l := &analyzer{
+ fieldNames: []string{
+ chdirName,
+ mkdirTempName,
+ tempDirName,
+ setenvName,
+ backgroundName,
+ todoName,
+ createTempName,
+ },
+ skipGoVersionDetection: skip,
+ }
+
+ a := &analysis.Analyzer{
+ Name: "usetesting",
+ Doc: "Reports uses of functions with replacement inside the testing package.",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: l.run,
+ }
+
+ a.Flags.BoolVar(&l.contextBackground, "contextbackground", true, "Enable/disable context.Background() detections")
+ a.Flags.BoolVar(&l.contextTodo, "contexttodo", true, "Enable/disable context.TODO() detections")
+ a.Flags.BoolVar(&l.osChdir, "oschdir", true, "Enable/disable os.Chdir() detections")
+ a.Flags.BoolVar(&l.osMkdirTemp, "osmkdirtemp", true, "Enable/disable os.MkdirTemp() detections")
+ a.Flags.BoolVar(&l.osSetenv, "ossetenv", false, "Enable/disable os.Setenv() detections")
+ a.Flags.BoolVar(&l.osTempDir, "ostempdir", false, "Enable/disable os.TempDir() detections")
+ a.Flags.BoolVar(&l.osCreateTemp, "oscreatetemp", true, `Enable/disable os.CreateTemp("", ...) detections`)
+
+ return a
+}
+
+func (a *analyzer) run(pass *analysis.Pass) (any, error) {
+ if !a.contextBackground && !a.contextTodo && !a.osChdir && !a.osMkdirTemp && !a.osSetenv && !a.osTempDir && !a.osCreateTemp {
+ return nil, nil
+ }
+
+ a.geGo124 = a.isGoSupported(pass)
+
+ insp, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ if !ok {
+ return nil, nil
+ }
+
+ nodeFilter := []ast.Node{
+ (*ast.FuncDecl)(nil),
+ (*ast.FuncLit)(nil),
+ }
+
+ insp.WithStack(nodeFilter, func(node ast.Node, push bool, stack []ast.Node) (proceed bool) {
+ if !push {
+ return false
+ }
+
+ switch fn := node.(type) {
+ case *ast.FuncDecl:
+ a.checkFunc(pass, fn.Type, fn.Body, fn.Name.Name)
+
+ case *ast.FuncLit:
+ if hasParentFunc(stack) {
+ return true
+ }
+
+ a.checkFunc(pass, fn.Type, fn.Body, "anonymous function")
+ }
+
+ return true
+ })
+
+ return nil, nil
+}
+
+func (a *analyzer) checkFunc(pass *analysis.Pass, ft *ast.FuncType, block *ast.BlockStmt, fnName string) {
+ if len(ft.Params.List) < 1 {
+ return
+ }
+
+ fnInfo := checkTestFunctionSignature(ft.Params.List[0], fnName)
+ if fnInfo == nil {
+ return
+ }
+
+ ast.Inspect(block, func(n ast.Node) bool {
+ switch v := n.(type) {
+ case *ast.SelectorExpr:
+ return !a.reportSelector(pass, v, fnInfo)
+
+ case *ast.Ident:
+ return !a.reportIdent(pass, v, fnInfo)
+
+ case *ast.CallExpr:
+ return !a.reportCallExpr(pass, v, fnInfo)
+ }
+
+ return true
+ })
+}
+
+func (a *analyzer) isGoSupported(pass *analysis.Pass) bool {
+ if a.skipGoVersionDetection {
+ return true
+ }
+
+ // Prior to go1.22, versions.FileVersion returns only the toolchain version,
+ // which is of no use to us,
+ // so disable this analyzer on earlier versions.
+ if !slices.Contains(build.Default.ReleaseTags, "go1.22") {
+ return false
+ }
+
+ pkgVersion := pass.Pkg.GoVersion()
+ if pkgVersion == "" {
+ // Empty means Go devel.
+ return true
+ }
+
+ raw := strings.TrimPrefix(pkgVersion, "go")
+
+ // prerelease version (go1.24rc1)
+ idx := strings.IndexFunc(raw, func(r rune) bool {
+ return (r < '0' || r > '9') && r != '.'
+ })
+
+ if idx != -1 {
+ raw = raw[:idx]
+ }
+
+ vParts := strings.Split(raw, ".")
+
+ v, err := strconv.Atoi(strings.Join(vParts[:2], ""))
+ if err != nil {
+ v = 116
+ }
+
+ return v >= 124
+}
+
+func hasParentFunc(stack []ast.Node) bool {
+ // -2 because the last parent is the node.
+ const skipSelf = 2
+
+ // skip 0 because it's always [*ast.File].
+ for i := len(stack) - skipSelf; i > 0; i-- {
+ s := stack[i]
+
+ switch fn := s.(type) {
+ case *ast.FuncDecl:
+ if len(fn.Type.Params.List) < 1 {
+ continue
+ }
+
+ if checkTestFunctionSignature(fn.Type.Params.List[0], fn.Name.Name) != nil {
+ return true
+ }
+
+ case *ast.FuncLit:
+ if len(fn.Type.Params.List) < 1 {
+ continue
+ }
+
+ if checkTestFunctionSignature(fn.Type.Params.List[0], "anonymous function") != nil {
+ return true
+ }
+ }
+ }
+
+ return false
+}
+
+func checkTestFunctionSignature(arg *ast.Field, fnName string) *FuncInfo {
+ switch at := arg.Type.(type) {
+ case *ast.StarExpr:
+ if se, ok := at.X.(*ast.SelectorExpr); ok {
+ return createFuncInfo(arg, "", se, testingPkgName, fnName, "T", "B")
+ }
+
+ case *ast.SelectorExpr:
+ return createFuncInfo(arg, "tb", at, testingPkgName, fnName, "TB")
+ }
+
+ return nil
+}
+
+func createFuncInfo(arg *ast.Field, defaultName string, se *ast.SelectorExpr, pkgName, fnName string, selectorNames ...string) *FuncInfo {
+ ok := checkSelectorName(se, pkgName, selectorNames...)
+ if !ok {
+ return nil
+ }
+
+ return &FuncInfo{
+ Name: fnName,
+ ArgName: getTestArgName(arg, defaultName),
+ }
+}
+
+func checkSelectorName(se *ast.SelectorExpr, pkgName string, selectorNames ...string) bool {
+ if ident, ok := se.X.(*ast.Ident); ok {
+ return pkgName == ident.Name && slices.Contains(selectorNames, se.Sel.Name)
+ }
+
+ return false
+}
+
+func getTestArgName(arg *ast.Field, defaultName string) string {
+ if len(arg.Names) > 0 && arg.Names[0].Name != "_" {
+ return arg.Names[0].Name
+ }
+
+ return defaultName
+}
diff --git a/vendor/github.com/lufeee/execinquery/.gitignore b/vendor/github.com/lufeee/execinquery/.gitignore
deleted file mode 100644
index 00e1abc31f..0000000000
--- a/vendor/github.com/lufeee/execinquery/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-execinquery
diff --git a/vendor/github.com/lufeee/execinquery/README.md b/vendor/github.com/lufeee/execinquery/README.md
deleted file mode 100644
index 38fa7c8b96..0000000000
--- a/vendor/github.com/lufeee/execinquery/README.md
+++ /dev/null
@@ -1,76 +0,0 @@
-# execinquery - a simple query string checker in Query function
-[](https://github.com/lufeee/execinquery/actions/workflows/go-cross.yml)
-[](https://github.com/lufeee/execinquery/actions/workflows/lint.yml)
-[](LICENSE)
-## About
-
-execinquery is a linter about query string checker in Query function which reads your Go src files and
-warnings it finds.
-
-## Installation
-
-```sh
-go install github.com/lufeee/execinquery/cmd/execinquery
-```
-
-## Usage
-```go
-package main
-
-import (
- "database/sql"
- "log"
-)
-
-func main() {
- db, err := sql.Open("mysql", "test:test@tcp(test:3306)/test")
- if err != nil {
- log.Fatal("Database Connect Error: ", err)
- }
- defer db.Close()
-
- test := "a"
- _, err = db.Query("Update * FROM hoge where id = ?", test)
- if err != nil {
- log.Fatal("Query Error: ", err)
- }
-
-}
-```
-
-```console
-go vet -vettool=$(which execinquery) ./...
-
-# command-line-arguments
-./a.go:16:11: Use Exec instead of Query to execute `UPDATE` query
-```
-
-## CI
-
-### CircleCI
-
-```yaml
-- run:
- name: install execinquery
- command: go install github.com/lufeee/execinquery
-
-- run:
- name: run execinquery
- command: go vet -vettool=`which execinquery` ./...
-```
-
-### GitHub Actions
-
-```yaml
-- name: install execinquery
- run: go install github.com/lufeee/execinquery
-
-- name: run execinquery
- run: go vet -vettool=`which execinquery` ./...
-```
-
-### License
-
-MIT license.
-
-
diff --git a/vendor/github.com/lufeee/execinquery/execinquery.go b/vendor/github.com/lufeee/execinquery/execinquery.go
deleted file mode 100644
index c37dc17010..0000000000
--- a/vendor/github.com/lufeee/execinquery/execinquery.go
+++ /dev/null
@@ -1,135 +0,0 @@
-package execinquery
-
-import (
- "go/ast"
- "regexp"
- "strings"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/inspector"
-)
-
-const doc = "execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds"
-
-// Analyzer is checking database/sql pkg Query's function
-var Analyzer = &analysis.Analyzer{
- Name: "execinquery",
- Doc: doc,
- Run: newLinter().run,
- Requires: []*analysis.Analyzer{
- inspect.Analyzer,
- },
-}
-
-type linter struct {
- commentExp *regexp.Regexp
- multilineCommentExp *regexp.Regexp
-}
-
-func newLinter() *linter {
- return &linter{
- commentExp: regexp.MustCompile(`--[^\n]*\n`),
- multilineCommentExp: regexp.MustCompile(`(?s)/\*.*?\*/`),
- }
-}
-
-func (l linter) run(pass *analysis.Pass) (interface{}, error) {
- result := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
-
- nodeFilter := []ast.Node{
- (*ast.CallExpr)(nil),
- }
-
- result.Preorder(nodeFilter, func(n ast.Node) {
- switch n := n.(type) {
- case *ast.CallExpr:
- selector, ok := n.Fun.(*ast.SelectorExpr)
- if !ok {
- return
- }
-
- if pass.TypesInfo == nil || pass.TypesInfo.Uses[selector.Sel] == nil || pass.TypesInfo.Uses[selector.Sel].Pkg() == nil {
- return
- }
-
- if "database/sql" != pass.TypesInfo.Uses[selector.Sel].Pkg().Path() {
- return
- }
-
- if !strings.Contains(selector.Sel.Name, "Query") {
- return
- }
-
- replacement := "Exec"
- var i int // the index of the query argument
- if strings.Contains(selector.Sel.Name, "Context") {
- replacement += "Context"
- i = 1
- }
-
- if len(n.Args) <= i {
- return
- }
-
- query := l.getQueryString(n.Args[i])
- if query == "" {
- return
- }
-
- query = strings.TrimSpace(l.cleanValue(query))
- parts := strings.SplitN(query, " ", 2)
- cmd := strings.ToUpper(parts[0])
-
- if strings.HasPrefix(cmd, "SELECT") {
- return
- }
-
- pass.Reportf(n.Fun.Pos(), "Use %s instead of %s to execute `%s` query", replacement, selector.Sel.Name, cmd)
- }
- })
-
- return nil, nil
-}
-
-func (l linter) cleanValue(s string) string {
- v := strings.NewReplacer(`"`, "", "`", "").Replace(s)
-
- v = l.multilineCommentExp.ReplaceAllString(v, "")
-
- return l.commentExp.ReplaceAllString(v, "")
-}
-
-func (l linter) getQueryString(exp interface{}) string {
- switch e := exp.(type) {
- case *ast.AssignStmt:
- var v string
- for _, stmt := range e.Rhs {
- v += l.cleanValue(l.getQueryString(stmt))
- }
- return v
-
- case *ast.BasicLit:
- return e.Value
-
- case *ast.ValueSpec:
- var v string
- for _, value := range e.Values {
- v += l.cleanValue(l.getQueryString(value))
- }
- return v
-
- case *ast.Ident:
- if e.Obj == nil {
- return ""
- }
- return l.getQueryString(e.Obj.Decl)
-
- case *ast.BinaryExpr:
- v := l.cleanValue(l.getQueryString(e.X))
- v += l.cleanValue(l.getQueryString(e.Y))
- return v
- }
-
- return ""
-}
diff --git a/vendor/github.com/matoous/godox/.golangci.yml b/vendor/github.com/matoous/godox/.golangci.yml
index 3f0fcdb191..8d080b28aa 100644
--- a/vendor/github.com/matoous/godox/.golangci.yml
+++ b/vendor/github.com/matoous/godox/.golangci.yml
@@ -1,10 +1,4 @@
linters-settings:
- depguard:
- list-type: blacklist
- include-go-root: true
- packages:
- # we are using "github.com/json-iterator/go" instead of json encoder from stdlib
- - "encoding/json"
dupl:
threshold: 100
gocritic:
@@ -19,10 +13,9 @@ linters-settings:
- unnamedResult # it is experimental currently and doesn't handle typed channels correctly
gocyclo:
min-complexity: 14 # TODO go lower
- golint:
- min-confidence: 0
govet:
- check-shadowing: true
+ enable:
+ - shadow
goconst:
min-len: 2
min-occurrences: 3
@@ -30,39 +23,47 @@ linters-settings:
local-prefixes: gitlab.skypicker.com/search-team/gonuts/conveyance-store
lll:
line-length: 140
- maligned:
- suggest-new: true
misspell:
locale: US
linters:
enable-all: true
disable:
+ - depguard
# prealloc is not recommended by `golangci-lint` developers.
- prealloc
- gochecknoglobals
+ # deprecated
+ - maligned
+ - exhaustivestruct
+ - nosnakecase
+ - scopelint
+ - structcheck
+ - ifshort
+ - varcheck
+ - deadcode
+ - golint
+ - interfacer
+
issues:
+ exclude-dirs:
+ - "fixtures"
exclude-rules:
- path: _test\.go
linters:
+ - exhaustruct
- goconst
- dupl
- - path: fixtures
- linters:
- - gocritic
- - varcheck
- - deadcode
- - unused
-
run:
modules-download-mode: readonly
# output configuration options
output:
# colored-line-number|line-number|json|tab|checkstyle|code-climate, default is "colored-line-number"
- format: tab
+ formats:
+ - format: tab
# print lines of code with issue, default is true
print-issued-lines: true
diff --git a/vendor/github.com/matoous/godox/.revive.toml b/vendor/github.com/matoous/godox/.revive.toml
index db0e4edb66..a4a30464da 100644
--- a/vendor/github.com/matoous/godox/.revive.toml
+++ b/vendor/github.com/matoous/godox/.revive.toml
@@ -1,5 +1,6 @@
ignoreGeneratedHeader = false
severity = "warning"
+exclude = ["./fixtures/..."]
# confidence <= 0.2 generate a lot of errors from package-comments rule. It marks files that do not contain
# package-level comments as a warning irrespective of existing package-level coment in one file.
diff --git a/vendor/github.com/matoous/godox/Makefile b/vendor/github.com/matoous/godox/Makefile
new file mode 100644
index 0000000000..694aa21d64
--- /dev/null
+++ b/vendor/github.com/matoous/godox/Makefile
@@ -0,0 +1,20 @@
+## Help display.
+## Pulls comments from beside commands and prints a nicely formatted
+## display with the commands and their usage information.
+
+.DEFAULT_GOAL := help
+
+help: ## Prints this help
+ @grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
+
+.PHONY: lint
+lint: ## Lint the application
+ golangci-lint run --max-same-issues=0 --timeout=1m ./...
+
+.PHONY: test
+test: ## Run unit tests
+ go test -race -shuffle=on ./...
+
+.PHONY: vet
+vet: ## Run go vet
+ go vet ./...
diff --git a/vendor/github.com/matoous/godox/godox.go b/vendor/github.com/matoous/godox/godox.go
index 3903525c80..5bcc7e980a 100644
--- a/vendor/github.com/matoous/godox/godox.go
+++ b/vendor/github.com/matoous/godox/godox.go
@@ -1,3 +1,5 @@
+// Package godox is a linter that scans Go code for comments containing certain keywords
+// (like TODO, BUG, FIXME) which typically indicate areas that require attention.
package godox
import (
@@ -20,22 +22,17 @@ type Message struct {
Message string
}
-func getMessages(comment *ast.Comment, fset *token.FileSet, keywords []string) []Message {
+func getMessages(comment *ast.Comment, fset *token.FileSet, keywords []string) ([]Message, error) {
commentText := extractComment(comment.Text)
- b := bufio.NewReader(bytes.NewBufferString(commentText))
+ scanner := bufio.NewScanner(bytes.NewBufferString(commentText))
var comments []Message
- for lineNum := 0; ; lineNum++ {
- line, _, err := b.ReadLine()
- if err != nil {
- break
- }
-
+ for lineNum := 0; scanner.Scan(); lineNum++ {
const minimumSize = 4
- sComment := bytes.TrimSpace(line)
+ sComment := bytes.TrimSpace(scanner.Bytes())
if len(sComment) < minimumSize {
continue
}
@@ -68,21 +65,22 @@ func getMessages(comment *ast.Comment, fset *token.FileSet, keywords []string) [
}
}
- return comments
+ if err := scanner.Err(); err != nil {
+ return nil, fmt.Errorf("scan: %w", err)
+ }
+
+ return comments, nil
}
func extractComment(commentText string) string {
switch commentText[1] {
case '/':
- commentText = commentText[2:]
- if len(commentText) > 0 && commentText[0] == ' ' {
- commentText = commentText[1:]
- }
+ return strings.TrimPrefix(commentText[2:], " ")
case '*':
- commentText = commentText[2 : len(commentText)-2]
+ return commentText[2 : len(commentText)-2]
+ default:
+ return commentText
}
-
- return commentText
}
func hasAlphanumRuneAdjacent(rest []byte) bool {
@@ -102,7 +100,7 @@ func hasAlphanumRuneAdjacent(rest []byte) bool {
// Run runs the godox linter on given file.
// Godox searches for comments starting with given keywords and reports them.
-func Run(file *ast.File, fset *token.FileSet, keywords ...string) []Message {
+func Run(file *ast.File, fset *token.FileSet, keywords ...string) ([]Message, error) {
if len(keywords) == 0 {
keywords = defaultKeywords
}
@@ -111,9 +109,14 @@ func Run(file *ast.File, fset *token.FileSet, keywords ...string) []Message {
for _, c := range file.Comments {
for _, ci := range c.List {
- messages = append(messages, getMessages(ci, fset, keywords)...)
+ msgs, err := getMessages(ci, fset, keywords)
+ if err != nil {
+ return nil, err
+ }
+
+ messages = append(messages, msgs...)
}
}
- return messages
+ return messages, nil
}
diff --git a/vendor/github.com/mattn/go-colorable/colorable_appengine.go b/vendor/github.com/mattn/go-colorable/colorable_appengine.go
deleted file mode 100644
index 416d1bbbf8..0000000000
--- a/vendor/github.com/mattn/go-colorable/colorable_appengine.go
+++ /dev/null
@@ -1,38 +0,0 @@
-//go:build appengine
-// +build appengine
-
-package colorable
-
-import (
- "io"
- "os"
-
- _ "github.com/mattn/go-isatty"
-)
-
-// NewColorable returns new instance of Writer which handles escape sequence.
-func NewColorable(file *os.File) io.Writer {
- if file == nil {
- panic("nil passed instead of *os.File to NewColorable()")
- }
-
- return file
-}
-
-// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout.
-func NewColorableStdout() io.Writer {
- return os.Stdout
-}
-
-// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr.
-func NewColorableStderr() io.Writer {
- return os.Stderr
-}
-
-// EnableColorsStdout enable colors if possible.
-func EnableColorsStdout(enabled *bool) func() {
- if enabled != nil {
- *enabled = true
- }
- return func() {}
-}
diff --git a/vendor/github.com/mattn/go-colorable/colorable_others.go b/vendor/github.com/mattn/go-colorable/colorable_others.go
index 766d94603a..c1a78aa94d 100644
--- a/vendor/github.com/mattn/go-colorable/colorable_others.go
+++ b/vendor/github.com/mattn/go-colorable/colorable_others.go
@@ -1,5 +1,5 @@
-//go:build !windows && !appengine
-// +build !windows,!appengine
+//go:build !windows || appengine
+// +build !windows appengine
package colorable
diff --git a/vendor/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/mattn/go-colorable/colorable_windows.go
index 1846ad5ab4..2df7b8598a 100644
--- a/vendor/github.com/mattn/go-colorable/colorable_windows.go
+++ b/vendor/github.com/mattn/go-colorable/colorable_windows.go
@@ -11,7 +11,7 @@ import (
"strconv"
"strings"
"sync"
- "syscall"
+ syscall "golang.org/x/sys/windows"
"unsafe"
"github.com/mattn/go-isatty"
@@ -73,7 +73,7 @@ type consoleCursorInfo struct {
}
var (
- kernel32 = syscall.NewLazyDLL("kernel32.dll")
+ kernel32 = syscall.NewLazySystemDLL("kernel32.dll")
procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
procSetConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition")
@@ -87,8 +87,8 @@ var (
procCreateConsoleScreenBuffer = kernel32.NewProc("CreateConsoleScreenBuffer")
)
-// Writer provides colorable Writer to the console
-type Writer struct {
+// writer provides colorable Writer to the console
+type writer struct {
out io.Writer
handle syscall.Handle
althandle syscall.Handle
@@ -98,7 +98,7 @@ type Writer struct {
mutex sync.Mutex
}
-// NewColorable returns new instance of Writer which handles escape sequence from File.
+// NewColorable returns new instance of writer which handles escape sequence from File.
func NewColorable(file *os.File) io.Writer {
if file == nil {
panic("nil passed instead of *os.File to NewColorable()")
@@ -112,17 +112,17 @@ func NewColorable(file *os.File) io.Writer {
var csbi consoleScreenBufferInfo
handle := syscall.Handle(file.Fd())
procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
- return &Writer{out: file, handle: handle, oldattr: csbi.attributes, oldpos: coord{0, 0}}
+ return &writer{out: file, handle: handle, oldattr: csbi.attributes, oldpos: coord{0, 0}}
}
return file
}
-// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout.
+// NewColorableStdout returns new instance of writer which handles escape sequence for stdout.
func NewColorableStdout() io.Writer {
return NewColorable(os.Stdout)
}
-// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr.
+// NewColorableStderr returns new instance of writer which handles escape sequence for stderr.
func NewColorableStderr() io.Writer {
return NewColorable(os.Stderr)
}
@@ -434,7 +434,7 @@ func atoiWithDefault(s string, def int) (int, error) {
}
// Write writes data on console
-func (w *Writer) Write(data []byte) (n int, err error) {
+func (w *writer) Write(data []byte) (n int, err error) {
w.mutex.Lock()
defer w.mutex.Unlock()
var csbi consoleScreenBufferInfo
@@ -560,7 +560,7 @@ loop:
}
procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
case 'E':
- n, err = strconv.Atoi(buf.String())
+ n, err = atoiWithDefault(buf.String(), 1)
if err != nil {
continue
}
@@ -569,7 +569,7 @@ loop:
csbi.cursorPosition.y += short(n)
procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
case 'F':
- n, err = strconv.Atoi(buf.String())
+ n, err = atoiWithDefault(buf.String(), 1)
if err != nil {
continue
}
diff --git a/vendor/github.com/mattn/go-runewidth/.travis.yml b/vendor/github.com/mattn/go-runewidth/.travis.yml
deleted file mode 100644
index 6a21813a3e..0000000000
--- a/vendor/github.com/mattn/go-runewidth/.travis.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-language: go
-sudo: false
-go:
- - 1.13.x
- - tip
-
-before_install:
- - go get -t -v ./...
-
-script:
- - go generate
- - git diff --cached --exit-code
- - ./go.test.sh
-
-after_success:
- - bash <(curl -s https://codecov.io/bash)
diff --git a/vendor/github.com/mattn/go-runewidth/README.md b/vendor/github.com/mattn/go-runewidth/README.md
index aa56ab96c2..5e2cfd98cb 100644
--- a/vendor/github.com/mattn/go-runewidth/README.md
+++ b/vendor/github.com/mattn/go-runewidth/README.md
@@ -1,7 +1,7 @@
go-runewidth
============
-[](https://travis-ci.org/mattn/go-runewidth)
+[](https://github.com/mattn/go-runewidth/actions?query=workflow%3Atest)
[](https://codecov.io/gh/mattn/go-runewidth)
[](http://godoc.org/github.com/mattn/go-runewidth)
[](https://goreportcard.com/report/github.com/mattn/go-runewidth)
diff --git a/vendor/github.com/mattn/go-runewidth/go.test.sh b/vendor/github.com/mattn/go-runewidth/go.test.sh
deleted file mode 100644
index 012162b077..0000000000
--- a/vendor/github.com/mattn/go-runewidth/go.test.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-echo "" > coverage.txt
-
-for d in $(go list ./... | grep -v vendor); do
- go test -race -coverprofile=profile.out -covermode=atomic "$d"
- if [ -f profile.out ]; then
- cat profile.out >> coverage.txt
- rm profile.out
- fi
-done
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth.go b/vendor/github.com/mattn/go-runewidth/runewidth.go
index 19f8e0449b..7dfbb3be91 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth.go
@@ -2,6 +2,9 @@ package runewidth
import (
"os"
+ "strings"
+
+ "github.com/rivo/uniseg"
)
//go:generate go run script/generate.go
@@ -10,11 +13,14 @@ var (
// EastAsianWidth will be set true if the current locale is CJK
EastAsianWidth bool
- // ZeroWidthJoiner is flag to set to use UTR#51 ZWJ
- ZeroWidthJoiner bool
+ // StrictEmojiNeutral should be set false if handle broken fonts
+ StrictEmojiNeutral bool = true
// DefaultCondition is a condition in current locale
- DefaultCondition = &Condition{}
+ DefaultCondition = &Condition{
+ EastAsianWidth: false,
+ StrictEmojiNeutral: true,
+ }
)
func init() {
@@ -29,8 +35,13 @@ func handleEnv() {
EastAsianWidth = env == "1"
}
// update DefaultCondition
- DefaultCondition.EastAsianWidth = EastAsianWidth
- DefaultCondition.ZeroWidthJoiner = ZeroWidthJoiner
+ if DefaultCondition.EastAsianWidth != EastAsianWidth {
+ DefaultCondition.EastAsianWidth = EastAsianWidth
+ if len(DefaultCondition.combinedLut) > 0 {
+ DefaultCondition.combinedLut = DefaultCondition.combinedLut[:0]
+ CreateLUT()
+ }
+ }
}
type interval struct {
@@ -85,63 +96,97 @@ var nonprint = table{
// Condition have flag EastAsianWidth whether the current locale is CJK or not.
type Condition struct {
- EastAsianWidth bool
- ZeroWidthJoiner bool
+ combinedLut []byte
+ EastAsianWidth bool
+ StrictEmojiNeutral bool
}
// NewCondition return new instance of Condition which is current locale.
func NewCondition() *Condition {
return &Condition{
- EastAsianWidth: EastAsianWidth,
- ZeroWidthJoiner: ZeroWidthJoiner,
+ EastAsianWidth: EastAsianWidth,
+ StrictEmojiNeutral: StrictEmojiNeutral,
}
}
// RuneWidth returns the number of cells in r.
// See http://www.unicode.org/reports/tr11/
func (c *Condition) RuneWidth(r rune) int {
- switch {
- case r < 0 || r > 0x10FFFF || inTables(r, nonprint, combining, notassigned):
+ if r < 0 || r > 0x10FFFF {
return 0
- case (c.EastAsianWidth && IsAmbiguousWidth(r)) || inTables(r, doublewidth):
- return 2
- default:
- return 1
}
-}
-
-func (c *Condition) stringWidth(s string) (width int) {
- for _, r := range []rune(s) {
- width += c.RuneWidth(r)
+ if len(c.combinedLut) > 0 {
+ return int(c.combinedLut[r>>1]>>(uint(r&1)*4)) & 3
}
- return width
-}
-
-func (c *Condition) stringWidthZeroJoiner(s string) (width int) {
- r1, r2 := rune(0), rune(0)
- for _, r := range []rune(s) {
- if r == 0xFE0E || r == 0xFE0F {
- continue
+ // optimized version, verified by TestRuneWidthChecksums()
+ if !c.EastAsianWidth {
+ switch {
+ case r < 0x20:
+ return 0
+ case (r >= 0x7F && r <= 0x9F) || r == 0xAD: // nonprint
+ return 0
+ case r < 0x300:
+ return 1
+ case inTable(r, narrow):
+ return 1
+ case inTables(r, nonprint, combining):
+ return 0
+ case inTable(r, doublewidth):
+ return 2
+ default:
+ return 1
}
- w := c.RuneWidth(r)
- if r2 == 0x200D && inTables(r, emoji) && inTables(r1, emoji) {
- if width < w {
- width = w
- }
- } else {
- width += w
+ } else {
+ switch {
+ case inTables(r, nonprint, combining):
+ return 0
+ case inTable(r, narrow):
+ return 1
+ case inTables(r, ambiguous, doublewidth):
+ return 2
+ case !c.StrictEmojiNeutral && inTables(r, ambiguous, emoji, narrow):
+ return 2
+ default:
+ return 1
}
- r1, r2 = r2, r
}
- return width
+}
+
+// CreateLUT will create an in-memory lookup table of 557056 bytes for faster operation.
+// This should not be called concurrently with other operations on c.
+// If options in c is changed, CreateLUT should be called again.
+func (c *Condition) CreateLUT() {
+ const max = 0x110000
+ lut := c.combinedLut
+ if len(c.combinedLut) != 0 {
+ // Remove so we don't use it.
+ c.combinedLut = nil
+ } else {
+ lut = make([]byte, max/2)
+ }
+ for i := range lut {
+ i32 := int32(i * 2)
+ x0 := c.RuneWidth(i32)
+ x1 := c.RuneWidth(i32 + 1)
+ lut[i] = uint8(x0) | uint8(x1)<<4
+ }
+ c.combinedLut = lut
}
// StringWidth return width as you can see
func (c *Condition) StringWidth(s string) (width int) {
- if c.ZeroWidthJoiner {
- return c.stringWidthZeroJoiner(s)
+ g := uniseg.NewGraphemes(s)
+ for g.Next() {
+ var chWidth int
+ for _, r := range g.Runes() {
+ chWidth = c.RuneWidth(r)
+ if chWidth > 0 {
+ break // Our best guess at this point is to use the width of the first non-zero-width rune.
+ }
+ }
+ width += chWidth
}
- return c.stringWidth(s)
+ return
}
// Truncate return string truncated with w cells
@@ -149,27 +194,69 @@ func (c *Condition) Truncate(s string, w int, tail string) string {
if c.StringWidth(s) <= w {
return s
}
- r := []rune(s)
- tw := c.StringWidth(tail)
- w -= tw
- width := 0
- i := 0
- for ; i < len(r); i++ {
- cw := c.RuneWidth(r[i])
- if width+cw > w {
+ w -= c.StringWidth(tail)
+ var width int
+ pos := len(s)
+ g := uniseg.NewGraphemes(s)
+ for g.Next() {
+ var chWidth int
+ for _, r := range g.Runes() {
+ chWidth = c.RuneWidth(r)
+ if chWidth > 0 {
+ break // See StringWidth() for details.
+ }
+ }
+ if width+chWidth > w {
+ pos, _ = g.Positions()
break
}
- width += cw
+ width += chWidth
+ }
+ return s[:pos] + tail
+}
+
+// TruncateLeft cuts w cells from the beginning of the `s`.
+func (c *Condition) TruncateLeft(s string, w int, prefix string) string {
+ if c.StringWidth(s) <= w {
+ return prefix
+ }
+
+ var width int
+ pos := len(s)
+
+ g := uniseg.NewGraphemes(s)
+ for g.Next() {
+ var chWidth int
+ for _, r := range g.Runes() {
+ chWidth = c.RuneWidth(r)
+ if chWidth > 0 {
+ break // See StringWidth() for details.
+ }
+ }
+
+ if width+chWidth > w {
+ if width < w {
+ _, pos = g.Positions()
+ prefix += strings.Repeat(" ", width+chWidth-w)
+ } else {
+ pos, _ = g.Positions()
+ }
+
+ break
+ }
+
+ width += chWidth
}
- return string(r[0:i]) + tail
+
+ return prefix + s[pos:]
}
// Wrap return string wrapped with w cells
func (c *Condition) Wrap(s string, w int) string {
width := 0
out := ""
- for _, r := range []rune(s) {
- cw := RuneWidth(r)
+ for _, r := range s {
+ cw := c.RuneWidth(r)
if r == '\n' {
out += string(r)
width = 0
@@ -241,6 +328,11 @@ func Truncate(s string, w int, tail string) string {
return DefaultCondition.Truncate(s, w, tail)
}
+// TruncateLeft cuts w cells from the beginning of the `s`.
+func TruncateLeft(s string, w int, prefix string) string {
+ return DefaultCondition.TruncateLeft(s, w, prefix)
+}
+
// Wrap return string wrapped with w cells
func Wrap(s string, w int) string {
return DefaultCondition.Wrap(s, w)
@@ -255,3 +347,12 @@ func FillLeft(s string, w int) string {
func FillRight(s string, w int) string {
return DefaultCondition.FillRight(s, w)
}
+
+// CreateLUT will create an in-memory lookup table of 557055 bytes for faster operation.
+// This should not be called concurrently with other operations.
+func CreateLUT() {
+ if len(DefaultCondition.combinedLut) > 0 {
+ return
+ }
+ DefaultCondition.CreateLUT()
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
index 7d99f6e521..84b6528dfe 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
@@ -1,3 +1,4 @@
+//go:build appengine
// +build appengine
package runewidth
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_js.go b/vendor/github.com/mattn/go-runewidth/runewidth_js.go
index c5fdf40baa..c2abbc2db3 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_js.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_js.go
@@ -1,5 +1,5 @@
-// +build js
-// +build !appengine
+//go:build js && !appengine
+// +build js,!appengine
package runewidth
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_posix.go b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
index 480ad74853..5a31d738ec 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
@@ -1,6 +1,5 @@
-// +build !windows
-// +build !js
-// +build !appengine
+//go:build !windows && !js && !appengine
+// +build !windows,!js,!appengine
package runewidth
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_table.go b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
index b27d77d891..ad025ad529 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_table.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
@@ -4,20 +4,21 @@ package runewidth
var combining = table{
{0x0300, 0x036F}, {0x0483, 0x0489}, {0x07EB, 0x07F3},
- {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0D00, 0x0D01},
- {0x135D, 0x135F}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1AC0},
- {0x1B6B, 0x1B73}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF},
+ {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0CF3, 0x0CF3},
+ {0x0D00, 0x0D01}, {0x135D, 0x135F}, {0x1A7F, 0x1A7F},
+ {0x1AB0, 0x1ACE}, {0x1B6B, 0x1B73}, {0x1DC0, 0x1DFF},
{0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2DE0, 0x2DFF},
{0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D},
{0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA8E0, 0xA8F1},
{0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x10376, 0x1037A},
- {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x11300, 0x11301},
- {0x1133B, 0x1133C}, {0x11366, 0x1136C}, {0x11370, 0x11374},
- {0x16AF0, 0x16AF4}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172},
+ {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x10F82, 0x10F85},
+ {0x11300, 0x11301}, {0x1133B, 0x1133C}, {0x11366, 0x1136C},
+ {0x11370, 0x11374}, {0x16AF0, 0x16AF4}, {0x1CF00, 0x1CF2D},
+ {0x1CF30, 0x1CF46}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172},
{0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD},
{0x1D242, 0x1D244}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018},
{0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, {0x1E026, 0x1E02A},
- {0x1E8D0, 0x1E8D6},
+ {0x1E08F, 0x1E08F}, {0x1E8D0, 0x1E8D6},
}
var doublewidth = table{
@@ -33,33 +34,34 @@ var doublewidth = table{
{0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797},
{0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C},
{0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99},
- {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB},
- {0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF},
- {0x3105, 0x312F}, {0x3131, 0x318E}, {0x3190, 0x31E3},
- {0x31F0, 0x321E}, {0x3220, 0x3247}, {0x3250, 0x4DBF},
- {0x4E00, 0xA48C}, {0xA490, 0xA4C6}, {0xA960, 0xA97C},
- {0xAC00, 0xD7A3}, {0xF900, 0xFAFF}, {0xFE10, 0xFE19},
- {0xFE30, 0xFE52}, {0xFE54, 0xFE66}, {0xFE68, 0xFE6B},
- {0xFF01, 0xFF60}, {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4},
- {0x16FF0, 0x16FF1}, {0x17000, 0x187F7}, {0x18800, 0x18CD5},
- {0x18D00, 0x18D08}, {0x1B000, 0x1B11E}, {0x1B150, 0x1B152},
- {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, {0x1F004, 0x1F004},
- {0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A},
- {0x1F200, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248},
- {0x1F250, 0x1F251}, {0x1F260, 0x1F265}, {0x1F300, 0x1F320},
- {0x1F32D, 0x1F335}, {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393},
- {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0},
- {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440},
- {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E},
- {0x1F550, 0x1F567}, {0x1F57A, 0x1F57A}, {0x1F595, 0x1F596},
- {0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5},
- {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2}, {0x1F6D5, 0x1F6D7},
- {0x1F6EB, 0x1F6EC}, {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB},
- {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F978},
- {0x1F97A, 0x1F9CB}, {0x1F9CD, 0x1F9FF}, {0x1FA70, 0x1FA74},
- {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA86}, {0x1FA90, 0x1FAA8},
- {0x1FAB0, 0x1FAB6}, {0x1FAC0, 0x1FAC2}, {0x1FAD0, 0x1FAD6},
- {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD},
+ {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x303E},
+ {0x3041, 0x3096}, {0x3099, 0x30FF}, {0x3105, 0x312F},
+ {0x3131, 0x318E}, {0x3190, 0x31E3}, {0x31EF, 0x321E},
+ {0x3220, 0x3247}, {0x3250, 0x4DBF}, {0x4E00, 0xA48C},
+ {0xA490, 0xA4C6}, {0xA960, 0xA97C}, {0xAC00, 0xD7A3},
+ {0xF900, 0xFAFF}, {0xFE10, 0xFE19}, {0xFE30, 0xFE52},
+ {0xFE54, 0xFE66}, {0xFE68, 0xFE6B}, {0xFF01, 0xFF60},
+ {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4}, {0x16FF0, 0x16FF1},
+ {0x17000, 0x187F7}, {0x18800, 0x18CD5}, {0x18D00, 0x18D08},
+ {0x1AFF0, 0x1AFF3}, {0x1AFF5, 0x1AFFB}, {0x1AFFD, 0x1AFFE},
+ {0x1B000, 0x1B122}, {0x1B132, 0x1B132}, {0x1B150, 0x1B152},
+ {0x1B155, 0x1B155}, {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB},
+ {0x1F004, 0x1F004}, {0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E},
+ {0x1F191, 0x1F19A}, {0x1F200, 0x1F202}, {0x1F210, 0x1F23B},
+ {0x1F240, 0x1F248}, {0x1F250, 0x1F251}, {0x1F260, 0x1F265},
+ {0x1F300, 0x1F320}, {0x1F32D, 0x1F335}, {0x1F337, 0x1F37C},
+ {0x1F37E, 0x1F393}, {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3},
+ {0x1F3E0, 0x1F3F0}, {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E},
+ {0x1F440, 0x1F440}, {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D},
+ {0x1F54B, 0x1F54E}, {0x1F550, 0x1F567}, {0x1F57A, 0x1F57A},
+ {0x1F595, 0x1F596}, {0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F},
+ {0x1F680, 0x1F6C5}, {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2},
+ {0x1F6D5, 0x1F6D7}, {0x1F6DC, 0x1F6DF}, {0x1F6EB, 0x1F6EC},
+ {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB}, {0x1F7F0, 0x1F7F0},
+ {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F9FF},
+ {0x1FA70, 0x1FA7C}, {0x1FA80, 0x1FA88}, {0x1FA90, 0x1FABD},
+ {0x1FABF, 0x1FAC5}, {0x1FACE, 0x1FADB}, {0x1FAE0, 0x1FAE8},
+ {0x1FAF0, 0x1FAF8}, {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD},
}
var ambiguous = table{
@@ -124,8 +126,10 @@ var ambiguous = table{
{0x1F18F, 0x1F190}, {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF},
{0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD},
}
-var notassigned = table{
- {0x27E6, 0x27ED}, {0x2985, 0x2986},
+var narrow = table{
+ {0x0020, 0x007E}, {0x00A2, 0x00A3}, {0x00A5, 0x00A6},
+ {0x00AC, 0x00AC}, {0x00AF, 0x00AF}, {0x27E6, 0x27ED},
+ {0x2985, 0x2986},
}
var neutral = table{
@@ -152,43 +156,43 @@ var neutral = table{
{0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F},
{0x0531, 0x0556}, {0x0559, 0x058A}, {0x058D, 0x058F},
{0x0591, 0x05C7}, {0x05D0, 0x05EA}, {0x05EF, 0x05F4},
- {0x0600, 0x061C}, {0x061E, 0x070D}, {0x070F, 0x074A},
- {0x074D, 0x07B1}, {0x07C0, 0x07FA}, {0x07FD, 0x082D},
- {0x0830, 0x083E}, {0x0840, 0x085B}, {0x085E, 0x085E},
- {0x0860, 0x086A}, {0x08A0, 0x08B4}, {0x08B6, 0x08C7},
- {0x08D3, 0x0983}, {0x0985, 0x098C}, {0x098F, 0x0990},
- {0x0993, 0x09A8}, {0x09AA, 0x09B0}, {0x09B2, 0x09B2},
- {0x09B6, 0x09B9}, {0x09BC, 0x09C4}, {0x09C7, 0x09C8},
- {0x09CB, 0x09CE}, {0x09D7, 0x09D7}, {0x09DC, 0x09DD},
- {0x09DF, 0x09E3}, {0x09E6, 0x09FE}, {0x0A01, 0x0A03},
- {0x0A05, 0x0A0A}, {0x0A0F, 0x0A10}, {0x0A13, 0x0A28},
- {0x0A2A, 0x0A30}, {0x0A32, 0x0A33}, {0x0A35, 0x0A36},
- {0x0A38, 0x0A39}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42},
- {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51},
- {0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76},
- {0x0A81, 0x0A83}, {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91},
- {0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3},
- {0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9},
- {0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3},
- {0x0AE6, 0x0AF1}, {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03},
- {0x0B05, 0x0B0C}, {0x0B0F, 0x0B10}, {0x0B13, 0x0B28},
- {0x0B2A, 0x0B30}, {0x0B32, 0x0B33}, {0x0B35, 0x0B39},
- {0x0B3C, 0x0B44}, {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D},
- {0x0B55, 0x0B57}, {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63},
- {0x0B66, 0x0B77}, {0x0B82, 0x0B83}, {0x0B85, 0x0B8A},
- {0x0B8E, 0x0B90}, {0x0B92, 0x0B95}, {0x0B99, 0x0B9A},
- {0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4},
- {0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2},
- {0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0},
- {0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C},
- {0x0C0E, 0x0C10}, {0x0C12, 0x0C28}, {0x0C2A, 0x0C39},
- {0x0C3D, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D},
- {0x0C55, 0x0C56}, {0x0C58, 0x0C5A}, {0x0C60, 0x0C63},
+ {0x0600, 0x070D}, {0x070F, 0x074A}, {0x074D, 0x07B1},
+ {0x07C0, 0x07FA}, {0x07FD, 0x082D}, {0x0830, 0x083E},
+ {0x0840, 0x085B}, {0x085E, 0x085E}, {0x0860, 0x086A},
+ {0x0870, 0x088E}, {0x0890, 0x0891}, {0x0898, 0x0983},
+ {0x0985, 0x098C}, {0x098F, 0x0990}, {0x0993, 0x09A8},
+ {0x09AA, 0x09B0}, {0x09B2, 0x09B2}, {0x09B6, 0x09B9},
+ {0x09BC, 0x09C4}, {0x09C7, 0x09C8}, {0x09CB, 0x09CE},
+ {0x09D7, 0x09D7}, {0x09DC, 0x09DD}, {0x09DF, 0x09E3},
+ {0x09E6, 0x09FE}, {0x0A01, 0x0A03}, {0x0A05, 0x0A0A},
+ {0x0A0F, 0x0A10}, {0x0A13, 0x0A28}, {0x0A2A, 0x0A30},
+ {0x0A32, 0x0A33}, {0x0A35, 0x0A36}, {0x0A38, 0x0A39},
+ {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42}, {0x0A47, 0x0A48},
+ {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51}, {0x0A59, 0x0A5C},
+ {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76}, {0x0A81, 0x0A83},
+ {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91}, {0x0A93, 0x0AA8},
+ {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3}, {0x0AB5, 0x0AB9},
+ {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9}, {0x0ACB, 0x0ACD},
+ {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3}, {0x0AE6, 0x0AF1},
+ {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03}, {0x0B05, 0x0B0C},
+ {0x0B0F, 0x0B10}, {0x0B13, 0x0B28}, {0x0B2A, 0x0B30},
+ {0x0B32, 0x0B33}, {0x0B35, 0x0B39}, {0x0B3C, 0x0B44},
+ {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D}, {0x0B55, 0x0B57},
+ {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63}, {0x0B66, 0x0B77},
+ {0x0B82, 0x0B83}, {0x0B85, 0x0B8A}, {0x0B8E, 0x0B90},
+ {0x0B92, 0x0B95}, {0x0B99, 0x0B9A}, {0x0B9C, 0x0B9C},
+ {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4}, {0x0BA8, 0x0BAA},
+ {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2}, {0x0BC6, 0x0BC8},
+ {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0}, {0x0BD7, 0x0BD7},
+ {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C}, {0x0C0E, 0x0C10},
+ {0x0C12, 0x0C28}, {0x0C2A, 0x0C39}, {0x0C3C, 0x0C44},
+ {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D}, {0x0C55, 0x0C56},
+ {0x0C58, 0x0C5A}, {0x0C5D, 0x0C5D}, {0x0C60, 0x0C63},
{0x0C66, 0x0C6F}, {0x0C77, 0x0C8C}, {0x0C8E, 0x0C90},
{0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9},
{0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD},
- {0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3},
- {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D00, 0x0D0C},
+ {0x0CD5, 0x0CD6}, {0x0CDD, 0x0CDE}, {0x0CE0, 0x0CE3},
+ {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF3}, {0x0D00, 0x0D0C},
{0x0D0E, 0x0D10}, {0x0D12, 0x0D44}, {0x0D46, 0x0D48},
{0x0D4A, 0x0D4F}, {0x0D54, 0x0D63}, {0x0D66, 0x0D7F},
{0x0D81, 0x0D83}, {0x0D85, 0x0D96}, {0x0D9A, 0x0DB1},
@@ -198,7 +202,7 @@ var neutral = table{
{0x0E01, 0x0E3A}, {0x0E3F, 0x0E5B}, {0x0E81, 0x0E82},
{0x0E84, 0x0E84}, {0x0E86, 0x0E8A}, {0x0E8C, 0x0EA3},
{0x0EA5, 0x0EA5}, {0x0EA7, 0x0EBD}, {0x0EC0, 0x0EC4},
- {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD}, {0x0ED0, 0x0ED9},
+ {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECE}, {0x0ED0, 0x0ED9},
{0x0EDC, 0x0EDF}, {0x0F00, 0x0F47}, {0x0F49, 0x0F6C},
{0x0F71, 0x0F97}, {0x0F99, 0x0FBC}, {0x0FBE, 0x0FCC},
{0x0FCE, 0x0FDA}, {0x1000, 0x10C5}, {0x10C7, 0x10C7},
@@ -210,20 +214,19 @@ var neutral = table{
{0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A},
{0x135D, 0x137C}, {0x1380, 0x1399}, {0x13A0, 0x13F5},
{0x13F8, 0x13FD}, {0x1400, 0x169C}, {0x16A0, 0x16F8},
- {0x1700, 0x170C}, {0x170E, 0x1714}, {0x1720, 0x1736},
- {0x1740, 0x1753}, {0x1760, 0x176C}, {0x176E, 0x1770},
- {0x1772, 0x1773}, {0x1780, 0x17DD}, {0x17E0, 0x17E9},
- {0x17F0, 0x17F9}, {0x1800, 0x180E}, {0x1810, 0x1819},
- {0x1820, 0x1878}, {0x1880, 0x18AA}, {0x18B0, 0x18F5},
- {0x1900, 0x191E}, {0x1920, 0x192B}, {0x1930, 0x193B},
- {0x1940, 0x1940}, {0x1944, 0x196D}, {0x1970, 0x1974},
- {0x1980, 0x19AB}, {0x19B0, 0x19C9}, {0x19D0, 0x19DA},
- {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, {0x1A60, 0x1A7C},
- {0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, {0x1AA0, 0x1AAD},
- {0x1AB0, 0x1AC0}, {0x1B00, 0x1B4B}, {0x1B50, 0x1B7C},
- {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, {0x1C3B, 0x1C49},
- {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA}, {0x1CBD, 0x1CC7},
- {0x1CD0, 0x1CFA}, {0x1D00, 0x1DF9}, {0x1DFB, 0x1F15},
+ {0x1700, 0x1715}, {0x171F, 0x1736}, {0x1740, 0x1753},
+ {0x1760, 0x176C}, {0x176E, 0x1770}, {0x1772, 0x1773},
+ {0x1780, 0x17DD}, {0x17E0, 0x17E9}, {0x17F0, 0x17F9},
+ {0x1800, 0x1819}, {0x1820, 0x1878}, {0x1880, 0x18AA},
+ {0x18B0, 0x18F5}, {0x1900, 0x191E}, {0x1920, 0x192B},
+ {0x1930, 0x193B}, {0x1940, 0x1940}, {0x1944, 0x196D},
+ {0x1970, 0x1974}, {0x1980, 0x19AB}, {0x19B0, 0x19C9},
+ {0x19D0, 0x19DA}, {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E},
+ {0x1A60, 0x1A7C}, {0x1A7F, 0x1A89}, {0x1A90, 0x1A99},
+ {0x1AA0, 0x1AAD}, {0x1AB0, 0x1ACE}, {0x1B00, 0x1B4C},
+ {0x1B50, 0x1B7E}, {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37},
+ {0x1C3B, 0x1C49}, {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA},
+ {0x1CBD, 0x1CC7}, {0x1CD0, 0x1CFA}, {0x1D00, 0x1F15},
{0x1F18, 0x1F1D}, {0x1F20, 0x1F45}, {0x1F48, 0x1F4D},
{0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B},
{0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4},
@@ -235,7 +238,7 @@ var neutral = table{
{0x2036, 0x203A}, {0x203C, 0x203D}, {0x203F, 0x2064},
{0x2066, 0x2071}, {0x2075, 0x207E}, {0x2080, 0x2080},
{0x2085, 0x208E}, {0x2090, 0x209C}, {0x20A0, 0x20A8},
- {0x20AA, 0x20AB}, {0x20AD, 0x20BF}, {0x20D0, 0x20F0},
+ {0x20AA, 0x20AB}, {0x20AD, 0x20C0}, {0x20D0, 0x20F0},
{0x2100, 0x2102}, {0x2104, 0x2104}, {0x2106, 0x2108},
{0x210A, 0x2112}, {0x2114, 0x2115}, {0x2117, 0x2120},
{0x2123, 0x2125}, {0x2127, 0x212A}, {0x212C, 0x2152},
@@ -273,15 +276,15 @@ var neutral = table{
{0x2780, 0x2794}, {0x2798, 0x27AF}, {0x27B1, 0x27BE},
{0x27C0, 0x27E5}, {0x27EE, 0x2984}, {0x2987, 0x2B1A},
{0x2B1D, 0x2B4F}, {0x2B51, 0x2B54}, {0x2B5A, 0x2B73},
- {0x2B76, 0x2B95}, {0x2B97, 0x2C2E}, {0x2C30, 0x2C5E},
- {0x2C60, 0x2CF3}, {0x2CF9, 0x2D25}, {0x2D27, 0x2D27},
- {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, {0x2D6F, 0x2D70},
- {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE},
- {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6},
- {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE},
- {0x2DE0, 0x2E52}, {0x303F, 0x303F}, {0x4DC0, 0x4DFF},
- {0xA4D0, 0xA62B}, {0xA640, 0xA6F7}, {0xA700, 0xA7BF},
- {0xA7C2, 0xA7CA}, {0xA7F5, 0xA82C}, {0xA830, 0xA839},
+ {0x2B76, 0x2B95}, {0x2B97, 0x2CF3}, {0x2CF9, 0x2D25},
+ {0x2D27, 0x2D27}, {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67},
+ {0x2D6F, 0x2D70}, {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6},
+ {0x2DA8, 0x2DAE}, {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE},
+ {0x2DC0, 0x2DC6}, {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6},
+ {0x2DD8, 0x2DDE}, {0x2DE0, 0x2E5D}, {0x303F, 0x303F},
+ {0x4DC0, 0x4DFF}, {0xA4D0, 0xA62B}, {0xA640, 0xA6F7},
+ {0xA700, 0xA7CA}, {0xA7D0, 0xA7D1}, {0xA7D3, 0xA7D3},
+ {0xA7D5, 0xA7D9}, {0xA7F2, 0xA82C}, {0xA830, 0xA839},
{0xA840, 0xA877}, {0xA880, 0xA8C5}, {0xA8CE, 0xA8D9},
{0xA8E0, 0xA953}, {0xA95F, 0xA95F}, {0xA980, 0xA9CD},
{0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE}, {0xAA00, 0xAA36},
@@ -292,8 +295,8 @@ var neutral = table{
{0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, {0xD800, 0xDFFF},
{0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB36},
{0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41},
- {0xFB43, 0xFB44}, {0xFB46, 0xFBC1}, {0xFBD3, 0xFD3F},
- {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFD},
+ {0xFB43, 0xFB44}, {0xFB46, 0xFBC2}, {0xFBD3, 0xFD8F},
+ {0xFD92, 0xFDC7}, {0xFDCF, 0xFDCF}, {0xFDF0, 0xFDFF},
{0xFE20, 0xFE2F}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC},
{0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC}, {0x10000, 0x1000B},
{0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D},
@@ -305,44 +308,48 @@ var neutral = table{
{0x10380, 0x1039D}, {0x1039F, 0x103C3}, {0x103C8, 0x103D5},
{0x10400, 0x1049D}, {0x104A0, 0x104A9}, {0x104B0, 0x104D3},
{0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563},
- {0x1056F, 0x1056F}, {0x10600, 0x10736}, {0x10740, 0x10755},
- {0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808},
- {0x1080A, 0x10835}, {0x10837, 0x10838}, {0x1083C, 0x1083C},
- {0x1083F, 0x10855}, {0x10857, 0x1089E}, {0x108A7, 0x108AF},
- {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x108FB, 0x1091B},
- {0x1091F, 0x10939}, {0x1093F, 0x1093F}, {0x10980, 0x109B7},
- {0x109BC, 0x109CF}, {0x109D2, 0x10A03}, {0x10A05, 0x10A06},
- {0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A35},
- {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48}, {0x10A50, 0x10A58},
- {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, {0x10AEB, 0x10AF6},
- {0x10B00, 0x10B35}, {0x10B39, 0x10B55}, {0x10B58, 0x10B72},
- {0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, {0x10BA9, 0x10BAF},
- {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2},
- {0x10CFA, 0x10D27}, {0x10D30, 0x10D39}, {0x10E60, 0x10E7E},
- {0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD}, {0x10EB0, 0x10EB1},
- {0x10F00, 0x10F27}, {0x10F30, 0x10F59}, {0x10FB0, 0x10FCB},
- {0x10FE0, 0x10FF6}, {0x11000, 0x1104D}, {0x11052, 0x1106F},
- {0x1107F, 0x110C1}, {0x110CD, 0x110CD}, {0x110D0, 0x110E8},
- {0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11147},
- {0x11150, 0x11176}, {0x11180, 0x111DF}, {0x111E1, 0x111F4},
- {0x11200, 0x11211}, {0x11213, 0x1123E}, {0x11280, 0x11286},
- {0x11288, 0x11288}, {0x1128A, 0x1128D}, {0x1128F, 0x1129D},
- {0x1129F, 0x112A9}, {0x112B0, 0x112EA}, {0x112F0, 0x112F9},
- {0x11300, 0x11303}, {0x11305, 0x1130C}, {0x1130F, 0x11310},
- {0x11313, 0x11328}, {0x1132A, 0x11330}, {0x11332, 0x11333},
- {0x11335, 0x11339}, {0x1133B, 0x11344}, {0x11347, 0x11348},
- {0x1134B, 0x1134D}, {0x11350, 0x11350}, {0x11357, 0x11357},
- {0x1135D, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374},
- {0x11400, 0x1145B}, {0x1145D, 0x11461}, {0x11480, 0x114C7},
- {0x114D0, 0x114D9}, {0x11580, 0x115B5}, {0x115B8, 0x115DD},
- {0x11600, 0x11644}, {0x11650, 0x11659}, {0x11660, 0x1166C},
- {0x11680, 0x116B8}, {0x116C0, 0x116C9}, {0x11700, 0x1171A},
- {0x1171D, 0x1172B}, {0x11730, 0x1173F}, {0x11800, 0x1183B},
- {0x118A0, 0x118F2}, {0x118FF, 0x11906}, {0x11909, 0x11909},
- {0x1190C, 0x11913}, {0x11915, 0x11916}, {0x11918, 0x11935},
- {0x11937, 0x11938}, {0x1193B, 0x11946}, {0x11950, 0x11959},
- {0x119A0, 0x119A7}, {0x119AA, 0x119D7}, {0x119DA, 0x119E4},
- {0x11A00, 0x11A47}, {0x11A50, 0x11AA2}, {0x11AC0, 0x11AF8},
+ {0x1056F, 0x1057A}, {0x1057C, 0x1058A}, {0x1058C, 0x10592},
+ {0x10594, 0x10595}, {0x10597, 0x105A1}, {0x105A3, 0x105B1},
+ {0x105B3, 0x105B9}, {0x105BB, 0x105BC}, {0x10600, 0x10736},
+ {0x10740, 0x10755}, {0x10760, 0x10767}, {0x10780, 0x10785},
+ {0x10787, 0x107B0}, {0x107B2, 0x107BA}, {0x10800, 0x10805},
+ {0x10808, 0x10808}, {0x1080A, 0x10835}, {0x10837, 0x10838},
+ {0x1083C, 0x1083C}, {0x1083F, 0x10855}, {0x10857, 0x1089E},
+ {0x108A7, 0x108AF}, {0x108E0, 0x108F2}, {0x108F4, 0x108F5},
+ {0x108FB, 0x1091B}, {0x1091F, 0x10939}, {0x1093F, 0x1093F},
+ {0x10980, 0x109B7}, {0x109BC, 0x109CF}, {0x109D2, 0x10A03},
+ {0x10A05, 0x10A06}, {0x10A0C, 0x10A13}, {0x10A15, 0x10A17},
+ {0x10A19, 0x10A35}, {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48},
+ {0x10A50, 0x10A58}, {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6},
+ {0x10AEB, 0x10AF6}, {0x10B00, 0x10B35}, {0x10B39, 0x10B55},
+ {0x10B58, 0x10B72}, {0x10B78, 0x10B91}, {0x10B99, 0x10B9C},
+ {0x10BA9, 0x10BAF}, {0x10C00, 0x10C48}, {0x10C80, 0x10CB2},
+ {0x10CC0, 0x10CF2}, {0x10CFA, 0x10D27}, {0x10D30, 0x10D39},
+ {0x10E60, 0x10E7E}, {0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD},
+ {0x10EB0, 0x10EB1}, {0x10EFD, 0x10F27}, {0x10F30, 0x10F59},
+ {0x10F70, 0x10F89}, {0x10FB0, 0x10FCB}, {0x10FE0, 0x10FF6},
+ {0x11000, 0x1104D}, {0x11052, 0x11075}, {0x1107F, 0x110C2},
+ {0x110CD, 0x110CD}, {0x110D0, 0x110E8}, {0x110F0, 0x110F9},
+ {0x11100, 0x11134}, {0x11136, 0x11147}, {0x11150, 0x11176},
+ {0x11180, 0x111DF}, {0x111E1, 0x111F4}, {0x11200, 0x11211},
+ {0x11213, 0x11241}, {0x11280, 0x11286}, {0x11288, 0x11288},
+ {0x1128A, 0x1128D}, {0x1128F, 0x1129D}, {0x1129F, 0x112A9},
+ {0x112B0, 0x112EA}, {0x112F0, 0x112F9}, {0x11300, 0x11303},
+ {0x11305, 0x1130C}, {0x1130F, 0x11310}, {0x11313, 0x11328},
+ {0x1132A, 0x11330}, {0x11332, 0x11333}, {0x11335, 0x11339},
+ {0x1133B, 0x11344}, {0x11347, 0x11348}, {0x1134B, 0x1134D},
+ {0x11350, 0x11350}, {0x11357, 0x11357}, {0x1135D, 0x11363},
+ {0x11366, 0x1136C}, {0x11370, 0x11374}, {0x11400, 0x1145B},
+ {0x1145D, 0x11461}, {0x11480, 0x114C7}, {0x114D0, 0x114D9},
+ {0x11580, 0x115B5}, {0x115B8, 0x115DD}, {0x11600, 0x11644},
+ {0x11650, 0x11659}, {0x11660, 0x1166C}, {0x11680, 0x116B9},
+ {0x116C0, 0x116C9}, {0x11700, 0x1171A}, {0x1171D, 0x1172B},
+ {0x11730, 0x11746}, {0x11800, 0x1183B}, {0x118A0, 0x118F2},
+ {0x118FF, 0x11906}, {0x11909, 0x11909}, {0x1190C, 0x11913},
+ {0x11915, 0x11916}, {0x11918, 0x11935}, {0x11937, 0x11938},
+ {0x1193B, 0x11946}, {0x11950, 0x11959}, {0x119A0, 0x119A7},
+ {0x119AA, 0x119D7}, {0x119DA, 0x119E4}, {0x11A00, 0x11A47},
+ {0x11A50, 0x11AA2}, {0x11AB0, 0x11AF8}, {0x11B00, 0x11B09},
{0x11C00, 0x11C08}, {0x11C0A, 0x11C36}, {0x11C38, 0x11C45},
{0x11C50, 0x11C6C}, {0x11C70, 0x11C8F}, {0x11C92, 0x11CA7},
{0x11CA9, 0x11CB6}, {0x11D00, 0x11D06}, {0x11D08, 0x11D09},
@@ -350,30 +357,36 @@ var neutral = table{
{0x11D3F, 0x11D47}, {0x11D50, 0x11D59}, {0x11D60, 0x11D65},
{0x11D67, 0x11D68}, {0x11D6A, 0x11D8E}, {0x11D90, 0x11D91},
{0x11D93, 0x11D98}, {0x11DA0, 0x11DA9}, {0x11EE0, 0x11EF8},
+ {0x11F00, 0x11F10}, {0x11F12, 0x11F3A}, {0x11F3E, 0x11F59},
{0x11FB0, 0x11FB0}, {0x11FC0, 0x11FF1}, {0x11FFF, 0x12399},
{0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543},
- {0x13000, 0x1342E}, {0x13430, 0x13438}, {0x14400, 0x14646},
+ {0x12F90, 0x12FF2}, {0x13000, 0x13455}, {0x14400, 0x14646},
{0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16A60, 0x16A69},
- {0x16A6E, 0x16A6F}, {0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5},
- {0x16B00, 0x16B45}, {0x16B50, 0x16B59}, {0x16B5B, 0x16B61},
- {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, {0x16E40, 0x16E9A},
- {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, {0x16F8F, 0x16F9F},
- {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88},
- {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, {0x1D000, 0x1D0F5},
- {0x1D100, 0x1D126}, {0x1D129, 0x1D1E8}, {0x1D200, 0x1D245},
- {0x1D2E0, 0x1D2F3}, {0x1D300, 0x1D356}, {0x1D360, 0x1D378},
- {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F},
- {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC},
- {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3},
- {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514},
- {0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E},
- {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550},
- {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B},
- {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006},
- {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024},
- {0x1E026, 0x1E02A}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D},
- {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E2C0, 0x1E2F9},
- {0x1E2FF, 0x1E2FF}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6},
+ {0x16A6E, 0x16ABE}, {0x16AC0, 0x16AC9}, {0x16AD0, 0x16AED},
+ {0x16AF0, 0x16AF5}, {0x16B00, 0x16B45}, {0x16B50, 0x16B59},
+ {0x16B5B, 0x16B61}, {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F},
+ {0x16E40, 0x16E9A}, {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87},
+ {0x16F8F, 0x16F9F}, {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C},
+ {0x1BC80, 0x1BC88}, {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3},
+ {0x1CF00, 0x1CF2D}, {0x1CF30, 0x1CF46}, {0x1CF50, 0x1CFC3},
+ {0x1D000, 0x1D0F5}, {0x1D100, 0x1D126}, {0x1D129, 0x1D1EA},
+ {0x1D200, 0x1D245}, {0x1D2C0, 0x1D2D3}, {0x1D2E0, 0x1D2F3},
+ {0x1D300, 0x1D356}, {0x1D360, 0x1D378}, {0x1D400, 0x1D454},
+ {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F}, {0x1D4A2, 0x1D4A2},
+ {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC}, {0x1D4AE, 0x1D4B9},
+ {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3}, {0x1D4C5, 0x1D505},
+ {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514}, {0x1D516, 0x1D51C},
+ {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E}, {0x1D540, 0x1D544},
+ {0x1D546, 0x1D546}, {0x1D54A, 0x1D550}, {0x1D552, 0x1D6A5},
+ {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B}, {0x1DA9B, 0x1DA9F},
+ {0x1DAA1, 0x1DAAF}, {0x1DF00, 0x1DF1E}, {0x1DF25, 0x1DF2A},
+ {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, {0x1E01B, 0x1E021},
+ {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, {0x1E030, 0x1E06D},
+ {0x1E08F, 0x1E08F}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D},
+ {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E290, 0x1E2AE},
+ {0x1E2C0, 0x1E2F9}, {0x1E2FF, 0x1E2FF}, {0x1E4D0, 0x1E4F9},
+ {0x1E7E0, 0x1E7E6}, {0x1E7E8, 0x1E7EB}, {0x1E7ED, 0x1E7EE},
+ {0x1E7F0, 0x1E7FE}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6},
{0x1E900, 0x1E94B}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F},
{0x1EC71, 0x1ECB4}, {0x1ED01, 0x1ED3D}, {0x1EE00, 0x1EE03},
{0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24},
@@ -398,8 +411,8 @@ var neutral = table{
{0x1F54F, 0x1F54F}, {0x1F568, 0x1F579}, {0x1F57B, 0x1F594},
{0x1F597, 0x1F5A3}, {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F},
{0x1F6C6, 0x1F6CB}, {0x1F6CD, 0x1F6CF}, {0x1F6D3, 0x1F6D4},
- {0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773},
- {0x1F780, 0x1F7D8}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847},
+ {0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F776},
+ {0x1F77B, 0x1F7D9}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847},
{0x1F850, 0x1F859}, {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD},
{0x1F8B0, 0x1F8B1}, {0x1F900, 0x1F90B}, {0x1F93B, 0x1F93B},
{0x1F946, 0x1F946}, {0x1FA00, 0x1FA53}, {0x1FA60, 0x1FA6D},
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_windows.go b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
index d6a61777d7..5f987a310f 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
@@ -1,5 +1,5 @@
-// +build windows
-// +build !appengine
+//go:build windows && !appengine
+// +build windows,!appengine
package runewidth
diff --git a/vendor/github.com/mgechev/revive/config/config.go b/vendor/github.com/mgechev/revive/config/config.go
index 50a2b8966f..34340b71c8 100644
--- a/vendor/github.com/mgechev/revive/config/config.go
+++ b/vendor/github.com/mgechev/revive/config/config.go
@@ -95,18 +95,26 @@ var allRules = append([]lint.Rule{
&rule.EnforceRepeatedArgTypeStyleRule{},
&rule.EnforceSliceStyleRule{},
&rule.MaxControlNestingRule{},
+ &rule.CommentsDensityRule{},
+ &rule.FileLengthLimitRule{},
+ &rule.FilenameFormatRule{},
+ &rule.RedundantBuildTagRule{},
+ &rule.UseErrorsNewRule{},
+ &rule.RedundantTestMainExitRule{},
}, defaultRules...)
+// allFormatters is a list of all available formatters to output the linting results.
+// Keep the list sorted and in sync with available formatters in README.md.
var allFormatters = []lint.Formatter{
- &formatter.Stylish{},
+ &formatter.Checkstyle{},
+ &formatter.Default{},
&formatter.Friendly{},
&formatter.JSON{},
&formatter.NDJSON{},
- &formatter.Default{},
- &formatter.Unix{},
- &formatter.Checkstyle{},
&formatter.Plain{},
&formatter.Sarif{},
+ &formatter.Stylish{},
+ &formatter.Unix{},
}
func getFormatters() map[string]lint.Formatter {
@@ -142,6 +150,12 @@ func GetLintingRules(config *lint.Config, extraRules []lint.Rule) ([]lint.Rule,
continue // skip disabled rules
}
+ if r, ok := r.(lint.ConfigurableRule); ok {
+ if err := r.Configure(ruleConfig.Arguments); err != nil {
+ return nil, fmt.Errorf("cannot configure rule: %q: %w", name, err)
+ }
+ }
+
lintingRules = append(lintingRules, r)
}
@@ -162,14 +176,14 @@ func parseConfig(path string, config *lint.Config) error {
if err != nil {
return errors.New("cannot read the config file")
}
- _, err = toml.Decode(string(file), config)
+ err = toml.Unmarshal(file, config)
if err != nil {
- return fmt.Errorf("cannot parse the config file: %v", err)
+ return fmt.Errorf("cannot parse the config file: %w", err)
}
for k, r := range config.Rules {
err := r.Initialize()
if err != nil {
- return fmt.Errorf("error in config of rule [%s] : [%v]", k, err)
+ return fmt.Errorf("error in config of rule [%s] : [%w]", k, err)
}
config.Rules[k] = r
}
@@ -235,15 +249,14 @@ func GetConfig(configPath string) (*lint.Config, error) {
// GetFormatter yields the formatter for lint failures
func GetFormatter(formatterName string) (lint.Formatter, error) {
formatters := getFormatters()
- fmtr := formatters["default"]
- if formatterName != "" {
- f, ok := formatters[formatterName]
- if !ok {
- return nil, fmt.Errorf("unknown formatter %v", formatterName)
- }
- fmtr = f
+ if formatterName == "" {
+ return formatters["default"], nil
+ }
+ f, ok := formatters[formatterName]
+ if !ok {
+ return nil, fmt.Errorf("unknown formatter %v", formatterName)
}
- return fmtr, nil
+ return f, nil
}
func defaultConfig() *lint.Config {
diff --git a/vendor/github.com/mgechev/revive/formatter/checkstyle.go b/vendor/github.com/mgechev/revive/formatter/checkstyle.go
index f45b63c925..8fe85fae52 100644
--- a/vendor/github.com/mgechev/revive/formatter/checkstyle.go
+++ b/vendor/github.com/mgechev/revive/formatter/checkstyle.go
@@ -45,7 +45,7 @@ func (*Checkstyle) Format(failures <-chan lint.Failure, config lint.Config) (str
}
fn := failure.GetFilename()
if issues[fn] == nil {
- issues[fn] = make([]issue, 0)
+ issues[fn] = []issue{}
}
issues[fn] = append(issues[fn], iss)
}
diff --git a/vendor/github.com/mgechev/revive/formatter/default.go b/vendor/github.com/mgechev/revive/formatter/default.go
index 2d5a04434f..7af4aad066 100644
--- a/vendor/github.com/mgechev/revive/formatter/default.go
+++ b/vendor/github.com/mgechev/revive/formatter/default.go
@@ -26,3 +26,7 @@ func (*Default) Format(failures <-chan lint.Failure, _ lint.Config) (string, err
}
return buf.String(), nil
}
+
+func ruleDescriptionURL(ruleName string) string {
+ return "https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#" + ruleName
+}
diff --git a/vendor/github.com/mgechev/revive/formatter/friendly.go b/vendor/github.com/mgechev/revive/formatter/friendly.go
index 5ff329a23c..9c1a0f6171 100644
--- a/vendor/github.com/mgechev/revive/formatter/friendly.go
+++ b/vendor/github.com/mgechev/revive/formatter/friendly.go
@@ -2,9 +2,11 @@ package formatter
import (
"bytes"
+ "cmp"
"fmt"
"io"
- "sort"
+ "slices"
+ "strings"
"github.com/fatih/color"
"github.com/mgechev/revive/lint"
@@ -32,7 +34,7 @@ func (*Friendly) Name() string {
// Format formats the failures gotten from the lint.
func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (string, error) {
- var buf bytes.Buffer
+ var buf strings.Builder
errorMap := map[string]int{}
warningMap := map[string]int{}
totalErrors := 0
@@ -40,38 +42,38 @@ func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (str
for failure := range failures {
sev := severity(config, failure)
f.printFriendlyFailure(&buf, failure, sev)
- if sev == lint.SeverityWarning {
+ switch sev {
+ case lint.SeverityWarning:
warningMap[failure.RuleName]++
totalWarnings++
- }
- if sev == lint.SeverityError {
+ case lint.SeverityError:
errorMap[failure.RuleName]++
totalErrors++
}
}
+
f.printSummary(&buf, totalErrors, totalWarnings)
f.printStatistics(&buf, color.RedString("Errors:"), errorMap)
f.printStatistics(&buf, color.YellowString("Warnings:"), warningMap)
return buf.String(), nil
}
-func (f *Friendly) printFriendlyFailure(w io.Writer, failure lint.Failure, severity lint.Severity) {
- f.printHeaderRow(w, failure, severity)
- f.printFilePosition(w, failure)
- fmt.Fprintln(w)
- fmt.Fprintln(w)
+func (f *Friendly) printFriendlyFailure(sb *strings.Builder, failure lint.Failure, severity lint.Severity) {
+ f.printHeaderRow(sb, failure, severity)
+ f.printFilePosition(sb, failure)
+ sb.WriteString("\n\n")
}
-func (f *Friendly) printHeaderRow(w io.Writer, failure lint.Failure, severity lint.Severity) {
+func (f *Friendly) printHeaderRow(sb *strings.Builder, failure lint.Failure, severity lint.Severity) {
emoji := getWarningEmoji()
if severity == lint.SeverityError {
emoji = getErrorEmoji()
}
- fmt.Fprint(w, f.table([][]string{{emoji, "https://revive.run/r#" + failure.RuleName, color.GreenString(failure.Failure)}}))
+ sb.WriteString(f.table([][]string{{emoji, ruleDescriptionURL(failure.RuleName), color.GreenString(failure.Failure)}}))
}
-func (*Friendly) printFilePosition(w io.Writer, failure lint.Failure) {
- fmt.Fprintf(w, " %s:%d:%d", failure.GetFilename(), failure.Position.Start.Line, failure.Position.Start.Column)
+func (*Friendly) printFilePosition(sb *strings.Builder, failure lint.Failure) {
+ sb.WriteString(fmt.Sprintf(" %s:%d:%d", failure.GetFilename(), failure.Position.Start.Line, failure.Position.Start.Column))
}
type statEntry struct {
@@ -98,13 +100,11 @@ func (*Friendly) printSummary(w io.Writer, errors, warnings int) {
}
str := fmt.Sprintf("%d %s (%d %s, %d %s)", errors+warnings, problemsLabel, errors, errorsLabel, warnings, warningsLabel)
if errors > 0 {
- fmt.Fprintf(w, "%s %s\n", emoji, color.RedString(str))
- fmt.Fprintln(w)
+ fmt.Fprintf(w, "%s %s\n\n", emoji, color.RedString(str))
return
}
if warnings > 0 {
- fmt.Fprintf(w, "%s %s\n", emoji, color.YellowString(str))
- fmt.Fprintln(w)
+ fmt.Fprintf(w, "%s %s\n\n", emoji, color.YellowString(str))
return
}
}
@@ -113,12 +113,12 @@ func (f *Friendly) printStatistics(w io.Writer, header string, stats map[string]
if len(stats) == 0 {
return
}
- var data []statEntry
+ data := make([]statEntry, 0, len(stats))
for name, total := range stats {
data = append(data, statEntry{name, total})
}
- sort.Slice(data, func(i, j int) bool {
- return data[i].failures > data[j].failures
+ slices.SortFunc(data, func(a, b statEntry) int {
+ return -cmp.Compare(a.failures, b.failures)
})
formatted := [][]string{}
for _, entry := range data {
diff --git a/vendor/github.com/mgechev/revive/formatter/plain.go b/vendor/github.com/mgechev/revive/formatter/plain.go
index 09ebf6cdc8..351248742d 100644
--- a/vendor/github.com/mgechev/revive/formatter/plain.go
+++ b/vendor/github.com/mgechev/revive/formatter/plain.go
@@ -1,8 +1,8 @@
package formatter
import (
- "bytes"
"fmt"
+ "strings"
"github.com/mgechev/revive/lint"
)
@@ -20,9 +20,9 @@ func (*Plain) Name() string {
// Format formats the failures gotten from the lint.
func (*Plain) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
- var buf bytes.Buffer
+ var sb strings.Builder
for failure := range failures {
- fmt.Fprintf(&buf, "%v: %s %s\n", failure.Position.Start, failure.Failure, "https://revive.run/r#"+failure.RuleName)
+ sb.WriteString(fmt.Sprintf("%v: %s %s\n", failure.Position.Start, failure.Failure, ruleDescriptionURL(failure.RuleName)))
}
- return buf.String(), nil
+ return sb.String(), nil
}
diff --git a/vendor/github.com/mgechev/revive/formatter/sarif.go b/vendor/github.com/mgechev/revive/formatter/sarif.go
index c42da73eb0..72da160713 100644
--- a/vendor/github.com/mgechev/revive/formatter/sarif.go
+++ b/vendor/github.com/mgechev/revive/formatter/sarif.go
@@ -20,7 +20,7 @@ func (*Sarif) Name() string {
return "sarif"
}
-const reviveSite = "https://revive.run"
+const reviveSite = "https://github.com/mgechev/revive"
// Format formats the failures gotten from the lint.
func (*Sarif) Format(failures <-chan lint.Failure, cfg lint.Config) (string, error) {
diff --git a/vendor/github.com/mgechev/revive/formatter/stylish.go b/vendor/github.com/mgechev/revive/formatter/stylish.go
index 828228c72e..bb3d7cd18c 100644
--- a/vendor/github.com/mgechev/revive/formatter/stylish.go
+++ b/vendor/github.com/mgechev/revive/formatter/stylish.go
@@ -22,11 +22,12 @@ func (*Stylish) Name() string {
func formatFailure(failure lint.Failure, severity lint.Severity) []string {
fString := color.CyanString(failure.Failure)
- fName := color.RedString("https://revive.run/r#" + failure.RuleName)
+ fURL := ruleDescriptionURL(failure.RuleName)
+ fName := color.RedString(fURL)
lineColumn := failure.Position
pos := fmt.Sprintf("(%d, %d)", lineColumn.Start.Line, lineColumn.Start.Column)
if severity == lint.SeverityWarning {
- fName = color.YellowString("https://revive.run/r#" + failure.RuleName)
+ fName = color.YellowString(fURL)
}
return []string{failure.GetFilename(), pos, fName, fString}
}
@@ -50,7 +51,7 @@ func (*Stylish) Format(failures <-chan lint.Failure, config lint.Config) (string
ps = "problem"
}
- fileReport := make(map[string][][]string)
+ fileReport := map[string][][]string{}
for _, row := range result {
if _, ok := fileReport[row[0]]; !ok {
@@ -77,11 +78,12 @@ func (*Stylish) Format(failures <-chan lint.Failure, config lint.Config) (string
suffix := fmt.Sprintf(" %d %s (%d errors) (%d warnings)", total, ps, totalErrors, total-totalErrors)
- if total > 0 && totalErrors > 0 {
+ switch {
+ case total > 0 && totalErrors > 0:
suffix = color.RedString("\n ✖" + suffix)
- } else if total > 0 && totalErrors == 0 {
+ case total > 0 && totalErrors == 0:
suffix = color.YellowString("\n ✖" + suffix)
- } else {
+ default:
suffix, output = "", ""
}
diff --git a/vendor/github.com/mgechev/revive/formatter/unix.go b/vendor/github.com/mgechev/revive/formatter/unix.go
index e46f3c275f..9ce8fee4de 100644
--- a/vendor/github.com/mgechev/revive/formatter/unix.go
+++ b/vendor/github.com/mgechev/revive/formatter/unix.go
@@ -1,8 +1,8 @@
package formatter
import (
- "bytes"
"fmt"
+ "strings"
"github.com/mgechev/revive/lint"
)
@@ -22,9 +22,9 @@ func (*Unix) Name() string {
// Format formats the failures gotten from the lint.
func (*Unix) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
- var buf bytes.Buffer
+ var sb strings.Builder
for failure := range failures {
- fmt.Fprintf(&buf, "%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure)
+ sb.WriteString(fmt.Sprintf("%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure))
}
- return buf.String(), nil
+ return sb.String(), nil
}
diff --git a/vendor/github.com/mgechev/revive/internal/astutils/ast_utils.go b/vendor/github.com/mgechev/revive/internal/astutils/ast_utils.go
new file mode 100644
index 0000000000..0a346043a9
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/internal/astutils/ast_utils.go
@@ -0,0 +1,82 @@
+// Package astutils provides utility functions for working with AST nodes
+package astutils
+
+import (
+ "go/ast"
+)
+
+// FuncSignatureIs returns true if the given func decl satisfies a signature characterized
+// by the given name, parameters types and return types; false otherwise.
+//
+// Example: to check if a function declaration has the signature Foo(int, string) (bool,error)
+// call to FuncSignatureIs(funcDecl,"Foo",[]string{"int","string"},[]string{"bool","error"})
+func FuncSignatureIs(funcDecl *ast.FuncDecl, wantName string, wantParametersTypes, wantResultsTypes []string) bool {
+ if wantName != funcDecl.Name.String() {
+ return false // func name doesn't match expected one
+ }
+
+ funcParametersTypes := getTypeNames(funcDecl.Type.Params)
+ if len(wantParametersTypes) != len(funcParametersTypes) {
+ return false // func has not the expected number of parameters
+ }
+
+ funcResultsTypes := getTypeNames(funcDecl.Type.Results)
+ if len(wantResultsTypes) != len(funcResultsTypes) {
+ return false // func has not the expected number of return values
+ }
+
+ for i, wantType := range wantParametersTypes {
+ if wantType != funcParametersTypes[i] {
+ return false // type of a func's parameter does not match the type of the corresponding expected parameter
+ }
+ }
+
+ for i, wantType := range wantResultsTypes {
+ if wantType != funcResultsTypes[i] {
+ return false // type of a func's return value does not match the type of the corresponding expected return value
+ }
+ }
+
+ return true
+}
+
+func getTypeNames(fields *ast.FieldList) []string {
+ result := []string{}
+
+ if fields == nil {
+ return result
+ }
+
+ for _, field := range fields.List {
+ typeName := getFieldTypeName(field.Type)
+ if field.Names == nil { // unnamed field
+ result = append(result, typeName)
+ continue
+ }
+
+ for range field.Names { // add one type name for each field name
+ result = append(result, typeName)
+ }
+ }
+
+ return result
+}
+
+func getFieldTypeName(typ ast.Expr) string {
+ switch f := typ.(type) {
+ case *ast.Ident:
+ return f.Name
+ case *ast.SelectorExpr:
+ return f.Sel.Name + "." + getFieldTypeName(f.X)
+ case *ast.StarExpr:
+ return "*" + getFieldTypeName(f.X)
+ case *ast.IndexExpr:
+ return getFieldTypeName(f.X) + "[" + getFieldTypeName(f.Index) + "]"
+ case *ast.ArrayType:
+ return "[]" + getFieldTypeName(f.Elt)
+ case *ast.InterfaceType:
+ return "interface{}"
+ default:
+ return "UNHANDLED_TYPE"
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/args.go b/vendor/github.com/mgechev/revive/internal/ifelse/args.go
index c6e647e697..fc65b70a30 100644
--- a/vendor/github.com/mgechev/revive/internal/ifelse/args.go
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/args.go
@@ -4,8 +4,15 @@ package ifelse
// that would enlarge variable scope
const PreserveScope = "preserveScope"
+// AllowJump is a configuration argument that permits early-return to
+// suggest introducing a new jump (return, continue, etc) statement
+// to reduce nesting. By default, suggestions only bring existing jumps
+// earlier.
+const AllowJump = "allowJump"
+
// Args contains arguments common to the early-return, indent-error-flow
-// and superfluous-else rules (currently just preserveScope)
+// and superfluous-else rules
type Args struct {
PreserveScope bool
+ AllowJump bool
}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/branch.go b/vendor/github.com/mgechev/revive/internal/ifelse/branch.go
index 6e6036b899..dfa744e354 100644
--- a/vendor/github.com/mgechev/revive/internal/ifelse/branch.go
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/branch.go
@@ -9,8 +9,8 @@ import (
// Branch contains information about a branch within an if-else chain.
type Branch struct {
BranchKind
- Call // The function called at the end for kind Panic or Exit.
- HasDecls bool // The branch has one or more declarations (at the top level block)
+ Call // The function called at the end for kind Panic or Exit.
+ block []ast.Stmt
}
// BlockBranch gets the Branch of an ast.BlockStmt.
@@ -21,7 +21,7 @@ func BlockBranch(block *ast.BlockStmt) Branch {
}
branch := StmtBranch(block.List[blockLen-1])
- branch.HasDecls = hasDecls(block)
+ branch.block = block.List
return branch
}
@@ -61,11 +61,14 @@ func StmtBranch(stmt ast.Stmt) Branch {
// String returns a brief string representation
func (b Branch) String() string {
switch b.BranchKind {
+ case Empty:
+ return "{ }"
+ case Regular:
+ return "{ ... }"
case Panic, Exit:
- return fmt.Sprintf("... %v()", b.Call)
- default:
- return b.BranchKind.String()
+ return fmt.Sprintf("{ ... %v() }", b.Call)
}
+ return fmt.Sprintf("{ ... %v }", b.BranchKind)
}
// LongString returns a longer form string representation
@@ -73,13 +76,13 @@ func (b Branch) LongString() string {
switch b.BranchKind {
case Panic, Exit:
return fmt.Sprintf("call to %v function", b.Call)
- default:
- return b.BranchKind.LongString()
}
+ return b.BranchKind.LongString()
}
-func hasDecls(block *ast.BlockStmt) bool {
- for _, stmt := range block.List {
+// HasDecls returns whether the branch has any top-level declarations
+func (b Branch) HasDecls() bool {
+ for _, stmt := range b.block {
switch stmt := stmt.(type) {
case *ast.DeclStmt:
return true
@@ -91,3 +94,22 @@ func hasDecls(block *ast.BlockStmt) bool {
}
return false
}
+
+// IsShort returns whether the branch is empty or consists of a single statement
+func (b Branch) IsShort() bool {
+ switch len(b.block) {
+ case 0:
+ return true
+ case 1:
+ return isShortStmt(b.block[0])
+ }
+ return false
+}
+
+func isShortStmt(stmt ast.Stmt) bool {
+ switch stmt.(type) {
+ case *ast.BlockStmt, *ast.IfStmt, *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt, *ast.ForStmt, *ast.RangeStmt:
+ return false
+ }
+ return true
+}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go b/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go
index 41601d1e1d..75d3b0cfed 100644
--- a/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/branch_kind.go
@@ -44,9 +44,8 @@ func (k BranchKind) Deviates() bool {
return false
case Return, Continue, Break, Goto, Panic, Exit:
return true
- default:
- panic("invalid kind")
}
+ panic("invalid kind")
}
// Branch returns a Branch with the given kind
@@ -58,22 +57,21 @@ func (k BranchKind) String() string {
case Empty:
return ""
case Regular:
- return "..."
+ return ""
case Return:
- return "... return"
+ return "return"
case Continue:
- return "... continue"
+ return "continue"
case Break:
- return "... break"
+ return "break"
case Goto:
- return "... goto"
+ return "goto"
case Panic:
- return "... panic()"
+ return "panic()"
case Exit:
- return "... os.Exit()"
- default:
- panic("invalid kind")
+ return "os.Exit()"
}
+ panic("invalid kind")
}
// LongString returns a longer form string representation
@@ -95,7 +93,6 @@ func (k BranchKind) LongString() string {
return "a function call that panics"
case Exit:
return "a function call that exits the program"
- default:
- panic("invalid kind")
}
+ panic("invalid kind")
}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/chain.go b/vendor/github.com/mgechev/revive/internal/ifelse/chain.go
index 9891635ee1..e3c8898ceb 100644
--- a/vendor/github.com/mgechev/revive/internal/ifelse/chain.go
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/chain.go
@@ -2,9 +2,11 @@ package ifelse
// Chain contains information about an if-else chain.
type Chain struct {
- If Branch // what happens at the end of the "if" block
- Else Branch // what happens at the end of the "else" block
- HasInitializer bool // is there an "if"-initializer somewhere in the chain?
- HasPriorNonDeviating bool // is there a prior "if" block that does NOT deviate control flow?
- AtBlockEnd bool // whether the chain is placed at the end of the surrounding block
+ If Branch // what happens at the end of the "if" block
+ HasElse bool // is there an "else" block?
+ Else Branch // what happens at the end of the "else" block
+ HasInitializer bool // is there an "if"-initializer somewhere in the chain?
+ HasPriorNonDeviating bool // is there a prior "if" block that does NOT deviate control flow?
+ AtBlockEnd bool // whether the chain is placed at the end of the surrounding block
+ BlockEndKind BranchKind // control flow at end of surrounding block (e.g. "return" for function body)
}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/doc.go b/vendor/github.com/mgechev/revive/internal/ifelse/doc.go
index 0aa2c98175..7461b12aa1 100644
--- a/vendor/github.com/mgechev/revive/internal/ifelse/doc.go
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/doc.go
@@ -1,4 +1,4 @@
-// Package ifelse provides helpers for analysing the control flow in if-else chains,
+// Package ifelse provides helpers for analyzing the control flow in if-else chains,
// presently used by the following rules:
// - early-return
// - indent-error-flow
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/func.go b/vendor/github.com/mgechev/revive/internal/ifelse/func.go
index 7ba3519184..45c78f0798 100644
--- a/vendor/github.com/mgechev/revive/internal/ifelse/func.go
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/func.go
@@ -42,10 +42,8 @@ func ExprCall(expr *ast.ExprStmt) (Call, bool) {
// String returns the function name with package qualifier (if any)
func (f Call) String() string {
- switch {
- case f.Pkg != "":
+ if f.Pkg != "" {
return fmt.Sprintf("%s.%s", f.Pkg, f.Name)
- default:
- return f.Name
}
+ return f.Name
}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/rule.go b/vendor/github.com/mgechev/revive/internal/ifelse/rule.go
index 07ad456b65..94f022180d 100644
--- a/vendor/github.com/mgechev/revive/internal/ifelse/rule.go
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/rule.go
@@ -7,10 +7,10 @@ import (
"github.com/mgechev/revive/lint"
)
-// Rule is an interface for linters operating on if-else chains
-type Rule interface {
- CheckIfElse(chain Chain, args Args) (failMsg string)
-}
+// CheckFunc evaluates a rule against the given if-else chain and returns a message
+// describing the proposed refactor, along with a indicator of whether such a refactor
+// could be found.
+type CheckFunc func(Chain, Args) (string, bool)
// Apply evaluates the given Rule on if-else chains found within the given AST,
// and returns the failures.
@@ -28,11 +28,14 @@ type Rule interface {
//
// Only the block following "bar" is linted. This is because the rules that use this function
// do not presently have anything to say about earlier blocks in the chain.
-func Apply(rule Rule, node ast.Node, target Target, args lint.Arguments) []lint.Failure {
- v := &visitor{rule: rule, target: target}
+func Apply(check CheckFunc, node ast.Node, target Target, args lint.Arguments) []lint.Failure {
+ v := &visitor{check: check, target: target}
for _, arg := range args {
- if arg == PreserveScope {
+ switch arg {
+ case PreserveScope:
v.args.PreserveScope = true
+ case AllowJump:
+ v.args.AllowJump = true
}
}
ast.Walk(v, node)
@@ -42,64 +45,99 @@ func Apply(rule Rule, node ast.Node, target Target, args lint.Arguments) []lint.
type visitor struct {
failures []lint.Failure
target Target
- rule Rule
+ check CheckFunc
args Args
}
func (v *visitor) Visit(node ast.Node) ast.Visitor {
- block, ok := node.(*ast.BlockStmt)
- if !ok {
+ switch stmt := node.(type) {
+ case *ast.FuncDecl:
+ v.visitBody(stmt.Body, Return)
+ case *ast.FuncLit:
+ v.visitBody(stmt.Body, Return)
+ case *ast.ForStmt:
+ v.visitBody(stmt.Body, Continue)
+ case *ast.RangeStmt:
+ v.visitBody(stmt.Body, Continue)
+ case *ast.CaseClause:
+ v.visitBlock(stmt.Body, Break)
+ case *ast.BlockStmt:
+ v.visitBlock(stmt.List, Regular)
+ default:
return v
}
+ return nil
+}
+
+func (v *visitor) visitBody(body *ast.BlockStmt, endKind BranchKind) {
+ if body != nil {
+ v.visitBlock(body.List, endKind)
+ }
+}
- for i, stmt := range block.List {
- if ifStmt, ok := stmt.(*ast.IfStmt); ok {
- v.visitChain(ifStmt, Chain{AtBlockEnd: i == len(block.List)-1})
+func (v *visitor) visitBlock(stmts []ast.Stmt, endKind BranchKind) {
+ for i, stmt := range stmts {
+ ifStmt, ok := stmt.(*ast.IfStmt)
+ if !ok {
+ ast.Walk(v, stmt)
continue
}
- ast.Walk(v, stmt)
+ var chain Chain
+ if i == len(stmts)-1 {
+ chain.AtBlockEnd = true
+ chain.BlockEndKind = endKind
+ }
+ v.visitIf(ifStmt, chain)
}
- return nil
}
-func (v *visitor) visitChain(ifStmt *ast.IfStmt, chain Chain) {
+func (v *visitor) visitIf(ifStmt *ast.IfStmt, chain Chain) {
// look for other if-else chains nested inside this if { } block
- ast.Walk(v, ifStmt.Body)
-
- if ifStmt.Else == nil {
- // no else branch
- return
- }
+ v.visitBlock(ifStmt.Body.List, chain.BlockEndKind)
if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE {
chain.HasInitializer = true
}
chain.If = BlockBranch(ifStmt.Body)
+ if ifStmt.Else == nil {
+ if v.args.AllowJump {
+ v.checkRule(ifStmt, chain)
+ }
+ return
+ }
+
switch elseBlock := ifStmt.Else.(type) {
case *ast.IfStmt:
if !chain.If.Deviates() {
chain.HasPriorNonDeviating = true
}
- v.visitChain(elseBlock, chain)
+ v.visitIf(elseBlock, chain)
case *ast.BlockStmt:
// look for other if-else chains nested inside this else { } block
- ast.Walk(v, elseBlock)
+ v.visitBlock(elseBlock.List, chain.BlockEndKind)
+ chain.HasElse = true
chain.Else = BlockBranch(elseBlock)
- if failMsg := v.rule.CheckIfElse(chain, v.args); failMsg != "" {
- if chain.HasInitializer {
- // if statement has a := initializer, so we might need to move the assignment
- // onto its own line in case the body references it
- failMsg += " (move short variable declaration to its own line if necessary)"
- }
- v.failures = append(v.failures, lint.Failure{
- Confidence: 1,
- Node: v.target.node(ifStmt),
- Failure: failMsg,
- })
- }
+ v.checkRule(ifStmt, chain)
default:
- panic("invalid node type for else")
+ panic("unexpected node type for else")
+ }
+}
+
+func (v *visitor) checkRule(ifStmt *ast.IfStmt, chain Chain) {
+ msg, found := v.check(chain, v.args)
+ if !found {
+ return // passed the check
+ }
+ if chain.HasInitializer {
+ // if statement has a := initializer, so we might need to move the assignment
+ // onto its own line in case the body references it
+ msg += " (move short variable declaration to its own line if necessary)"
}
+ v.failures = append(v.failures, lint.Failure{
+ Confidence: 1,
+ Node: v.target.node(ifStmt),
+ Failure: msg,
+ })
}
diff --git a/vendor/github.com/mgechev/revive/internal/ifelse/target.go b/vendor/github.com/mgechev/revive/internal/ifelse/target.go
index 81ff1c3037..63755acf11 100644
--- a/vendor/github.com/mgechev/revive/internal/ifelse/target.go
+++ b/vendor/github.com/mgechev/revive/internal/ifelse/target.go
@@ -19,7 +19,6 @@ func (t Target) node(ifStmt *ast.IfStmt) ast.Node {
return ifStmt
case TargetElse:
return ifStmt.Else
- default:
- panic("bad target")
}
+ panic("bad target")
}
diff --git a/vendor/github.com/mgechev/revive/lint/config.go b/vendor/github.com/mgechev/revive/lint/config.go
index 7e51a93c28..485f618337 100644
--- a/vendor/github.com/mgechev/revive/lint/config.go
+++ b/vendor/github.com/mgechev/revive/lint/config.go
@@ -1,7 +1,11 @@
package lint
+import (
+ goversion "github.com/hashicorp/go-version"
+)
+
// Arguments is type used for the arguments of a rule.
-type Arguments = []interface{}
+type Arguments = []any
// FileFilters is type used for modeling file filters to apply to rules.
type FileFilters = []*FileFilter
@@ -61,4 +65,7 @@ type Config struct {
WarningCode int `toml:"warningCode"`
Directives DirectivesConfig `toml:"directive"`
Exclude []string `toml:"exclude"`
+ // If set, overrides the go language version specified in go.mod of
+ // packages being linted, and assumes this specific language version.
+ GoVersion *goversion.Version
}
diff --git a/vendor/github.com/mgechev/revive/lint/failure.go b/vendor/github.com/mgechev/revive/lint/failure.go
index 479b0cb48b..48095f9d72 100644
--- a/vendor/github.com/mgechev/revive/lint/failure.go
+++ b/vendor/github.com/mgechev/revive/lint/failure.go
@@ -5,6 +5,53 @@ import (
"go/token"
)
+const (
+ // FailureCategoryArgOrder indicates argument order issues.
+ FailureCategoryArgOrder FailureCategory = "arg-order"
+ // FailureCategoryBadPractice indicates bad practice issues.
+ FailureCategoryBadPractice FailureCategory = "bad practice"
+ // FailureCategoryCodeStyle indicates code style issues.
+ FailureCategoryCodeStyle FailureCategory = "code-style"
+ // FailureCategoryComments indicates comment issues.
+ FailureCategoryComments FailureCategory = "comments"
+ // FailureCategoryComplexity indicates complexity issues.
+ FailureCategoryComplexity FailureCategory = "complexity"
+ // FailureCategoryContent indicates content issues.
+ FailureCategoryContent FailureCategory = "content"
+ // FailureCategoryErrors indicates error handling issues.
+ FailureCategoryErrors FailureCategory = "errors"
+ // FailureCategoryImports indicates import issues.
+ FailureCategoryImports FailureCategory = "imports"
+ // FailureCategoryLogic indicates logic issues.
+ FailureCategoryLogic FailureCategory = "logic"
+ // FailureCategoryMaintenance indicates maintenance issues.
+ FailureCategoryMaintenance FailureCategory = "maintenance"
+ // FailureCategoryNaming indicates naming issues.
+ FailureCategoryNaming FailureCategory = "naming"
+ // FailureCategoryOptimization indicates optimization issues.
+ FailureCategoryOptimization FailureCategory = "optimization"
+ // FailureCategoryStyle indicates style issues.
+ FailureCategoryStyle FailureCategory = "style"
+ // FailureCategoryTime indicates time-related issues.
+ FailureCategoryTime FailureCategory = "time"
+ // FailureCategoryTypeInference indicates type inference issues.
+ FailureCategoryTypeInference FailureCategory = "type-inference"
+ // FailureCategoryUnaryOp indicates unary operation issues.
+ FailureCategoryUnaryOp FailureCategory = "unary-op"
+ // FailureCategoryUnexportedTypeInAPI indicates unexported type in API issues.
+ FailureCategoryUnexportedTypeInAPI FailureCategory = "unexported-type-in-api"
+ // FailureCategoryZeroValue indicates zero value issues.
+ FailureCategoryZeroValue FailureCategory = "zero-value"
+
+ // failureCategoryInternal indicates internal failures.
+ failureCategoryInternal FailureCategory = "REVIVE_INTERNAL"
+ // failureCategoryValidity indicates validity issues.
+ failureCategoryValidity FailureCategory = "validity"
+)
+
+// FailureCategory is the type for the failure categories.
+type FailureCategory string
+
const (
// SeverityWarning declares failures of type warning
SeverityWarning = "warning"
@@ -25,7 +72,7 @@ type FailurePosition struct {
type Failure struct {
Failure string
RuleName string
- Category string
+ Category FailureCategory
Position FailurePosition
Node ast.Node `json:"-"`
Confidence float64
@@ -37,3 +84,16 @@ type Failure struct {
func (f *Failure) GetFilename() string {
return f.Position.Start.Filename
}
+
+// IsInternal returns true if this failure is internal, false otherwise.
+func (f *Failure) IsInternal() bool {
+ return f.Category == failureCategoryInternal
+}
+
+// NewInternalFailure yields an internal failure with the given message as failure message.
+func NewInternalFailure(message string) Failure {
+ return Failure{
+ Category: failureCategoryInternal,
+ Failure: message,
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/lint/file.go b/vendor/github.com/mgechev/revive/lint/file.go
index 23255304c5..0311210e5a 100644
--- a/vendor/github.com/mgechev/revive/lint/file.go
+++ b/vendor/github.com/mgechev/revive/lint/file.go
@@ -2,6 +2,7 @@ package lint
import (
"bytes"
+ "errors"
"go/ast"
"go/parser"
"go/printer"
@@ -48,7 +49,7 @@ func (f *File) ToPosition(pos token.Pos) token.Position {
}
// Render renders a node.
-func (f *File) Render(x interface{}) string {
+func (f *File) Render(x any) string {
var buf bytes.Buffer
if err := printer.Fprint(&buf, f.Pkg.fset, x); err != nil {
panic(err)
@@ -96,7 +97,7 @@ func (f *File) isMain() bool {
const directiveSpecifyDisableReason = "specify-disable-reason"
-func (f *File) lint(rules []Rule, config Config, failures chan Failure) {
+func (f *File) lint(rules []Rule, config Config, failures chan Failure) error {
rulesConfig := config.Rules
_, mustSpecifyDisableReason := config.Directives[directiveSpecifyDisableReason]
disabledIntervals := f.disabledIntervals(rules, mustSpecifyDisableReason, failures)
@@ -107,6 +108,10 @@ func (f *File) lint(rules []Rule, config Config, failures chan Failure) {
}
currentFailures := currentRule.Apply(f, ruleConfig.Arguments)
for idx, failure := range currentFailures {
+ if failure.IsInternal() {
+ return errors.New(failure.Failure)
+ }
+
if failure.RuleName == "" {
failure.RuleName = currentRule.Name()
}
@@ -122,6 +127,7 @@ func (f *File) lint(rules []Rule, config Config, failures chan Failure) {
}
}
}
+ return nil
}
type enableDisableConfig struct {
@@ -140,10 +146,10 @@ const (
var re = regexp.MustCompile(directiveRE)
func (f *File) disabledIntervals(rules []Rule, mustSpecifyDisableReason bool, failures chan Failure) disabledIntervalsMap {
- enabledDisabledRulesMap := make(map[string][]enableDisableConfig)
+ enabledDisabledRulesMap := map[string][]enableDisableConfig{}
getEnabledDisabledIntervals := func() disabledIntervalsMap {
- result := make(disabledIntervalsMap)
+ result := disabledIntervalsMap{}
for ruleName, disabledArr := range enabledDisabledRulesMap {
ruleResult := []DisabledInterval{}
@@ -188,16 +194,17 @@ func (f *File) disabledIntervals(rules []Rule, mustSpecifyDisableReason bool, fa
enabledDisabledRulesMap[name] = existing
}
- handleRules := func(filename, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval {
+ handleRules := func(_, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval {
var result []DisabledInterval
for _, name := range ruleNames {
- if modifier == "line" {
+ switch modifier {
+ case "line":
handleConfig(isEnabled, line, name)
handleConfig(!isEnabled, line, name)
- } else if modifier == "next-line" {
+ case "next-line":
handleConfig(isEnabled, line+1, name)
handleConfig(!isEnabled, line+1, name)
- } else {
+ default:
handleConfig(isEnabled, line, name)
}
}
@@ -260,21 +267,22 @@ func (File) filterFailures(failures []Failure, disabledIntervals disabledInterva
intervals, ok := disabledIntervals[failure.RuleName]
if !ok {
result = append(result, failure)
- } else {
- include := true
- for _, interval := range intervals {
- intStart := interval.From.Line
- intEnd := interval.To.Line
- if (fStart >= intStart && fStart <= intEnd) ||
- (fEnd >= intStart && fEnd <= intEnd) {
- include = false
- break
- }
- }
- if include {
- result = append(result, failure)
+ continue
+ }
+
+ include := true
+ for _, interval := range intervals {
+ intStart := interval.From.Line
+ intEnd := interval.To.Line
+ if (fStart >= intStart && fStart <= intEnd) ||
+ (fEnd >= intStart && fEnd <= intEnd) {
+ include = false
+ break
}
}
+ if include {
+ result = append(result, failure)
+ }
}
return result
}
diff --git a/vendor/github.com/mgechev/revive/lint/filefilter.go b/vendor/github.com/mgechev/revive/lint/filefilter.go
index 8da090b9cc..fb2c9bbac2 100644
--- a/vendor/github.com/mgechev/revive/lint/filefilter.go
+++ b/vendor/github.com/mgechev/revive/lint/filefilter.go
@@ -55,19 +55,21 @@ func (ff *FileFilter) MatchFileName(name string) bool {
return ff.rx.MatchString(name)
}
-var fileFilterInvalidGlobRegexp = regexp.MustCompile(`[^/]\*\*[^/]`)
-var escapeRegexSymbols = ".+{}()[]^$"
+var (
+ fileFilterInvalidGlobRegexp = regexp.MustCompile(`[^/]\*\*[^/]`)
+ escapeRegexSymbols = ".+{}()[]^$"
+)
func (ff *FileFilter) prepareRegexp() error {
var err error
- var src = ff.raw
+ src := ff.raw
if src == "TEST" {
src = "~_test\\.go"
}
if strings.HasPrefix(src, "~") {
ff.rx, err = regexp.Compile(src[1:])
if err != nil {
- return fmt.Errorf("invalid file filter [%s], regexp compile error: [%v]", ff.raw, err)
+ return fmt.Errorf("invalid file filter [%s], regexp compile error: [%w]", ff.raw, err)
}
return nil
}
@@ -110,7 +112,7 @@ func (ff *FileFilter) prepareRegexp() error {
rxBuild.WriteByte('$')
ff.rx, err = regexp.Compile(rxBuild.String())
if err != nil {
- return fmt.Errorf("invalid file filter [%s], regexp compile error after glob expand: [%v]", ff.raw, err)
+ return fmt.Errorf("invalid file filter [%s], regexp compile error after glob expand: [%w]", ff.raw, err)
}
return nil
}
@@ -122,7 +124,7 @@ func (ff *FileFilter) prepareRegexp() error {
fillRx = "^" + fillRx + "$"
ff.rx, err = regexp.Compile(fillRx)
if err != nil {
- return fmt.Errorf("invalid file filter [%s], regexp compile full path: [%v]", ff.raw, err)
+ return fmt.Errorf("invalid file filter [%s], regexp compile full path: [%w]", ff.raw, err)
}
return nil
}
diff --git a/vendor/github.com/mgechev/revive/lint/linter.go b/vendor/github.com/mgechev/revive/lint/linter.go
index fb1ab6f28e..73b5341bdc 100644
--- a/vendor/github.com/mgechev/revive/lint/linter.go
+++ b/vendor/github.com/mgechev/revive/lint/linter.go
@@ -6,9 +6,14 @@ import (
"fmt"
"go/token"
"os"
+ "path/filepath"
"regexp"
"strconv"
- "sync"
+ "strings"
+
+ goversion "github.com/hashicorp/go-version"
+ "golang.org/x/mod/modfile"
+ "golang.org/x/sync/errgroup"
)
// ReadFile defines an abstraction for reading files.
@@ -49,38 +54,85 @@ func (l Linter) readFile(path string) (result []byte, err error) {
}
var (
- genHdr = []byte("// Code generated ")
- genFtr = []byte(" DO NOT EDIT.")
+ generatedPrefix = []byte("// Code generated ")
+ generatedSuffix = []byte(" DO NOT EDIT.")
+ defaultGoVersion = goversion.Must(goversion.NewVersion("1.0"))
)
// Lint lints a set of files with the specified rule.
func (l *Linter) Lint(packages [][]string, ruleSet []Rule, config Config) (<-chan Failure, error) {
failures := make(chan Failure)
- var wg sync.WaitGroup
- for _, pkg := range packages {
- wg.Add(1)
- go func(pkg []string) {
- if err := l.lintPackage(pkg, ruleSet, config, failures); err != nil {
- fmt.Fprintln(os.Stderr, err)
- os.Exit(1)
+ perModVersions := map[string]*goversion.Version{}
+ perPkgVersions := make([]*goversion.Version, len(packages))
+ for n, files := range packages {
+ if len(files) == 0 {
+ continue
+ }
+ if config.GoVersion != nil {
+ perPkgVersions[n] = config.GoVersion
+ continue
+ }
+
+ dir, err := filepath.Abs(filepath.Dir(files[0]))
+ if err != nil {
+ return nil, err
+ }
+
+ alreadyKnownMod := false
+ for d, v := range perModVersions {
+ if strings.HasPrefix(dir, d) {
+ perPkgVersions[n] = v
+ alreadyKnownMod = true
+ break
+ }
+ }
+ if alreadyKnownMod {
+ continue
+ }
+
+ d, v, err := detectGoMod(dir)
+ if err != nil {
+ // No luck finding the go.mod file thus set the default Go version
+ v = defaultGoVersion
+ d = dir
+ }
+ perModVersions[d] = v
+ perPkgVersions[n] = v
+ }
+
+ var wg errgroup.Group
+ for n := range packages {
+ wg.Go(func() error {
+ pkg := packages[n]
+ gover := perPkgVersions[n]
+ if err := l.lintPackage(pkg, gover, ruleSet, config, failures); err != nil {
+ return fmt.Errorf("error during linting: %w", err)
}
- defer wg.Done()
- }(pkg)
+ return nil
+ })
}
go func() {
- wg.Wait()
+ err := wg.Wait()
+ if err != nil {
+ failures <- NewInternalFailure(err.Error())
+ }
close(failures)
}()
return failures, nil
}
-func (l *Linter) lintPackage(filenames []string, ruleSet []Rule, config Config, failures chan Failure) error {
+func (l *Linter) lintPackage(filenames []string, gover *goversion.Version, ruleSet []Rule, config Config, failures chan Failure) error {
+ if len(filenames) == 0 {
+ return nil
+ }
+
pkg := &Package{
- fset: token.NewFileSet(),
- files: map[string]*File{},
+ fset: token.NewFileSet(),
+ files: map[string]*File{},
+ goVersion: gover,
}
for _, filename := range filenames {
content, err := l.readFile(filename)
@@ -103,9 +155,52 @@ func (l *Linter) lintPackage(filenames []string, ruleSet []Rule, config Config,
return nil
}
- pkg.lint(ruleSet, config, failures)
+ return pkg.lint(ruleSet, config, failures)
+}
+
+func detectGoMod(dir string) (rootDir string, ver *goversion.Version, err error) {
+ modFileName, err := retrieveModFile(dir)
+ if err != nil {
+ return "", nil, fmt.Errorf("%q doesn't seem to be part of a Go module", dir)
+ }
+
+ mod, err := os.ReadFile(modFileName)
+ if err != nil {
+ return "", nil, fmt.Errorf("failed to read %q, got %w", modFileName, err)
+ }
+
+ modAst, err := modfile.ParseLax(modFileName, mod, nil)
+ if err != nil {
+ return "", nil, fmt.Errorf("failed to parse %q, got %w", modFileName, err)
+ }
- return nil
+ if modAst.Go == nil {
+ return "", nil, fmt.Errorf("%q does not specify a Go version", modFileName)
+ }
+
+ ver, err = goversion.NewVersion(modAst.Go.Version)
+ return filepath.Dir(modFileName), ver, err
+}
+
+func retrieveModFile(dir string) (string, error) {
+ const lookingForFile = "go.mod"
+ for {
+ // filepath.Dir returns 'C:\' on Windows, and '/' on Unix
+ isRootDir := (dir == filepath.VolumeName(dir)+string(filepath.Separator))
+ if dir == "." || isRootDir {
+ return "", fmt.Errorf("did not found %q file", lookingForFile)
+ }
+
+ lookingForFilePath := filepath.Join(dir, lookingForFile)
+ info, err := os.Stat(lookingForFilePath)
+ if err != nil || info.IsDir() {
+ // lets check the parent dir
+ dir = filepath.Dir(dir)
+ continue
+ }
+
+ return lookingForFilePath, nil
+ }
}
// isGenerated reports whether the source file is generated code
@@ -115,7 +210,7 @@ func isGenerated(src []byte) bool {
sc := bufio.NewScanner(bytes.NewReader(src))
for sc.Scan() {
b := sc.Bytes()
- if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) {
+ if bytes.HasPrefix(b, generatedPrefix) && bytes.HasSuffix(b, generatedSuffix) && len(b) >= len(generatedPrefix)+len(generatedSuffix) {
return true
}
}
@@ -128,7 +223,7 @@ func addInvalidFileFailure(filename, errStr string, failures chan Failure) {
failures <- Failure{
Confidence: 1,
Failure: fmt.Sprintf("invalid file %s: %v", filename, errStr),
- Category: "validity",
+ Category: failureCategoryValidity,
Position: position,
}
}
diff --git a/vendor/github.com/mgechev/revive/lint/utils.go b/vendor/github.com/mgechev/revive/lint/name.go
similarity index 100%
rename from vendor/github.com/mgechev/revive/lint/utils.go
rename to vendor/github.com/mgechev/revive/lint/name.go
diff --git a/vendor/github.com/mgechev/revive/lint/package.go b/vendor/github.com/mgechev/revive/lint/package.go
index 5976acf99c..dfc701f7e1 100644
--- a/vendor/github.com/mgechev/revive/lint/package.go
+++ b/vendor/github.com/mgechev/revive/lint/package.go
@@ -1,19 +1,25 @@
package lint
import (
+ "errors"
"go/ast"
"go/importer"
"go/token"
"go/types"
"sync"
+ goversion "github.com/hashicorp/go-version"
+ "golang.org/x/sync/errgroup"
+
+ "github.com/mgechev/revive/internal/astutils"
"github.com/mgechev/revive/internal/typeparams"
)
// Package represents a package in the project.
type Package struct {
- fset *token.FileSet
- files map[string]*File
+ fset *token.FileSet
+ files map[string]*File
+ goVersion *goversion.Version
typesPkg *types.Package
typesInfo *types.Info
@@ -28,7 +34,11 @@ type Package struct {
var (
trueValue = 1
falseValue = 2
- notSet = 3
+
+ go115 = goversion.Must(goversion.NewVersion("1.15"))
+ go121 = goversion.Must(goversion.NewVersion("1.21"))
+ go122 = goversion.Must(goversion.NewVersion("1.22"))
+ go124 = goversion.Must(goversion.NewVersion("1.24"))
)
// Files return package's files.
@@ -82,21 +92,21 @@ func (p *Package) TypeCheck() error {
p.Lock()
defer p.Unlock()
- // If type checking has already been performed
- // skip it.
- if p.typesInfo != nil || p.typesPkg != nil {
+ alreadyTypeChecked := p.typesInfo != nil || p.typesPkg != nil
+ if alreadyTypeChecked {
return nil
}
+
config := &types.Config{
// By setting a no-op error reporter, the type checker does as much work as possible.
Error: func(error) {},
Importer: importer.Default(),
}
info := &types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Scopes: make(map[ast.Node]*types.Scope),
+ Types: map[ast.Expr]types.TypeAndValue{},
+ Defs: map[*ast.Ident]types.Object{},
+ Uses: map[*ast.Ident]types.Object{},
+ Scopes: map[ast.Node]*types.Scope{},
}
var anyFile *File
var astFiles []*ast.File
@@ -105,6 +115,11 @@ func (p *Package) TypeCheck() error {
astFiles = append(astFiles, f.AST)
}
+ if anyFile == nil {
+ // this is unlikely to happen, but technically guarantees anyFile to not be nil
+ return errors.New("no ast.File found")
+ }
+
typesPkg, err := check(config, anyFile.AST.Name.Name, p.fset, astFiles, info)
// Remember the typechecking info, even if config.Check failed,
@@ -129,7 +144,7 @@ func check(config *types.Config, n string, fset *token.FileSet, astFiles []*ast.
return config.Check(n, fset, astFiles, info)
}
-// TypeOf returns the type of an expression.
+// TypeOf returns the type of expression.
func (p *Package) TypeOf(expr ast.Expr) types.Type {
if p.typesInfo == nil {
return nil
@@ -137,54 +152,79 @@ func (p *Package) TypeOf(expr ast.Expr) types.Type {
return p.typesInfo.TypeOf(expr)
}
-type walker struct {
- nmap map[string]int
- has map[string]int
-}
+type sortableMethodsFlags int
-func (w *walker) Visit(n ast.Node) ast.Visitor {
- fn, ok := n.(*ast.FuncDecl)
- if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 {
- return w
- }
- // TODO(dsymonds): We could check the signature to be more precise.
- recv := typeparams.ReceiverType(fn)
- if i, ok := w.nmap[fn.Name.Name]; ok {
- w.has[recv] |= i
- }
- return w
-}
+// flags for sortable interface methods.
+const (
+ bfLen sortableMethodsFlags = 1 << iota
+ bfLess
+ bfSwap
+)
func (p *Package) scanSortable() {
- p.sortable = make(map[string]bool)
-
- // bitfield for which methods exist on each type.
- const (
- Len = 1 << iota
- Less
- Swap
- )
- nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap}
- has := make(map[string]int)
+ sortableFlags := map[string]sortableMethodsFlags{}
for _, f := range p.files {
- ast.Walk(&walker{nmap, has}, f.AST)
+ for _, decl := range f.AST.Decls {
+ fn, ok := decl.(*ast.FuncDecl)
+ isAMethodDeclaration := ok && fn.Recv != nil && len(fn.Recv.List) != 0
+ if !isAMethodDeclaration {
+ continue
+ }
+
+ recvType := typeparams.ReceiverType(fn)
+ sortableFlags[recvType] |= getSortableMethodFlagForFunction(fn)
+ }
}
- for typ, ms := range has {
- if ms == Len|Less|Swap {
+
+ p.sortable = make(map[string]bool, len(sortableFlags))
+ for typ, ms := range sortableFlags {
+ if ms == bfLen|bfLess|bfSwap {
p.sortable[typ] = true
}
}
}
-func (p *Package) lint(rules []Rule, config Config, failures chan Failure) {
+func (p *Package) lint(rules []Rule, config Config, failures chan Failure) error {
p.scanSortable()
- var wg sync.WaitGroup
+ var eg errgroup.Group
for _, file := range p.files {
- wg.Add(1)
- go (func(file *File) {
- file.lint(rules, config, failures)
- defer wg.Done()
- })(file)
+ eg.Go(func() error {
+ return file.lint(rules, config, failures)
+ })
+ }
+
+ return eg.Wait()
+}
+
+// IsAtLeastGo115 returns true if the Go version for this package is 1.15 or higher, false otherwise
+func (p *Package) IsAtLeastGo115() bool {
+ return p.goVersion.GreaterThanOrEqual(go115)
+}
+
+// IsAtLeastGo121 returns true if the Go version for this package is 1.21 or higher, false otherwise
+func (p *Package) IsAtLeastGo121() bool {
+ return p.goVersion.GreaterThanOrEqual(go121)
+}
+
+// IsAtLeastGo122 returns true if the Go version for this package is 1.22 or higher, false otherwise
+func (p *Package) IsAtLeastGo122() bool {
+ return p.goVersion.GreaterThanOrEqual(go122)
+}
+
+// IsAtLeastGo124 returns true if the Go version for this package is 1.24 or higher, false otherwise
+func (p *Package) IsAtLeastGo124() bool {
+ return p.goVersion.GreaterThanOrEqual(go124)
+}
+
+func getSortableMethodFlagForFunction(fn *ast.FuncDecl) sortableMethodsFlags {
+ switch {
+ case astutils.FuncSignatureIs(fn, "Len", []string{}, []string{"int"}):
+ return bfLen
+ case astutils.FuncSignatureIs(fn, "Less", []string{"int", "int"}, []string{"bool"}):
+ return bfLess
+ case astutils.FuncSignatureIs(fn, "Swap", []string{"int", "int"}, []string{}):
+ return bfSwap
+ default:
+ return 0
}
- wg.Wait()
}
diff --git a/vendor/github.com/mgechev/revive/lint/rule.go b/vendor/github.com/mgechev/revive/lint/rule.go
index ccc66691c6..cc424e96a7 100644
--- a/vendor/github.com/mgechev/revive/lint/rule.go
+++ b/vendor/github.com/mgechev/revive/lint/rule.go
@@ -17,9 +17,9 @@ type Rule interface {
Apply(*File, Arguments) []Failure
}
-// AbstractRule defines an abstract rule.
-type AbstractRule struct {
- Failures []Failure
+// ConfigurableRule defines an abstract configurable rule interface.
+type ConfigurableRule interface {
+ Configure(Arguments) error
}
// ToFailurePosition returns the failure position.
diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add_constant.go
similarity index 57%
rename from vendor/github.com/mgechev/revive/rule/add-constant.go
rename to vendor/github.com/mgechev/revive/rule/add_constant.go
index 86182623a9..c58c369e62 100644
--- a/vendor/github.com/mgechev/revive/rule/add-constant.go
+++ b/vendor/github.com/mgechev/revive/rule/add_constant.go
@@ -1,12 +1,12 @@
package rule
import (
+ "errors"
"fmt"
"go/ast"
"regexp"
"strconv"
"strings"
- "sync"
"github.com/mgechev/revive/lint"
)
@@ -31,18 +31,15 @@ func (wl allowList) add(kind, list string) {
}
}
-// AddConstantRule lints unused params in functions.
+// AddConstantRule suggests using constants instead of magic numbers and string literals.
type AddConstantRule struct {
allowList allowList
ignoreFunctions []*regexp.Regexp
strLitLimit int
- sync.Mutex
}
// Apply applies the rule to given file.
-func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *AddConstantRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
onFailure := func(failure lint.Failure) {
@@ -51,11 +48,11 @@ func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lin
w := &lintAddConstantRule{
onFailure: onFailure,
- strLits: make(map[string]int),
+ strLits: map[string]int{},
strLitLimit: r.strLitLimit,
- allowList: r.allowList,
+ allowList: r.allowList,
ignoreFunctions: r.ignoreFunctions,
- structTags: make(map[*ast.BasicLit]struct{}),
+ structTags: map[*ast.BasicLit]struct{}{},
}
ast.Walk(w, file.AST)
@@ -72,7 +69,7 @@ type lintAddConstantRule struct {
onFailure func(lint.Failure)
strLits map[string]int
strLitLimit int
- allowList allowList
+ allowList allowList
ignoreFunctions []*regexp.Regexp
structTags map[*ast.BasicLit]struct{}
}
@@ -127,6 +124,11 @@ func (*lintAddConstantRule) getFuncName(expr *ast.CallExpr) string {
switch prefix := f.X.(type) {
case *ast.Ident:
return prefix.Name + "." + f.Sel.Name
+ case *ast.CallExpr:
+ // If the selector is an CallExpr, like `fn().Info`, we return `.Info` as function name
+ if f.Sel != nil {
+ return "." + f.Sel.Name
+ }
}
case *ast.Ident:
return f.Name
@@ -155,18 +157,21 @@ func (w *lintAddConstantRule) isIgnoredFunc(fName string) bool {
}
func (w *lintAddConstantRule) checkStrLit(n *ast.BasicLit) {
+ const ignoreMarker = -1
+
if w.allowList[kindSTRING][n.Value] {
return
}
count := w.strLits[n.Value]
- if count >= 0 {
+ mustCheck := count > ignoreMarker
+ if mustCheck {
w.strLits[n.Value] = count + 1
if w.strLits[n.Value] > w.strLitLimit {
w.onFailure(lint.Failure{
Confidence: 1,
Node: n,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: fmt.Sprintf("string literal %s appears, at least, %d times, create a named constant for it", n.Value, w.strLits[n.Value]),
})
w.strLits[n.Value] = -1 // mark it to avoid failing again on the same literal
@@ -182,7 +187,7 @@ func (w *lintAddConstantRule) checkNumLit(kind string, n *ast.BasicLit) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: n,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: fmt.Sprintf("avoid magic numbers like '%s', create a named constant for it", n.Value),
})
}
@@ -192,70 +197,71 @@ func (w *lintAddConstantRule) isStructTag(n *ast.BasicLit) bool {
return ok
}
-func (r *AddConstantRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *AddConstantRule) Configure(arguments lint.Arguments) error {
+ r.strLitLimit = defaultStrLitLimit
+ r.allowList = newAllowList()
+ if len(arguments) == 0 {
+ return nil
+ }
+ args, ok := arguments[0].(map[string]any)
+ if !ok {
+ return fmt.Errorf("invalid argument to the add-constant rule, expecting a k,v map. Got %T", arguments[0])
+ }
+ for k, v := range args {
+ kind := ""
+ switch k {
+ case "allowFloats":
+ kind = kindFLOAT
+ fallthrough
+ case "allowInts":
+ if kind == "" {
+ kind = kindINT
+ }
+ fallthrough
+ case "allowStrs":
+ if kind == "" {
+ kind = kindSTRING
+ }
+ list, ok := v.(string)
+ if !ok {
+ return fmt.Errorf("invalid argument to the add-constant rule, string expected. Got '%v' (%T)", v, v)
+ }
+ r.allowList.add(kind, list)
+ case "maxLitCount":
+ sl, ok := v.(string)
+ if !ok {
+ return fmt.Errorf("invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v' (%T)", v, v)
+ }
- if r.allowList == nil {
- r.strLitLimit = defaultStrLitLimit
- r.allowList = newAllowList()
- if len(arguments) > 0 {
- args, ok := arguments[0].(map[string]any)
+ limit, err := strconv.Atoi(sl)
+ if err != nil {
+ return fmt.Errorf("invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v'", v)
+ }
+ r.strLitLimit = limit
+ case "ignoreFuncs":
+ excludes, ok := v.(string)
if !ok {
- panic(fmt.Sprintf("Invalid argument to the add-constant rule. Expecting a k,v map, got %T", arguments[0]))
+ return fmt.Errorf("invalid argument to the ignoreFuncs parameter of add-constant rule, string expected. Got '%v' (%T)", v, v)
}
- for k, v := range args {
- kind := ""
- switch k {
- case "allowFloats":
- kind = kindFLOAT
- fallthrough
- case "allowInts":
- if kind == "" {
- kind = kindINT
- }
- fallthrough
- case "allowStrs":
- if kind == "" {
- kind = kindSTRING
- }
- list, ok := v.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the add-constant rule, string expected. Got '%v' (%T)", v, v))
- }
- r.allowList.add(kind, list)
- case "maxLitCount":
- sl, ok := v.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v' (%T)", v, v))
- }
-
- limit, err := strconv.Atoi(sl)
- if err != nil {
- panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v'", v))
- }
- r.strLitLimit = limit
- case "ignoreFuncs":
- excludes, ok := v.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the ignoreFuncs parameter of add-constant rule, string expected. Got '%v' (%T)", v, v))
- }
-
- for _, exclude := range strings.Split(excludes, ",") {
- exclude = strings.Trim(exclude, " ")
- if exclude == "" {
- panic("Invalid argument to the ignoreFuncs parameter of add-constant rule, expected regular expression must not be empty.")
- }
-
- exp, err := regexp.Compile(exclude)
- if err != nil {
- panic(fmt.Sprintf("Invalid argument to the ignoreFuncs parameter of add-constant rule: regexp %q does not compile: %v", exclude, err))
- }
-
- r.ignoreFunctions = append(r.ignoreFunctions, exp)
- }
+
+ for _, exclude := range strings.Split(excludes, ",") {
+ exclude = strings.Trim(exclude, " ")
+ if exclude == "" {
+ return errors.New("invalid argument to the ignoreFuncs parameter of add-constant rule, expected regular expression must not be empty")
+ }
+
+ exp, err := regexp.Compile(exclude)
+ if err != nil {
+ return fmt.Errorf("invalid argument to the ignoreFuncs parameter of add-constant rule: regexp %q does not compile: %w", exclude, err)
}
+
+ r.ignoreFunctions = append(r.ignoreFunctions, exp)
}
}
}
+
+ return nil
}
diff --git a/vendor/github.com/mgechev/revive/rule/argument-limit.go b/vendor/github.com/mgechev/revive/rule/argument-limit.go
deleted file mode 100644
index 8120288fd5..0000000000
--- a/vendor/github.com/mgechev/revive/rule/argument-limit.go
+++ /dev/null
@@ -1,84 +0,0 @@
-package rule
-
-import (
- "fmt"
- "go/ast"
- "sync"
-
- "github.com/mgechev/revive/lint"
-)
-
-// ArgumentsLimitRule lints given else constructs.
-type ArgumentsLimitRule struct {
- total int
- sync.Mutex
-}
-
-const defaultArgumentsLimit = 8
-
-func (r *ArgumentsLimitRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.total == 0 {
- if len(arguments) < 1 {
- r.total = defaultArgumentsLimit
- return
- }
-
- total, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok {
- panic(`invalid value passed as argument number to the "argument-limit" rule`)
- }
- r.total = int(total)
- }
-}
-
-// Apply applies the rule to given file.
-func (r *ArgumentsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
- var failures []lint.Failure
- onFailure := func(failure lint.Failure) {
- failures = append(failures, failure)
- }
-
- walker := lintArgsNum{
- total: r.total,
- onFailure: onFailure,
- }
-
- ast.Walk(walker, file.AST)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*ArgumentsLimitRule) Name() string {
- return "argument-limit"
-}
-
-type lintArgsNum struct {
- total int
- onFailure func(lint.Failure)
-}
-
-func (w lintArgsNum) Visit(n ast.Node) ast.Visitor {
- node, ok := n.(*ast.FuncDecl)
- if ok {
- num := 0
- for _, l := range node.Type.Params.List {
- for range l.Names {
- num++
- }
- }
- if num > w.total {
- w.onFailure(lint.Failure{
- Confidence: 1,
- Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", w.total, num),
- Node: node.Type,
- })
- return w
- }
- }
- return w
-}
diff --git a/vendor/github.com/mgechev/revive/rule/argument_limit.go b/vendor/github.com/mgechev/revive/rule/argument_limit.go
new file mode 100644
index 0000000000..7fd6a382d0
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/argument_limit.go
@@ -0,0 +1,67 @@
+package rule
+
+import (
+ "errors"
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ArgumentsLimitRule lints the number of arguments a function can receive.
+type ArgumentsLimitRule struct {
+ max int
+}
+
+const defaultArgumentsLimit = 8
+
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *ArgumentsLimitRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) < 1 {
+ r.max = defaultArgumentsLimit
+ return nil
+ }
+
+ maxArguments, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ return errors.New(`invalid value passed as argument number to the "argument-limit" rule`)
+ }
+ r.max = int(maxArguments)
+ return nil
+}
+
+// Apply applies the rule to given file.
+func (r *ArgumentsLimitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ for _, decl := range file.AST.Decls {
+ funcDecl, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+
+ numParams := 0
+ for _, l := range funcDecl.Type.Params.List {
+ numParams += len(l.Names)
+ }
+
+ if numParams <= r.max {
+ continue
+ }
+
+ failures = append(failures, lint.Failure{
+ Confidence: 1,
+ Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", r.max, numParams),
+ Node: funcDecl.Type,
+ })
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*ArgumentsLimitRule) Name() string {
+ return "argument-limit"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/atomic.go b/vendor/github.com/mgechev/revive/rule/atomic.go
index 287b28c213..61219765f5 100644
--- a/vendor/github.com/mgechev/revive/rule/atomic.go
+++ b/vendor/github.com/mgechev/revive/rule/atomic.go
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// AtomicRule lints given else constructs.
+// AtomicRule lints usages of the `sync/atomic` package.
type AtomicRule struct{}
// Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/banned-characters.go b/vendor/github.com/mgechev/revive/rule/banned_characters.go
similarity index 67%
rename from vendor/github.com/mgechev/revive/rule/banned-characters.go
rename to vendor/github.com/mgechev/revive/rule/banned_characters.go
index 12997bae11..7eb026b03f 100644
--- a/vendor/github.com/mgechev/revive/rule/banned-characters.go
+++ b/vendor/github.com/mgechev/revive/rule/banned_characters.go
@@ -4,7 +4,6 @@ import (
"fmt"
"go/ast"
"strings"
- "sync"
"github.com/mgechev/revive/lint"
)
@@ -12,24 +11,31 @@ import (
// BannedCharsRule checks if a file contains banned characters.
type BannedCharsRule struct {
bannedCharList []string
- sync.Mutex
}
const bannedCharsRuleName = "banned-characters"
-func (r *BannedCharsRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.bannedCharList == nil && len(arguments) > 0 {
- checkNumberOfArguments(1, arguments, bannedCharsRuleName)
- r.bannedCharList = r.getBannedCharsList(arguments)
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *BannedCharsRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) > 0 {
+ err := checkNumberOfArguments(1, arguments, bannedCharsRuleName)
+ if err != nil {
+ return err
+ }
+ list, err := r.getBannedCharsList(arguments)
+ if err != nil {
+ return err
+ }
+
+ r.bannedCharList = list
}
+ return nil
}
// Apply applied the rule to the given file.
-func (r *BannedCharsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *BannedCharsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
onFailure := func(failure lint.Failure) {
failures = append(failures, failure)
@@ -50,17 +56,17 @@ func (*BannedCharsRule) Name() string {
}
// getBannedCharsList converts arguments into the banned characters list
-func (r *BannedCharsRule) getBannedCharsList(args lint.Arguments) []string {
+func (r *BannedCharsRule) getBannedCharsList(args lint.Arguments) ([]string, error) {
var bannedChars []string
for _, char := range args {
charStr, ok := char.(string)
if !ok {
- panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), char))
+ return nil, fmt.Errorf("invalid argument for the %s rule: expecting a string, got %T", r.Name(), char)
}
bannedChars = append(bannedChars, charStr)
}
- return bannedChars
+ return bannedChars, nil
}
type lintBannedCharsRule struct {
diff --git a/vendor/github.com/mgechev/revive/rule/bare-return.go b/vendor/github.com/mgechev/revive/rule/bare_return.go
similarity index 89%
rename from vendor/github.com/mgechev/revive/rule/bare-return.go
rename to vendor/github.com/mgechev/revive/rule/bare_return.go
index 147fa84db6..c5a9441f67 100644
--- a/vendor/github.com/mgechev/revive/rule/bare-return.go
+++ b/vendor/github.com/mgechev/revive/rule/bare_return.go
@@ -6,7 +6,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// BareReturnRule lints given else constructs.
+// BareReturnRule lints bare returns.
type BareReturnRule struct{}
// Apply applies the rule to given file.
@@ -49,7 +49,7 @@ func (w lintBareReturnRule) checkFunc(results *ast.FieldList, body *ast.BlockStm
return // nothing to do
}
- brf := bareReturnFinder{w.onFailure}
+ brf := bareReturnFinder(w)
ast.Walk(brf, body)
}
@@ -60,8 +60,8 @@ type bareReturnFinder struct {
func (w bareReturnFinder) Visit(node ast.Node) ast.Visitor {
_, ok := node.(*ast.FuncLit)
if ok {
- // skip analysing function literals
- // they will be analysed by the lintBareReturnRule.Visit method
+ // skip analyzing function literals
+ // they will be analyzed by the lintBareReturnRule.Visit method
return nil
}
diff --git a/vendor/github.com/mgechev/revive/rule/blank-imports.go b/vendor/github.com/mgechev/revive/rule/blank_imports.go
similarity index 77%
rename from vendor/github.com/mgechev/revive/rule/blank-imports.go
rename to vendor/github.com/mgechev/revive/rule/blank_imports.go
index a3d50b4f7e..b3f7a3cdc2 100644
--- a/vendor/github.com/mgechev/revive/rule/blank-imports.go
+++ b/vendor/github.com/mgechev/revive/rule/blank_imports.go
@@ -7,7 +7,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// BlankImportsRule lints given else constructs.
+// BlankImportsRule lints blank imports.
type BlankImportsRule struct{}
// Name returns the rule name.
@@ -22,9 +22,7 @@ func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu
}
const (
- message = "a blank import should be only in a main or test package, or have a comment justifying it"
- category = "imports"
-
+ message = "a blank import should be only in a main or test package, or have a comment justifying it"
embedImportPath = `"embed"`
)
@@ -39,7 +37,8 @@ func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu
continue // Ignore non-blank imports.
}
- if i > 0 {
+ isNotFirstElement := i > 0
+ if isNotFirstElement {
prev := file.AST.Imports[i-1]
prevPos := file.ToPosition(prev.Pos())
@@ -55,7 +54,7 @@ func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu
// This is the first blank import of a group.
if imp.Doc == nil && imp.Comment == nil {
- failures = append(failures, lint.Failure{Failure: message, Category: category, Node: imp, Confidence: 1})
+ failures = append(failures, lint.Failure{Failure: message, Category: lint.FailureCategoryImports, Node: imp, Confidence: 1})
}
}
@@ -73,3 +72,7 @@ func (*BlankImportsRule) fileHasValidEmbedComment(fileAst *ast.File) bool {
return false
}
+
+// isBlank returns whether id is the blank identifier "_".
+// If id == nil, the answer is false.
+func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" }
diff --git a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go b/vendor/github.com/mgechev/revive/rule/bool_literal_in_expr.go
similarity index 68%
rename from vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go
rename to vendor/github.com/mgechev/revive/rule/bool_literal_in_expr.go
index d6150339b9..dd1e9be875 100644
--- a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go
+++ b/vendor/github.com/mgechev/revive/rule/bool_literal_in_expr.go
@@ -45,7 +45,6 @@ func (w *lintBoolLiteral) Visit(node ast.Node) ast.Visitor {
lexeme, ok := isExprABooleanLit(n.X)
if !ok {
lexeme, ok = isExprABooleanLit(n.Y)
-
if !ok {
return w
}
@@ -54,16 +53,16 @@ func (w *lintBoolLiteral) Visit(node ast.Node) ast.Visitor {
isConstant := (n.Op == token.LAND && lexeme == "false") || (n.Op == token.LOR && lexeme == "true")
if isConstant {
- w.addFailure(n, "Boolean expression seems to always evaluate to "+lexeme, "logic")
+ w.addFailure(n, "Boolean expression seems to always evaluate to "+lexeme, lint.FailureCategoryLogic)
} else {
- w.addFailure(n, "omit Boolean literal in expression", "style")
+ w.addFailure(n, "omit Boolean literal in expression", lint.FailureCategoryStyle)
}
}
return w
}
-func (w lintBoolLiteral) addFailure(node ast.Node, msg, cat string) {
+func (w lintBoolLiteral) addFailure(node ast.Node, msg string, cat lint.FailureCategory) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: node,
@@ -71,3 +70,23 @@ func (w lintBoolLiteral) addFailure(node ast.Node, msg, cat string) {
Failure: msg,
})
}
+
+// isBoolOp returns true if the given token corresponds to a bool operator.
+func isBoolOp(t token.Token) bool {
+ switch t {
+ case token.LAND, token.LOR, token.EQL, token.NEQ:
+ return true
+ }
+
+ return false
+}
+
+func isExprABooleanLit(n ast.Node) (lexeme string, ok bool) {
+ oper, ok := n.(*ast.Ident)
+
+ if !ok {
+ return "", false
+ }
+
+ return oper.Name, oper.Name == "true" || oper.Name == "false"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/call-to-gc.go b/vendor/github.com/mgechev/revive/rule/call_to_gc.go
similarity index 96%
rename from vendor/github.com/mgechev/revive/rule/call-to-gc.go
rename to vendor/github.com/mgechev/revive/rule/call_to_gc.go
index 9c68380a43..c3eb1bb71d 100644
--- a/vendor/github.com/mgechev/revive/rule/call-to-gc.go
+++ b/vendor/github.com/mgechev/revive/rule/call_to_gc.go
@@ -62,7 +62,7 @@ func (w lintCallToGC) Visit(node ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Confidence: 1,
Node: node,
- Category: "bad practice",
+ Category: lint.FailureCategoryBadPractice,
Failure: "explicit call to the garbage collector",
})
diff --git a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go b/vendor/github.com/mgechev/revive/rule/cognitive_complexity.go
similarity index 74%
rename from vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
rename to vendor/github.com/mgechev/revive/rule/cognitive_complexity.go
index 1973faef87..0c95c544fc 100644
--- a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
+++ b/vendor/github.com/mgechev/revive/rule/cognitive_complexity.go
@@ -4,42 +4,38 @@ import (
"fmt"
"go/ast"
"go/token"
- "sync"
"github.com/mgechev/revive/lint"
"golang.org/x/tools/go/ast/astutil"
)
-// CognitiveComplexityRule lints given else constructs.
+// CognitiveComplexityRule sets restriction for maximum cognitive complexity.
type CognitiveComplexityRule struct {
maxComplexity int
- sync.Mutex
}
const defaultMaxCognitiveComplexity = 7
-func (r *CognitiveComplexityRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.maxComplexity == 0 {
-
- if len(arguments) < 1 {
- r.maxComplexity = defaultMaxCognitiveComplexity
- return
- }
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *CognitiveComplexityRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) < 1 {
+ r.maxComplexity = defaultMaxCognitiveComplexity
+ return nil
+ }
- complexity, ok := arguments[0].(int64)
- if !ok {
- panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0]))
- }
- r.maxComplexity = int(complexity)
+ complexity, ok := arguments[0].(int64)
+ if !ok {
+ return fmt.Errorf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0])
}
+
+ r.maxComplexity = int(complexity)
+ return nil
}
// Apply applies the rule to given file.
-func (r *CognitiveComplexityRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *CognitiveComplexityRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
linter := cognitiveComplexityLinter{
@@ -70,12 +66,14 @@ func (w cognitiveComplexityLinter) lintCognitiveComplexity() {
f := w.file
for _, decl := range f.AST.Decls {
if fn, ok := decl.(*ast.FuncDecl); ok && fn.Body != nil {
- v := cognitiveComplexityVisitor{}
+ v := cognitiveComplexityVisitor{
+ name: fn.Name,
+ }
c := v.subTreeComplexity(fn.Body)
if c > w.maxComplexity {
w.onFailure(lint.Failure{
Confidence: 1,
- Category: "maintenance",
+ Category: lint.FailureCategoryMaintenance,
Failure: fmt.Sprintf("function %s has cognitive complexity %d (> max enabled %d)", funcName(fn), c, w.maxComplexity),
Node: fn,
})
@@ -85,13 +83,14 @@ func (w cognitiveComplexityLinter) lintCognitiveComplexity() {
}
type cognitiveComplexityVisitor struct {
+ name *ast.Ident
complexity int
nestingLevel int
}
// subTreeComplexity calculates the cognitive complexity of an AST-subtree.
-func (v cognitiveComplexityVisitor) subTreeComplexity(n ast.Node) int {
- ast.Walk(&v, n)
+func (v *cognitiveComplexityVisitor) subTreeComplexity(n ast.Node) int {
+ ast.Walk(v, n)
return v.complexity
}
@@ -123,13 +122,20 @@ func (v *cognitiveComplexityVisitor) Visit(n ast.Node) ast.Visitor {
return nil
case *ast.BinaryExpr:
v.complexity += v.binExpComplexity(n)
- return nil // skip visiting binexp sub-tree (already visited by binExpComplexity)
+ return nil // skip visiting binexp subtree (already visited by binExpComplexity)
case *ast.BranchStmt:
if n.Label != nil {
v.complexity++
}
+ case *ast.CallExpr:
+ if ident, ok := n.Fun.(*ast.Ident); ok {
+ if ident.Obj == v.name.Obj && ident.Name == v.name.Name {
+ // called by same function directly (direct recursion)
+ v.complexity++
+ return nil
+ }
+ }
}
- // TODO handle (at least) direct recursion
return v
}
@@ -150,7 +156,7 @@ func (v *cognitiveComplexityVisitor) walk(complexityIncrement int, targets ...as
v.nestingLevel = nesting
}
-func (cognitiveComplexityVisitor) binExpComplexity(n *ast.BinaryExpr) int {
+func (*cognitiveComplexityVisitor) binExpComplexity(n *ast.BinaryExpr) int {
calculator := binExprComplexityCalculator{opsStack: []token.Token{}}
astutil.Apply(n, calculator.pre, calculator.post)
diff --git a/vendor/github.com/mgechev/revive/rule/comment-spacings.go b/vendor/github.com/mgechev/revive/rule/comment_spacings.go
similarity index 61%
rename from vendor/github.com/mgechev/revive/rule/comment-spacings.go
rename to vendor/github.com/mgechev/revive/rule/comment_spacings.go
index 2b8240ca58..5187bb2183 100644
--- a/vendor/github.com/mgechev/revive/rule/comment-spacings.go
+++ b/vendor/github.com/mgechev/revive/rule/comment_spacings.go
@@ -3,43 +3,33 @@ package rule
import (
"fmt"
"strings"
- "sync"
"github.com/mgechev/revive/lint"
)
-// CommentSpacingsRule check the whether there is a space between
+// CommentSpacingsRule check whether there is a space between
// the comment symbol( // ) and the start of the comment text
type CommentSpacingsRule struct {
allowList []string
- sync.Mutex
}
-func (r *CommentSpacingsRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
-
- if r.allowList == nil {
- r.allowList = []string{
- "//go:",
- "//revive:",
- "//nolint:",
- }
-
- for _, arg := range arguments {
- allow, ok := arg.(string) // Alt. non panicking version
- if !ok {
- panic(fmt.Sprintf("invalid argument %v for %s; expected string but got %T", arg, r.Name(), arg))
- }
- r.allowList = append(r.allowList, `//`+allow+`:`)
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *CommentSpacingsRule) Configure(arguments lint.Arguments) error {
+ r.allowList = []string{}
+ for _, arg := range arguments {
+ allow, ok := arg.(string) // Alt. non panicking version
+ if !ok {
+ return fmt.Errorf("invalid argument %v for %s; expected string but got %T", arg, r.Name(), arg)
}
+ r.allowList = append(r.allowList, `//`+allow)
}
+ return nil
}
// Apply the rule.
-func (r *CommentSpacingsRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- r.configure(args)
-
+func (r *CommentSpacingsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
for _, cg := range file.AST.Comments {
@@ -67,7 +57,7 @@ func (r *CommentSpacingsRule) Apply(file *lint.File, args lint.Arguments) []lint
failures = append(failures, lint.Failure{
Node: comment,
Confidence: 1,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: "no space between comment delimiter and comment text",
})
}
@@ -87,5 +77,5 @@ func (r *CommentSpacingsRule) isAllowed(line string) bool {
}
}
- return false
+ return isDirectiveComment(line)
}
diff --git a/vendor/github.com/mgechev/revive/rule/comments_density.go b/vendor/github.com/mgechev/revive/rule/comments_density.go
new file mode 100644
index 0000000000..e83c20add9
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/comments_density.go
@@ -0,0 +1,86 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// CommentsDensityRule enforces a minimum comment / code relation.
+type CommentsDensityRule struct {
+ minimumCommentsDensity int64
+}
+
+const defaultMinimumCommentsPercentage = 0
+
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *CommentsDensityRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) < 1 {
+ r.minimumCommentsDensity = defaultMinimumCommentsPercentage
+ return nil
+ }
+
+ var ok bool
+ r.minimumCommentsDensity, ok = arguments[0].(int64)
+ if !ok {
+ return fmt.Errorf("invalid argument for %q rule: argument should be an int, got %T", r.Name(), arguments[0])
+ }
+ return nil
+}
+
+// Apply applies the rule to given file.
+func (r *CommentsDensityRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ commentsLines := countDocLines(file.AST.Comments)
+ statementsCount := countStatements(file.AST)
+ density := (float32(commentsLines) / float32(statementsCount+commentsLines)) * 100
+
+ if density < float32(r.minimumCommentsDensity) {
+ return []lint.Failure{
+ {
+ Node: file.AST,
+ Confidence: 1,
+ Failure: fmt.Sprintf("the file has a comment density of %2.f%% (%d comment lines for %d code lines) but expected a minimum of %d%%",
+ density, commentsLines, statementsCount, r.minimumCommentsDensity),
+ },
+ }
+ }
+
+ return nil
+}
+
+// Name returns the rule name.
+func (*CommentsDensityRule) Name() string {
+ return "comments-density"
+}
+
+// countStatements counts the number of program statements in the given AST.
+func countStatements(node ast.Node) int {
+ counter := 0
+
+ ast.Inspect(node, func(n ast.Node) bool {
+ switch n.(type) {
+ case *ast.ExprStmt, *ast.AssignStmt, *ast.ReturnStmt, *ast.GoStmt, *ast.DeferStmt,
+ *ast.BranchStmt, *ast.IfStmt, *ast.SwitchStmt, *ast.TypeSwitchStmt,
+ *ast.SelectStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause,
+ *ast.DeclStmt, *ast.FuncDecl:
+ counter++
+ }
+ return true
+ })
+
+ return counter
+}
+
+func countDocLines(comments []*ast.CommentGroup) int {
+ acc := 0
+ for _, c := range comments {
+ lines := strings.Split(c.Text(), "\n")
+ acc += len(lines) - 1
+ }
+
+ return acc
+}
diff --git a/vendor/github.com/mgechev/revive/rule/confusing-results.go b/vendor/github.com/mgechev/revive/rule/confusing-results.go
deleted file mode 100644
index 1b79ada9c4..0000000000
--- a/vendor/github.com/mgechev/revive/rule/confusing-results.go
+++ /dev/null
@@ -1,66 +0,0 @@
-package rule
-
-import (
- "go/ast"
-
- "github.com/mgechev/revive/lint"
-)
-
-// ConfusingResultsRule lints given function declarations
-type ConfusingResultsRule struct{}
-
-// Apply applies the rule to given file.
-func (*ConfusingResultsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
- var failures []lint.Failure
-
- fileAst := file.AST
- walker := lintConfusingResults{
- onFailure: func(failure lint.Failure) {
- failures = append(failures, failure)
- },
- }
-
- ast.Walk(walker, fileAst)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*ConfusingResultsRule) Name() string {
- return "confusing-results"
-}
-
-type lintConfusingResults struct {
- onFailure func(lint.Failure)
-}
-
-func (w lintConfusingResults) Visit(n ast.Node) ast.Visitor {
- fn, ok := n.(*ast.FuncDecl)
- if !ok || fn.Type.Results == nil || len(fn.Type.Results.List) < 2 {
- return w
- }
- lastType := ""
- for _, result := range fn.Type.Results.List {
- if len(result.Names) > 0 {
- return w
- }
-
- t, ok := result.Type.(*ast.Ident)
- if !ok {
- return w
- }
-
- if t.Name == lastType {
- w.onFailure(lint.Failure{
- Node: n,
- Confidence: 1,
- Category: "naming",
- Failure: "unnamed results of the same type may be confusing, consider using named results",
- })
- break
- }
- lastType = t.Name
- }
-
- return w
-}
diff --git a/vendor/github.com/mgechev/revive/rule/confusing-naming.go b/vendor/github.com/mgechev/revive/rule/confusing_naming.go
similarity index 86%
rename from vendor/github.com/mgechev/revive/rule/confusing-naming.go
rename to vendor/github.com/mgechev/revive/rule/confusing_naming.go
index febfd88245..8a8ea13f8e 100644
--- a/vendor/github.com/mgechev/revive/rule/confusing-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/confusing_naming.go
@@ -35,7 +35,7 @@ func (ps *packages) methodNames(lp *lint.Package) pkgMethods {
}
}
- pkgm := pkgMethods{pkg: lp, methods: make(map[string]map[string]*referenceMethod), mu: &sync.Mutex{}}
+ pkgm := pkgMethods{pkg: lp, methods: map[string]map[string]*referenceMethod{}, mu: &sync.Mutex{}}
ps.pkgs = append(ps.pkgs, pkgm)
return pkgm
@@ -102,7 +102,7 @@ func checkMethodName(holder string, id *ast.Ident, w *lintConfusingNames) {
Failure: fmt.Sprintf("Method '%s' differs only by capitalization to %s '%s' in %s", id.Name, kind, refMethod.id.Name, fileName),
Confidence: 1,
Node: id,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
})
return
@@ -138,16 +138,32 @@ func getStructName(r *ast.FieldList) string {
switch v := t.(type) {
case *ast.StarExpr:
- t = v.X
+ return extractFromStarExpr(v)
case *ast.IndexExpr:
- t = v.X
+ return extractFromIndexExpr(v)
+ case *ast.Ident:
+ return v.Name
}
- if p, _ := t.(*ast.Ident); p != nil {
- result = p.Name
+ return defaultStructName
+}
+
+func extractFromStarExpr(expr *ast.StarExpr) string {
+ switch v := expr.X.(type) {
+ case *ast.IndexExpr:
+ return extractFromIndexExpr(v)
+ case *ast.Ident:
+ return v.Name
}
+ return defaultStructName
+}
- return result
+func extractFromIndexExpr(expr *ast.IndexExpr) string {
+ switch v := expr.X.(type) {
+ case *ast.Ident:
+ return v.Name
+ }
+ return defaultStructName
}
func checkStructFields(fields *ast.FieldList, structName string, w *lintConfusingNames) {
@@ -160,7 +176,7 @@ func checkStructFields(fields *ast.FieldList, structName string, w *lintConfusin
Failure: fmt.Sprintf("Field '%s' differs only by capitalization to other field in the struct type %s", id.Name, structName),
Confidence: 1,
Node: id,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
})
} else {
bl[normName] = true
diff --git a/vendor/github.com/mgechev/revive/rule/confusing_results.go b/vendor/github.com/mgechev/revive/rule/confusing_results.go
new file mode 100644
index 0000000000..1be16f399f
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/confusing_results.go
@@ -0,0 +1,54 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ConfusingResultsRule lints given function declarations
+type ConfusingResultsRule struct{}
+
+// Apply applies the rule to given file.
+func (*ConfusingResultsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ for _, decl := range file.AST.Decls {
+ funcDecl, ok := decl.(*ast.FuncDecl)
+
+ isFunctionWithMoreThanOneResult := ok && funcDecl.Type.Results != nil && len(funcDecl.Type.Results.List) > 1
+ if !isFunctionWithMoreThanOneResult {
+ continue
+ }
+
+ resultsAreNamed := len(funcDecl.Type.Results.List[0].Names) > 0
+ if resultsAreNamed {
+ continue
+ }
+
+ lastType := ""
+ for _, result := range funcDecl.Type.Results.List {
+ resultTypeName := gofmt(result.Type)
+
+ if resultTypeName == lastType {
+ failures = append(failures, lint.Failure{
+ Node: result,
+ Confidence: 1,
+ Category: lint.FailureCategoryNaming,
+ Failure: "unnamed results of the same type may be confusing, consider using named results",
+ })
+
+ break
+ }
+
+ lastType = resultTypeName
+ }
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*ConfusingResultsRule) Name() string {
+ return "confusing-results"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go b/vendor/github.com/mgechev/revive/rule/constant_logical_expr.go
similarity index 89%
rename from vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
rename to vendor/github.com/mgechev/revive/rule/constant_logical_expr.go
index 36cd641f74..cb5dd746dd 100644
--- a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
+++ b/vendor/github.com/mgechev/revive/rule/constant_logical_expr.go
@@ -41,8 +41,9 @@ func (w *lintConstantLogicalExpr) Visit(node ast.Node) ast.Visitor {
return w
}
- if gofmt(n.X) != gofmt(n.Y) { // check if subexpressions are the same
- return w
+ subExpressionsAreNotEqual := gofmt(n.X) != gofmt(n.Y)
+ if subExpressionsAreNotEqual {
+ return w // nothing to say
}
// Handles cases like: a <= a, a == a, a >= a
@@ -90,11 +91,11 @@ func (*lintConstantLogicalExpr) isInequalityOperator(t token.Token) bool {
return false
}
-func (w lintConstantLogicalExpr) newFailure(node ast.Node, msg string) {
+func (w *lintConstantLogicalExpr) newFailure(node ast.Node, msg string) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: node,
- Category: "logic",
+ Category: lint.FailureCategoryLogic,
Failure: msg,
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/context-as-argument.go b/vendor/github.com/mgechev/revive/rule/context-as-argument.go
deleted file mode 100644
index e0c8cfa5e9..0000000000
--- a/vendor/github.com/mgechev/revive/rule/context-as-argument.go
+++ /dev/null
@@ -1,110 +0,0 @@
-package rule
-
-import (
- "fmt"
- "go/ast"
- "strings"
- "sync"
-
- "github.com/mgechev/revive/lint"
-)
-
-// ContextAsArgumentRule lints given else constructs.
-type ContextAsArgumentRule struct {
- allowTypesLUT map[string]struct{}
- sync.Mutex
-}
-
-// Apply applies the rule to given file.
-func (r *ContextAsArgumentRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- r.Lock()
- if r.allowTypesLUT == nil {
- r.allowTypesLUT = getAllowTypesFromArguments(args)
- }
- r.Unlock()
-
- var failures []lint.Failure
- r.Lock()
- walker := lintContextArguments{
- allowTypesLUT: r.allowTypesLUT,
- onFailure: func(failure lint.Failure) {
- failures = append(failures, failure)
- },
- }
- r.Unlock()
-
- ast.Walk(walker, file.AST)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*ContextAsArgumentRule) Name() string {
- return "context-as-argument"
-}
-
-type lintContextArguments struct {
- allowTypesLUT map[string]struct{}
- onFailure func(lint.Failure)
-}
-
-func (w lintContextArguments) Visit(n ast.Node) ast.Visitor {
- fn, ok := n.(*ast.FuncDecl)
- if !ok || len(fn.Type.Params.List) <= 1 {
- return w
- }
-
- fnArgs := fn.Type.Params.List
-
- // A context.Context should be the first parameter of a function.
- // Flag any that show up after the first.
- isCtxStillAllowed := true
- for _, arg := range fnArgs {
- argIsCtx := isPkgDot(arg.Type, "context", "Context")
- if argIsCtx && !isCtxStillAllowed {
- w.onFailure(lint.Failure{
- Node: arg,
- Category: "arg-order",
- Failure: "context.Context should be the first parameter of a function",
- Confidence: 0.9,
- })
- break // only flag one
- }
-
- typeName := gofmt(arg.Type)
- // a parameter of type context.Context is still allowed if the current arg type is in the LUT
- _, isCtxStillAllowed = w.allowTypesLUT[typeName]
- }
-
- return nil // avoid visiting the function body
-}
-
-func getAllowTypesFromArguments(args lint.Arguments) map[string]struct{} {
- allowTypesBefore := []string{}
- if len(args) >= 1 {
- argKV, ok := args[0].(map[string]any)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the context-as-argument rule. Expecting a k,v map, got %T", args[0]))
- }
- for k, v := range argKV {
- switch k {
- case "allowTypesBefore":
- typesBefore, ok := v.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the context-as-argument.allowTypesBefore rule. Expecting a string, got %T", v))
- }
- allowTypesBefore = append(allowTypesBefore, strings.Split(typesBefore, ",")...)
- default:
- panic(fmt.Sprintf("Invalid argument to the context-as-argument rule. Unrecognized key %s", k))
- }
- }
- }
-
- result := make(map[string]struct{}, len(allowTypesBefore))
- for _, v := range allowTypesBefore {
- result[v] = struct{}{}
- }
-
- result["context.Context"] = struct{}{} // context.Context is always allowed before another context.Context
- return result
-}
diff --git a/vendor/github.com/mgechev/revive/rule/context_as_argument.go b/vendor/github.com/mgechev/revive/rule/context_as_argument.go
new file mode 100644
index 0000000000..f6a1d8ca45
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/context_as_argument.go
@@ -0,0 +1,97 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ContextAsArgumentRule suggests that `context.Context` should be the first argument of a function.
+type ContextAsArgumentRule struct {
+ allowTypes map[string]struct{}
+}
+
+// Apply applies the rule to given file.
+func (r *ContextAsArgumentRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ for _, decl := range file.AST.Decls {
+ fn, ok := decl.(*ast.FuncDecl)
+ if !ok || len(fn.Type.Params.List) <= 1 {
+ continue // not a function or a function with less than 2 parameters
+ }
+
+ fnArgs := fn.Type.Params.List
+
+ // A context.Context should be the first parameter of a function.
+ // Flag any that show up after the first.
+ isCtxStillAllowed := true
+ for _, arg := range fnArgs {
+ argIsCtx := isPkgDot(arg.Type, "context", "Context")
+ if argIsCtx && !isCtxStillAllowed {
+ failures = append(failures, lint.Failure{
+ Node: arg,
+ Category: lint.FailureCategoryArgOrder,
+ Failure: "context.Context should be the first parameter of a function",
+ Confidence: 0.9,
+ })
+
+ break // only flag one
+ }
+
+ typeName := gofmt(arg.Type)
+ // a parameter of type context.Context is still allowed if the current arg type is in the allow types LookUpTable
+ _, isCtxStillAllowed = r.allowTypes[typeName]
+ }
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*ContextAsArgumentRule) Name() string {
+ return "context-as-argument"
+}
+
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *ContextAsArgumentRule) Configure(arguments lint.Arguments) error {
+ types, err := r.getAllowTypesFromArguments(arguments)
+ if err != nil {
+ return err
+ }
+ r.allowTypes = types
+ return nil
+}
+
+func (*ContextAsArgumentRule) getAllowTypesFromArguments(args lint.Arguments) (map[string]struct{}, error) {
+ allowTypesBefore := []string{}
+ if len(args) >= 1 {
+ argKV, ok := args[0].(map[string]any)
+ if !ok {
+ return nil, fmt.Errorf("invalid argument to the context-as-argument rule. Expecting a k,v map, got %T", args[0])
+ }
+ for k, v := range argKV {
+ switch k {
+ case "allowTypesBefore":
+ typesBefore, ok := v.(string)
+ if !ok {
+ return nil, fmt.Errorf("invalid argument to the context-as-argument.allowTypesBefore rule. Expecting a string, got %T", v)
+ }
+ allowTypesBefore = append(allowTypesBefore, strings.Split(typesBefore, ",")...)
+ default:
+ return nil, fmt.Errorf("invalid argument to the context-as-argument rule. Unrecognized key %s", k)
+ }
+ }
+ }
+
+ result := make(map[string]struct{}, len(allowTypesBefore))
+ for _, v := range allowTypesBefore {
+ result[v] = struct{}{}
+ }
+
+ result["context.Context"] = struct{}{} // context.Context is always allowed before another context.Context
+ return result, nil
+}
diff --git a/vendor/github.com/mgechev/revive/rule/context-keys-type.go b/vendor/github.com/mgechev/revive/rule/context_keys_type.go
similarity index 92%
rename from vendor/github.com/mgechev/revive/rule/context-keys-type.go
rename to vendor/github.com/mgechev/revive/rule/context_keys_type.go
index 60ccec560a..02e1f9fa8e 100644
--- a/vendor/github.com/mgechev/revive/rule/context-keys-type.go
+++ b/vendor/github.com/mgechev/revive/rule/context_keys_type.go
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// ContextKeysType lints given else constructs.
+// ContextKeysType disallows the usage of basic types in `context.WithValue`.
type ContextKeysType struct{}
// Apply applies the rule to given file.
@@ -74,7 +74,7 @@ func checkContextKeyType(w lintContextKeyTypes, x *ast.CallExpr) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: x,
- Category: "content",
+ Category: lint.FailureCategoryContent,
Failure: fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type),
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/cyclomatic.go b/vendor/github.com/mgechev/revive/rule/cyclomatic.go
index 9f6d50043d..088c45c85b 100644
--- a/vendor/github.com/mgechev/revive/rule/cyclomatic.go
+++ b/vendor/github.com/mgechev/revive/rule/cyclomatic.go
@@ -4,54 +4,56 @@ import (
"fmt"
"go/ast"
"go/token"
- "sync"
"github.com/mgechev/revive/lint"
)
// Based on https://github.com/fzipp/gocyclo
-// CyclomaticRule lints given else constructs.
+// CyclomaticRule sets restriction for maximum cyclomatic complexity.
type CyclomaticRule struct {
maxComplexity int
- sync.Mutex
}
const defaultMaxCyclomaticComplexity = 10
-func (r *CyclomaticRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.maxComplexity == 0 {
- if len(arguments) < 1 {
- r.maxComplexity = defaultMaxCyclomaticComplexity
- return
- }
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *CyclomaticRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) < 1 {
+ r.maxComplexity = defaultMaxCyclomaticComplexity
+ return nil
+ }
- complexity, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok {
- panic(fmt.Sprintf("invalid argument for cyclomatic complexity; expected int but got %T", arguments[0]))
- }
- r.maxComplexity = int(complexity)
+ complexity, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ return fmt.Errorf("invalid argument for cyclomatic complexity; expected int but got %T", arguments[0])
}
+ r.maxComplexity = int(complexity)
+ return nil
}
// Apply applies the rule to given file.
-func (r *CyclomaticRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *CyclomaticRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
- fileAst := file.AST
-
- walker := lintCyclomatic{
- file: file,
- complexity: r.maxComplexity,
- onFailure: func(failure lint.Failure) {
- failures = append(failures, failure)
- },
- }
+ for _, decl := range file.AST.Decls {
+ fn, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
- ast.Walk(walker, fileAst)
+ c := complexity(fn)
+ if c > r.maxComplexity {
+ failures = append(failures, lint.Failure{
+ Confidence: 1,
+ Category: lint.FailureCategoryMaintenance,
+ Failure: fmt.Sprintf("function %s has cyclomatic complexity %d (> max enabled %d)",
+ funcName(fn), c, r.maxComplexity),
+ Node: fn,
+ })
+ }
+ }
return failures
}
@@ -61,40 +63,15 @@ func (*CyclomaticRule) Name() string {
return "cyclomatic"
}
-type lintCyclomatic struct {
- file *lint.File
- complexity int
- onFailure func(lint.Failure)
-}
-
-func (w lintCyclomatic) Visit(_ ast.Node) ast.Visitor {
- f := w.file
- for _, decl := range f.AST.Decls {
- if fn, ok := decl.(*ast.FuncDecl); ok {
- c := complexity(fn)
- if c > w.complexity {
- w.onFailure(lint.Failure{
- Confidence: 1,
- Category: "maintenance",
- Failure: fmt.Sprintf("function %s has cyclomatic complexity %d (> max enabled %d)",
- funcName(fn), c, w.complexity),
- Node: fn,
- })
- }
- }
- }
- return nil
-}
-
// funcName returns the name representation of a function or method:
// "(Type).Name" for methods or simply "Name" for functions.
func funcName(fn *ast.FuncDecl) string {
- if fn.Recv != nil {
- if fn.Recv.NumFields() > 0 {
- typ := fn.Recv.List[0].Type
- return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name)
- }
+ declarationHasReceiver := fn.Recv != nil && fn.Recv.NumFields() > 0
+ if declarationHasReceiver {
+ typ := fn.Recv.List[0].Type
+ return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name)
}
+
return fn.Name.Name
}
diff --git a/vendor/github.com/mgechev/revive/rule/datarace.go b/vendor/github.com/mgechev/revive/rule/datarace.go
index 39e96696ad..a189bc3a35 100644
--- a/vendor/github.com/mgechev/revive/rule/datarace.go
+++ b/vendor/github.com/mgechev/revive/rule/datarace.go
@@ -11,49 +11,47 @@ import (
type DataRaceRule struct{}
// Apply applies the rule to given file.
-func (*DataRaceRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+func (r *DataRaceRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ isGo122 := file.Pkg.IsAtLeastGo122()
var failures []lint.Failure
- onFailure := func(failure lint.Failure) {
- failures = append(failures, failure)
- }
- w := lintDataRaces{onFailure: onFailure}
+ for _, decl := range file.AST.Decls {
+ funcDecl, ok := decl.(*ast.FuncDecl)
+ if !ok || funcDecl.Body == nil {
+ continue // not function declaration or empty function
+ }
- ast.Walk(w, file.AST)
+ funcResults := funcDecl.Type.Results
- return failures
-}
+ // TODO: ast.Object is deprecated
+ returnIDs := map[*ast.Object]struct{}{}
+ if funcResults != nil {
+ returnIDs = r.extractReturnIDs(funcResults.List)
+ }
-// Name returns the rule name.
-func (*DataRaceRule) Name() string {
- return "datarace"
-}
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
-type lintDataRaces struct {
- onFailure func(failure lint.Failure)
-}
+ fl := &lintFunctionForDataRaces{
+ onFailure: onFailure,
+ returnIDs: returnIDs,
+ rangeIDs: map[*ast.Object]struct{}{}, // TODO: ast.Object is deprecated
+ go122for: isGo122,
+ }
-func (w lintDataRaces) Visit(n ast.Node) ast.Visitor {
- node, ok := n.(*ast.FuncDecl)
- if !ok {
- return w // not function declaration
- }
- if node.Body == nil {
- return nil // empty body
+ ast.Walk(fl, funcDecl.Body)
}
- results := node.Type.Results
-
- returnIDs := map[*ast.Object]struct{}{}
- if results != nil {
- returnIDs = w.ExtractReturnIDs(results.List)
- }
- fl := &lintFunctionForDataRaces{onFailure: w.onFailure, returnIDs: returnIDs, rangeIDs: map[*ast.Object]struct{}{}}
- ast.Walk(fl, node.Body)
+ return failures
+}
- return nil
+// Name returns the rule name.
+func (*DataRaceRule) Name() string {
+ return "datarace"
}
-func (lintDataRaces) ExtractReturnIDs(fields []*ast.Field) map[*ast.Object]struct{} {
+// TODO: ast.Object is deprecated
+func (*DataRaceRule) extractReturnIDs(fields []*ast.Field) map[*ast.Object]struct{} {
r := map[*ast.Object]struct{}{}
for _, f := range fields {
for _, id := range f.Names {
@@ -67,8 +65,10 @@ func (lintDataRaces) ExtractReturnIDs(fields []*ast.Field) map[*ast.Object]struc
type lintFunctionForDataRaces struct {
_ struct{}
onFailure func(failure lint.Failure)
- returnIDs map[*ast.Object]struct{}
- rangeIDs map[*ast.Object]struct{}
+ returnIDs map[*ast.Object]struct{} // TODO: ast.Object is deprecated
+ rangeIDs map[*ast.Object]struct{} // TODO: ast.Object is deprecated
+
+ go122for bool
}
func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor {
@@ -78,7 +78,7 @@ func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor {
return nil
}
- getIds := func(exprs ...ast.Expr) []*ast.Ident {
+ getIDs := func(exprs ...ast.Expr) []*ast.Ident {
r := []*ast.Ident{}
for _, expr := range exprs {
if id, ok := expr.(*ast.Ident); ok {
@@ -88,7 +88,7 @@ func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor {
return r
}
- ids := getIds(n.Key, n.Value)
+ ids := getIDs(n.Key, n.Value)
for _, id := range ids {
w.rangeIDs[id.Obj] = struct{}{}
}
@@ -118,18 +118,18 @@ func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor {
_, isReturnID := w.returnIDs[id.Obj]
switch {
- case isRangeID:
+ case isRangeID && !w.go122for:
w.onFailure(lint.Failure{
Confidence: 1,
Node: id,
- Category: "logic",
+ Category: lint.FailureCategoryLogic,
Failure: fmt.Sprintf("datarace: range value %s is captured (by-reference) in goroutine", id.Name),
})
case isReturnID:
w.onFailure(lint.Failure{
Confidence: 0.8,
Node: id,
- Category: "logic",
+ Category: lint.FailureCategoryLogic,
Failure: fmt.Sprintf("potential datarace: return value %s is captured (by-reference) in goroutine", id.Name),
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/deep-exit.go b/vendor/github.com/mgechev/revive/rule/deep_exit.go
similarity index 51%
rename from vendor/github.com/mgechev/revive/rule/deep-exit.go
rename to vendor/github.com/mgechev/revive/rule/deep_exit.go
index 918d4294a9..6f7acd305f 100644
--- a/vendor/github.com/mgechev/revive/rule/deep-exit.go
+++ b/vendor/github.com/mgechev/revive/rule/deep_exit.go
@@ -3,6 +3,9 @@ package rule
import (
"fmt"
"go/ast"
+ "strings"
+ "unicode"
+ "unicode/utf8"
"github.com/mgechev/revive/lint"
)
@@ -17,20 +20,7 @@ func (*DeepExitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
failures = append(failures, failure)
}
- exitFunctions := map[string]map[string]bool{
- "os": {"Exit": true},
- "syscall": {"Exit": true},
- "log": {
- "Fatal": true,
- "Fatalf": true,
- "Fatalln": true,
- "Panic": true,
- "Panicf": true,
- "Panicln": true,
- },
- }
-
- w := lintDeepExit{onFailure, exitFunctions, file.IsTest()}
+ w := &lintDeepExit{onFailure: onFailure, isTestFile: file.IsTest()}
ast.Walk(w, file.AST)
return failures
}
@@ -41,12 +31,11 @@ func (*DeepExitRule) Name() string {
}
type lintDeepExit struct {
- onFailure func(lint.Failure)
- exitFunctions map[string]map[string]bool
- isTestFile bool
+ onFailure func(lint.Failure)
+ isTestFile bool
}
-func (w lintDeepExit) Visit(node ast.Node) ast.Visitor {
+func (w *lintDeepExit) Visit(node ast.Node) ast.Visitor {
if fd, ok := node.(*ast.FuncDecl); ok {
if w.mustIgnore(fd) {
return nil // skip analysis of this function
@@ -73,13 +62,13 @@ func (w lintDeepExit) Visit(node ast.Node) ast.Visitor {
return w
}
- fn := fc.Sel.Name
pkg := id.Name
- if w.exitFunctions[pkg] != nil && w.exitFunctions[pkg][fn] { // it's a call to an exit function
+ fn := fc.Sel.Name
+ if isCallToExitFunction(pkg, fn) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: ce,
- Category: "bad practice",
+ Category: lint.FailureCategoryBadPractice,
Failure: fmt.Sprintf("calls to %s.%s only in main() or init() functions", pkg, fn),
})
}
@@ -90,5 +79,32 @@ func (w lintDeepExit) Visit(node ast.Node) ast.Visitor {
func (w *lintDeepExit) mustIgnore(fd *ast.FuncDecl) bool {
fn := fd.Name.Name
- return fn == "init" || fn == "main" || (w.isTestFile && fn == "TestMain")
+ return fn == "init" || fn == "main" || w.isTestMain(fd) || w.isTestExample(fd)
+}
+
+func (w *lintDeepExit) isTestMain(fd *ast.FuncDecl) bool {
+ return w.isTestFile && fd.Name.Name == "TestMain"
+}
+
+// isTestExample returns true if the function is a testable example function.
+// See https://go.dev/blog/examples#examples-are-tests for more information.
+//
+// Inspired by https://github.com/golang/go/blob/go1.23.0/src/go/doc/example.go#L72-L77
+func (w *lintDeepExit) isTestExample(fd *ast.FuncDecl) bool {
+ if !w.isTestFile {
+ return false
+ }
+ name := fd.Name.Name
+ const prefix = "Example"
+ if !strings.HasPrefix(name, prefix) {
+ return false
+ }
+ if len(name) == len(prefix) { // "Example" is a package level example
+ return len(fd.Type.Params.List) == 0
+ }
+ r, _ := utf8.DecodeRuneInString(name[len(prefix):])
+ if unicode.IsLower(r) {
+ return false
+ }
+ return len(fd.Type.Params.List) == 0
}
diff --git a/vendor/github.com/mgechev/revive/rule/defer.go b/vendor/github.com/mgechev/revive/rule/defer.go
index adc6478aee..47771f4f87 100644
--- a/vendor/github.com/mgechev/revive/rule/defer.go
+++ b/vendor/github.com/mgechev/revive/rule/defer.go
@@ -3,29 +3,29 @@ package rule
import (
"fmt"
"go/ast"
- "sync"
"github.com/mgechev/revive/lint"
)
-// DeferRule lints unused params in functions.
+// DeferRule lints gotchas in defer statements.
type DeferRule struct {
allow map[string]bool
- sync.Mutex
}
-func (r *DeferRule) configure(arguments lint.Arguments) {
- r.Lock()
- if r.allow == nil {
- r.allow = r.allowFromArgs(arguments)
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *DeferRule) Configure(arguments lint.Arguments) error {
+ list, err := r.allowFromArgs(arguments)
+ if err != nil {
+ return err
}
- r.Unlock()
+ r.allow = list
+ return nil
}
// Apply applies the rule to given file.
-func (r *DeferRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *DeferRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
onFailure := func(failure lint.Failure) {
failures = append(failures, failure)
@@ -42,7 +42,7 @@ func (*DeferRule) Name() string {
return "defer"
}
-func (*DeferRule) allowFromArgs(args lint.Arguments) map[string]bool {
+func (*DeferRule) allowFromArgs(args lint.Arguments) (map[string]bool, error) {
if len(args) < 1 {
allow := map[string]bool{
"loop": true,
@@ -53,24 +53,24 @@ func (*DeferRule) allowFromArgs(args lint.Arguments) map[string]bool {
"immediate-recover": true,
}
- return allow
+ return allow, nil
}
aa, ok := args[0].([]any)
if !ok {
- panic(fmt.Sprintf("Invalid argument '%v' for 'defer' rule. Expecting []string, got %T", args[0], args[0]))
+ return nil, fmt.Errorf("invalid argument '%v' for 'defer' rule. Expecting []string, got %T", args[0], args[0])
}
allow := make(map[string]bool, len(aa))
for _, subcase := range aa {
sc, ok := subcase.(string)
if !ok {
- panic(fmt.Sprintf("Invalid argument '%v' for 'defer' rule. Expecting string, got %T", subcase, subcase))
+ return nil, fmt.Errorf("invalid argument '%v' for 'defer' rule. Expecting string, got %T", subcase, subcase)
}
allow[sc] = true
}
- return allow
+ return allow, nil
}
type lintDeferRule struct {
@@ -94,7 +94,7 @@ func (w lintDeferRule) Visit(node ast.Node) ast.Visitor {
return nil
case *ast.ReturnStmt:
if len(n.Results) != 0 && w.inADefer && w.inAFuncLit {
- w.newFailure("return in a defer function has no effect", n, 1.0, "logic", "return")
+ w.newFailure("return in a defer function has no effect", n, 1.0, lint.FailureCategoryLogic, "return")
}
case *ast.CallExpr:
isCallToRecover := isIdent(n.Fun, "recover")
@@ -103,22 +103,22 @@ func (w lintDeferRule) Visit(node ast.Node) ast.Visitor {
// func fn() { recover() }
//
// confidence is not 1 because recover can be in a function that is deferred elsewhere
- w.newFailure("recover must be called inside a deferred function", n, 0.8, "logic", "recover")
+ w.newFailure("recover must be called inside a deferred function", n, 0.8, lint.FailureCategoryLogic, "recover")
case w.inADefer && !w.inAFuncLit && isCallToRecover:
// defer helper(recover())
//
// confidence is not truly 1 because this could be in a correctly-deferred func,
// but it is very likely to be a misunderstanding of defer's behavior around arguments.
- w.newFailure("recover must be called inside a deferred function, this is executing recover immediately", n, 1, "logic", "immediate-recover")
+ w.newFailure("recover must be called inside a deferred function, this is executing recover immediately", n, 1, lint.FailureCategoryLogic, "immediate-recover")
}
-
+ return nil // no need to analyze the arguments of the function call
case *ast.DeferStmt:
if isIdent(n.Call.Fun, "recover") {
// defer recover()
//
// confidence is not truly 1 because this could be in a correctly-deferred func,
// but normally this doesn't suppress a panic, and even if it did it would silently discard the value.
- w.newFailure("recover must be called inside a deferred function, this is executing recover immediately", n, 1, "logic", "immediate-recover")
+ w.newFailure("recover must be called inside a deferred function, this is executing recover immediately", n, 1, lint.FailureCategoryLogic, "immediate-recover")
}
w.visitSubtree(n.Call.Fun, true, false, false)
for _, a := range n.Call.Args {
@@ -131,17 +131,17 @@ func (w lintDeferRule) Visit(node ast.Node) ast.Visitor {
}
if w.inALoop {
- w.newFailure("prefer not to defer inside loops", n, 1.0, "bad practice", "loop")
+ w.newFailure("prefer not to defer inside loops", n, 1.0, lint.FailureCategoryBadPractice, "loop")
}
switch fn := n.Call.Fun.(type) {
case *ast.CallExpr:
- w.newFailure("prefer not to defer chains of function calls", fn, 1.0, "bad practice", "call-chain")
+ w.newFailure("prefer not to defer chains of function calls", fn, 1.0, lint.FailureCategoryBadPractice, "call-chain")
case *ast.SelectorExpr:
if id, ok := fn.X.(*ast.Ident); ok {
isMethodCall := id != nil && id.Obj != nil && id.Obj.Kind == ast.Typ
if isMethodCall {
- w.newFailure("be careful when deferring calls to methods without pointer receiver", fn, 0.8, "bad practice", "method-call")
+ w.newFailure("be careful when deferring calls to methods without pointer receiver", fn, 0.8, lint.FailureCategoryBadPractice, "method-call")
}
}
}
@@ -163,7 +163,7 @@ func (w lintDeferRule) visitSubtree(n ast.Node, inADefer, inALoop, inAFuncLit bo
ast.Walk(nw, n)
}
-func (w lintDeferRule) newFailure(msg string, node ast.Node, confidence float64, cat, subcase string) {
+func (w lintDeferRule) newFailure(msg string, node ast.Node, confidence float64, cat lint.FailureCategory, subcase string) {
if !w.allow[subcase] {
return
}
diff --git a/vendor/github.com/mgechev/revive/rule/dot-imports.go b/vendor/github.com/mgechev/revive/rule/dot_imports.go
similarity index 55%
rename from vendor/github.com/mgechev/revive/rule/dot-imports.go
rename to vendor/github.com/mgechev/revive/rule/dot_imports.go
index 6b877677db..3ee4999a3a 100644
--- a/vendor/github.com/mgechev/revive/rule/dot-imports.go
+++ b/vendor/github.com/mgechev/revive/rule/dot_imports.go
@@ -3,21 +3,17 @@ package rule
import (
"fmt"
"go/ast"
- "sync"
"github.com/mgechev/revive/lint"
)
-// DotImportsRule lints given else constructs.
+// DotImportsRule forbids . imports.
type DotImportsRule struct {
- sync.Mutex
allowedPackages allowPackages
}
// Apply applies the rule to given file.
-func (r *DotImportsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *DotImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
fileAst := file.AST
@@ -40,37 +36,34 @@ func (*DotImportsRule) Name() string {
return "dot-imports"
}
-func (r *DotImportsRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
-
- if r.allowedPackages != nil {
- return
- }
-
- r.allowedPackages = make(allowPackages)
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *DotImportsRule) Configure(arguments lint.Arguments) error {
+ r.allowedPackages = allowPackages{}
if len(arguments) == 0 {
- return
+ return nil
}
args, ok := arguments[0].(map[string]any)
if !ok {
- panic(fmt.Sprintf("Invalid argument to the dot-imports rule. Expecting a k,v map, got %T", arguments[0]))
+ return fmt.Errorf("invalid argument to the dot-imports rule. Expecting a k,v map, got %T", arguments[0])
}
if allowedPkgArg, ok := args["allowedPackages"]; ok {
- if pkgs, ok := allowedPkgArg.([]any); ok {
- for _, p := range pkgs {
- if pkg, ok := p.(string); ok {
- r.allowedPackages.add(pkg)
- } else {
- panic(fmt.Sprintf("Invalid argument to the dot-imports rule, string expected. Got '%v' (%T)", p, p))
- }
+ pkgs, ok := allowedPkgArg.([]any)
+ if !ok {
+ return fmt.Errorf("invalid argument to the dot-imports rule, []string expected. Got '%v' (%T)", allowedPkgArg, allowedPkgArg)
+ }
+ for _, p := range pkgs {
+ pkg, ok := p.(string)
+ if !ok {
+ return fmt.Errorf("invalid argument to the dot-imports rule, string expected. Got '%v' (%T)", p, p)
}
- } else {
- panic(fmt.Sprintf("Invalid argument to the dot-imports rule, []string expected. Got '%v' (%T)", allowedPkgArg, allowedPkgArg))
+ r.allowedPackages.add(pkg)
}
}
+ return nil
}
type lintImports struct {
@@ -81,13 +74,14 @@ type lintImports struct {
}
func (w lintImports) Visit(_ ast.Node) ast.Visitor {
- for _, is := range w.fileAst.Imports {
- if is.Name != nil && is.Name.Name == "." && !w.allowPackages.isAllowedPackage(is.Path.Value) {
+ for _, importSpec := range w.fileAst.Imports {
+ isDotImport := importSpec.Name != nil && importSpec.Name.Name == "."
+ if isDotImport && !w.allowPackages.isAllowedPackage(importSpec.Path.Value) {
w.onFailure(lint.Failure{
Confidence: 1,
Failure: "should not use dot imports",
- Node: is,
- Category: "imports",
+ Node: importSpec,
+ Category: lint.FailureCategoryImports,
})
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/duplicated-imports.go b/vendor/github.com/mgechev/revive/rule/duplicated_imports.go
similarity index 85%
rename from vendor/github.com/mgechev/revive/rule/duplicated-imports.go
rename to vendor/github.com/mgechev/revive/rule/duplicated_imports.go
index 2b177fac6c..60955c4278 100644
--- a/vendor/github.com/mgechev/revive/rule/duplicated-imports.go
+++ b/vendor/github.com/mgechev/revive/rule/duplicated_imports.go
@@ -6,7 +6,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// DuplicatedImportsRule lints given else constructs.
+// DuplicatedImportsRule looks for packages that are imported two or more times.
type DuplicatedImportsRule struct{}
// Apply applies the rule to given file.
@@ -22,7 +22,7 @@ func (*DuplicatedImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Fa
Confidence: 1,
Failure: fmt.Sprintf("Package %s already imported", path),
Node: imp,
- Category: "imports",
+ Category: lint.FailureCategoryImports,
})
continue
}
diff --git a/vendor/github.com/mgechev/revive/rule/early-return.go b/vendor/github.com/mgechev/revive/rule/early-return.go
deleted file mode 100644
index 9c04a1dbe9..0000000000
--- a/vendor/github.com/mgechev/revive/rule/early-return.go
+++ /dev/null
@@ -1,51 +0,0 @@
-package rule
-
-import (
- "fmt"
-
- "github.com/mgechev/revive/internal/ifelse"
- "github.com/mgechev/revive/lint"
-)
-
-// EarlyReturnRule finds opportunities to reduce nesting by inverting
-// the condition of an "if" block.
-type EarlyReturnRule struct{}
-
-// Apply applies the rule to given file.
-func (e *EarlyReturnRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- return ifelse.Apply(e, file.AST, ifelse.TargetIf, args)
-}
-
-// Name returns the rule name.
-func (*EarlyReturnRule) Name() string {
- return "early-return"
-}
-
-// CheckIfElse evaluates the rule against an ifelse.Chain.
-func (*EarlyReturnRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) {
- if !chain.Else.Deviates() {
- // this rule only applies if the else-block deviates control flow
- return
- }
-
- if chain.HasPriorNonDeviating && !chain.If.IsEmpty() {
- // if we de-indent this block then a previous branch
- // might flow into it, affecting program behaviour
- return
- }
-
- if chain.If.Deviates() {
- // avoid overlapping with superfluous-else
- return
- }
-
- if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.If.HasDecls) {
- // avoid increasing variable scope
- return
- }
-
- if chain.If.IsEmpty() {
- return fmt.Sprintf("if c { } else { %[1]v } can be simplified to if !c { %[1]v }", chain.Else)
- }
- return fmt.Sprintf("if c { ... } else { %[1]v } can be simplified to if !c { %[1]v } ...", chain.Else)
-}
diff --git a/vendor/github.com/mgechev/revive/rule/early_return.go b/vendor/github.com/mgechev/revive/rule/early_return.go
new file mode 100644
index 0000000000..41557b0199
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/early_return.go
@@ -0,0 +1,60 @@
+package rule
+
+import (
+ "fmt"
+
+ "github.com/mgechev/revive/internal/ifelse"
+ "github.com/mgechev/revive/lint"
+)
+
+// EarlyReturnRule finds opportunities to reduce nesting by inverting
+// the condition of an "if" block.
+type EarlyReturnRule struct{}
+
+// Apply applies the rule to given file.
+func (e *EarlyReturnRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
+ return ifelse.Apply(e.checkIfElse, file.AST, ifelse.TargetIf, args)
+}
+
+// Name returns the rule name.
+func (*EarlyReturnRule) Name() string {
+ return "early-return"
+}
+
+func (*EarlyReturnRule) checkIfElse(chain ifelse.Chain, args ifelse.Args) (string, bool) {
+ if chain.HasElse {
+ if !chain.Else.BranchKind.Deviates() {
+ // this rule only applies if the else-block deviates control flow
+ return "", false
+ }
+ } else if !args.AllowJump || !chain.AtBlockEnd || !chain.BlockEndKind.Deviates() || chain.If.IsShort() {
+ // this kind of refactor requires introducing a new indented "return", "continue" or "break" statement,
+ // so ignore unless we are able to outdent multiple statements in exchange.
+ return "", false
+ }
+
+ if chain.HasPriorNonDeviating && !chain.If.IsEmpty() {
+ // if we de-indent this block then a previous branch
+ // might flow into it, affecting program behavior
+ return "", false
+ }
+
+ if chain.HasElse && chain.If.Deviates() {
+ // avoid overlapping with superfluous-else
+ return "", false
+ }
+
+ if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.If.HasDecls()) {
+ // avoid increasing variable scope
+ return "", false
+ }
+
+ if !chain.HasElse {
+ return fmt.Sprintf("if c { ... } can be rewritten if !c { %v } ... to reduce nesting", chain.BlockEndKind), true
+ }
+
+ if chain.If.IsEmpty() {
+ return fmt.Sprintf("if c { } else %[1]v can be simplified to if !c %[1]v", chain.Else), true
+ }
+ return fmt.Sprintf("if c { ... } else %[1]v can be simplified to if !c %[1]v ...", chain.Else), true
+}
diff --git a/vendor/github.com/mgechev/revive/rule/empty-block.go b/vendor/github.com/mgechev/revive/rule/empty_block.go
similarity index 88%
rename from vendor/github.com/mgechev/revive/rule/empty-block.go
rename to vendor/github.com/mgechev/revive/rule/empty_block.go
index 25a052a0ef..210692c947 100644
--- a/vendor/github.com/mgechev/revive/rule/empty-block.go
+++ b/vendor/github.com/mgechev/revive/rule/empty_block.go
@@ -6,7 +6,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// EmptyBlockRule lints given else constructs.
+// EmptyBlockRule warns on empty code blocks.
type EmptyBlockRule struct{}
// Apply applies the rule to given file.
@@ -17,7 +17,7 @@ func (*EmptyBlockRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
failures = append(failures, failure)
}
- w := lintEmptyBlock{make(map[*ast.BlockStmt]bool), onFailure}
+ w := lintEmptyBlock{map[*ast.BlockStmt]bool{}, onFailure}
ast.Walk(w, file.AST)
return failures
}
@@ -55,7 +55,7 @@ func (w lintEmptyBlock) Visit(node ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Confidence: 0.9,
Node: n,
- Category: "logic",
+ Category: lint.FailureCategoryLogic,
Failure: "this block is empty, you can remove it",
})
return nil // skip visiting the range subtree (it will produce a duplicated failure)
@@ -65,7 +65,7 @@ func (w lintEmptyBlock) Visit(node ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Confidence: 1,
Node: n,
- Category: "logic",
+ Category: lint.FailureCategoryLogic,
Failure: "this block is empty, you can remove it",
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/empty-lines.go b/vendor/github.com/mgechev/revive/rule/empty_lines.go
similarity index 96%
rename from vendor/github.com/mgechev/revive/rule/empty-lines.go
rename to vendor/github.com/mgechev/revive/rule/empty_lines.go
index 2710a89797..a2f8dc6fde 100644
--- a/vendor/github.com/mgechev/revive/rule/empty-lines.go
+++ b/vendor/github.com/mgechev/revive/rule/empty_lines.go
@@ -60,7 +60,7 @@ func (w lintEmptyLines) checkStart(block *ast.BlockStmt) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: block,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: "extra empty line at the start of a block",
})
}
@@ -79,7 +79,7 @@ func (w lintEmptyLines) checkEnd(block *ast.BlockStmt) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: block,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: "extra empty line at the end of a block",
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/enforce-map-style.go b/vendor/github.com/mgechev/revive/rule/enforce_map_style.go
similarity index 81%
rename from vendor/github.com/mgechev/revive/rule/enforce-map-style.go
rename to vendor/github.com/mgechev/revive/rule/enforce_map_style.go
index 36ac2374c2..df9793bb60 100644
--- a/vendor/github.com/mgechev/revive/rule/enforce-map-style.go
+++ b/vendor/github.com/mgechev/revive/rule/enforce_map_style.go
@@ -3,7 +3,6 @@ package rule
import (
"fmt"
"go/ast"
- "sync"
"github.com/mgechev/revive/lint"
)
@@ -39,49 +38,39 @@ func mapStyleFromString(s string) (enforceMapStyleType, error) {
// EnforceMapStyleRule implements a rule to enforce `make(map[type]type)` over `map[type]type{}`.
type EnforceMapStyleRule struct {
- configured bool
enforceMapStyle enforceMapStyleType
- sync.Mutex
}
-func (r *EnforceMapStyleRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
-
- if r.configured {
- return
- }
- r.configured = true
-
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *EnforceMapStyleRule) Configure(arguments lint.Arguments) error {
if len(arguments) < 1 {
r.enforceMapStyle = enforceMapStyleTypeAny
- return
+ return nil
}
enforceMapStyle, ok := arguments[0].(string)
if !ok {
- panic(fmt.Sprintf("Invalid argument '%v' for 'enforce-map-style' rule. Expecting string, got %T", arguments[0], arguments[0]))
+ return fmt.Errorf("invalid argument '%v' for 'enforce-map-style' rule. Expecting string, got %T", arguments[0], arguments[0])
}
var err error
r.enforceMapStyle, err = mapStyleFromString(enforceMapStyle)
-
if err != nil {
- panic(fmt.Sprintf("Invalid argument to the enforce-map-style rule: %v", err))
+ return fmt.Errorf("invalid argument to the enforce-map-style rule: %w", err)
}
+
+ return nil
}
// Apply applies the rule to given file.
-func (r *EnforceMapStyleRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *EnforceMapStyleRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
if r.enforceMapStyle == enforceMapStyleTypeAny {
// this linter is not configured
return nil
}
-
var failures []lint.Failure
-
astFile := file.AST
ast.Inspect(astFile, func(n ast.Node) bool {
switch v := n.(type) {
@@ -94,15 +83,15 @@ func (r *EnforceMapStyleRule) Apply(file *lint.File, arguments lint.Arguments) [
return true
}
- if len(v.Elts) > 0 {
- // not an empty map
+ isEmptyMap := len(v.Elts) > 0
+ if isEmptyMap {
return true
}
failures = append(failures, lint.Failure{
Confidence: 1,
Node: v,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: "use make(map[type]type) instead of map[type]type{}",
})
case *ast.CallExpr:
@@ -130,7 +119,7 @@ func (r *EnforceMapStyleRule) Apply(file *lint.File, arguments lint.Arguments) [
failures = append(failures, lint.Failure{
Confidence: 1,
Node: v.Args[0],
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: "use map[type]type{} instead of make(map[type]type)",
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go b/vendor/github.com/mgechev/revive/rule/enforce_repeated_arg_type_style.go
similarity index 56%
rename from vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go
rename to vendor/github.com/mgechev/revive/rule/enforce_repeated_arg_type_style.go
index 067082b1b0..ab466f5f82 100644
--- a/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go
+++ b/vendor/github.com/mgechev/revive/rule/enforce_repeated_arg_type_style.go
@@ -3,8 +3,6 @@ package rule
import (
"fmt"
"go/ast"
- "go/types"
- "sync"
"github.com/mgechev/revive/lint"
)
@@ -17,14 +15,14 @@ const (
enforceRepeatedArgTypeStyleTypeFull enforceRepeatedArgTypeStyleType = "full"
)
-func repeatedArgTypeStyleFromString(s string) enforceRepeatedArgTypeStyleType {
+func repeatedArgTypeStyleFromString(s string) (enforceRepeatedArgTypeStyleType, error) {
switch s {
case string(enforceRepeatedArgTypeStyleTypeAny), "":
- return enforceRepeatedArgTypeStyleTypeAny
+ return enforceRepeatedArgTypeStyleTypeAny, nil
case string(enforceRepeatedArgTypeStyleTypeShort):
- return enforceRepeatedArgTypeStyleTypeShort
+ return enforceRepeatedArgTypeStyleTypeShort, nil
case string(enforceRepeatedArgTypeStyleTypeFull):
- return enforceRepeatedArgTypeStyleTypeFull
+ return enforceRepeatedArgTypeStyleTypeFull, nil
default:
err := fmt.Errorf(
"invalid repeated arg type style: %s (expecting one of %v)",
@@ -36,67 +34,74 @@ func repeatedArgTypeStyleFromString(s string) enforceRepeatedArgTypeStyleType {
},
)
- panic(fmt.Sprintf("Invalid argument to the enforce-repeated-arg-type-style rule: %v", err))
+ return "", fmt.Errorf("invalid argument to the enforce-repeated-arg-type-style rule: %w", err)
}
}
// EnforceRepeatedArgTypeStyleRule implements a rule to enforce repeated argument type style.
type EnforceRepeatedArgTypeStyleRule struct {
- configured bool
funcArgStyle enforceRepeatedArgTypeStyleType
funcRetValStyle enforceRepeatedArgTypeStyleType
-
- sync.Mutex
}
-func (r *EnforceRepeatedArgTypeStyleRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
-
- if r.configured {
- return
- }
- r.configured = true
-
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *EnforceRepeatedArgTypeStyleRule) Configure(arguments lint.Arguments) error {
r.funcArgStyle = enforceRepeatedArgTypeStyleTypeAny
r.funcRetValStyle = enforceRepeatedArgTypeStyleTypeAny
if len(arguments) == 0 {
- return
+ return nil
}
switch funcArgStyle := arguments[0].(type) {
case string:
- r.funcArgStyle = repeatedArgTypeStyleFromString(funcArgStyle)
- r.funcRetValStyle = repeatedArgTypeStyleFromString(funcArgStyle)
+ argstyle, err := repeatedArgTypeStyleFromString(funcArgStyle)
+ if err != nil {
+ return err
+ }
+ r.funcArgStyle = argstyle
+ valstyle, err := repeatedArgTypeStyleFromString(funcArgStyle)
+ if err != nil {
+ return err
+ }
+ r.funcRetValStyle = valstyle
case map[string]any: // expecting map[string]string
for k, v := range funcArgStyle {
switch k {
case "funcArgStyle":
val, ok := v.(string)
if !ok {
- panic(fmt.Sprintf("Invalid map value type for 'enforce-repeated-arg-type-style' rule. Expecting string, got %T", v))
+ return fmt.Errorf("invalid map value type for 'enforce-repeated-arg-type-style' rule. Expecting string, got %T", v)
+ }
+ valstyle, err := repeatedArgTypeStyleFromString(val)
+ if err != nil {
+ return err
}
- r.funcArgStyle = repeatedArgTypeStyleFromString(val)
+ r.funcArgStyle = valstyle
case "funcRetValStyle":
val, ok := v.(string)
if !ok {
- panic(fmt.Sprintf("Invalid map value '%v' for 'enforce-repeated-arg-type-style' rule. Expecting string, got %T", v, v))
+ return fmt.Errorf("invalid map value '%v' for 'enforce-repeated-arg-type-style' rule. Expecting string, got %T", v, v)
}
- r.funcRetValStyle = repeatedArgTypeStyleFromString(val)
+ argstyle, err := repeatedArgTypeStyleFromString(val)
+ if err != nil {
+ return err
+ }
+ r.funcRetValStyle = argstyle
default:
- panic(fmt.Sprintf("Invalid map key for 'enforce-repeated-arg-type-style' rule. Expecting 'funcArgStyle' or 'funcRetValStyle', got %v", k))
+ return fmt.Errorf("invalid map key for 'enforce-repeated-arg-type-style' rule. Expecting 'funcArgStyle' or 'funcRetValStyle', got %v", k)
}
}
default:
- panic(fmt.Sprintf("Invalid argument '%v' for 'import-alias-naming' rule. Expecting string or map[string]string, got %T", arguments[0], arguments[0]))
+ return fmt.Errorf("invalid argument '%v' for 'import-alias-naming' rule. Expecting string or map[string]string, got %T", arguments[0], arguments[0])
}
+ return nil
}
// Apply applies the rule to a given file.
-func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
if r.funcArgStyle == enforceRepeatedArgTypeStyleTypeAny && r.funcRetValStyle == enforceRepeatedArgTypeStyleTypeAny {
// This linter is not configured, return no failures.
return nil
@@ -104,42 +109,36 @@ func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint.
var failures []lint.Failure
- err := file.Pkg.TypeCheck()
- if err != nil {
- // the file has other issues
- return nil
- }
- typesInfo := file.Pkg.TypesInfo()
-
astFile := file.AST
ast.Inspect(astFile, func(n ast.Node) bool {
switch fn := n.(type) {
case *ast.FuncDecl:
- if r.funcArgStyle == enforceRepeatedArgTypeStyleTypeFull {
+ switch r.funcArgStyle {
+ case enforceRepeatedArgTypeStyleTypeFull:
if fn.Type.Params != nil {
for _, field := range fn.Type.Params.List {
if len(field.Names) > 1 {
failures = append(failures, lint.Failure{
Confidence: 1,
Node: field,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: "argument types should not be omitted",
})
}
}
}
- }
-
- if r.funcArgStyle == enforceRepeatedArgTypeStyleTypeShort {
- var prevType ast.Expr
+ case enforceRepeatedArgTypeStyleTypeShort:
if fn.Type.Params != nil {
+ var prevType ast.Expr
for _, field := range fn.Type.Params.List {
- if types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) {
+ prevTypeStr := gofmt(prevType)
+ currentTypeStr := gofmt(field.Type)
+ if currentTypeStr == prevTypeStr {
failures = append(failures, lint.Failure{
Confidence: 1,
- Node: field,
- Category: "style",
- Failure: "repeated argument type can be omitted",
+ Node: prevType,
+ Category: lint.FailureCategoryStyle,
+ Failure: fmt.Sprintf("repeated argument type %q can be omitted", prevTypeStr),
})
}
prevType = field.Type
@@ -147,31 +146,32 @@ func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint.
}
}
- if r.funcRetValStyle == enforceRepeatedArgTypeStyleTypeFull {
+ switch r.funcRetValStyle {
+ case enforceRepeatedArgTypeStyleTypeFull:
if fn.Type.Results != nil {
for _, field := range fn.Type.Results.List {
if len(field.Names) > 1 {
failures = append(failures, lint.Failure{
Confidence: 1,
Node: field,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: "return types should not be omitted",
})
}
}
}
- }
-
- if r.funcRetValStyle == enforceRepeatedArgTypeStyleTypeShort {
- var prevType ast.Expr
+ case enforceRepeatedArgTypeStyleTypeShort:
if fn.Type.Results != nil {
+ var prevType ast.Expr
for _, field := range fn.Type.Results.List {
- if field.Names != nil && types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) {
+ prevTypeStr := gofmt(prevType)
+ currentTypeStr := gofmt(field.Type)
+ if field.Names != nil && currentTypeStr == prevTypeStr {
failures = append(failures, lint.Failure{
Confidence: 1,
- Node: field,
- Category: "style",
- Failure: "repeated return type can be omitted",
+ Node: prevType,
+ Category: lint.FailureCategoryStyle,
+ Failure: fmt.Sprintf("repeated return type %q can be omitted", prevTypeStr),
})
}
prevType = field.Type
diff --git a/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go b/vendor/github.com/mgechev/revive/rule/enforce_slice_style.go
similarity index 62%
rename from vendor/github.com/mgechev/revive/rule/enforce-slice-style.go
rename to vendor/github.com/mgechev/revive/rule/enforce_slice_style.go
index abaf20be0e..ab503094f5 100644
--- a/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go
+++ b/vendor/github.com/mgechev/revive/rule/enforce_slice_style.go
@@ -3,7 +3,6 @@ package rule
import (
"fmt"
"go/ast"
- "sync"
"github.com/mgechev/revive/lint"
)
@@ -14,6 +13,7 @@ const (
enforceSliceStyleTypeAny enforceSliceStyleType = "any"
enforceSliceStyleTypeMake enforceSliceStyleType = "make"
enforceSliceStyleTypeLiteral enforceSliceStyleType = "literal"
+ enforceSliceStyleTypeNil enforceSliceStyleType = "nil"
)
func sliceStyleFromString(s string) (enforceSliceStyleType, error) {
@@ -24,6 +24,8 @@ func sliceStyleFromString(s string) (enforceSliceStyleType, error) {
return enforceSliceStyleTypeMake, nil
case string(enforceSliceStyleTypeLiteral):
return enforceSliceStyleTypeLiteral, nil
+ case string(enforceSliceStyleTypeNil):
+ return enforceSliceStyleTypeNil, nil
default:
return enforceSliceStyleTypeAny, fmt.Errorf(
"invalid slice style: %s (expecting one of %v)",
@@ -32,6 +34,7 @@ func sliceStyleFromString(s string) (enforceSliceStyleType, error) {
enforceSliceStyleTypeAny,
enforceSliceStyleTypeMake,
enforceSliceStyleTypeLiteral,
+ enforceSliceStyleTypeNil,
},
)
}
@@ -39,42 +42,33 @@ func sliceStyleFromString(s string) (enforceSliceStyleType, error) {
// EnforceSliceStyleRule implements a rule to enforce `make([]type)` over `[]type{}`.
type EnforceSliceStyleRule struct {
- configured bool
enforceSliceStyle enforceSliceStyleType
- sync.Mutex
}
-func (r *EnforceSliceStyleRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
-
- if r.configured {
- return
- }
- r.configured = true
-
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *EnforceSliceStyleRule) Configure(arguments lint.Arguments) error {
if len(arguments) < 1 {
r.enforceSliceStyle = enforceSliceStyleTypeAny
- return
+ return nil
}
enforceSliceStyle, ok := arguments[0].(string)
if !ok {
- panic(fmt.Sprintf("Invalid argument '%v' for 'enforce-slice-style' rule. Expecting string, got %T", arguments[0], arguments[0]))
+ return fmt.Errorf("invalid argument '%v' for 'enforce-slice-style' rule. Expecting string, got %T", arguments[0], arguments[0])
}
var err error
r.enforceSliceStyle, err = sliceStyleFromString(enforceSliceStyle)
-
if err != nil {
- panic(fmt.Sprintf("Invalid argument to the enforce-slice-style rule: %v", err))
+ return fmt.Errorf("invalid argument to the enforce-slice-style rule: %w", err)
}
+ return nil
}
// Apply applies the rule to given file.
-func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *EnforceSliceStyleRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
if r.enforceSliceStyle == enforceSliceStyleTypeAny {
// this linter is not configured
return nil
@@ -86,7 +80,10 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments)
ast.Inspect(astFile, func(n ast.Node) bool {
switch v := n.(type) {
case *ast.CompositeLit:
- if r.enforceSliceStyle != enforceSliceStyleTypeMake {
+ switch r.enforceSliceStyle {
+ case enforceSliceStyleTypeMake, enforceSliceStyleTypeNil:
+ // continue
+ default:
return true
}
@@ -94,19 +91,27 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments)
return true
}
- if len(v.Elts) > 0 {
- // not an empty slice
+ isNotEmptySlice := len(v.Elts) > 0
+ if isNotEmptySlice {
return true
}
+ var failureMessage string
+ if r.enforceSliceStyle == enforceSliceStyleTypeNil {
+ failureMessage = "use nil slice declaration (e.g. var args []type) instead of []type{}"
+ } else {
+ failureMessage = "use make([]type) instead of []type{} (or declare nil slice)"
+ }
failures = append(failures, lint.Failure{
Confidence: 1,
Node: v,
- Category: "style",
- Failure: "use make([]type) instead of []type{} (or declare nil slice)",
+ Category: lint.FailureCategoryStyle,
+ Failure: failureMessage,
})
case *ast.CallExpr:
- if r.enforceSliceStyle != enforceSliceStyleTypeLiteral {
+ switch r.enforceSliceStyle {
+ case enforceSliceStyleTypeLiteral, enforceSliceStyleTypeNil:
+ default:
// skip any function calls, even if it's make([]type)
// we don't want to report it if literals are not enforced
return true
@@ -117,8 +122,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments)
return true
}
- if len(v.Args) < 2 {
- // skip invalid make declarations
+ isInvalidMakeDeclaration := len(v.Args) < 2
+ if isInvalidMakeDeclaration {
return true
}
@@ -133,8 +138,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments)
return true
}
- if arg.Value != "0" {
- // skip slice with non-zero size
+ isSliceSizeNotZero := arg.Value != "0"
+ if isSliceSizeNotZero {
return true
}
@@ -145,17 +150,23 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments)
return true
}
- if arg.Value != "0" {
- // skip non-zero capacity slice
+ isNonZeroCapacitySlice := arg.Value != "0"
+ if isNonZeroCapacitySlice {
return true
}
}
+ var failureMessage string
+ if r.enforceSliceStyle == enforceSliceStyleTypeNil {
+ failureMessage = "use nil slice declaration (e.g. var args []type) instead of make([]type, 0)"
+ } else {
+ failureMessage = "use []type{} instead of make([]type, 0) (or declare nil slice)"
+ }
failures = append(failures, lint.Failure{
Confidence: 1,
Node: v.Args[0],
- Category: "style",
- Failure: "use []type{} instead of make([]type, 0) (or declare nil slice)",
+ Category: lint.FailureCategoryStyle,
+ Failure: failureMessage,
})
}
return true
diff --git a/vendor/github.com/mgechev/revive/rule/error-return.go b/vendor/github.com/mgechev/revive/rule/error-return.go
deleted file mode 100644
index a724e001c8..0000000000
--- a/vendor/github.com/mgechev/revive/rule/error-return.go
+++ /dev/null
@@ -1,67 +0,0 @@
-package rule
-
-import (
- "go/ast"
-
- "github.com/mgechev/revive/lint"
-)
-
-// ErrorReturnRule lints given else constructs.
-type ErrorReturnRule struct{}
-
-// Apply applies the rule to given file.
-func (*ErrorReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
- var failures []lint.Failure
-
- fileAst := file.AST
- walker := lintErrorReturn{
- file: file,
- fileAst: fileAst,
- onFailure: func(failure lint.Failure) {
- failures = append(failures, failure)
- },
- }
-
- ast.Walk(walker, fileAst)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*ErrorReturnRule) Name() string {
- return "error-return"
-}
-
-type lintErrorReturn struct {
- file *lint.File
- fileAst *ast.File
- onFailure func(lint.Failure)
-}
-
-func (w lintErrorReturn) Visit(n ast.Node) ast.Visitor {
- fn, ok := n.(*ast.FuncDecl)
- if !ok || fn.Type.Results == nil {
- return w
- }
- ret := fn.Type.Results.List
- if len(ret) <= 1 {
- return w
- }
- if isIdent(ret[len(ret)-1].Type, "error") {
- return nil
- }
- // An error return parameter should be the last parameter.
- // Flag any error parameters found before the last.
- for _, r := range ret[:len(ret)-1] {
- if isIdent(r.Type, "error") {
- w.onFailure(lint.Failure{
- Category: "arg-order",
- Confidence: 0.9,
- Node: fn,
- Failure: "error should be the last type when returning multiple items",
- })
- break // only flag one
- }
- }
- return w
-}
diff --git a/vendor/github.com/mgechev/revive/rule/error-naming.go b/vendor/github.com/mgechev/revive/rule/error_naming.go
similarity index 94%
rename from vendor/github.com/mgechev/revive/rule/error-naming.go
rename to vendor/github.com/mgechev/revive/rule/error_naming.go
index a4f24f3f09..5ae490813a 100644
--- a/vendor/github.com/mgechev/revive/rule/error-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/error_naming.go
@@ -9,7 +9,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// ErrorNamingRule lints given else constructs.
+// ErrorNamingRule lints naming of error variables.
type ErrorNamingRule struct{}
// Apply applies the rule to given file.
@@ -69,7 +69,7 @@ func (w lintErrors) Visit(_ ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Node: id,
Confidence: 0.9,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
Failure: fmt.Sprintf("error var %s should have name of the form %sFoo", id.Name, prefix),
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/error_return.go b/vendor/github.com/mgechev/revive/rule/error_return.go
new file mode 100644
index 0000000000..19f10a6617
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/error_return.go
@@ -0,0 +1,51 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ErrorReturnRule ensures that the error return parameter is the last parameter.
+type ErrorReturnRule struct{}
+
+// Apply applies the rule to given file.
+func (*ErrorReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ for _, decl := range file.AST.Decls {
+ funcDecl, ok := decl.(*ast.FuncDecl)
+ isFunctionWithMoreThanOneResult := ok && funcDecl.Type.Results != nil && len(funcDecl.Type.Results.List) > 1
+ if !isFunctionWithMoreThanOneResult {
+ continue
+ }
+
+ funcResults := funcDecl.Type.Results.List
+ isLastResultError := isIdent(funcResults[len(funcResults)-1].Type, "error")
+ if isLastResultError {
+ continue
+ }
+
+ // An error return parameter should be the last parameter.
+ // Flag any error parameters found before the last.
+ for _, r := range funcResults[:len(funcResults)-1] {
+ if isIdent(r.Type, "error") {
+ failures = append(failures, lint.Failure{
+ Category: lint.FailureCategoryStyle,
+ Confidence: 0.9,
+ Node: funcDecl,
+ Failure: "error should be the last type when returning multiple items",
+ })
+
+ break // only flag one
+ }
+ }
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*ErrorReturnRule) Name() string {
+ return "error-return"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/error-strings.go b/vendor/github.com/mgechev/revive/rule/error_strings.go
similarity index 88%
rename from vendor/github.com/mgechev/revive/rule/error-strings.go
rename to vendor/github.com/mgechev/revive/rule/error_strings.go
index 81ebda5401..839a613aa3 100644
--- a/vendor/github.com/mgechev/revive/rule/error-strings.go
+++ b/vendor/github.com/mgechev/revive/rule/error_strings.go
@@ -1,31 +1,26 @@
package rule
import (
+ "fmt"
"go/ast"
"go/token"
"strconv"
"strings"
- "sync"
"unicode"
"unicode/utf8"
"github.com/mgechev/revive/lint"
)
-// ErrorStringsRule lints given else constructs.
+// ErrorStringsRule lints error strings.
type ErrorStringsRule struct {
errorFunctions map[string]map[string]struct{}
- sync.Mutex
}
-func (r *ErrorStringsRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
-
- if r.errorFunctions != nil {
- return
- }
-
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *ErrorStringsRule) Configure(arguments lint.Arguments) error {
r.errorFunctions = map[string]map[string]struct{}{
"fmt": {
"Errorf": {},
@@ -52,16 +47,15 @@ func (r *ErrorStringsRule) configure(arguments lint.Arguments) {
}
}
if len(invalidCustomFunctions) != 0 {
- panic("found invalid custom function: " + strings.Join(invalidCustomFunctions, ","))
+ return fmt.Errorf("found invalid custom function: %s", strings.Join(invalidCustomFunctions, ","))
}
+ return nil
}
// Apply applies the rule to given file.
-func (r *ErrorStringsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+func (r *ErrorStringsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
- r.configure(arguments)
-
fileAst := file.AST
walker := lintErrorStrings{
file: file,
@@ -121,7 +115,7 @@ func (w lintErrorStrings) Visit(n ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Node: str,
Confidence: conf,
- Category: "errors",
+ Category: lint.FailureCategoryErrors,
Failure: "error strings should not be capitalized or end with punctuation or a newline",
})
return w
diff --git a/vendor/github.com/mgechev/revive/rule/errorf.go b/vendor/github.com/mgechev/revive/rule/errorf.go
index 1588a745d7..cd56fe29c6 100644
--- a/vendor/github.com/mgechev/revive/rule/errorf.go
+++ b/vendor/github.com/mgechev/revive/rule/errorf.go
@@ -9,7 +9,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// ErrorfRule lints given else constructs.
+// ErrorfRule suggests using `fmt.Errorf` instead of `errors.New(fmt.Sprintf())`.
type ErrorfRule struct{}
// Apply applies the rule to given file.
@@ -69,7 +69,7 @@ func (w lintErrorf) Visit(n ast.Node) ast.Visitor {
}
failure := lint.Failure{
- Category: "errors",
+ Category: lint.FailureCategoryErrors,
Node: n,
Confidence: 1,
Failure: fmt.Sprintf("should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", w.file.Render(se), errorfPrefix),
diff --git a/vendor/github.com/mgechev/revive/rule/exported.go b/vendor/github.com/mgechev/revive/rule/exported.go
index b8663c48c6..7d59c4de5c 100644
--- a/vendor/github.com/mgechev/revive/rule/exported.go
+++ b/vendor/github.com/mgechev/revive/rule/exported.go
@@ -5,7 +5,6 @@ import (
"go/ast"
"go/token"
"strings"
- "sync"
"unicode"
"unicode/utf8"
@@ -13,34 +12,104 @@ import (
"github.com/mgechev/revive/lint"
)
-// ExportedRule lints given else constructs.
+// disabledChecks store ignored warnings types
+type disabledChecks struct {
+ Const bool
+ Function bool
+ Method bool
+ PrivateReceivers bool
+ PublicInterfaces bool
+ Stuttering bool
+ Type bool
+ Var bool
+}
+
+const (
+ checkNamePrivateReceivers = "privateReceivers"
+ checkNamePublicInterfaces = "publicInterfaces"
+ checkNameStuttering = "stuttering"
+)
+
+// isDisabled returns true if the given check is disabled, false otherwise
+func (dc *disabledChecks) isDisabled(checkName string) bool {
+ switch checkName {
+ case "var":
+ return dc.Var
+ case "const":
+ return dc.Const
+ case "function":
+ return dc.Function
+ case "method":
+ return dc.Method
+ case checkNamePrivateReceivers:
+ return dc.PrivateReceivers
+ case checkNamePublicInterfaces:
+ return dc.PublicInterfaces
+ case checkNameStuttering:
+ return dc.Stuttering
+ case "type":
+ return dc.Type
+ default:
+ return false
+ }
+}
+
+var commonMethods = map[string]bool{
+ "Error": true,
+ "Read": true,
+ "ServeHTTP": true,
+ "String": true,
+ "Write": true,
+ "Unwrap": true,
+}
+
+// ExportedRule lints naming and commenting conventions on exported symbols.
type ExportedRule struct {
- configured bool
- checkPrivateReceivers bool
- disableStutteringCheck bool
- stuttersMsg string
- sync.Mutex
+ stuttersMsg string
+ disabledChecks disabledChecks
}
-func (r *ExportedRule) configure(arguments lint.Arguments) {
- r.Lock()
- if !r.configured {
- var sayRepetitiveInsteadOfStutters bool
- r.checkPrivateReceivers, r.disableStutteringCheck, sayRepetitiveInsteadOfStutters = r.getConf(arguments)
- r.stuttersMsg = "stutters"
- if sayRepetitiveInsteadOfStutters {
- r.stuttersMsg = "is repetitive"
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *ExportedRule) Configure(arguments lint.Arguments) error {
+ r.disabledChecks = disabledChecks{PrivateReceivers: true, PublicInterfaces: true}
+ r.stuttersMsg = "stutters"
+ for _, flag := range arguments {
+ switch flag := flag.(type) {
+ case string:
+ switch flag {
+ case "checkPrivateReceivers":
+ r.disabledChecks.PrivateReceivers = false
+ case "disableStutteringCheck":
+ r.disabledChecks.Stuttering = true
+ case "sayRepetitiveInsteadOfStutters":
+ r.stuttersMsg = "is repetitive"
+ case "checkPublicInterface":
+ r.disabledChecks.PublicInterfaces = false
+ case "disableChecksOnConstants":
+ r.disabledChecks.Const = true
+ case "disableChecksOnFunctions":
+ r.disabledChecks.Function = true
+ case "disableChecksOnMethods":
+ r.disabledChecks.Method = true
+ case "disableChecksOnTypes":
+ r.disabledChecks.Type = true
+ case "disableChecksOnVariables":
+ r.disabledChecks.Var = true
+ default:
+ return fmt.Errorf("unknown configuration flag %s for %s rule", flag, r.Name())
+ }
+ default:
+ return fmt.Errorf("invalid argument for the %s rule: expecting a string, got %T", r.Name(), flag)
}
-
- r.configured = true
}
- r.Unlock()
+
+ return nil
}
// Apply applies the rule to given file.
-func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- r.configure(args)
-
+func (r *ExportedRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
if file.IsTest() {
return failures
@@ -54,10 +123,9 @@ func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failur
onFailure: func(failure lint.Failure) {
failures = append(failures, failure)
},
- genDeclMissingComments: make(map[*ast.GenDecl]bool),
- checkPrivateReceivers: r.checkPrivateReceivers,
- disableStutteringCheck: r.disableStutteringCheck,
+ genDeclMissingComments: map[*ast.GenDecl]bool{},
stuttersMsg: r.stuttersMsg,
+ disabledChecks: r.disabledChecks,
}
ast.Walk(&walker, fileAst)
@@ -70,61 +138,36 @@ func (*ExportedRule) Name() string {
return "exported"
}
-func (r *ExportedRule) getConf(args lint.Arguments) (checkPrivateReceivers, disableStutteringCheck, sayRepetitiveInsteadOfStutters bool) {
- // if any, we expect a slice of strings as configuration
- if len(args) < 1 {
- return
- }
- for _, flag := range args {
- flagStr, ok := flag.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), flag))
- }
-
- switch flagStr {
- case "checkPrivateReceivers":
- checkPrivateReceivers = true
- case "disableStutteringCheck":
- disableStutteringCheck = true
- case "sayRepetitiveInsteadOfStutters":
- sayRepetitiveInsteadOfStutters = true
- default:
- panic(fmt.Sprintf("Unknown configuration flag %s for %s rule", flagStr, r.Name()))
- }
- }
-
- return
-}
-
type lintExported struct {
file *lint.File
fileAst *ast.File
lastGen *ast.GenDecl
genDeclMissingComments map[*ast.GenDecl]bool
onFailure func(lint.Failure)
- checkPrivateReceivers bool
- disableStutteringCheck bool
stuttersMsg string
+ disabledChecks disabledChecks
}
func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) {
if !ast.IsExported(fn.Name.Name) {
- // func is unexported
- return
+ return // func is unexported, nothing to do
}
+
kind := "function"
name := fn.Name.Name
- if fn.Recv != nil && len(fn.Recv.List) > 0 {
- // method
+ isMethod := fn.Recv != nil && len(fn.Recv.List) > 0
+ if isMethod {
kind = "method"
recv := typeparams.ReceiverType(fn)
- if !w.checkPrivateReceivers && !ast.IsExported(recv) {
- // receiver is unexported
+
+ if !ast.IsExported(recv) && w.disabledChecks.PrivateReceivers {
return
}
+
if commonMethods[name] {
return
}
+
switch name {
case "Len", "Less", "Swap":
sortables := w.file.Pkg.Sortable()
@@ -134,29 +177,35 @@ func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) {
}
name = recv + "." + name
}
- if fn.Doc == nil {
+
+ if w.disabledChecks.isDisabled(kind) {
+ return
+ }
+
+ if !hasTextComment(fn.Doc) {
w.onFailure(lint.Failure{
Node: fn,
Confidence: 1,
- Category: "comments",
+ Category: lint.FailureCategoryComments,
Failure: fmt.Sprintf("exported %s %s should have comment or be unexported", kind, name),
})
return
}
+
s := normalizeText(fn.Doc.Text())
prefix := fn.Name.Name + " "
if !strings.HasPrefix(s, prefix) {
w.onFailure(lint.Failure{
Node: fn.Doc,
Confidence: 0.8,
- Category: "comments",
+ Category: lint.FailureCategoryComments,
Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix),
})
}
}
func (w *lintExported) checkStutter(id *ast.Ident, thing string) {
- if w.disableStutteringCheck {
+ if w.disabledChecks.Stuttering {
return
}
@@ -183,21 +232,26 @@ func (w *lintExported) checkStutter(id *ast.Ident, thing string) {
w.onFailure(lint.Failure{
Node: id,
Confidence: 0.8,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
Failure: fmt.Sprintf("%s name will be used as %s.%s by other packages, and that %s; consider calling this %s", thing, pkg, name, w.stuttersMsg, rem),
})
}
}
func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) {
+ if w.disabledChecks.isDisabled("type") {
+ return
+ }
+
if !ast.IsExported(t.Name.Name) {
return
}
- if doc == nil {
+
+ if !hasTextComment(doc) {
w.onFailure(lint.Failure{
Node: t,
Confidence: 1,
- Category: "comments",
+ Category: lint.FailureCategoryComments,
Failure: fmt.Sprintf("exported type %v should have comment or be unexported", t.Name),
})
return
@@ -209,19 +263,24 @@ func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) {
if t.Name.Name == a {
continue
}
- if strings.HasPrefix(s, a+" ") {
- s = s[len(a)+1:]
+ var found bool
+ if s, found = strings.CutPrefix(s, a+" "); found {
break
}
}
- if !strings.HasPrefix(s, t.Name.Name+" ") {
- w.onFailure(lint.Failure{
- Node: doc,
- Confidence: 1,
- Category: "comments",
- Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name),
- })
+
+ // if comment starts with name of type and has some text after - it's ok
+ expectedPrefix := t.Name.Name + " "
+ if strings.HasPrefix(s, expectedPrefix) {
+ return
}
+
+ w.onFailure(lint.Failure{
+ Node: doc,
+ Confidence: 1,
+ Category: lint.FailureCategoryComments,
+ Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%s..." (with optional leading article)`, t.Name, expectedPrefix),
+ })
}
func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) {
@@ -230,12 +289,16 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD
kind = "const"
}
+ if w.disabledChecks.isDisabled(kind) {
+ return
+ }
+
if len(vs.Names) > 1 {
// Check that none are exported except for the first.
for _, n := range vs.Names[1:] {
if ast.IsExported(n.Name) {
w.onFailure(lint.Failure{
- Category: "comments",
+ Category: lint.FailureCategoryComments,
Confidence: 1,
Failure: fmt.Sprintf("exported %s %s should have its own declaration", kind, n.Name),
Node: vs,
@@ -251,7 +314,7 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD
return
}
- if vs.Doc == nil && vs.Comment == nil && gd.Doc == nil {
+ if !hasTextComment(vs.Doc) && !hasTextComment(gd.Doc) {
if genDeclMissingComments[gd] {
return
}
@@ -262,23 +325,23 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD
w.onFailure(lint.Failure{
Confidence: 1,
Node: vs,
- Category: "comments",
+ Category: lint.FailureCategoryComments,
Failure: fmt.Sprintf("exported %s %s should have comment%s or be unexported", kind, name, block),
})
genDeclMissingComments[gd] = true
return
}
// If this GenDecl has parens and a comment, we don't check its comment form.
- if gd.Doc != nil && gd.Lparen.IsValid() {
+ if hasTextComment(gd.Doc) && gd.Lparen.IsValid() {
return
}
// The relevant text to check will be on either vs.Doc or gd.Doc.
// Use vs.Doc preferentially.
var doc *ast.CommentGroup
switch {
- case vs.Doc != nil:
+ case hasTextComment(vs.Doc):
doc = vs.Doc
- case vs.Comment != nil && gd.Doc == nil:
+ case hasTextComment(vs.Comment) && !hasTextComment(gd.Doc):
doc = vs.Comment
default:
doc = gd.Doc
@@ -290,18 +353,31 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD
w.onFailure(lint.Failure{
Confidence: 1,
Node: doc,
- Category: "comments",
+ Category: lint.FailureCategoryComments,
Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix),
})
}
}
+// hasTextComment returns true if the comment contains a text comment
+// e.g. //go:embed foo.txt a directive comment, not a text comment
+// e.g. //nolint:whatever is a directive comment, not a text comment
+func hasTextComment(comment *ast.CommentGroup) bool {
+ if comment == nil {
+ return false
+ }
+
+ // a comment could be directive and not a text comment
+ text := comment.Text()
+ return text != ""
+}
+
// normalizeText is a helper function that normalizes comment strings by:
// * removing one leading space
//
// This function is needed because ast.CommentGroup.Text() does not handle //-style and /*-style comments uniformly
func normalizeText(t string) string {
- return strings.TrimPrefix(t, " ")
+ return strings.TrimSpace(t)
}
func (w *lintExported) Visit(n ast.Node) ast.Visitor {
@@ -325,12 +401,20 @@ func (w *lintExported) Visit(n ast.Node) ast.Visitor {
case *ast.TypeSpec:
// inside a GenDecl, which usually has the doc
doc := v.Doc
- if doc == nil {
+ if !hasTextComment(doc) {
doc = w.lastGen.Doc
}
w.lintTypeDoc(v, doc)
w.checkStutter(v.Name, "type")
- // Don't proceed inside types.
+
+ if !w.disabledChecks.PublicInterfaces {
+ if iface, ok := v.Type.(*ast.InterfaceType); ok {
+ if ast.IsExported(v.Name.Name) {
+ w.doCheckPublicInterface(v.Name.Name, iface)
+ }
+ }
+ }
+
return nil
case *ast.ValueSpec:
w.lintValueSpecDoc(v, w.lastGen, w.genDeclMissingComments)
@@ -338,3 +422,38 @@ func (w *lintExported) Visit(n ast.Node) ast.Visitor {
}
return w
}
+
+func (w *lintExported) doCheckPublicInterface(typeName string, iface *ast.InterfaceType) {
+ for _, m := range iface.Methods.List {
+ w.lintInterfaceMethod(typeName, m)
+ }
+}
+
+func (w *lintExported) lintInterfaceMethod(typeName string, m *ast.Field) {
+ if len(m.Names) == 0 {
+ return
+ }
+ if !ast.IsExported(m.Names[0].Name) {
+ return
+ }
+ name := m.Names[0].Name
+ if !hasTextComment(m.Doc) {
+ w.onFailure(lint.Failure{
+ Node: m,
+ Confidence: 1,
+ Category: lint.FailureCategoryComments,
+ Failure: fmt.Sprintf("public interface method %s.%s should be commented", typeName, name),
+ })
+ return
+ }
+ s := normalizeText(m.Doc.Text())
+ expectedPrefix := m.Names[0].Name + " "
+ if !strings.HasPrefix(s, expectedPrefix) {
+ w.onFailure(lint.Failure{
+ Node: m.Doc,
+ Confidence: 0.8,
+ Category: lint.FailureCategoryComments,
+ Failure: fmt.Sprintf(`comment on exported interface method %s.%s should be of the form "%s..."`, typeName, name, expectedPrefix),
+ })
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/rule/file-header.go b/vendor/github.com/mgechev/revive/rule/file_header.go
similarity index 61%
rename from vendor/github.com/mgechev/revive/rule/file-header.go
rename to vendor/github.com/mgechev/revive/rule/file_header.go
index a7d69ff2b1..53d7ea9d03 100644
--- a/vendor/github.com/mgechev/revive/rule/file-header.go
+++ b/vendor/github.com/mgechev/revive/rule/file_header.go
@@ -3,15 +3,13 @@ package rule
import (
"fmt"
"regexp"
- "sync"
"github.com/mgechev/revive/lint"
)
-// FileHeaderRule lints given else constructs.
+// FileHeaderRule lints the header that each file should have.
type FileHeaderRule struct {
header string
- sync.Mutex
}
var (
@@ -19,26 +17,24 @@ var (
singleRegexp = regexp.MustCompile("^//")
)
-func (r *FileHeaderRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.header == "" {
- if len(arguments) < 1 {
- return
- }
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *FileHeaderRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) < 1 {
+ return nil
+ }
- var ok bool
- r.header, ok = arguments[0].(string)
- if !ok {
- panic(fmt.Sprintf("invalid argument for \"file-header\" rule: argument should be a string, got %T", arguments[0]))
- }
+ var ok bool
+ r.header, ok = arguments[0].(string)
+ if !ok {
+ return fmt.Errorf(`invalid argument for "file-header" rule: argument should be a string, got %T`, arguments[0])
}
+ return nil
}
// Apply applies the rule to given file.
-func (r *FileHeaderRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *FileHeaderRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
if r.header == "" {
return nil
}
@@ -72,7 +68,7 @@ func (r *FileHeaderRule) Apply(file *lint.File, arguments lint.Arguments) []lint
regex, err := regexp.Compile(r.header)
if err != nil {
- panic(err.Error())
+ return newInternalFailureError(err)
}
if !regex.MatchString(comment) {
diff --git a/vendor/github.com/mgechev/revive/rule/file_length_limit.go b/vendor/github.com/mgechev/revive/rule/file_length_limit.go
new file mode 100644
index 0000000000..8a3b371266
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/file_length_limit.go
@@ -0,0 +1,131 @@
+package rule
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// FileLengthLimitRule lints the number of lines in a file.
+type FileLengthLimitRule struct {
+ // max is the maximum number of lines allowed in a file. 0 means the rule is disabled.
+ max int
+ // skipComments indicates whether to skip comment lines when counting lines.
+ skipComments bool
+ // skipBlankLines indicates whether to skip blank lines when counting lines.
+ skipBlankLines bool
+}
+
+// Apply applies the rule to given file.
+func (r *FileLengthLimitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ if r.max <= 0 {
+ // when max is negative or 0 the rule is disabled
+ return nil
+ }
+
+ all := 0
+ blank := 0
+ scanner := bufio.NewScanner(bytes.NewReader(file.Content()))
+ for scanner.Scan() {
+ all++
+ if len(bytes.TrimSpace(scanner.Bytes())) == 0 {
+ blank++
+ }
+ }
+
+ if err := scanner.Err(); err != nil {
+ return newInternalFailureError(err)
+ }
+
+ lines := all
+ if r.skipComments {
+ lines -= countCommentLines(file.AST.Comments)
+ }
+
+ if r.skipBlankLines {
+ lines -= blank
+ }
+
+ if lines <= r.max {
+ return nil
+ }
+
+ return []lint.Failure{
+ {
+ Category: lint.FailureCategoryCodeStyle,
+ Confidence: 1,
+ Position: lint.FailurePosition{
+ Start: token.Position{
+ Filename: file.Name,
+ Line: all,
+ },
+ },
+ Failure: fmt.Sprintf("file length is %d lines, which exceeds the limit of %d", lines, r.max),
+ },
+ }
+}
+
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *FileLengthLimitRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) < 1 {
+ return nil // use default
+ }
+
+ argKV, ok := arguments[0].(map[string]any)
+ if !ok {
+ return fmt.Errorf(`invalid argument to the "file-length-limit" rule. Expecting a k,v map, got %T`, arguments[0])
+ }
+ for k, v := range argKV {
+ switch k {
+ case "max":
+ maxLines, ok := v.(int64)
+ if !ok || maxLines < 0 {
+ return fmt.Errorf(`invalid configuration value for max lines in "file-length-limit" rule; need positive int64 but got %T`, arguments[0])
+ }
+ r.max = int(maxLines)
+ case "skipComments":
+ skipComments, ok := v.(bool)
+ if !ok {
+ return fmt.Errorf(`invalid configuration value for skip comments in "file-length-limit" rule; need bool but got %T`, arguments[1])
+ }
+ r.skipComments = skipComments
+ case "skipBlankLines":
+ skipBlankLines, ok := v.(bool)
+ if !ok {
+ return fmt.Errorf(`invalid configuration value for skip blank lines in "file-length-limit" rule; need bool but got %T`, arguments[2])
+ }
+ r.skipBlankLines = skipBlankLines
+ }
+ }
+ return nil
+}
+
+// Name returns the rule name.
+func (*FileLengthLimitRule) Name() string {
+ return "file-length-limit"
+}
+
+func countCommentLines(comments []*ast.CommentGroup) int {
+ count := 0
+ for _, cg := range comments {
+ for _, comment := range cg.List {
+ if len(comment.Text) < 2 {
+ continue
+ }
+ switch comment.Text[1] {
+ case '/': // single-line comment
+ count++
+ case '*': // multi-line comment
+ count += strings.Count(comment.Text, "\n") + 1
+ }
+ }
+ }
+ return count
+}
diff --git a/vendor/github.com/mgechev/revive/rule/filename_format.go b/vendor/github.com/mgechev/revive/rule/filename_format.go
new file mode 100644
index 0000000000..6d4905f189
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/filename_format.go
@@ -0,0 +1,81 @@
+package rule
+
+import (
+ "fmt"
+ "path/filepath"
+ "regexp"
+ "unicode"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// FilenameFormatRule lints source filenames according to a set of regular expressions given as arguments
+type FilenameFormatRule struct {
+ format *regexp.Regexp
+}
+
+// Apply applies the rule to the given file.
+func (r *FilenameFormatRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ filename := filepath.Base(file.Name)
+ if r.format.MatchString(filename) {
+ return nil
+ }
+
+ failureMsg := fmt.Sprintf("Filename %s is not of the format %s.%s", filename, r.format.String(), r.getMsgForNonASCIIChars(filename))
+ return []lint.Failure{{
+ Confidence: 1,
+ Failure: failureMsg,
+ RuleName: r.Name(),
+ Node: file.AST.Name,
+ }}
+}
+
+func (*FilenameFormatRule) getMsgForNonASCIIChars(str string) string {
+ result := ""
+ for _, c := range str {
+ if c <= unicode.MaxASCII {
+ continue
+ }
+
+ result += fmt.Sprintf(" Non ASCII character %c (%U) found.", c, c)
+ }
+
+ return result
+}
+
+// Name returns the rule name.
+func (*FilenameFormatRule) Name() string {
+ return "filename-format"
+}
+
+var defaultFormat = regexp.MustCompile(`^[_A-Za-z0-9][_A-Za-z0-9-]*\.go$`)
+
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *FilenameFormatRule) Configure(arguments lint.Arguments) error {
+ argsCount := len(arguments)
+ if argsCount == 0 {
+ r.format = defaultFormat
+ return nil
+ }
+
+ if argsCount > 1 {
+ return fmt.Errorf("rule %q expects only one argument, got %d %v", r.Name(), argsCount, arguments)
+ }
+
+ arg := arguments[0]
+ str, ok := arg.(string)
+ if !ok {
+ return fmt.Errorf("rule %q expects a string argument, got %v of type %T", r.Name(), arg, arg)
+ }
+
+ format, err := regexp.Compile(str)
+ if err != nil {
+ return fmt.Errorf("rule %q expects a valid regexp argument, got error for %s: %w", r.Name(), str, err)
+ }
+
+ r.format = format
+
+ return nil
+}
diff --git a/vendor/github.com/mgechev/revive/rule/flag-param.go b/vendor/github.com/mgechev/revive/rule/flag_param.go
similarity index 54%
rename from vendor/github.com/mgechev/revive/rule/flag-param.go
rename to vendor/github.com/mgechev/revive/rule/flag_param.go
index f9bfb712c4..2f69503cab 100644
--- a/vendor/github.com/mgechev/revive/rule/flag-param.go
+++ b/vendor/github.com/mgechev/revive/rule/flag_param.go
@@ -7,64 +7,54 @@ import (
"github.com/mgechev/revive/lint"
)
-// FlagParamRule lints given else constructs.
+// FlagParamRule warns on boolean parameters that create a control coupling.
type FlagParamRule struct{}
// Apply applies the rule to given file.
func (*FlagParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
-
onFailure := func(failure lint.Failure) {
failures = append(failures, failure)
}
- w := lintFlagParamRule{onFailure: onFailure}
- ast.Walk(w, file.AST)
- return failures
-}
-
-// Name returns the rule name.
-func (*FlagParamRule) Name() string {
- return "flag-parameter"
-}
-
-type lintFlagParamRule struct {
- onFailure func(lint.Failure)
-}
-
-func (w lintFlagParamRule) Visit(node ast.Node) ast.Visitor {
- fd, ok := node.(*ast.FuncDecl)
- if !ok {
- return w
- }
-
- if fd.Body == nil {
- return nil // skip whole function declaration
- }
+ for _, decl := range file.AST.Decls {
+ fd, ok := decl.(*ast.FuncDecl)
+ isFuncWithNonEmptyBody := ok && fd.Body != nil
+ if !isFuncWithNonEmptyBody {
+ continue
+ }
- for _, p := range fd.Type.Params.List {
- t := p.Type
+ boolParams := map[string]struct{}{}
+ for _, param := range fd.Type.Params.List {
+ if !isIdent(param.Type, "bool") {
+ continue
+ }
- id, ok := t.(*ast.Ident)
- if !ok {
- continue
+ for _, paramIdent := range param.Names {
+ boolParams[paramIdent.Name] = struct{}{}
+ }
}
- if id.Name != "bool" {
+ if len(boolParams) == 0 {
continue
}
- cv := conditionVisitor{p.Names, fd, w}
+ cv := conditionVisitor{boolParams, fd, onFailure}
ast.Walk(cv, fd.Body)
}
- return w
+ return failures
+}
+
+// Name returns the rule name.
+func (*FlagParamRule) Name() string {
+ return "flag-parameter"
}
type conditionVisitor struct {
- ids []*ast.Ident
- fd *ast.FuncDecl
- linter lintFlagParamRule
+ idents map[string]struct{}
+ fd *ast.FuncDecl
+ onFailure func(lint.Failure)
}
func (w conditionVisitor) Visit(node ast.Node) ast.Visitor {
@@ -73,31 +63,30 @@ func (w conditionVisitor) Visit(node ast.Node) ast.Visitor {
return w
}
- fselect := func(n ast.Node) bool {
+ findUsesOfIdents := func(n ast.Node) bool {
ident, ok := n.(*ast.Ident)
if !ok {
return false
}
- for _, id := range w.ids {
- if ident.Name == id.Name {
- return true
- }
+ _, ok = w.idents[ident.Name]
+ if !ok {
+ return false
}
- return false
+ return w.idents[ident.Name] == struct{}{}
}
- uses := pick(ifStmt.Cond, fselect)
+ uses := pick(ifStmt.Cond, findUsesOfIdents)
if len(uses) < 1 {
return w
}
- w.linter.onFailure(lint.Failure{
+ w.onFailure(lint.Failure{
Confidence: 1,
Node: w.fd.Type.Params,
- Category: "bad practice",
+ Category: lint.FailureCategoryBadPractice,
Failure: fmt.Sprintf("parameter '%s' seems to be a control flag, avoid control coupling", uses[0]),
})
diff --git a/vendor/github.com/mgechev/revive/rule/function-length.go b/vendor/github.com/mgechev/revive/rule/function-length.go
deleted file mode 100644
index fd65884e97..0000000000
--- a/vendor/github.com/mgechev/revive/rule/function-length.go
+++ /dev/null
@@ -1,177 +0,0 @@
-package rule
-
-import (
- "fmt"
- "go/ast"
- "reflect"
- "sync"
-
- "github.com/mgechev/revive/lint"
-)
-
-// FunctionLength lint.
-type FunctionLength struct {
- maxStmt int
- maxLines int
- configured bool
- sync.Mutex
-}
-
-func (r *FunctionLength) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if !r.configured {
- maxStmt, maxLines := r.parseArguments(arguments)
- r.maxStmt = int(maxStmt)
- r.maxLines = int(maxLines)
- r.configured = true
- }
-}
-
-// Apply applies the rule to given file.
-func (r *FunctionLength) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
- var failures []lint.Failure
-
- walker := lintFuncLength{
- file: file,
- maxStmt: r.maxStmt,
- maxLines: r.maxLines,
- onFailure: func(failure lint.Failure) {
- failures = append(failures, failure)
- },
- }
-
- ast.Walk(walker, file.AST)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*FunctionLength) Name() string {
- return "function-length"
-}
-
-const defaultFuncStmtsLimit = 50
-const defaultFuncLinesLimit = 75
-
-func (*FunctionLength) parseArguments(arguments lint.Arguments) (maxStmt, maxLines int64) {
- if len(arguments) == 0 {
- return defaultFuncStmtsLimit, defaultFuncLinesLimit
- }
-
- if len(arguments) != 2 {
- panic(fmt.Sprintf(`invalid configuration for "function-length" rule, expected 2 arguments but got %d`, len(arguments)))
- }
-
- maxStmt, maxStmtOk := arguments[0].(int64)
- if !maxStmtOk {
- panic(fmt.Sprintf(`invalid configuration value for max statements in "function-length" rule; need int64 but got %T`, arguments[0]))
- }
- if maxStmt < 0 {
- panic(fmt.Sprintf(`the configuration value for max statements in "function-length" rule cannot be negative, got %d`, maxStmt))
- }
-
- maxLines, maxLinesOk := arguments[1].(int64)
- if !maxLinesOk {
- panic(fmt.Sprintf(`invalid configuration value for max lines in "function-length" rule; need int64 but got %T`, arguments[1]))
- }
- if maxLines < 0 {
- panic(fmt.Sprintf(`the configuration value for max statements in "function-length" rule cannot be negative, got %d`, maxLines))
- }
-
- return maxStmt, maxLines
-}
-
-type lintFuncLength struct {
- file *lint.File
- maxStmt int
- maxLines int
- onFailure func(lint.Failure)
-}
-
-func (w lintFuncLength) Visit(n ast.Node) ast.Visitor {
- node, ok := n.(*ast.FuncDecl)
- if !ok {
- return w
- }
-
- body := node.Body
- if body == nil || len(node.Body.List) == 0 {
- return nil
- }
-
- if w.maxStmt > 0 {
- stmtCount := w.countStmts(node.Body.List)
- if stmtCount > w.maxStmt {
- w.onFailure(lint.Failure{
- Confidence: 1,
- Failure: fmt.Sprintf("maximum number of statements per function exceeded; max %d but got %d", w.maxStmt, stmtCount),
- Node: node,
- })
- }
- }
-
- if w.maxLines > 0 {
- lineCount := w.countLines(node.Body)
- if lineCount > w.maxLines {
- w.onFailure(lint.Failure{
- Confidence: 1,
- Failure: fmt.Sprintf("maximum number of lines per function exceeded; max %d but got %d", w.maxLines, lineCount),
- Node: node,
- })
- }
- }
-
- return nil
-}
-
-func (w lintFuncLength) countLines(b *ast.BlockStmt) int {
- return w.file.ToPosition(b.End()).Line - w.file.ToPosition(b.Pos()).Line - 1
-}
-
-func (w lintFuncLength) countStmts(b []ast.Stmt) int {
- count := 0
- for _, s := range b {
- switch stmt := s.(type) {
- case *ast.BlockStmt:
- count += w.countStmts(stmt.List)
- case *ast.IfStmt:
- count += 1 + w.countBodyListStmts(stmt)
- if stmt.Else != nil {
- elseBody, ok := stmt.Else.(*ast.BlockStmt)
- if ok {
- count += w.countStmts(elseBody.List)
- }
- }
- case *ast.ForStmt, *ast.RangeStmt,
- *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt:
- count += 1 + w.countBodyListStmts(stmt)
- case *ast.CaseClause:
- count += w.countStmts(stmt.Body)
- case *ast.AssignStmt:
- count += 1 + w.countFuncLitStmts(stmt.Rhs[0])
- case *ast.GoStmt:
- count += 1 + w.countFuncLitStmts(stmt.Call.Fun)
- case *ast.DeferStmt:
- count += 1 + w.countFuncLitStmts(stmt.Call.Fun)
- default:
- count++
- }
- }
-
- return count
-}
-
-func (w lintFuncLength) countFuncLitStmts(stmt ast.Expr) int {
- if block, ok := stmt.(*ast.FuncLit); ok {
- return w.countStmts(block.Body.List)
- }
- return 0
-}
-
-func (w lintFuncLength) countBodyListStmts(t any) int {
- i := reflect.ValueOf(t).Elem().FieldByName(`Body`).Elem().FieldByName(`List`).Interface()
- return w.countStmts(i.([]ast.Stmt))
-}
diff --git a/vendor/github.com/mgechev/revive/rule/function-result-limit.go b/vendor/github.com/mgechev/revive/rule/function-result-limit.go
deleted file mode 100644
index 6a0748011d..0000000000
--- a/vendor/github.com/mgechev/revive/rule/function-result-limit.go
+++ /dev/null
@@ -1,83 +0,0 @@
-package rule
-
-import (
- "fmt"
- "go/ast"
- "sync"
-
- "github.com/mgechev/revive/lint"
-)
-
-// FunctionResultsLimitRule lints given else constructs.
-type FunctionResultsLimitRule struct {
- max int
- sync.Mutex
-}
-
-const defaultResultsLimit = 3
-
-func (r *FunctionResultsLimitRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.max == 0 {
- if len(arguments) < 1 {
- r.max = defaultResultsLimit
- return
- }
- max, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok {
- panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0]))
- }
- if max < 0 {
- panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`)
- }
- r.max = int(max)
- }
-}
-
-// Apply applies the rule to given file.
-func (r *FunctionResultsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
- var failures []lint.Failure
-
- walker := lintFunctionResultsNum{
- max: r.max,
- onFailure: func(failure lint.Failure) {
- failures = append(failures, failure)
- },
- }
-
- ast.Walk(walker, file.AST)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*FunctionResultsLimitRule) Name() string {
- return "function-result-limit"
-}
-
-type lintFunctionResultsNum struct {
- max int
- onFailure func(lint.Failure)
-}
-
-func (w lintFunctionResultsNum) Visit(n ast.Node) ast.Visitor {
- node, ok := n.(*ast.FuncDecl)
- if ok {
- num := 0
- if node.Type.Results != nil {
- num = node.Type.Results.NumFields()
- }
- if num > w.max {
- w.onFailure(lint.Failure{
- Confidence: 1,
- Failure: fmt.Sprintf("maximum number of return results per function exceeded; max %d but got %d", w.max, num),
- Node: node.Type,
- })
- return w
- }
- }
- return w
-}
diff --git a/vendor/github.com/mgechev/revive/rule/function_length.go b/vendor/github.com/mgechev/revive/rule/function_length.go
new file mode 100644
index 0000000000..53cb6827c9
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/function_length.go
@@ -0,0 +1,158 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "reflect"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// FunctionLength lint.
+type FunctionLength struct {
+ maxStmt int
+ maxLines int
+}
+
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *FunctionLength) Configure(arguments lint.Arguments) error {
+ maxStmt, maxLines, err := r.parseArguments(arguments)
+ if err != nil {
+ return err
+ }
+ r.maxStmt = int(maxStmt)
+ r.maxLines = int(maxLines)
+ return nil
+}
+
+// Apply applies the rule to given file.
+func (r *FunctionLength) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ for _, decl := range file.AST.Decls {
+ funcDecl, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+
+ body := funcDecl.Body
+ emptyBody := body == nil || len(body.List) == 0
+ if emptyBody {
+ return nil
+ }
+
+ if r.maxStmt > 0 {
+ stmtCount := r.countStmts(body.List)
+ if stmtCount > r.maxStmt {
+ failures = append(failures, lint.Failure{
+ Confidence: 1,
+ Failure: fmt.Sprintf("maximum number of statements per function exceeded; max %d but got %d", r.maxStmt, stmtCount),
+ Node: funcDecl,
+ })
+ }
+ }
+
+ if r.maxLines > 0 {
+ lineCount := r.countLines(body, file)
+ if lineCount > r.maxLines {
+ failures = append(failures, lint.Failure{
+ Confidence: 1,
+ Failure: fmt.Sprintf("maximum number of lines per function exceeded; max %d but got %d", r.maxLines, lineCount),
+ Node: funcDecl,
+ })
+ }
+ }
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*FunctionLength) Name() string {
+ return "function-length"
+}
+
+const (
+ defaultFuncStmtsLimit = 50
+ defaultFuncLinesLimit = 75
+)
+
+func (*FunctionLength) parseArguments(arguments lint.Arguments) (maxStmt, maxLines int64, err error) {
+ if len(arguments) == 0 {
+ return defaultFuncStmtsLimit, defaultFuncLinesLimit, nil
+ }
+
+ const minArguments = 2
+ if len(arguments) != minArguments {
+ return 0, 0, fmt.Errorf(`invalid configuration for "function-length" rule, expected %d arguments but got %d`, minArguments, len(arguments))
+ }
+
+ maxStmt, maxStmtOk := arguments[0].(int64)
+ if !maxStmtOk {
+ return 0, 0, fmt.Errorf(`invalid configuration value for max statements in "function-length" rule; need int64 but got %T`, arguments[0])
+ }
+ if maxStmt < 0 {
+ return 0, 0, fmt.Errorf(`the configuration value for max statements in "function-length" rule cannot be negative, got %d`, maxStmt)
+ }
+
+ maxLines, maxLinesOk := arguments[1].(int64)
+ if !maxLinesOk {
+ return 0, 0, fmt.Errorf(`invalid configuration value for max lines in "function-length" rule; need int64 but got %T`, arguments[1])
+ }
+ if maxLines < 0 {
+ return 0, 0, fmt.Errorf(`the configuration value for max statements in "function-length" rule cannot be negative, got %d`, maxLines)
+ }
+
+ return maxStmt, maxLines, nil
+}
+
+func (*FunctionLength) countLines(b *ast.BlockStmt, file *lint.File) int {
+ return file.ToPosition(b.End()).Line - file.ToPosition(b.Pos()).Line - 1
+}
+
+func (r *FunctionLength) countStmts(b []ast.Stmt) int {
+ count := 0
+ for _, s := range b {
+ switch stmt := s.(type) {
+ case *ast.BlockStmt:
+ count += r.countStmts(stmt.List)
+ case *ast.IfStmt:
+ count += 1 + r.countBodyListStmts(stmt)
+ if stmt.Else != nil {
+ elseBody, ok := stmt.Else.(*ast.BlockStmt)
+ if ok {
+ count += r.countStmts(elseBody.List)
+ }
+ }
+ case *ast.ForStmt, *ast.RangeStmt,
+ *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt:
+ count += 1 + r.countBodyListStmts(stmt)
+ case *ast.CaseClause:
+ count += r.countStmts(stmt.Body)
+ case *ast.AssignStmt:
+ count += 1 + r.countFuncLitStmts(stmt.Rhs[0])
+ case *ast.GoStmt:
+ count += 1 + r.countFuncLitStmts(stmt.Call.Fun)
+ case *ast.DeferStmt:
+ count += 1 + r.countFuncLitStmts(stmt.Call.Fun)
+ default:
+ count++
+ }
+ }
+
+ return count
+}
+
+func (r *FunctionLength) countFuncLitStmts(stmt ast.Expr) int {
+ if block, ok := stmt.(*ast.FuncLit); ok {
+ return r.countStmts(block.Body.List)
+ }
+
+ return 0
+}
+
+func (r *FunctionLength) countBodyListStmts(t any) int {
+ i := reflect.ValueOf(t).Elem().FieldByName(`Body`).Elem().FieldByName(`List`).Interface()
+ return r.countStmts(i.([]ast.Stmt))
+}
diff --git a/vendor/github.com/mgechev/revive/rule/function_result_limit.go b/vendor/github.com/mgechev/revive/rule/function_result_limit.go
new file mode 100644
index 0000000000..b5508f3683
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/function_result_limit.go
@@ -0,0 +1,71 @@
+package rule
+
+import (
+ "errors"
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// FunctionResultsLimitRule limits the maximum number of results a function can return.
+type FunctionResultsLimitRule struct {
+ max int
+}
+
+// Apply applies the rule to given file.
+func (r *FunctionResultsLimitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ for _, decl := range file.AST.Decls {
+ funcDecl, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+
+ num := 0
+ hasResults := funcDecl.Type.Results != nil
+ if hasResults {
+ num = funcDecl.Type.Results.NumFields()
+ }
+
+ if num <= r.max {
+ continue
+ }
+
+ failures = append(failures, lint.Failure{
+ Confidence: 1,
+ Failure: fmt.Sprintf("maximum number of return results per function exceeded; max %d but got %d", r.max, num),
+ Node: funcDecl.Type,
+ })
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*FunctionResultsLimitRule) Name() string {
+ return "function-result-limit"
+}
+
+const defaultResultsLimit = 3
+
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *FunctionResultsLimitRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) < 1 {
+ r.max = defaultResultsLimit
+ return nil
+ }
+
+ maxResults, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ return fmt.Errorf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0])
+ }
+ if maxResults < 0 {
+ return errors.New(`the value passed as return results number to the "function-result-limit" rule cannot be negative`)
+ }
+
+ r.max = int(maxResults)
+ return nil
+}
diff --git a/vendor/github.com/mgechev/revive/rule/get-return.go b/vendor/github.com/mgechev/revive/rule/get-return.go
deleted file mode 100644
index 600a40fac2..0000000000
--- a/vendor/github.com/mgechev/revive/rule/get-return.go
+++ /dev/null
@@ -1,70 +0,0 @@
-package rule
-
-import (
- "fmt"
- "go/ast"
- "strings"
-
- "github.com/mgechev/revive/lint"
-)
-
-// GetReturnRule lints given else constructs.
-type GetReturnRule struct{}
-
-// Apply applies the rule to given file.
-func (*GetReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
- var failures []lint.Failure
-
- onFailure := func(failure lint.Failure) {
- failures = append(failures, failure)
- }
-
- w := lintReturnRule{onFailure}
- ast.Walk(w, file.AST)
- return failures
-}
-
-// Name returns the rule name.
-func (*GetReturnRule) Name() string {
- return "get-return"
-}
-
-type lintReturnRule struct {
- onFailure func(lint.Failure)
-}
-
-func isGetter(name string) bool {
- if strings.HasPrefix(strings.ToUpper(name), "GET") {
- if len(name) > 3 {
- c := name[3]
- return !(c >= 'a' && c <= 'z')
- }
- }
-
- return false
-}
-
-func hasResults(rs *ast.FieldList) bool {
- return rs != nil && len(rs.List) > 0
-}
-
-func (w lintReturnRule) Visit(node ast.Node) ast.Visitor {
- fd, ok := node.(*ast.FuncDecl)
- if !ok {
- return w
- }
-
- if !isGetter(fd.Name.Name) {
- return w
- }
- if !hasResults(fd.Type.Results) {
- w.onFailure(lint.Failure{
- Confidence: 0.8,
- Node: fd,
- Category: "logic",
- Failure: fmt.Sprintf("function '%s' seems to be a getter but it does not return any result", fd.Name.Name),
- })
- }
-
- return w
-}
diff --git a/vendor/github.com/mgechev/revive/rule/get_return.go b/vendor/github.com/mgechev/revive/rule/get_return.go
new file mode 100644
index 0000000000..cf58a687c4
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/get_return.go
@@ -0,0 +1,71 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// GetReturnRule warns on getters that do not yield any result.
+type GetReturnRule struct{}
+
+// Apply applies the rule to given file.
+func (*GetReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ for _, decl := range file.AST.Decls {
+ fd, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+
+ if !isGetter(fd.Name.Name) {
+ continue
+ }
+
+ if hasResults(fd.Type.Results) {
+ continue
+ }
+
+ failures = append(failures, lint.Failure{
+ Confidence: 0.8,
+ Node: fd,
+ Category: lint.FailureCategoryLogic,
+ Failure: fmt.Sprintf("function '%s' seems to be a getter but it does not return any result", fd.Name.Name),
+ })
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*GetReturnRule) Name() string {
+ return "get-return"
+}
+
+const getterPrefix = "GET"
+
+var lenGetterPrefix = len(getterPrefix)
+
+func isGetter(name string) bool {
+ nameHasGetterPrefix := strings.HasPrefix(strings.ToUpper(name), getterPrefix)
+ if !nameHasGetterPrefix {
+ return false
+ }
+
+ isJustGet := len(name) == lenGetterPrefix
+ if isJustGet {
+ return false
+ }
+
+ c := name[lenGetterPrefix]
+ lowerCaseAfterGetterPrefix := c >= 'a' && c <= 'z'
+
+ return !lowerCaseAfterGetterPrefix
+}
+
+func hasResults(rs *ast.FieldList) bool {
+ return rs != nil && len(rs.List) > 0
+}
diff --git a/vendor/github.com/mgechev/revive/rule/identical-branches.go b/vendor/github.com/mgechev/revive/rule/identical_branches.go
similarity index 70%
rename from vendor/github.com/mgechev/revive/rule/identical-branches.go
rename to vendor/github.com/mgechev/revive/rule/identical_branches.go
index 9222c8a9c5..044b04147d 100644
--- a/vendor/github.com/mgechev/revive/rule/identical-branches.go
+++ b/vendor/github.com/mgechev/revive/rule/identical_branches.go
@@ -39,9 +39,11 @@ func (w *lintIdenticalBranches) Visit(node ast.Node) ast.Visitor {
return w
}
- if n.Else == nil {
+ noElseBranch := n.Else == nil
+ if noElseBranch {
return w
}
+
branches := []*ast.BlockStmt{n.Body}
elseBranch, ok := n.Else.(*ast.BlockStmt)
@@ -57,16 +59,17 @@ func (w *lintIdenticalBranches) Visit(node ast.Node) ast.Visitor {
return w
}
-func (lintIdenticalBranches) identicalBranches(branches []*ast.BlockStmt) bool {
+func (*lintIdenticalBranches) identicalBranches(branches []*ast.BlockStmt) bool {
if len(branches) < 2 {
- return false
+ return false // only one branch to compare thus we return
}
- ref := gofmt(branches[0])
- refSize := len(branches[0].List)
+ referenceBranch := gofmt(branches[0])
+ referenceBranchSize := len(branches[0].List)
for i := 1; i < len(branches); i++ {
- currentSize := len(branches[i].List)
- if currentSize != refSize || gofmt(branches[i]) != ref {
+ currentBranch := branches[i]
+ currentBranchSize := len(currentBranch.List)
+ if currentBranchSize != referenceBranchSize || gofmt(currentBranch) != referenceBranch {
return false
}
}
@@ -74,11 +77,11 @@ func (lintIdenticalBranches) identicalBranches(branches []*ast.BlockStmt) bool {
return true
}
-func (w lintIdenticalBranches) newFailure(node ast.Node, msg string) {
+func (w *lintIdenticalBranches) newFailure(node ast.Node, msg string) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: node,
- Category: "logic",
+ Category: lint.FailureCategoryLogic,
Failure: msg,
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/if-return.go b/vendor/github.com/mgechev/revive/rule/if_return.go
similarity index 97%
rename from vendor/github.com/mgechev/revive/rule/if-return.go
rename to vendor/github.com/mgechev/revive/rule/if_return.go
index a6a3113adb..f9e5ef2339 100644
--- a/vendor/github.com/mgechev/revive/rule/if-return.go
+++ b/vendor/github.com/mgechev/revive/rule/if_return.go
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// IfReturnRule lints given else constructs.
+// IfReturnRule searches for redundant `if` when returning an error.
type IfReturnRule struct{}
// Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/import-alias-naming.go b/vendor/github.com/mgechev/revive/rule/import_alias_naming.go
similarity index 59%
rename from vendor/github.com/mgechev/revive/rule/import-alias-naming.go
rename to vendor/github.com/mgechev/revive/rule/import_alias_naming.go
index a6d096c8b2..0859150a9c 100644
--- a/vendor/github.com/mgechev/revive/rule/import-alias-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/import_alias_naming.go
@@ -3,62 +3,65 @@ package rule
import (
"fmt"
"regexp"
- "sync"
"github.com/mgechev/revive/lint"
)
// ImportAliasNamingRule lints import alias naming.
type ImportAliasNamingRule struct {
- configured bool
allowRegexp *regexp.Regexp
denyRegexp *regexp.Regexp
- sync.Mutex
}
const defaultImportAliasNamingAllowRule = "^[a-z][a-z0-9]{0,}$"
var defaultImportAliasNamingAllowRegexp = regexp.MustCompile(defaultImportAliasNamingAllowRule)
-func (r *ImportAliasNamingRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.configured {
- return
- }
-
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *ImportAliasNamingRule) Configure(arguments lint.Arguments) error {
if len(arguments) == 0 {
r.allowRegexp = defaultImportAliasNamingAllowRegexp
- return
+ return nil
}
switch namingRule := arguments[0].(type) {
case string:
- r.setAllowRule(namingRule)
+ err := r.setAllowRule(namingRule)
+ if err != nil {
+ return err
+ }
case map[string]any: // expecting map[string]string
for k, v := range namingRule {
switch k {
case "allowRegex":
- r.setAllowRule(v)
+ err := r.setAllowRule(v)
+ if err != nil {
+ return err
+ }
case "denyRegex":
- r.setDenyRule(v)
+ err := r.setDenyRule(v)
+ if err != nil {
+ return err
+ }
+
default:
- panic(fmt.Sprintf("Invalid map key for 'import-alias-naming' rule. Expecting 'allowRegex' or 'denyRegex', got %v", k))
+ return fmt.Errorf("invalid map key for 'import-alias-naming' rule. Expecting 'allowRegex' or 'denyRegex', got %v", k)
}
}
default:
- panic(fmt.Sprintf("Invalid argument '%v' for 'import-alias-naming' rule. Expecting string or map[string]string, got %T", arguments[0], arguments[0]))
+ return fmt.Errorf("invalid argument '%v' for 'import-alias-naming' rule. Expecting string or map[string]string, got %T", arguments[0], arguments[0])
}
if r.allowRegexp == nil && r.denyRegexp == nil {
r.allowRegexp = defaultImportAliasNamingAllowRegexp
}
+ return nil
}
// Apply applies the rule to given file.
-func (r *ImportAliasNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *ImportAliasNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
for _, is := range file.AST.Imports {
@@ -68,7 +71,7 @@ func (r *ImportAliasNamingRule) Apply(file *lint.File, arguments lint.Arguments)
}
alias := is.Name
- if alias == nil || alias.Name == "_" || alias.Name == "." { // "_" and "." are special types of import aiases and should be processed by another linter rule
+ if alias == nil || alias.Name == "_" || alias.Name == "." { // "_" and "." are special types of import aliases and should be processed by another linter rule
continue
}
@@ -77,7 +80,7 @@ func (r *ImportAliasNamingRule) Apply(file *lint.File, arguments lint.Arguments)
Confidence: 1,
Failure: fmt.Sprintf("import name (%s) must match the regular expression: %s", alias.Name, r.allowRegexp.String()),
Node: alias,
- Category: "imports",
+ Category: lint.FailureCategoryImports,
})
}
@@ -86,7 +89,7 @@ func (r *ImportAliasNamingRule) Apply(file *lint.File, arguments lint.Arguments)
Confidence: 1,
Failure: fmt.Sprintf("import name (%s) must NOT match the regular expression: %s", alias.Name, r.denyRegexp.String()),
Node: alias,
- Category: "imports",
+ Category: lint.FailureCategoryImports,
})
}
}
@@ -99,28 +102,30 @@ func (*ImportAliasNamingRule) Name() string {
return "import-alias-naming"
}
-func (r *ImportAliasNamingRule) setAllowRule(value any) {
+func (r *ImportAliasNamingRule) setAllowRule(value any) error {
namingRule, ok := value.(string)
if !ok {
- panic(fmt.Sprintf("Invalid argument '%v' for import-alias-naming allowRegexp rule. Expecting string, got %T", value, value))
+ return fmt.Errorf("invalid argument '%v' for import-alias-naming allowRegexp rule. Expecting string, got %T", value, value)
}
namingRuleRegexp, err := regexp.Compile(namingRule)
if err != nil {
- panic(fmt.Sprintf("Invalid argument to the import-alias-naming allowRegexp rule. Expecting %q to be a valid regular expression, got: %v", namingRule, err))
+ return fmt.Errorf("invalid argument to the import-alias-naming allowRegexp rule. Expecting %q to be a valid regular expression, got: %w", namingRule, err)
}
r.allowRegexp = namingRuleRegexp
+ return nil
}
-func (r *ImportAliasNamingRule) setDenyRule(value any) {
+func (r *ImportAliasNamingRule) setDenyRule(value any) error {
namingRule, ok := value.(string)
if !ok {
- panic(fmt.Sprintf("Invalid argument '%v' for import-alias-naming denyRegexp rule. Expecting string, got %T", value, value))
+ return fmt.Errorf("invalid argument '%v' for import-alias-naming denyRegexp rule. Expecting string, got %T", value, value)
}
namingRuleRegexp, err := regexp.Compile(namingRule)
if err != nil {
- panic(fmt.Sprintf("Invalid argument to the import-alias-naming denyRegexp rule. Expecting %q to be a valid regular expression, got: %v", namingRule, err))
+ return fmt.Errorf("invalid argument to the import-alias-naming denyRegexp rule. Expecting %q to be a valid regular expression, got: %w", namingRule, err)
}
r.denyRegexp = namingRuleRegexp
+ return nil
}
diff --git a/vendor/github.com/mgechev/revive/rule/import-shadowing.go b/vendor/github.com/mgechev/revive/rule/import_shadowing.go
similarity index 92%
rename from vendor/github.com/mgechev/revive/rule/import-shadowing.go
rename to vendor/github.com/mgechev/revive/rule/import_shadowing.go
index 046aeb688e..69d17f2b1d 100644
--- a/vendor/github.com/mgechev/revive/rule/import-shadowing.go
+++ b/vendor/github.com/mgechev/revive/rule/import_shadowing.go
@@ -9,7 +9,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// ImportShadowingRule lints given else constructs.
+// ImportShadowingRule spots identifiers that shadow an import.
type ImportShadowingRule struct{}
// Apply applies the rule to given file.
@@ -28,7 +28,7 @@ func (*ImportShadowingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Fail
onFailure: func(failure lint.Failure) {
failures = append(failures, failure)
},
- alreadySeen: map[*ast.Object]struct{}{},
+ alreadySeen: map[*ast.Object]struct{}{}, // TODO: ast.Object is deprecated
skipIdents: map[*ast.Ident]struct{}{},
}
@@ -62,7 +62,7 @@ type importShadowing struct {
packageNameIdent *ast.Ident
importNames map[string]struct{}
onFailure func(lint.Failure)
- alreadySeen map[*ast.Object]struct{}
+ alreadySeen map[*ast.Object]struct{} // TODO: ast.Object is deprecated
skipIdents map[*ast.Ident]struct{}
}
@@ -103,7 +103,7 @@ func (w importShadowing) Visit(n ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Confidence: 1,
Node: n,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
Failure: fmt.Sprintf("The name '%s' shadows an import name", id),
})
diff --git a/vendor/github.com/mgechev/revive/rule/imports-blocklist.go b/vendor/github.com/mgechev/revive/rule/imports-blocklist.go
deleted file mode 100644
index 431066403a..0000000000
--- a/vendor/github.com/mgechev/revive/rule/imports-blocklist.go
+++ /dev/null
@@ -1,73 +0,0 @@
-package rule
-
-import (
- "fmt"
- "regexp"
- "sync"
-
- "github.com/mgechev/revive/lint"
-)
-
-// ImportsBlocklistRule lints given else constructs.
-type ImportsBlocklistRule struct {
- blocklist []*regexp.Regexp
- sync.Mutex
-}
-
-var replaceImportRegexp = regexp.MustCompile(`/?\*\*/?`)
-
-func (r *ImportsBlocklistRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
-
- if r.blocklist == nil {
- r.blocklist = make([]*regexp.Regexp, 0)
-
- for _, arg := range arguments {
- argStr, ok := arg.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the imports-blocklist rule. Expecting a string, got %T", arg))
- }
- regStr, err := regexp.Compile(fmt.Sprintf(`(?m)"%s"$`, replaceImportRegexp.ReplaceAllString(argStr, `(\W|\w)*`)))
- if err != nil {
- panic(fmt.Sprintf("Invalid argument to the imports-blocklist rule. Expecting %q to be a valid regular expression, got: %v", argStr, err))
- }
- r.blocklist = append(r.blocklist, regStr)
- }
- }
-}
-
-func (r *ImportsBlocklistRule) isBlocklisted(path string) bool {
- for _, regex := range r.blocklist {
- if regex.MatchString(path) {
- return true
- }
- }
- return false
-}
-
-// Apply applies the rule to given file.
-func (r *ImportsBlocklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
- var failures []lint.Failure
-
- for _, is := range file.AST.Imports {
- path := is.Path
- if path != nil && r.isBlocklisted(path.Value) {
- failures = append(failures, lint.Failure{
- Confidence: 1,
- Failure: "should not use the following blocklisted import: " + path.Value,
- Node: is,
- Category: "imports",
- })
- }
- }
-
- return failures
-}
-
-// Name returns the rule name.
-func (*ImportsBlocklistRule) Name() string {
- return "imports-blocklist"
-}
diff --git a/vendor/github.com/mgechev/revive/rule/imports_blocklist.go b/vendor/github.com/mgechev/revive/rule/imports_blocklist.go
new file mode 100644
index 0000000000..c96382daf9
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/imports_blocklist.go
@@ -0,0 +1,67 @@
+package rule
+
+import (
+ "fmt"
+ "regexp"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ImportsBlocklistRule disallows importing the specified packages.
+type ImportsBlocklistRule struct {
+ blocklist []*regexp.Regexp
+}
+
+var replaceImportRegexp = regexp.MustCompile(`/?\*\*/?`)
+
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *ImportsBlocklistRule) Configure(arguments lint.Arguments) error {
+ r.blocklist = []*regexp.Regexp{}
+ for _, arg := range arguments {
+ argStr, ok := arg.(string)
+ if !ok {
+ return fmt.Errorf("invalid argument to the imports-blocklist rule. Expecting a string, got %T", arg)
+ }
+ regStr, err := regexp.Compile(fmt.Sprintf(`(?m)"%s"$`, replaceImportRegexp.ReplaceAllString(argStr, `(\W|\w)*`)))
+ if err != nil {
+ return fmt.Errorf("invalid argument to the imports-blocklist rule. Expecting %q to be a valid regular expression, got: %w", argStr, err)
+ }
+ r.blocklist = append(r.blocklist, regStr)
+ }
+ return nil
+}
+
+func (r *ImportsBlocklistRule) isBlocklisted(path string) bool {
+ for _, regex := range r.blocklist {
+ if regex.MatchString(path) {
+ return true
+ }
+ }
+ return false
+}
+
+// Apply applies the rule to given file.
+func (r *ImportsBlocklistRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ for _, is := range file.AST.Imports {
+ path := is.Path
+ if path != nil && r.isBlocklisted(path.Value) {
+ failures = append(failures, lint.Failure{
+ Confidence: 1,
+ Failure: "should not use the following blocklisted import: " + path.Value,
+ Node: is,
+ Category: lint.FailureCategoryImports,
+ })
+ }
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*ImportsBlocklistRule) Name() string {
+ return "imports-blocklist"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/increment-decrement.go b/vendor/github.com/mgechev/revive/rule/increment_decrement.go
similarity index 92%
rename from vendor/github.com/mgechev/revive/rule/increment-decrement.go
rename to vendor/github.com/mgechev/revive/rule/increment_decrement.go
index 34a8e1ec52..d8cebcf252 100644
--- a/vendor/github.com/mgechev/revive/rule/increment-decrement.go
+++ b/vendor/github.com/mgechev/revive/rule/increment_decrement.go
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// IncrementDecrementRule lints given else constructs.
+// IncrementDecrementRule lints `i += 1` and `i -= 1` constructs.
type IncrementDecrementRule struct{}
// Apply applies the rule to given file.
@@ -61,7 +61,7 @@ func (w lintIncrementDecrement) Visit(n ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Confidence: 0.8,
Node: as,
- Category: "unary-op",
+ Category: lint.FailureCategoryUnaryOp,
Failure: fmt.Sprintf("should replace %s with %s%s", w.file.Render(as), w.file.Render(as.Lhs[0]), suffix),
})
return w
diff --git a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go b/vendor/github.com/mgechev/revive/rule/indent_error_flow.go
similarity index 64%
rename from vendor/github.com/mgechev/revive/rule/indent-error-flow.go
rename to vendor/github.com/mgechev/revive/rule/indent_error_flow.go
index 294ceef842..4dda64c14a 100644
--- a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go
+++ b/vendor/github.com/mgechev/revive/rule/indent_error_flow.go
@@ -5,12 +5,12 @@ import (
"github.com/mgechev/revive/lint"
)
-// IndentErrorFlowRule lints given else constructs.
+// IndentErrorFlowRule prevents redundant else statements.
type IndentErrorFlowRule struct{}
// Apply applies the rule to given file.
func (e *IndentErrorFlowRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- return ifelse.Apply(e, file.AST, ifelse.TargetElse, args)
+ return ifelse.Apply(e.checkIfElse, file.AST, ifelse.TargetElse, args)
}
// Name returns the rule name.
@@ -18,28 +18,31 @@ func (*IndentErrorFlowRule) Name() string {
return "indent-error-flow"
}
-// CheckIfElse evaluates the rule against an ifelse.Chain.
-func (*IndentErrorFlowRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) {
+func (*IndentErrorFlowRule) checkIfElse(chain ifelse.Chain, args ifelse.Args) (string, bool) {
+ if !chain.HasElse {
+ return "", false
+ }
+
if !chain.If.Deviates() {
// this rule only applies if the if-block deviates control flow
- return
+ return "", false
}
if chain.HasPriorNonDeviating {
// if we de-indent the "else" block then a previous branch
- // might flow into it, affecting program behaviour
- return
+ // might flow into it, affecting program behavior
+ return "", false
}
if !chain.If.Returns() {
// avoid overlapping with superfluous-else
- return
+ return "", false
}
- if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) {
+ if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls()) {
// avoid increasing variable scope
- return
+ return "", false
}
- return "if block ends with a return statement, so drop this else and outdent its block"
+ return "if block ends with a return statement, so drop this else and outdent its block", true
}
diff --git a/vendor/github.com/mgechev/revive/rule/line-length-limit.go b/vendor/github.com/mgechev/revive/rule/line_length_limit.go
similarity index 66%
rename from vendor/github.com/mgechev/revive/rule/line-length-limit.go
rename to vendor/github.com/mgechev/revive/rule/line_length_limit.go
index 1a414f6914..0c4c57691b 100644
--- a/vendor/github.com/mgechev/revive/rule/line-length-limit.go
+++ b/vendor/github.com/mgechev/revive/rule/line_length_limit.go
@@ -3,45 +3,42 @@ package rule
import (
"bufio"
"bytes"
+ "errors"
"fmt"
"go/token"
"strings"
- "sync"
"unicode/utf8"
"github.com/mgechev/revive/lint"
)
-// LineLengthLimitRule lints given else constructs.
+// LineLengthLimitRule lints number of characters in a line.
type LineLengthLimitRule struct {
max int
- sync.Mutex
}
const defaultLineLengthLimit = 80
-func (r *LineLengthLimitRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.max == 0 {
- if len(arguments) < 1 {
- r.max = defaultLineLengthLimit
- return
- }
-
- max, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok || max < 0 {
- panic(`invalid value passed as argument number to the "line-length-limit" rule`)
- }
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *LineLengthLimitRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) < 1 {
+ r.max = defaultLineLengthLimit
+ return nil
+ }
- r.max = int(max)
+ maxLength, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok || maxLength < 0 {
+ return errors.New(`invalid value passed as argument number to the "line-length-limit" rule`)
}
+
+ r.max = int(maxLength)
+ return nil
}
// Apply applies the rule to given file.
-func (r *LineLengthLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *LineLengthLimitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
checker := lintLineLengthNum{
@@ -79,7 +76,7 @@ func (r lintLineLengthNum) check() {
c := utf8.RuneCountInString(t)
if c > r.max {
r.onFailure(lint.Failure{
- Category: "code-style",
+ Category: lint.FailureCategoryCodeStyle,
Position: lint.FailurePosition{
// Offset not set; it is non-trivial, and doesn't appear to be needed.
Start: token.Position{
diff --git a/vendor/github.com/mgechev/revive/rule/max-control-nesting.go b/vendor/github.com/mgechev/revive/rule/max_control_nesting.go
similarity index 76%
rename from vendor/github.com/mgechev/revive/rule/max-control-nesting.go
rename to vendor/github.com/mgechev/revive/rule/max_control_nesting.go
index c4eb361937..5bb11d098b 100644
--- a/vendor/github.com/mgechev/revive/rule/max-control-nesting.go
+++ b/vendor/github.com/mgechev/revive/rule/max_control_nesting.go
@@ -1,25 +1,22 @@
package rule
import (
+ "errors"
"fmt"
"go/ast"
- "sync"
"github.com/mgechev/revive/lint"
)
-// MaxControlNestingRule lints given else constructs.
+// MaxControlNestingRule sets restriction for maximum nesting of control structures.
type MaxControlNestingRule struct {
max int64
- sync.Mutex
}
const defaultMaxControlNesting = 5
// Apply applies the rule to given file.
-func (r *MaxControlNestingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *MaxControlNestingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
fileAst := file.AST
@@ -54,7 +51,7 @@ func (w *lintMaxControlNesting) Visit(n ast.Node) ast.Visitor {
Failure: fmt.Sprintf("control flow nesting exceeds %d", w.max),
Confidence: 1,
Node: w.lastCtrlStmt,
- Category: "complexity",
+ Category: lint.FailureCategoryComplexity,
})
return nil // stop visiting deeper
}
@@ -106,23 +103,24 @@ func (w *lintMaxControlNesting) walkControlledBlock(b ast.Node) {
w.nestingLevelAcc = oldNestingLevel
}
-func (r *MaxControlNestingRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if !(r.max < 1) {
- return // max already set
- }
-
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *MaxControlNestingRule) Configure(arguments lint.Arguments) error {
if len(arguments) < 1 {
r.max = defaultMaxControlNesting
- return
+ return nil
}
- checkNumberOfArguments(1, arguments, r.Name())
+ check := checkNumberOfArguments(1, arguments, r.Name())
+ if check != nil {
+ return check
+ }
- max, ok := arguments[0].(int64) // Alt. non panicking version
+ maxNesting, ok := arguments[0].(int64) // Alt. non panicking version
if !ok {
- panic(`invalid value passed as argument number to the "max-control-nesting" rule`)
+ return errors.New(`invalid value passed as argument number to the "max-control-nesting" rule`)
}
- r.max = max
+ r.max = maxNesting
+ return nil
}
diff --git a/vendor/github.com/mgechev/revive/rule/max-public-structs.go b/vendor/github.com/mgechev/revive/rule/max_public_structs.go
similarity index 52%
rename from vendor/github.com/mgechev/revive/rule/max-public-structs.go
rename to vendor/github.com/mgechev/revive/rule/max_public_structs.go
index 25be3e676f..f27edd7e6c 100644
--- a/vendor/github.com/mgechev/revive/rule/max-public-structs.go
+++ b/vendor/github.com/mgechev/revive/rule/max_public_structs.go
@@ -1,46 +1,51 @@
package rule
import (
+ "errors"
+ "fmt"
"go/ast"
"strings"
- "sync"
"github.com/mgechev/revive/lint"
)
-// MaxPublicStructsRule lints given else constructs.
+// MaxPublicStructsRule lints the number of public structs in a file.
type MaxPublicStructsRule struct {
max int64
- sync.Mutex
}
const defaultMaxPublicStructs = 5
-func (r *MaxPublicStructsRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.max < 1 {
- if len(arguments) < 1 {
- r.max = defaultMaxPublicStructs
- return
- }
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *MaxPublicStructsRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) < 1 {
+ r.max = defaultMaxPublicStructs
+ return nil
+ }
- checkNumberOfArguments(1, arguments, r.Name())
+ err := checkNumberOfArguments(1, arguments, r.Name())
+ if err != nil {
+ return err
+ }
- max, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok {
- panic(`invalid value passed as argument number to the "max-public-structs" rule`)
- }
- r.max = max
+ maxStructs, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ return errors.New(`invalid value passed as argument number to the "max-public-structs" rule`)
}
+ r.max = maxStructs
+ return nil
}
// Apply applies the rule to given file.
-func (r *MaxPublicStructsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *MaxPublicStructsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
+ if r.max < 1 {
+ return failures
+ }
+
fileAst := file.AST
walker := &lintMaxPublicStructs{
@@ -54,10 +59,10 @@ func (r *MaxPublicStructsRule) Apply(file *lint.File, arguments lint.Arguments)
if walker.current > r.max {
walker.onFailure(lint.Failure{
- Failure: "you have exceeded the maximum number of public struct declarations",
+ Failure: fmt.Sprintf("you have exceeded the maximum number (%d) of public struct declarations", r.max),
Confidence: 1,
Node: fileAst,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go b/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go
deleted file mode 100644
index e9e64b9a6a..0000000000
--- a/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go
+++ /dev/null
@@ -1,129 +0,0 @@
-package rule
-
-import (
- "go/ast"
- "strings"
-
- "github.com/mgechev/revive/lint"
-)
-
-// ModifiesValRecRule lints assignments to value method-receivers.
-type ModifiesValRecRule struct{}
-
-// Apply applies the rule to given file.
-func (*ModifiesValRecRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
- var failures []lint.Failure
-
- onFailure := func(failure lint.Failure) {
- failures = append(failures, failure)
- }
-
- w := lintModifiesValRecRule{file: file, onFailure: onFailure}
- file.Pkg.TypeCheck()
- ast.Walk(w, file.AST)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*ModifiesValRecRule) Name() string {
- return "modifies-value-receiver"
-}
-
-type lintModifiesValRecRule struct {
- file *lint.File
- onFailure func(lint.Failure)
-}
-
-func (w lintModifiesValRecRule) Visit(node ast.Node) ast.Visitor {
- switch n := node.(type) {
- case *ast.FuncDecl:
- if n.Recv == nil {
- return nil // skip, not a method
- }
-
- receiver := n.Recv.List[0]
- if _, ok := receiver.Type.(*ast.StarExpr); ok {
- return nil // skip, method with pointer receiver
- }
-
- if w.skipType(receiver.Type) {
- return nil // skip, receiver is a map or array
- }
-
- if len(receiver.Names) < 1 {
- return nil // skip, anonymous receiver
- }
-
- receiverName := receiver.Names[0].Name
- if receiverName == "_" {
- return nil // skip, anonymous receiver
- }
-
- fselect := func(n ast.Node) bool {
- // look for assignments with the receiver in the right hand
- asgmt, ok := n.(*ast.AssignStmt)
- if !ok {
- return false
- }
-
- for _, exp := range asgmt.Lhs {
- switch e := exp.(type) {
- case *ast.IndexExpr: // receiver...[] = ...
- continue
- case *ast.StarExpr: // *receiver = ...
- continue
- case *ast.SelectorExpr: // receiver.field = ...
- name := w.getNameFromExpr(e.X)
- if name == "" || name != receiverName {
- continue
- }
- case *ast.Ident: // receiver := ...
- if e.Name != receiverName {
- continue
- }
- default:
- continue
- }
-
- return true
- }
-
- return false
- }
-
- assignmentsToReceiver := pick(n.Body, fselect)
-
- for _, assignment := range assignmentsToReceiver {
- w.onFailure(lint.Failure{
- Node: assignment,
- Confidence: 1,
- Failure: "suspicious assignment to a by-value method receiver",
- })
- }
- }
-
- return w
-}
-
-func (w lintModifiesValRecRule) skipType(t ast.Expr) bool {
- rt := w.file.Pkg.TypeOf(t)
- if rt == nil {
- return false
- }
-
- rt = rt.Underlying()
- rtName := rt.String()
-
- // skip when receiver is a map or array
- return strings.HasPrefix(rtName, "[]") || strings.HasPrefix(rtName, "map[")
-}
-
-func (lintModifiesValRecRule) getNameFromExpr(ie ast.Expr) string {
- ident, ok := ie.(*ast.Ident)
- if !ok {
- return ""
- }
-
- return ident.Name
-}
diff --git a/vendor/github.com/mgechev/revive/rule/modifies-param.go b/vendor/github.com/mgechev/revive/rule/modifies_param.go
similarity index 93%
rename from vendor/github.com/mgechev/revive/rule/modifies-param.go
rename to vendor/github.com/mgechev/revive/rule/modifies_param.go
index a68ae2501d..da509087d2 100644
--- a/vendor/github.com/mgechev/revive/rule/modifies-param.go
+++ b/vendor/github.com/mgechev/revive/rule/modifies_param.go
@@ -7,7 +7,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// ModifiesParamRule lints given else constructs.
+// ModifiesParamRule warns on assignments to function parameters.
type ModifiesParamRule struct{}
// Apply applies the rule to given file.
@@ -73,7 +73,7 @@ func checkParam(id *ast.Ident, w *lintModifiesParamRule) {
w.onFailure(lint.Failure{
Confidence: 0.5, // confidence is low because of shadow variables
Node: id,
- Category: "bad practice",
+ Category: lint.FailureCategoryBadPractice,
Failure: fmt.Sprintf("parameter '%s' seems to be modified", id),
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/modifies_value_receiver.go b/vendor/github.com/mgechev/revive/rule/modifies_value_receiver.go
new file mode 100644
index 0000000000..9af91099f3
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/modifies_value_receiver.go
@@ -0,0 +1,183 @@
+package rule
+
+import (
+ "go/ast"
+ "go/token"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ModifiesValRecRule lints assignments to value method-receivers.
+type ModifiesValRecRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ModifiesValRecRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ file.Pkg.TypeCheck()
+ for _, decl := range file.AST.Decls {
+ funcDecl, ok := decl.(*ast.FuncDecl)
+ isAMethod := ok && funcDecl.Recv != nil
+ if !isAMethod {
+ continue // skip, not a method
+ }
+
+ receiver := funcDecl.Recv.List[0]
+ if r.mustSkip(receiver, file.Pkg) {
+ continue
+ }
+
+ receiverName := receiver.Names[0].Name
+ assignmentsToReceiver := r.getReceiverModifications(receiverName, funcDecl.Body)
+ if len(assignmentsToReceiver) == 0 {
+ continue // receiver is not modified
+ }
+
+ methodReturnsReceiver := len(r.findReturnReceiverStatements(receiverName, funcDecl.Body)) > 0
+ if methodReturnsReceiver {
+ continue // modification seems legit (see issue #1066)
+ }
+
+ for _, assignment := range assignmentsToReceiver {
+ failures = append(failures, lint.Failure{
+ Node: assignment,
+ Confidence: 1,
+ Failure: "suspicious assignment to a by-value method receiver",
+ })
+ }
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*ModifiesValRecRule) Name() string {
+ return "modifies-value-receiver"
+}
+
+func (*ModifiesValRecRule) skipType(t ast.Expr, pkg *lint.Package) bool {
+ rt := pkg.TypeOf(t)
+ if rt == nil {
+ return false
+ }
+
+ rt = rt.Underlying()
+ rtName := rt.String()
+
+ // skip when receiver is a map or array
+ return strings.HasPrefix(rtName, "[]") || strings.HasPrefix(rtName, "map[")
+}
+
+func (*ModifiesValRecRule) getNameFromExpr(ie ast.Expr) string {
+ ident, ok := ie.(*ast.Ident)
+ if !ok {
+ return ""
+ }
+
+ return ident.Name
+}
+
+func (r *ModifiesValRecRule) findReturnReceiverStatements(receiverName string, target ast.Node) []ast.Node {
+ finder := func(n ast.Node) bool {
+ // look for returns with the receiver as value
+ returnStatement, ok := n.(*ast.ReturnStmt)
+ if !ok {
+ return false
+ }
+
+ for _, exp := range returnStatement.Results {
+ switch e := exp.(type) {
+ case *ast.SelectorExpr: // receiver.field = ...
+ name := r.getNameFromExpr(e.X)
+ if name == "" || name != receiverName {
+ continue
+ }
+ case *ast.Ident: // receiver := ...
+ if e.Name != receiverName {
+ continue
+ }
+ case *ast.UnaryExpr:
+ if e.Op != token.AND {
+ continue
+ }
+ name := r.getNameFromExpr(e.X)
+ if name == "" || name != receiverName {
+ continue
+ }
+
+ default:
+ continue
+ }
+
+ return true
+ }
+
+ return false
+ }
+
+ return pick(target, finder)
+}
+
+func (r *ModifiesValRecRule) mustSkip(receiver *ast.Field, pkg *lint.Package) bool {
+ if _, ok := receiver.Type.(*ast.StarExpr); ok {
+ return true // skip, method with pointer receiver
+ }
+
+ if len(receiver.Names) < 1 {
+ return true // skip, anonymous receiver
+ }
+
+ receiverName := receiver.Names[0].Name
+ if receiverName == "_" {
+ return true // skip, anonymous receiver
+ }
+
+ if r.skipType(receiver.Type, pkg) {
+ return true // skip, receiver is a map or array
+ }
+
+ return false
+}
+
+func (r *ModifiesValRecRule) getReceiverModifications(receiverName string, funcBody *ast.BlockStmt) []ast.Node {
+ receiverModificationFinder := func(n ast.Node) bool {
+ switch node := n.(type) {
+ case *ast.IncDecStmt:
+ se, ok := node.X.(*ast.SelectorExpr)
+ if !ok {
+ return false
+ }
+
+ name := r.getNameFromExpr(se.X)
+ return name == receiverName
+ case *ast.AssignStmt:
+ // look for assignments with the receiver in the right hand
+ for _, exp := range node.Lhs {
+ switch e := exp.(type) {
+ case *ast.IndexExpr: // receiver...[] = ...
+ continue
+ case *ast.StarExpr: // *receiver = ...
+ continue
+ case *ast.SelectorExpr: // receiver.field = ...
+ name := r.getNameFromExpr(e.X)
+ if name == "" || name != receiverName {
+ continue
+ }
+ case *ast.Ident: // receiver := ...
+ if e.Name != receiverName {
+ continue
+ }
+ default:
+ continue
+ }
+
+ return true
+ }
+ }
+
+ return false
+ }
+
+ return pick(funcBody, receiverModificationFinder)
+}
diff --git a/vendor/github.com/mgechev/revive/rule/nested-structs.go b/vendor/github.com/mgechev/revive/rule/nested_structs.go
similarity index 96%
rename from vendor/github.com/mgechev/revive/rule/nested-structs.go
rename to vendor/github.com/mgechev/revive/rule/nested_structs.go
index 147bd482b1..49e240b6f0 100644
--- a/vendor/github.com/mgechev/revive/rule/nested-structs.go
+++ b/vendor/github.com/mgechev/revive/rule/nested_structs.go
@@ -68,7 +68,7 @@ func (l *lintStruct) Visit(n ast.Node) ast.Visitor {
func (l *lintStruct) fail(n ast.Node) {
l.onFailure(lint.Failure{
Failure: "no nested structs are allowed",
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Node: n,
Confidence: 1,
})
diff --git a/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go b/vendor/github.com/mgechev/revive/rule/optimize_operands_order.go
similarity index 84%
rename from vendor/github.com/mgechev/revive/rule/optimize-operands-order.go
rename to vendor/github.com/mgechev/revive/rule/optimize_operands_order.go
index 841bde56c0..c9297d2d46 100644
--- a/vendor/github.com/mgechev/revive/rule/optimize-operands-order.go
+++ b/vendor/github.com/mgechev/revive/rule/optimize_operands_order.go
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// OptimizeOperandsOrderRule lints given else constructs.
+// OptimizeOperandsOrderRule checks inefficient conditional expressions.
type OptimizeOperandsOrderRule struct{}
// Apply applies the rule to given file.
@@ -49,8 +49,17 @@ func (w lintOptimizeOperandsOrderlExpr) Visit(node ast.Node) ast.Visitor {
}
isCaller := func(n ast.Node) bool {
- _, ok := n.(*ast.CallExpr)
- return ok
+ ce, ok := n.(*ast.CallExpr)
+ if !ok {
+ return false
+ }
+
+ ident, isIdent := ce.Fun.(*ast.Ident)
+ if !isIdent {
+ return true
+ }
+
+ return ident.Name != "len" || ident.Obj != nil
}
// check if the left sub-expression contains a function call
@@ -69,7 +78,7 @@ func (w lintOptimizeOperandsOrderlExpr) Visit(node ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Failure: fmt.Sprintf("for better performance '%v' might be rewritten as '%v'", gofmt(binExpr), gofmt(&newExpr)),
Node: node,
- Category: "optimization",
+ Category: lint.FailureCategoryOptimization,
Confidence: 0.3,
})
diff --git a/vendor/github.com/mgechev/revive/rule/package-comments.go b/vendor/github.com/mgechev/revive/rule/package_comments.go
similarity index 91%
rename from vendor/github.com/mgechev/revive/rule/package-comments.go
rename to vendor/github.com/mgechev/revive/rule/package_comments.go
index 02f246be08..20afee88e8 100644
--- a/vendor/github.com/mgechev/revive/rule/package-comments.go
+++ b/vendor/github.com/mgechev/revive/rule/package_comments.go
@@ -88,7 +88,7 @@ func (l *lintPackageComments) checkPackageComment() []lint.Failure {
if docFile != nil {
pkgFile := l.file.Pkg.Files()[fileSource]
return []lint.Failure{{
- Category: "comments",
+ Category: lint.FailureCategoryComments,
Position: lint.FailurePosition{
Start: pkgFile.ToPosition(docFile.Pos()),
End: pkgFile.ToPosition(docFile.Name.End()),
@@ -131,7 +131,7 @@ func (l *lintPackageComments) Visit(_ ast.Node) ast.Visitor {
Column: 1,
}
l.onFailure(lint.Failure{
- Category: "comments",
+ Category: lint.FailureCategoryComments,
Position: lint.FailurePosition{
Start: pos,
End: pos,
@@ -150,19 +150,11 @@ func (l *lintPackageComments) Visit(_ ast.Node) ast.Visitor {
return nil
}
s := l.fileAst.Doc.Text()
- if ts := strings.TrimLeft(s, " \t"); ts != s {
- l.onFailure(lint.Failure{
- Category: "comments",
- Node: l.fileAst.Doc,
- Confidence: 1,
- Failure: "package comment should not have leading space",
- })
- s = ts
- }
+
// Only non-main packages need to keep to this form.
- if !l.file.Pkg.IsMain() && !strings.HasPrefix(s, prefix) {
+ if !l.file.Pkg.IsMain() && !strings.HasPrefix(s, prefix) && !isDirectiveComment(s) {
l.onFailure(lint.Failure{
- Category: "comments",
+ Category: lint.FailureCategoryComments,
Node: l.fileAst.Doc,
Confidence: 1,
Failure: fmt.Sprintf(`package comment should be of the form "%s..."`, prefix),
diff --git a/vendor/github.com/mgechev/revive/rule/range.go b/vendor/github.com/mgechev/revive/rule/range.go
index 9d483a6737..b54078e4de 100644
--- a/vendor/github.com/mgechev/revive/rule/range.go
+++ b/vendor/github.com/mgechev/revive/rule/range.go
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// RangeRule lints given else constructs.
+// RangeRule prevents redundant variables when iterating over a collection.
type RangeRule struct{}
// Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/range-val-address.go b/vendor/github.com/mgechev/revive/rule/range_val_address.go
similarity index 90%
rename from vendor/github.com/mgechev/revive/rule/range-val-address.go
rename to vendor/github.com/mgechev/revive/rule/range_val_address.go
index 51ad8e108b..239175c6eb 100644
--- a/vendor/github.com/mgechev/revive/rule/range-val-address.go
+++ b/vendor/github.com/mgechev/revive/rule/range_val_address.go
@@ -16,6 +16,10 @@ type RangeValAddress struct{}
func (*RangeValAddress) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
+ if file.Pkg.IsAtLeastGo122() {
+ return failures
+ }
+
walker := rangeValAddress{
file: file,
onFailure: func(failure lint.Failure) {
@@ -66,7 +70,7 @@ func (w rangeValAddress) Visit(node ast.Node) ast.Visitor {
type rangeBodyVisitor struct {
valueIsStarExpr bool
- valueID *ast.Object
+ valueID *ast.Object // TODO: ast.Object is deprecated
onFailure func(lint.Failure)
}
@@ -136,7 +140,7 @@ func (bw rangeBodyVisitor) isAccessingRangeValueAddress(exp ast.Expr) bool {
v, ok := u.X.(*ast.Ident)
if !ok {
var s *ast.SelectorExpr
- s, ok = u.X.(*ast.SelectorExpr)
+ s, ok = u.X.(*ast.SelectorExpr) // TODO: possible BUG: if it's `=` and not `:=`, it means that in the last return `ok` is always true
if !ok {
return false
}
@@ -150,7 +154,7 @@ func (bw rangeBodyVisitor) isAccessingRangeValueAddress(exp ast.Expr) bool {
}
}
- return ok && v.Obj == bw.valueID
+ return ok && v.Obj == bw.valueID // TODO: ok is always true due to the previous TODO remark
}
func (bw rangeBodyVisitor) newFailure(node ast.Node) lint.Failure {
diff --git a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go b/vendor/github.com/mgechev/revive/rule/range_val_in_closure.go
similarity index 94%
rename from vendor/github.com/mgechev/revive/rule/range-val-in-closure.go
rename to vendor/github.com/mgechev/revive/rule/range_val_in_closure.go
index 1e85d0d0d1..92078288f0 100644
--- a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go
+++ b/vendor/github.com/mgechev/revive/rule/range_val_in_closure.go
@@ -7,13 +7,17 @@ import (
"github.com/mgechev/revive/lint"
)
-// RangeValInClosureRule lints given else constructs.
+// RangeValInClosureRule warns if range value is used in a closure dispatched as goroutine.
type RangeValInClosureRule struct{}
// Apply applies the rule to given file.
func (*RangeValInClosureRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
+ if file.Pkg.IsAtLeastGo122() {
+ return failures
+ }
+
walker := rangeValInClosure{
onFailure: func(failure lint.Failure) {
failures = append(failures, failure)
diff --git a/vendor/github.com/mgechev/revive/rule/receiver-naming.go b/vendor/github.com/mgechev/revive/rule/receiver-naming.go
deleted file mode 100644
index d79bb9fe8f..0000000000
--- a/vendor/github.com/mgechev/revive/rule/receiver-naming.go
+++ /dev/null
@@ -1,81 +0,0 @@
-package rule
-
-import (
- "fmt"
- "go/ast"
-
- "github.com/mgechev/revive/internal/typeparams"
- "github.com/mgechev/revive/lint"
-)
-
-// ReceiverNamingRule lints given else constructs.
-type ReceiverNamingRule struct{}
-
-// Apply applies the rule to given file.
-func (*ReceiverNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
- var failures []lint.Failure
-
- fileAst := file.AST
- walker := lintReceiverName{
- onFailure: func(failure lint.Failure) {
- failures = append(failures, failure)
- },
- typeReceiver: map[string]string{},
- }
-
- ast.Walk(walker, fileAst)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*ReceiverNamingRule) Name() string {
- return "receiver-naming"
-}
-
-type lintReceiverName struct {
- onFailure func(lint.Failure)
- typeReceiver map[string]string
-}
-
-func (w lintReceiverName) Visit(n ast.Node) ast.Visitor {
- fn, ok := n.(*ast.FuncDecl)
- if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 {
- return w
- }
- names := fn.Recv.List[0].Names
- if len(names) < 1 {
- return w
- }
- name := names[0].Name
- if name == "_" {
- w.onFailure(lint.Failure{
- Node: n,
- Confidence: 1,
- Category: "naming",
- Failure: "receiver name should not be an underscore, omit the name if it is unused",
- })
- return w
- }
- if name == "this" || name == "self" {
- w.onFailure(lint.Failure{
- Node: n,
- Confidence: 1,
- Category: "naming",
- Failure: `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`,
- })
- return w
- }
- recv := typeparams.ReceiverType(fn)
- if prev, ok := w.typeReceiver[recv]; ok && prev != name {
- w.onFailure(lint.Failure{
- Node: n,
- Confidence: 1,
- Category: "naming",
- Failure: fmt.Sprintf("receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv),
- })
- return w
- }
- w.typeReceiver[recv] = name
- return w
-}
diff --git a/vendor/github.com/mgechev/revive/rule/receiver_naming.go b/vendor/github.com/mgechev/revive/rule/receiver_naming.go
new file mode 100644
index 0000000000..0327feed61
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/receiver_naming.go
@@ -0,0 +1,112 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/internal/typeparams"
+ "github.com/mgechev/revive/lint"
+)
+
+// ReceiverNamingRule lints a receiver name.
+type ReceiverNamingRule struct {
+ receiverNameMaxLength int
+}
+
+const defaultReceiverNameMaxLength = -1 // thus will not check
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *ReceiverNamingRule) Configure(arguments lint.Arguments) error {
+ r.receiverNameMaxLength = defaultReceiverNameMaxLength
+ if len(arguments) < 1 {
+ return nil
+ }
+
+ args, ok := arguments[0].(map[string]any)
+ if !ok {
+ return fmt.Errorf("unable to get arguments for rule %s. Expected object of key-value-pairs", r.Name())
+ }
+
+ for k, v := range args {
+ switch k {
+ case "maxLength":
+ value, ok := v.(int64)
+ if !ok {
+ return fmt.Errorf("invalid value %v for argument %s of rule %s, expected integer value got %T", v, k, r.Name(), v)
+ }
+ r.receiverNameMaxLength = int(value)
+ default:
+ return fmt.Errorf("unknown argument %s for %s rule", k, r.Name())
+ }
+ }
+ return nil
+}
+
+// Apply applies the rule to given file.
+func (r *ReceiverNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ typeReceiver := map[string]string{}
+ var failures []lint.Failure
+ for _, decl := range file.AST.Decls {
+ fn, ok := decl.(*ast.FuncDecl)
+ if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 {
+ continue
+ }
+
+ names := fn.Recv.List[0].Names
+ if len(names) < 1 {
+ continue
+ }
+ name := names[0].Name
+
+ if name == "_" {
+ failures = append(failures, lint.Failure{
+ Node: decl,
+ Confidence: 1,
+ Category: lint.FailureCategoryNaming,
+ Failure: "receiver name should not be an underscore, omit the name if it is unused",
+ })
+ continue
+ }
+
+ if name == "this" || name == "self" {
+ failures = append(failures, lint.Failure{
+ Node: decl,
+ Confidence: 1,
+ Category: lint.FailureCategoryNaming,
+ Failure: `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`,
+ })
+ continue
+ }
+
+ if r.receiverNameMaxLength > 0 && len([]rune(name)) > r.receiverNameMaxLength {
+ failures = append(failures, lint.Failure{
+ Node: decl,
+ Confidence: 1,
+ Category: lint.FailureCategoryNaming,
+ Failure: fmt.Sprintf("receiver name %s is longer than %d characters", name, r.receiverNameMaxLength),
+ })
+ continue
+ }
+
+ recv := typeparams.ReceiverType(fn)
+ if prev, ok := typeReceiver[recv]; ok && prev != name {
+ failures = append(failures, lint.Failure{
+ Node: decl,
+ Confidence: 1,
+ Category: lint.FailureCategoryNaming,
+ Failure: fmt.Sprintf("receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv),
+ })
+ continue
+ }
+
+ typeReceiver[recv] = name
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*ReceiverNamingRule) Name() string {
+ return "receiver-naming"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go b/vendor/github.com/mgechev/revive/rule/redefines_builtin_id.go
similarity index 69%
rename from vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go
rename to vendor/github.com/mgechev/revive/rule/redefines_builtin_id.go
index b3ff084563..62f8d7a213 100644
--- a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go
+++ b/vendor/github.com/mgechev/revive/rule/redefines_builtin_id.go
@@ -4,6 +4,7 @@ import (
"fmt"
"go/ast"
"go/token"
+ "maps"
"github.com/mgechev/revive/lint"
)
@@ -33,6 +34,12 @@ var builtFunctions = map[string]bool{
"recover": true,
}
+var builtFunctionsAfterGo121 = map[string]bool{
+ "clear": true,
+ "max": true,
+ "min": true,
+}
+
var builtInTypes = map[string]bool{
"bool": true,
"byte": true,
@@ -69,7 +76,17 @@ func (*RedefinesBuiltinIDRule) Apply(file *lint.File, _ lint.Arguments) []lint.F
}
astFile := file.AST
- w := &lintRedefinesBuiltinID{onFailure}
+
+ builtFuncs := maps.Clone(builtFunctions)
+ if file.Pkg.IsAtLeastGo121() {
+ maps.Copy(builtFuncs, builtFunctionsAfterGo121)
+ }
+ w := &lintRedefinesBuiltinID{
+ onFailure: onFailure,
+ builtInConstAndVars: builtInConstAndVars,
+ builtFunctions: builtFuncs,
+ builtInTypes: builtInTypes,
+ }
ast.Walk(w, astFile)
return failures
@@ -81,7 +98,10 @@ func (*RedefinesBuiltinIDRule) Name() string {
}
type lintRedefinesBuiltinID struct {
- onFailure func(lint.Failure)
+ onFailure func(lint.Failure)
+ builtInConstAndVars map[string]bool
+ builtFunctions map[string]bool
+ builtInTypes map[string]bool
}
func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor {
@@ -125,6 +145,31 @@ func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor {
if ok, bt := w.isBuiltIn(id); ok {
w.addFailure(n, fmt.Sprintf("redefinition of the built-in %s %s", bt, id))
}
+ case *ast.FuncType:
+ var fields []*ast.Field
+ if n.TypeParams != nil {
+ fields = append(fields, n.TypeParams.List...)
+ }
+ if n.Params != nil {
+ fields = append(fields, n.Params.List...)
+ }
+ if n.Results != nil {
+ fields = append(fields, n.Results.List...)
+ }
+ for _, field := range fields {
+ for _, name := range field.Names {
+ obj := name.Obj
+ isTypeOrName := obj != nil && (obj.Kind == ast.Var || obj.Kind == ast.Typ)
+ if !isTypeOrName {
+ continue
+ }
+
+ id := obj.Name
+ if ok, bt := w.isBuiltIn(id); ok {
+ w.addFailure(name, fmt.Sprintf("redefinition of the built-in %s %s", bt, id))
+ }
+ }
+ }
case *ast.AssignStmt:
for _, e := range n.Lhs {
id, ok := e.(*ast.Ident)
@@ -153,25 +198,25 @@ func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor {
return w
}
-func (w lintRedefinesBuiltinID) addFailure(node ast.Node, msg string) {
+func (w *lintRedefinesBuiltinID) addFailure(node ast.Node, msg string) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: node,
- Category: "logic",
+ Category: lint.FailureCategoryLogic,
Failure: msg,
})
}
-func (lintRedefinesBuiltinID) isBuiltIn(id string) (r bool, builtInKind string) {
- if builtFunctions[id] {
+func (w *lintRedefinesBuiltinID) isBuiltIn(id string) (r bool, builtInKind string) {
+ if w.builtFunctions[id] {
return true, "function"
}
- if builtInConstAndVars[id] {
+ if w.builtInConstAndVars[id] {
return true, "constant or variable"
}
- if builtInTypes[id] {
+ if w.builtInTypes[id] {
return true, "type"
}
diff --git a/vendor/github.com/mgechev/revive/rule/redundant_build_tag.go b/vendor/github.com/mgechev/revive/rule/redundant_build_tag.go
new file mode 100644
index 0000000000..d195ce6e42
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/redundant_build_tag.go
@@ -0,0 +1,41 @@
+package rule
+
+import (
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// RedundantBuildTagRule lints the presence of redundant build tags.
+type RedundantBuildTagRule struct{}
+
+// Apply triggers if an old build tag `// +build` is found after a new one `//go:build`.
+// `//go:build` comments are automatically added by gofmt when Go 1.17+ is used.
+// See https://pkg.go.dev/cmd/go#hdr-Build_constraints
+func (*RedundantBuildTagRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ for _, group := range file.AST.Comments {
+ hasGoBuild := false
+ for _, comment := range group.List {
+ if strings.HasPrefix(comment.Text, "//go:build ") {
+ hasGoBuild = true
+ continue
+ }
+
+ if hasGoBuild && strings.HasPrefix(comment.Text, "// +build ") {
+ return []lint.Failure{{
+ Category: lint.FailureCategoryStyle,
+ Confidence: 1,
+ Node: comment,
+ Failure: `The build tag "// +build" is redundant since Go 1.17 and can be removed`,
+ }}
+ }
+ }
+ }
+
+ return []lint.Failure{}
+}
+
+// Name returns the rule name.
+func (*RedundantBuildTagRule) Name() string {
+ return "redundant-build-tag"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/redundant-import-alias.go b/vendor/github.com/mgechev/revive/rule/redundant_import_alias.go
similarity index 88%
rename from vendor/github.com/mgechev/revive/rule/redundant-import-alias.go
rename to vendor/github.com/mgechev/revive/rule/redundant_import_alias.go
index fa5281f24b..692507a27d 100644
--- a/vendor/github.com/mgechev/revive/rule/redundant-import-alias.go
+++ b/vendor/github.com/mgechev/revive/rule/redundant_import_alias.go
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// RedundantImportAlias lints given else constructs.
+// RedundantImportAlias warns on import aliases matching the imported package name.
type RedundantImportAlias struct{}
// Apply applies the rule to given file.
@@ -25,7 +25,7 @@ func (*RedundantImportAlias) Apply(file *lint.File, _ lint.Arguments) []lint.Fai
Confidence: 1,
Failure: fmt.Sprintf("Import alias \"%s\" is redundant", imp.Name.Name),
Node: imp,
- Category: "imports",
+ Category: lint.FailureCategoryImports,
})
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/redundant_test_main_exit.go b/vendor/github.com/mgechev/revive/rule/redundant_test_main_exit.go
new file mode 100644
index 0000000000..d456aa2152
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/redundant_test_main_exit.go
@@ -0,0 +1,79 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// RedundantTestMainExitRule suggests removing Exit call in TestMain function for test files.
+type RedundantTestMainExitRule struct{}
+
+// Apply applies the rule to given file.
+func (*RedundantTestMainExitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ if !file.IsTest() || !file.Pkg.IsAtLeastGo115() {
+ // skip analysis for non-test files or for Go versions before 1.15
+ return failures
+ }
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := &lintRedundantTestMainExit{onFailure: onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (*RedundantTestMainExitRule) Name() string {
+ return "redundant-test-main-exit"
+}
+
+type lintRedundantTestMainExit struct {
+ onFailure func(lint.Failure)
+}
+
+func (w *lintRedundantTestMainExit) Visit(node ast.Node) ast.Visitor {
+ if fd, ok := node.(*ast.FuncDecl); ok {
+ if fd.Name.Name != "TestMain" {
+ return nil // skip analysis for other functions than TestMain
+ }
+
+ return w
+ }
+
+ se, ok := node.(*ast.ExprStmt)
+ if !ok {
+ return w
+ }
+ ce, ok := se.X.(*ast.CallExpr)
+ if !ok {
+ return w
+ }
+
+ fc, ok := ce.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return w
+ }
+ id, ok := fc.X.(*ast.Ident)
+ if !ok {
+ return w
+ }
+
+ pkg := id.Name
+ fn := fc.Sel.Name
+ if isCallToExitFunction(pkg, fn) {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: ce,
+ Category: lint.FailureCategoryStyle,
+ Failure: fmt.Sprintf("redundant call to %s.%s in TestMain function, the test runner will handle it automatically as of Go 1.15", pkg, fn),
+ })
+ }
+
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/string-format.go b/vendor/github.com/mgechev/revive/rule/string_format.go
similarity index 51%
rename from vendor/github.com/mgechev/revive/rule/string-format.go
rename to vendor/github.com/mgechev/revive/rule/string_format.go
index 70edf7387c..a3beac43d1 100644
--- a/vendor/github.com/mgechev/revive/rule/string-format.go
+++ b/vendor/github.com/mgechev/revive/rule/string_format.go
@@ -6,12 +6,11 @@ import (
"go/token"
"regexp"
"strconv"
+ "strings"
"github.com/mgechev/revive/lint"
)
-// #region Revive API
-
// StringFormatRule lints strings and/or comments according to a set of regular expressions given as Arguments
type StringFormatRule struct{}
@@ -23,8 +22,12 @@ func (*StringFormatRule) Apply(file *lint.File, arguments lint.Arguments) []lint
failures = append(failures, failure)
}
- w := lintStringFormatRule{onFailure: onFailure}
- w.parseArguments(arguments)
+ w := &lintStringFormatRule{onFailure: onFailure}
+ err := w.parseArguments(arguments)
+ if err != nil {
+ return newInternalFailureError(err)
+ }
+
ast.Walk(w, file.AST)
return failures
@@ -36,16 +39,15 @@ func (*StringFormatRule) Name() string {
}
// ParseArgumentsTest is a public wrapper around w.parseArguments used for testing. Returns the error message provided to panic, or nil if no error was encountered
-func (StringFormatRule) ParseArgumentsTest(arguments lint.Arguments) *string {
+func (*StringFormatRule) ParseArgumentsTest(arguments lint.Arguments) *string {
w := lintStringFormatRule{}
c := make(chan any)
// Parse the arguments in a goroutine, defer a recover() call, return the error encountered (or nil if there was no error)
go func() {
defer func() {
- err := recover()
+ err := w.parseArguments(arguments)
c <- err
}()
- w.parseArguments(arguments)
}()
err := <-c
if err != nil {
@@ -55,10 +57,6 @@ func (StringFormatRule) ParseArgumentsTest(arguments lint.Arguments) *string {
return nil
}
-// #endregion
-
-// #region Internal structure
-
type lintStringFormatRule struct {
onFailure func(lint.Failure)
rules []stringFormatSubrule
@@ -66,12 +64,14 @@ type lintStringFormatRule struct {
type stringFormatSubrule struct {
parent *lintStringFormatRule
- scope stringFormatSubruleScope
+ scopes stringFormatSubruleScopes
regexp *regexp.Regexp
negated bool
errorMessage string
}
+type stringFormatSubruleScopes []*stringFormatSubruleScope
+
type stringFormatSubruleScope struct {
funcName string // Function name the rule is scoped to
argument int // (optional) Which argument in calls to the function is checked against the rule (the first argument is checked by default)
@@ -84,67 +84,81 @@ const identRegex = "[_A-Za-z][_A-Za-z0-9]*"
var parseStringFormatScope = regexp.MustCompile(
fmt.Sprintf("^(%s(?:\\.%s)?)(?:\\[([0-9]+)\\](?:\\.(%s))?)?$", identRegex, identRegex, identRegex))
-// #endregion
-
-// #region Argument parsing
-
-func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) {
+func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) error {
for i, argument := range arguments {
- scope, regex, negated, errorMessage := w.parseArgument(argument, i)
+ scopes, regex, negated, errorMessage, err := w.parseArgument(argument, i)
+ if err != nil {
+ return err
+ }
w.rules = append(w.rules, stringFormatSubrule{
parent: w,
- scope: scope,
+ scopes: scopes,
regexp: regex,
negated: negated,
errorMessage: errorMessage,
})
}
+ return nil
}
-func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope stringFormatSubruleScope, regex *regexp.Regexp, negated bool, errorMessage string) {
+func (w *lintStringFormatRule) parseArgument(argument any, ruleNum int) (scopes stringFormatSubruleScopes, regex *regexp.Regexp, negated bool, errorMessage string, err error) {
g, ok := argument.([]any) // Cast to generic slice first
if !ok {
- w.configError("argument is not a slice", ruleNum, 0)
+ return stringFormatSubruleScopes{}, regex, false, "", w.configError("argument is not a slice", ruleNum, 0)
}
if len(g) < 2 {
- w.configError("less than two slices found in argument, scope and regex are required", ruleNum, len(g)-1)
+ return stringFormatSubruleScopes{}, regex, false, "", w.configError("less than two slices found in argument, scope and regex are required", ruleNum, len(g)-1)
}
rule := make([]string, len(g))
for i, obj := range g {
val, ok := obj.(string)
if !ok {
- w.configError("unexpected value, string was expected", ruleNum, i)
+ return stringFormatSubruleScopes{}, regex, false, "", w.configError("unexpected value, string was expected", ruleNum, i)
}
rule[i] = val
}
// Validate scope and regex length
if rule[0] == "" {
- w.configError("empty scope provided", ruleNum, 0)
+ return stringFormatSubruleScopes{}, regex, false, "", w.configError("empty scope provided", ruleNum, 0)
} else if len(rule[1]) < 2 {
- w.configError("regex is too small (regexes should begin and end with '/')", ruleNum, 1)
+ return stringFormatSubruleScopes{}, regex, false, "", w.configError("regex is too small (regexes should begin and end with '/')", ruleNum, 1)
}
- // Parse rule scope
- scope = stringFormatSubruleScope{}
- matches := parseStringFormatScope.FindStringSubmatch(rule[0])
- if matches == nil {
- // The rule's scope didn't match the parsing regex at all, probably a configuration error
- w.parseError("unable to parse rule scope", ruleNum, 0)
- } else if len(matches) != 4 {
- // The rule's scope matched the parsing regex, but an unexpected number of submatches was returned, probably a bug
- w.parseError(fmt.Sprintf("unexpected number of submatches when parsing scope: %d, expected 4", len(matches)), ruleNum, 0)
- }
- scope.funcName = matches[1]
- if len(matches[2]) > 0 {
- var err error
- scope.argument, err = strconv.Atoi(matches[2])
- if err != nil {
- w.parseError("unable to parse argument number in rule scope", ruleNum, 0)
+ // Parse rule scopes
+ rawScopes := strings.Split(rule[0], ",")
+
+ scopes = make([]*stringFormatSubruleScope, 0, len(rawScopes))
+ for scopeNum, rawScope := range rawScopes {
+ rawScope = strings.TrimSpace(rawScope)
+
+ if len(rawScope) == 0 {
+ return stringFormatSubruleScopes{}, regex, false, "", w.parseScopeError("empty scope in rule scopes:", ruleNum, 0, scopeNum)
}
- }
- if len(matches[3]) > 0 {
- scope.field = matches[3]
+
+ scope := stringFormatSubruleScope{}
+ matches := parseStringFormatScope.FindStringSubmatch(rawScope)
+ if matches == nil {
+ // The rule's scope didn't match the parsing regex at all, probably a configuration error
+ return stringFormatSubruleScopes{}, regex, false, "", w.parseScopeError("unable to parse rule scope", ruleNum, 0, scopeNum)
+ } else if len(matches) != 4 {
+ // The rule's scope matched the parsing regex, but an unexpected number of submatches was returned, probably a bug
+ return stringFormatSubruleScopes{}, regex, false, "",
+ w.parseScopeError(fmt.Sprintf("unexpected number of submatches when parsing scope: %d, expected 4", len(matches)), ruleNum, 0, scopeNum)
+ }
+ scope.funcName = matches[1]
+ if len(matches[2]) > 0 {
+ var err error
+ scope.argument, err = strconv.Atoi(matches[2])
+ if err != nil {
+ return stringFormatSubruleScopes{}, regex, false, "", w.parseScopeError("unable to parse argument number in rule scope", ruleNum, 0, scopeNum)
+ }
+ }
+ if len(matches[3]) > 0 {
+ scope.field = matches[3]
+ }
+
+ scopes = append(scopes, &scope)
}
// Strip / characters from the beginning and end of rule[1] before compiling
@@ -153,33 +167,34 @@ func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope st
if negated {
offset++
}
- regex, err := regexp.Compile(rule[1][offset : len(rule[1])-1])
- if err != nil {
- w.parseError(fmt.Sprintf("unable to compile %s as regexp", rule[1]), ruleNum, 1)
+ regex, errr := regexp.Compile(rule[1][offset : len(rule[1])-1])
+ if errr != nil {
+ return stringFormatSubruleScopes{}, regex, false, "", w.parseError(fmt.Sprintf("unable to compile %s as regexp", rule[1]), ruleNum, 1)
}
// Use custom error message if provided
if len(rule) == 3 {
errorMessage = rule[2]
}
- return scope, regex, negated, errorMessage
+ return scopes, regex, negated, errorMessage, nil
}
// Report an invalid config, this is specifically the user's fault
-func (lintStringFormatRule) configError(msg string, ruleNum, option int) {
- panic(fmt.Sprintf("invalid configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option))
+func (*lintStringFormatRule) configError(msg string, ruleNum, option int) error {
+ return fmt.Errorf("invalid configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option)
}
// Report a general config parsing failure, this may be the user's fault, but it isn't known for certain
-func (lintStringFormatRule) parseError(msg string, ruleNum, option int) {
- panic(fmt.Sprintf("failed to parse configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option))
+func (*lintStringFormatRule) parseError(msg string, ruleNum, option int) error {
+ return fmt.Errorf("failed to parse configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option)
}
-// #endregion
-
-// #region Node traversal
+// Report a general scope config parsing failure, this may be the user's fault, but it isn't known for certain
+func (*lintStringFormatRule) parseScopeError(msg string, ruleNum, option, scopeNum int) error {
+ return fmt.Errorf("failed to parse configuration for string-format: %s [argument %d, option %d, scope index %d]", msg, ruleNum, option, scopeNum)
+}
-func (w lintStringFormatRule) Visit(node ast.Node) ast.Visitor {
+func (w *lintStringFormatRule) Visit(node ast.Node) ast.Visitor {
// First, check if node is a call expression
call, ok := node.(*ast.CallExpr)
if !ok {
@@ -193,8 +208,10 @@ func (w lintStringFormatRule) Visit(node ast.Node) ast.Visitor {
}
for _, rule := range w.rules {
- if rule.scope.funcName == callName {
- rule.Apply(call)
+ for _, scope := range rule.scopes {
+ if scope.funcName == callName {
+ rule.apply(call, scope)
+ }
}
}
@@ -202,7 +219,7 @@ func (w lintStringFormatRule) Visit(node ast.Node) ast.Visitor {
}
// Return the name of a call expression in the form of package.Func or Func
-func (lintStringFormatRule) getCallName(call *ast.CallExpr) (callName string, ok bool) {
+func (*lintStringFormatRule) getCallName(call *ast.CallExpr) (callName string, ok bool) {
if ident, ok := call.Fun.(*ast.Ident); ok {
// Local function call
return ident.Name, true
@@ -224,19 +241,15 @@ func (lintStringFormatRule) getCallName(call *ast.CallExpr) (callName string, ok
return "", false
}
-// #endregion
-
-// #region Linting logic
-
-// Apply a single format rule to a call expression (should be done after verifying the that the call expression matches the rule's scope)
-func (r *stringFormatSubrule) Apply(call *ast.CallExpr) {
- if len(call.Args) <= r.scope.argument {
+// apply a single format rule to a call expression (should be done after verifying the that the call expression matches the rule's scope)
+func (r *stringFormatSubrule) apply(call *ast.CallExpr, scope *stringFormatSubruleScope) {
+ if len(call.Args) <= scope.argument {
return
}
- arg := call.Args[r.scope.argument]
+ arg := call.Args[scope.argument]
var lit *ast.BasicLit
- if len(r.scope.field) > 0 {
+ if len(scope.field) > 0 {
// Try finding the scope's Field, treating arg as a composite literal
composite, ok := arg.(*ast.CompositeLit)
if !ok {
@@ -248,7 +261,7 @@ func (r *stringFormatSubrule) Apply(call *ast.CallExpr) {
continue
}
key, ok := kv.Key.(*ast.Ident)
- if !ok || key.Name != r.scope.field {
+ if !ok || key.Name != scope.field {
continue
}
@@ -266,46 +279,44 @@ func (r *stringFormatSubrule) Apply(call *ast.CallExpr) {
return
}
}
+
+ // extra safety check
+ if lit == nil {
+ return
+ }
+
// Unquote the string literal before linting
unquoted := lit.Value[1 : len(lit.Value)-1]
- r.lintMessage(unquoted, lit)
+ if r.stringIsOK(unquoted) {
+ return
+ }
+
+ r.generateFailure(lit)
}
-func (r *stringFormatSubrule) lintMessage(s string, node ast.Node) {
+func (r *stringFormatSubrule) stringIsOK(s string) bool {
+ matches := r.regexp.MatchString(s)
if r.negated {
- if !r.regexp.MatchString(s) {
- return
- }
- // Fail if the string does match the user's regex
- var failure string
- if len(r.errorMessage) > 0 {
- failure = r.errorMessage
- } else {
- failure = fmt.Sprintf("string literal matches user defined regex /%s/", r.regexp.String())
- }
- r.parent.onFailure(lint.Failure{
- Confidence: 1,
- Failure: failure,
- Node: node,
- })
- return
+ return !matches
}
- // Fail if the string does NOT match the user's regex
- if r.regexp.MatchString(s) {
- return
- }
+ return matches
+}
+
+func (r *stringFormatSubrule) generateFailure(node ast.Node) {
var failure string
- if len(r.errorMessage) > 0 {
+ switch {
+ case len(r.errorMessage) > 0:
failure = r.errorMessage
- } else {
+ case r.negated:
+ failure = fmt.Sprintf("string literal matches user defined regex /%s/", r.regexp.String())
+ case !r.negated:
failure = fmt.Sprintf("string literal doesn't match user defined regex /%s/", r.regexp.String())
}
+
r.parent.onFailure(lint.Failure{
Confidence: 1,
Failure: failure,
Node: node,
})
}
-
-// #endregion
diff --git a/vendor/github.com/mgechev/revive/rule/string-of-int.go b/vendor/github.com/mgechev/revive/rule/string_of_int.go
similarity index 100%
rename from vendor/github.com/mgechev/revive/rule/string-of-int.go
rename to vendor/github.com/mgechev/revive/rule/string_of_int.go
diff --git a/vendor/github.com/mgechev/revive/rule/struct-tag.go b/vendor/github.com/mgechev/revive/rule/struct_tag.go
similarity index 80%
rename from vendor/github.com/mgechev/revive/rule/struct-tag.go
rename to vendor/github.com/mgechev/revive/rule/struct_tag.go
index f6ee47a731..00a2b964c8 100644
--- a/vendor/github.com/mgechev/revive/rule/struct-tag.go
+++ b/vendor/github.com/mgechev/revive/rule/struct_tag.go
@@ -5,7 +5,6 @@ import (
"go/ast"
"strconv"
"strings"
- "sync"
"github.com/fatih/structtag"
"github.com/mgechev/revive/lint"
@@ -14,45 +13,50 @@ import (
// StructTagRule lints struct tags.
type StructTagRule struct {
userDefined map[string][]string // map: key -> []option
- sync.Mutex
}
-func (r *StructTagRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.userDefined == nil && len(arguments) > 0 {
- checkNumberOfArguments(1, arguments, r.Name())
- r.userDefined = make(map[string][]string, len(arguments))
- for _, arg := range arguments {
- item, ok := arg.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string, got %v (of type %T)", r.Name(), arg, arg))
- }
- parts := strings.Split(item, ",")
- if len(parts) < 2 {
- panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string of the form key[,option]+, got %s", r.Name(), item))
- }
- key := strings.TrimSpace(parts[0])
- for i := 1; i < len(parts); i++ {
- option := strings.TrimSpace(parts[i])
- r.userDefined[key] = append(r.userDefined[key], option)
- }
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *StructTagRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) == 0 {
+ return nil
+ }
+
+ err := checkNumberOfArguments(1, arguments, r.Name())
+ if err != nil {
+ return err
+ }
+ r.userDefined = make(map[string][]string, len(arguments))
+ for _, arg := range arguments {
+ item, ok := arg.(string)
+ if !ok {
+ return fmt.Errorf("invalid argument to the %s rule. Expecting a string, got %v (of type %T)", r.Name(), arg, arg)
+ }
+ parts := strings.Split(item, ",")
+ if len(parts) < 2 {
+ return fmt.Errorf("invalid argument to the %s rule. Expecting a string of the form key[,option]+, got %s", r.Name(), item)
+ }
+ key := strings.TrimSpace(parts[0])
+ for i := 1; i < len(parts); i++ {
+ option := strings.TrimSpace(parts[i])
+ r.userDefined[key] = append(r.userDefined[key], option)
}
}
+ return nil
}
// Apply applies the rule to given file.
-func (r *StructTagRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- r.configure(args)
-
+func (r *StructTagRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
onFailure := func(failure lint.Failure) {
failures = append(failures, failure)
}
w := lintStructTagRule{
- onFailure: onFailure,
- userDefined: r.userDefined,
+ onFailure: onFailure,
+ userDefined: r.userDefined,
+ isAtLeastGo124: file.Pkg.IsAtLeastGo124(),
}
ast.Walk(w, file.AST)
@@ -66,20 +70,23 @@ func (*StructTagRule) Name() string {
}
type lintStructTagRule struct {
- onFailure func(lint.Failure)
- userDefined map[string][]string // map: key -> []option
- usedTagNbr map[int]bool // list of used tag numbers
- usedTagName map[string]bool // list of used tag keys
+ onFailure func(lint.Failure)
+ userDefined map[string][]string // map: key -> []option
+ usedTagNbr map[int]bool // list of used tag numbers
+ usedTagName map[string]bool // list of used tag keys
+ isAtLeastGo124 bool
}
func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor {
switch n := node.(type) {
case *ast.StructType:
- if n.Fields == nil || n.Fields.NumFields() < 1 {
+ isEmptyStruct := n.Fields == nil || n.Fields.NumFields() < 1
+ if isEmptyStruct {
return nil // skip empty structs
}
- w.usedTagNbr = map[int]bool{} // init
- w.usedTagName = map[string]bool{} // init
+
+ w.usedTagNbr = map[int]bool{}
+ w.usedTagName = map[string]bool{}
for _, f := range n.Fields.List {
if f.Tag != nil {
w.checkTaggedField(f)
@@ -90,14 +97,16 @@ func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor {
return w
}
-const keyASN1 = "asn1"
-const keyBSON = "bson"
-const keyDefault = "default"
-const keyJSON = "json"
-const keyProtobuf = "protobuf"
-const keyRequired = "required"
-const keyXML = "xml"
-const keyYAML = "yaml"
+const (
+ keyASN1 = "asn1"
+ keyBSON = "bson"
+ keyDefault = "default"
+ keyJSON = "json"
+ keyProtobuf = "protobuf"
+ keyRequired = "required"
+ keyXML = "xml"
+ keyYAML = "yaml"
+)
func (w lintStructTagRule) checkTagNameIfNeed(tag *structtag.Tag) (string, bool) {
isUnnamedTag := tag.Name == "" || tag.Name == "-"
@@ -105,13 +114,9 @@ func (w lintStructTagRule) checkTagNameIfNeed(tag *structtag.Tag) (string, bool)
return "", true
}
- needsToCheckTagName := tag.Key == keyBSON ||
- tag.Key == keyJSON ||
- tag.Key == keyXML ||
- tag.Key == keyYAML ||
- tag.Key == keyProtobuf
-
- if !needsToCheckTagName {
+ switch tag.Key {
+ case keyBSON, keyJSON, keyXML, keyYAML, keyProtobuf:
+ default:
return "", true
}
@@ -136,8 +141,8 @@ func (lintStructTagRule) getTagName(tag *structtag.Tag) string {
switch tag.Key {
case keyProtobuf:
for _, option := range tag.Options {
- if strings.HasPrefix(option, "name=") {
- return strings.TrimPrefix(option, "name=")
+ if tagName, found := strings.CutPrefix(option, "name="); found {
+ return tagName
}
}
return "" // protobuf tag lacks 'name' option
@@ -278,6 +283,11 @@ func (w lintStructTagRule) checkJSONTag(name string, options []string) (string,
if name != "-" {
return "option can not be empty in JSON tag", false
}
+ case "omitzero":
+ if w.isAtLeastGo124 {
+ continue
+ }
+ fallthrough
default:
if w.isUserDefined(keyJSON, opt) {
continue
diff --git a/vendor/github.com/mgechev/revive/rule/superfluous-else.go b/vendor/github.com/mgechev/revive/rule/superfluous_else.go
similarity index 67%
rename from vendor/github.com/mgechev/revive/rule/superfluous-else.go
rename to vendor/github.com/mgechev/revive/rule/superfluous_else.go
index 2aa1b6b2ca..c9474d9c37 100644
--- a/vendor/github.com/mgechev/revive/rule/superfluous-else.go
+++ b/vendor/github.com/mgechev/revive/rule/superfluous_else.go
@@ -2,6 +2,7 @@ package rule
import (
"fmt"
+
"github.com/mgechev/revive/internal/ifelse"
"github.com/mgechev/revive/lint"
)
@@ -11,7 +12,7 @@ type SuperfluousElseRule struct{}
// Apply applies the rule to given file.
func (e *SuperfluousElseRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- return ifelse.Apply(e, file.AST, ifelse.TargetElse, args)
+ return ifelse.Apply(e.checkIfElse, file.AST, ifelse.TargetElse, args)
}
// Name returns the rule name.
@@ -19,28 +20,31 @@ func (*SuperfluousElseRule) Name() string {
return "superfluous-else"
}
-// CheckIfElse evaluates the rule against an ifelse.Chain.
-func (*SuperfluousElseRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) {
+func (*SuperfluousElseRule) checkIfElse(chain ifelse.Chain, args ifelse.Args) (string, bool) {
+ if !chain.HasElse {
+ return "", false
+ }
+
if !chain.If.Deviates() {
// this rule only applies if the if-block deviates control flow
- return
+ return "", false
}
if chain.HasPriorNonDeviating {
// if we de-indent the "else" block then a previous branch
- // might flow into it, affecting program behaviour
- return
+ // might flow into it, affecting program behavior
+ return "", false
}
if chain.If.Returns() {
// avoid overlapping with indent-error-flow
- return
+ return "", false
}
- if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) {
+ if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls()) {
// avoid increasing variable scope
- return
+ return "", false
}
- return fmt.Sprintf("if block ends with %v, so drop this else and outdent its block", chain.If.LongString())
+ return fmt.Sprintf("if block ends with %v, so drop this else and outdent its block", chain.If.LongString()), true
}
diff --git a/vendor/github.com/mgechev/revive/rule/time-equal.go b/vendor/github.com/mgechev/revive/rule/time_equal.go
similarity index 68%
rename from vendor/github.com/mgechev/revive/rule/time-equal.go
rename to vendor/github.com/mgechev/revive/rule/time_equal.go
index 3b85e18a8e..769b15406b 100644
--- a/vendor/github.com/mgechev/revive/rule/time-equal.go
+++ b/vendor/github.com/mgechev/revive/rule/time_equal.go
@@ -50,26 +50,23 @@ func (l *lintTimeEqual) Visit(node ast.Node) ast.Visitor {
return l
}
- xtyp := l.file.Pkg.TypeOf(expr.X)
- ytyp := l.file.Pkg.TypeOf(expr.Y)
-
- if !isNamedType(xtyp, "time", "Time") || !isNamedType(ytyp, "time", "Time") {
+ typeOfX := l.file.Pkg.TypeOf(expr.X)
+ typeOfY := l.file.Pkg.TypeOf(expr.Y)
+ bothAreOfTimeType := isNamedType(typeOfX, "time", "Time") && isNamedType(typeOfY, "time", "Time")
+ if !bothAreOfTimeType {
return l
}
- var failure string
- switch expr.Op {
- case token.EQL:
- failure = fmt.Sprintf("use %s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op)
- case token.NEQ:
- failure = fmt.Sprintf("use !%s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op)
+ negateStr := ""
+ if token.NEQ == expr.Op {
+ negateStr = "!"
}
l.onFailure(lint.Failure{
- Category: "time",
+ Category: lint.FailureCategoryTime,
Confidence: 1,
Node: node,
- Failure: failure,
+ Failure: fmt.Sprintf("use %s%s.Equal(%s) instead of %q operator", negateStr, gofmt(expr.X), gofmt(expr.Y), expr.Op),
})
return l
diff --git a/vendor/github.com/mgechev/revive/rule/time-naming.go b/vendor/github.com/mgechev/revive/rule/time_naming.go
similarity index 89%
rename from vendor/github.com/mgechev/revive/rule/time-naming.go
rename to vendor/github.com/mgechev/revive/rule/time_naming.go
index cea452e613..5bbe8aa063 100644
--- a/vendor/github.com/mgechev/revive/rule/time-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/time_naming.go
@@ -9,7 +9,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// TimeNamingRule lints given else constructs.
+// TimeNamingRule lints the name of a time variable.
type TimeNamingRule struct{}
// Apply applies the rule to given file.
@@ -64,7 +64,7 @@ func (w *lintTimeNames) Visit(node ast.Node) ast.Visitor {
continue
}
w.onFailure(lint.Failure{
- Category: "time",
+ Category: lint.FailureCategoryTime,
Confidence: 0.9,
Node: v,
Failure: fmt.Sprintf("var %s is of type %v; don't use unit-specific suffix %q", name.Name, origTyp, suffix),
@@ -90,6 +90,7 @@ func isNamedType(typ types.Type, importPath, name string) bool {
if !ok {
return false
}
- tn := n.Obj()
- return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name
+
+ typeName := n.Obj()
+ return typeName != nil && typeName.Pkg() != nil && typeName.Pkg().Path() == importPath && typeName.Name() == name
}
diff --git a/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go b/vendor/github.com/mgechev/revive/rule/unchecked_type_assertion.go
similarity index 62%
rename from vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go
rename to vendor/github.com/mgechev/revive/rule/unchecked_type_assertion.go
index df27743cbd..a272724dd9 100644
--- a/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go
+++ b/vendor/github.com/mgechev/revive/rule/unchecked_type_assertion.go
@@ -1,9 +1,9 @@
package rule
import (
+ "errors"
"fmt"
"go/ast"
- "sync"
"github.com/mgechev/revive/lint"
)
@@ -13,52 +13,47 @@ const (
ruleUTAMessageIgnored = "type assertion result ignored"
)
-// UncheckedTypeAssertionRule lints missing or ignored `ok`-value in danymic type casts.
+// UncheckedTypeAssertionRule lints missing or ignored `ok`-value in dynamic type casts.
type UncheckedTypeAssertionRule struct {
- sync.Mutex
acceptIgnoredAssertionResult bool
- configured bool
}
-func (u *UncheckedTypeAssertionRule) configure(arguments lint.Arguments) {
- u.Lock()
- defer u.Unlock()
-
- if len(arguments) == 0 || u.configured {
- return
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *UncheckedTypeAssertionRule) Configure(arguments lint.Arguments) error {
+ if len(arguments) == 0 {
+ return nil
}
- u.configured = true
-
args, ok := arguments[0].(map[string]any)
if !ok {
- panic("Unable to get arguments. Expected object of key-value-pairs.")
+ return errors.New("unable to get arguments. Expected object of key-value-pairs")
}
for k, v := range args {
switch k {
case "acceptIgnoredAssertionResult":
- u.acceptIgnoredAssertionResult, ok = v.(bool)
+ r.acceptIgnoredAssertionResult, ok = v.(bool)
if !ok {
- panic(fmt.Sprintf("Unable to parse argument '%s'. Expected boolean.", k))
+ return fmt.Errorf("unable to parse argument '%s'. Expected boolean", k)
}
default:
- panic(fmt.Sprintf("Unknown argument: %s", k))
+ return fmt.Errorf("unknown argument: %s", k)
}
}
+ return nil
}
// Apply applies the rule to given file.
-func (u *UncheckedTypeAssertionRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- u.configure(args)
-
+func (r *UncheckedTypeAssertionRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
- walker := &lintUnchekedTypeAssertion{
+ walker := &lintUncheckedTypeAssertion{
onFailure: func(failure lint.Failure) {
failures = append(failures, failure)
},
- acceptIgnoredTypeAssertionResult: u.acceptIgnoredAssertionResult,
+ acceptIgnoredTypeAssertionResult: r.acceptIgnoredAssertionResult,
}
ast.Walk(walker, file.AST)
@@ -71,7 +66,7 @@ func (*UncheckedTypeAssertionRule) Name() string {
return "unchecked-type-assertion"
}
-type lintUnchekedTypeAssertion struct {
+type lintUncheckedTypeAssertion struct {
onFailure func(lint.Failure)
acceptIgnoredTypeAssertionResult bool
}
@@ -89,14 +84,14 @@ func isTypeSwitch(e *ast.TypeAssertExpr) bool {
return e.Type == nil
}
-func (w *lintUnchekedTypeAssertion) requireNoTypeAssert(expr ast.Expr) {
+func (w *lintUncheckedTypeAssertion) requireNoTypeAssert(expr ast.Expr) {
e, ok := expr.(*ast.TypeAssertExpr)
if ok && !isTypeSwitch(e) {
w.addFailure(e, ruleUTAMessagePanic)
}
}
-func (w *lintUnchekedTypeAssertion) handleIfStmt(n *ast.IfStmt) {
+func (w *lintUncheckedTypeAssertion) handleIfStmt(n *ast.IfStmt) {
ifCondition, ok := n.Cond.(*ast.BinaryExpr)
if ok {
w.requireNoTypeAssert(ifCondition.X)
@@ -104,7 +99,7 @@ func (w *lintUnchekedTypeAssertion) handleIfStmt(n *ast.IfStmt) {
}
}
-func (w *lintUnchekedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(expr ast.Expr) {
+func (w *lintUncheckedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(expr ast.Expr) {
binaryExpr, ok := expr.(*ast.BinaryExpr)
if ok {
w.requireNoTypeAssert(binaryExpr.X)
@@ -112,19 +107,19 @@ func (w *lintUnchekedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(
}
}
-func (w *lintUnchekedTypeAssertion) handleCaseClause(n *ast.CaseClause) {
+func (w *lintUncheckedTypeAssertion) handleCaseClause(n *ast.CaseClause) {
for _, expr := range n.List {
w.requireNoTypeAssert(expr)
w.requireBinaryExpressionWithoutTypeAssertion(expr)
}
}
-func (w *lintUnchekedTypeAssertion) handleSwitch(n *ast.SwitchStmt) {
+func (w *lintUncheckedTypeAssertion) handleSwitch(n *ast.SwitchStmt) {
w.requireNoTypeAssert(n.Tag)
w.requireBinaryExpressionWithoutTypeAssertion(n.Tag)
}
-func (w *lintUnchekedTypeAssertion) handleAssignment(n *ast.AssignStmt) {
+func (w *lintUncheckedTypeAssertion) handleAssignment(n *ast.AssignStmt) {
if len(n.Rhs) == 0 {
return
}
@@ -148,21 +143,21 @@ func (w *lintUnchekedTypeAssertion) handleAssignment(n *ast.AssignStmt) {
}
// handles "return foo(.*bar)" - one of them is enough to fail as golang does not forward the type cast tuples in return statements
-func (w *lintUnchekedTypeAssertion) handleReturn(n *ast.ReturnStmt) {
+func (w *lintUncheckedTypeAssertion) handleReturn(n *ast.ReturnStmt) {
for _, r := range n.Results {
w.requireNoTypeAssert(r)
}
}
-func (w *lintUnchekedTypeAssertion) handleRange(n *ast.RangeStmt) {
+func (w *lintUncheckedTypeAssertion) handleRange(n *ast.RangeStmt) {
w.requireNoTypeAssert(n.X)
}
-func (w *lintUnchekedTypeAssertion) handleChannelSend(n *ast.SendStmt) {
+func (w *lintUncheckedTypeAssertion) handleChannelSend(n *ast.SendStmt) {
w.requireNoTypeAssert(n.Value)
}
-func (w *lintUnchekedTypeAssertion) Visit(node ast.Node) ast.Visitor {
+func (w *lintUncheckedTypeAssertion) Visit(node ast.Node) ast.Visitor {
switch n := node.(type) {
case *ast.RangeStmt:
w.handleRange(n)
@@ -183,10 +178,10 @@ func (w *lintUnchekedTypeAssertion) Visit(node ast.Node) ast.Visitor {
return w
}
-func (w *lintUnchekedTypeAssertion) addFailure(n *ast.TypeAssertExpr, why string) {
+func (w *lintUncheckedTypeAssertion) addFailure(n *ast.TypeAssertExpr, why string) {
s := fmt.Sprintf("type cast result is unchecked in %v - %s", gofmt(n), why)
w.onFailure(lint.Failure{
- Category: "bad practice",
+ Category: lint.FailureCategoryBadPractice,
Confidence: 1,
Node: n,
Failure: s,
diff --git a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go b/vendor/github.com/mgechev/revive/rule/unconditional_recursion.go
similarity index 80%
rename from vendor/github.com/mgechev/revive/rule/unconditional-recursion.go
rename to vendor/github.com/mgechev/revive/rule/unconditional_recursion.go
index 9ac2648cdd..b59275d893 100644
--- a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go
+++ b/vendor/github.com/mgechev/revive/rule/unconditional_recursion.go
@@ -6,7 +6,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// UnconditionalRecursionRule lints given else constructs.
+// UnconditionalRecursionRule warns on function calls that will lead to infinite recursion.
type UnconditionalRecursionRule struct{}
// Apply applies the rule to given file.
@@ -17,8 +17,35 @@ func (*UnconditionalRecursionRule) Apply(file *lint.File, _ lint.Arguments) []li
failures = append(failures, failure)
}
- w := lintUnconditionalRecursionRule{onFailure: onFailure}
- ast.Walk(w, file.AST)
+ // Range over global declarations of the file to detect func/method declarations and analyze them
+ for _, decl := range file.AST.Decls {
+ n, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ continue // not a func/method declaration
+ }
+
+ if n.Body == nil {
+ continue // func/method with empty body => it can not be recursive
+ }
+
+ var rec *ast.Ident
+ switch {
+ case n.Recv == nil:
+ rec = nil
+ case n.Recv.NumFields() < 1 || len(n.Recv.List[0].Names) < 1:
+ rec = &ast.Ident{Name: "_"}
+ default:
+ rec = n.Recv.List[0].Names[0]
+ }
+
+ w := &lintUnconditionalRecursionRule{
+ onFailure: onFailure,
+ currentFunc: &funcStatus{&funcDesc{rec, n.Name}, false},
+ }
+
+ ast.Walk(w, n.Body)
+ }
+
return failures
}
@@ -50,26 +77,14 @@ type lintUnconditionalRecursionRule struct {
inGoStatement bool
}
-// Visit will traverse the file AST.
-// The rule is based in the following algorithm: inside each function body we search for calls to the function itself.
+// Visit will traverse function's body we search for calls to the function itself.
// We do not search inside conditional control structures (if, for, switch, ...) because any recursive call inside them is conditioned
// We do search inside conditional control structures are statements that will take the control out of the function (return, exit, panic)
// If we find conditional control exits, it means the function is NOT unconditionally-recursive
// If we find a recursive call before finding any conditional exit, a failure is generated
-// In resume: if we found a recursive call control-dependant from the entry point of the function then we raise a failure.
-func (w lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor {
+// In resume: if we found a recursive call control-dependent from the entry point of the function then we raise a failure.
+func (w *lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor {
switch n := node.(type) {
- case *ast.FuncDecl:
- var rec *ast.Ident
- switch {
- case n.Recv == nil:
- rec = nil
- case n.Recv.NumFields() < 1 || len(n.Recv.List[0].Names) < 1:
- rec = &ast.Ident{Name: "_"}
- default:
- rec = n.Recv.List[0].Names[0]
- }
- w.currentFunc = &funcStatus{&funcDesc{rec, n.Name}, false}
case *ast.CallExpr:
// check if call arguments has a recursive call
for _, arg := range n.Args {
@@ -100,7 +115,7 @@ func (w lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor {
!w.currentFunc.seenConditionalExit && // there is a conditional exit in the function
w.currentFunc.funcDesc.equal(&funcDesc{selector, funcID}) {
w.onFailure(lint.Failure{
- Category: "logic",
+ Category: lint.FailureCategoryLogic,
Confidence: 0.8,
Node: n,
Failure: "unconditional recursive call",
@@ -152,20 +167,7 @@ func (w *lintUnconditionalRecursionRule) updateFuncStatus(node ast.Node) {
w.currentFunc.seenConditionalExit = w.hasControlExit(node)
}
-var exitFunctions = map[string]map[string]bool{
- "os": {"Exit": true},
- "syscall": {"Exit": true},
- "log": {
- "Fatal": true,
- "Fatalf": true,
- "Fatalln": true,
- "Panic": true,
- "Panicf": true,
- "Panicln": true,
- },
-}
-
-func (lintUnconditionalRecursionRule) hasControlExit(node ast.Node) bool {
+func (*lintUnconditionalRecursionRule) hasControlExit(node ast.Node) bool {
// isExit returns true if the given node makes control exit the function
isExit := func(node ast.Node) bool {
switch n := node.(type) {
@@ -185,9 +187,9 @@ func (lintUnconditionalRecursionRule) hasControlExit(node ast.Node) bool {
return false
}
- fn := se.Sel.Name
- pkg := id.Name
- if exitFunctions[pkg] != nil && exitFunctions[pkg][fn] { // it's a call to an exit function
+ functionName := se.Sel.Name
+ pkgName := id.Name
+ if isCallToExitFunction(pkgName, functionName) {
return true
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/unexported-return.go b/vendor/github.com/mgechev/revive/rule/unexported-return.go
deleted file mode 100644
index 10f8e3fbe0..0000000000
--- a/vendor/github.com/mgechev/revive/rule/unexported-return.go
+++ /dev/null
@@ -1,107 +0,0 @@
-package rule
-
-import (
- "fmt"
- "go/ast"
- "go/types"
-
- "github.com/mgechev/revive/internal/typeparams"
- "github.com/mgechev/revive/lint"
-)
-
-// UnexportedReturnRule lints given else constructs.
-type UnexportedReturnRule struct{}
-
-// Apply applies the rule to given file.
-func (*UnexportedReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
- var failures []lint.Failure
-
- fileAst := file.AST
- walker := lintUnexportedReturn{
- file: file,
- fileAst: fileAst,
- onFailure: func(failure lint.Failure) {
- failures = append(failures, failure)
- },
- }
-
- file.Pkg.TypeCheck()
- ast.Walk(walker, fileAst)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*UnexportedReturnRule) Name() string {
- return "unexported-return"
-}
-
-type lintUnexportedReturn struct {
- file *lint.File
- fileAst *ast.File
- onFailure func(lint.Failure)
-}
-
-func (w lintUnexportedReturn) Visit(n ast.Node) ast.Visitor {
- fn, ok := n.(*ast.FuncDecl)
- if !ok {
- return w
- }
- if fn.Type.Results == nil {
- return nil
- }
- if !fn.Name.IsExported() {
- return nil
- }
- thing := "func"
- if fn.Recv != nil && len(fn.Recv.List) > 0 {
- thing = "method"
- if !ast.IsExported(typeparams.ReceiverType(fn)) {
- // Don't report exported methods of unexported types,
- // such as private implementations of sort.Interface.
- return nil
- }
- }
- for _, ret := range fn.Type.Results.List {
- typ := w.file.Pkg.TypeOf(ret.Type)
- if exportedType(typ) {
- continue
- }
- w.onFailure(lint.Failure{
- Category: "unexported-type-in-api",
- Node: ret.Type,
- Confidence: 0.8,
- Failure: fmt.Sprintf("exported %s %s returns unexported type %s, which can be annoying to use",
- thing, fn.Name.Name, typ),
- })
- break // only flag one
- }
- return nil
-}
-
-// exportedType reports whether typ is an exported type.
-// It is imprecise, and will err on the side of returning true,
-// such as for composite types.
-func exportedType(typ types.Type) bool {
- switch t := typ.(type) {
- case *types.Named:
- obj := t.Obj()
- switch {
- // Builtin types have no package.
- case obj.Pkg() == nil:
- case obj.Exported():
- default:
- _, ok := t.Underlying().(*types.Interface)
- return ok
- }
- return true
- case *types.Map:
- return exportedType(t.Key()) && exportedType(t.Elem())
- case interface {
- Elem() types.Type
- }: // array, slice, pointer, chan
- return exportedType(t.Elem())
- }
- // Be conservative about other types, such as struct, interface, etc.
- return true
-}
diff --git a/vendor/github.com/mgechev/revive/rule/unexported-naming.go b/vendor/github.com/mgechev/revive/rule/unexported_naming.go
similarity index 97%
rename from vendor/github.com/mgechev/revive/rule/unexported-naming.go
rename to vendor/github.com/mgechev/revive/rule/unexported_naming.go
index 0c2b39d410..ceb096a41a 100644
--- a/vendor/github.com/mgechev/revive/rule/unexported-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/unexported_naming.go
@@ -107,7 +107,7 @@ func (unl unexportablenamingLinter) lintIDs(ids []*ast.Ident) {
unl.onFailure(lint.Failure{
Node: id,
Confidence: 1,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
Failure: fmt.Sprintf("the symbol %s is local, its name should start with a lowercase letter", id.String()),
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/unexported_return.go b/vendor/github.com/mgechev/revive/rule/unexported_return.go
new file mode 100644
index 0000000000..0d88a957c7
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/unexported_return.go
@@ -0,0 +1,94 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+
+ "github.com/mgechev/revive/internal/typeparams"
+ "github.com/mgechev/revive/lint"
+)
+
+// UnexportedReturnRule warns when a public return is from unexported type.
+type UnexportedReturnRule struct{}
+
+// Apply applies the rule to given file.
+func (*UnexportedReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ for _, decl := range file.AST.Decls {
+ fn, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+
+ if fn.Type.Results == nil {
+ continue
+ }
+
+ if !fn.Name.IsExported() {
+ continue
+ }
+
+ thing := "func"
+ if fn.Recv != nil && len(fn.Recv.List) > 0 {
+ thing = "method"
+ if !ast.IsExported(typeparams.ReceiverType(fn)) {
+ // Don't report exported methods of unexported types,
+ // such as private implementations of sort.Interface.
+ continue
+ }
+ }
+
+ for _, ret := range fn.Type.Results.List {
+ typ := file.Pkg.TypeOf(ret.Type)
+ if exportedType(typ) {
+ continue
+ }
+
+ failures = append(failures, lint.Failure{
+ Category: lint.FailureCategoryUnexportedTypeInAPI,
+ Node: ret.Type,
+ Confidence: 0.8,
+ Failure: fmt.Sprintf("exported %s %s returns unexported type %s, which can be annoying to use",
+ thing, fn.Name.Name, typ),
+ })
+
+ break // only flag one
+ }
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*UnexportedReturnRule) Name() string {
+ return "unexported-return"
+}
+
+// exportedType reports whether typ is an exported type.
+// It is imprecise, and will err on the side of returning true,
+// such as for composite types.
+func exportedType(typ types.Type) bool {
+ switch t := typ.(type) {
+ case *types.Named:
+ obj := t.Obj()
+ switch {
+ // Builtin types have no package.
+ case obj.Pkg() == nil:
+ case obj.Exported():
+ default:
+ _, ok := t.Underlying().(*types.Interface)
+ return ok
+ }
+ return true
+ case *types.Map:
+ return exportedType(t.Key()) && exportedType(t.Elem())
+ case interface {
+ Elem() types.Type
+ }: // array, slice, pointer, chan
+ return exportedType(t.Elem())
+ }
+ // Be conservative about other types, such as struct, interface, etc.
+ return true
+}
diff --git a/vendor/github.com/mgechev/revive/rule/unhandled-error.go b/vendor/github.com/mgechev/revive/rule/unhandled_error.go
similarity index 71%
rename from vendor/github.com/mgechev/revive/rule/unhandled-error.go
rename to vendor/github.com/mgechev/revive/rule/unhandled_error.go
index ce6fa38641..6ae919bcd5 100644
--- a/vendor/github.com/mgechev/revive/rule/unhandled-error.go
+++ b/vendor/github.com/mgechev/revive/rule/unhandled_error.go
@@ -1,51 +1,48 @@
package rule
import (
+ "errors"
"fmt"
"go/ast"
"go/types"
"regexp"
"strings"
- "sync"
"github.com/mgechev/revive/lint"
)
-// UnhandledErrorRule lints given else constructs.
+// UnhandledErrorRule warns on unhandled errors returned by function calls.
type UnhandledErrorRule struct {
ignoreList []*regexp.Regexp
- sync.Mutex
}
-func (r *UnhandledErrorRule) configure(arguments lint.Arguments) {
- r.Lock()
- if r.ignoreList == nil {
- for _, arg := range arguments {
- argStr, ok := arg.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg))
- }
-
- argStr = strings.Trim(argStr, " ")
- if argStr == "" {
- panic("Invalid argument to the unhandled-error rule, expected regular expression must not be empty.")
- }
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *UnhandledErrorRule) Configure(arguments lint.Arguments) error {
+ for _, arg := range arguments {
+ argStr, ok := arg.(string)
+ if !ok {
+ return fmt.Errorf("invalid argument to the unhandled-error rule. Expecting a string, got %T", arg)
+ }
- exp, err := regexp.Compile(argStr)
- if err != nil {
- panic(fmt.Sprintf("Invalid argument to the unhandled-error rule: regexp %q does not compile: %v", argStr, err))
- }
+ argStr = strings.Trim(argStr, " ")
+ if argStr == "" {
+ return errors.New("invalid argument to the unhandled-error rule, expected regular expression must not be empty")
+ }
- r.ignoreList = append(r.ignoreList, exp)
+ exp, err := regexp.Compile(argStr)
+ if err != nil {
+ return fmt.Errorf("invalid argument to the unhandled-error rule: regexp %q does not compile: %w", argStr, err)
}
+
+ r.ignoreList = append(r.ignoreList, exp)
}
- r.Unlock()
+ return nil
}
// Apply applies the rule to given file.
-func (r *UnhandledErrorRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- r.configure(args)
-
+func (r *UnhandledErrorRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
walker := &lintUnhandledErrors{
@@ -116,7 +113,7 @@ func (w *lintUnhandledErrors) addFailure(n *ast.CallExpr) {
}
w.onFailure(lint.Failure{
- Category: "bad practice",
+ Category: lint.FailureCategoryBadPractice,
Confidence: 1,
Node: n,
Failure: fmt.Sprintf("Unhandled error in call to function %v", name),
@@ -130,9 +127,9 @@ func (w *lintUnhandledErrors) funcName(call *ast.CallExpr) string {
}
name := fn.FullName()
- name = strings.Replace(name, "(", "", -1)
- name = strings.Replace(name, ")", "", -1)
- name = strings.Replace(name, "*", "", -1)
+ name = strings.ReplaceAll(name, "(", "")
+ name = strings.ReplaceAll(name, ")", "")
+ name = strings.ReplaceAll(name, "*", "")
return name
}
diff --git a/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go b/vendor/github.com/mgechev/revive/rule/unnecessary_stmt.go
similarity index 98%
rename from vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go
rename to vendor/github.com/mgechev/revive/rule/unnecessary_stmt.go
index 8e0784ba4a..c9369ca9fe 100644
--- a/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go
+++ b/vendor/github.com/mgechev/revive/rule/unnecessary_stmt.go
@@ -101,7 +101,7 @@ func (w lintUnnecessaryStmtRule) newFailure(node ast.Node, msg string) {
w.onFailure(lint.Failure{
Confidence: 1,
Node: node,
- Category: "style",
+ Category: lint.FailureCategoryStyle,
Failure: msg,
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/unreachable-code.go b/vendor/github.com/mgechev/revive/rule/unreachable_code.go
similarity index 97%
rename from vendor/github.com/mgechev/revive/rule/unreachable-code.go
rename to vendor/github.com/mgechev/revive/rule/unreachable_code.go
index dcc5b79056..dbc877148d 100644
--- a/vendor/github.com/mgechev/revive/rule/unreachable-code.go
+++ b/vendor/github.com/mgechev/revive/rule/unreachable_code.go
@@ -62,7 +62,6 @@ func (w lintUnreachableCode) Visit(node ast.Node) ast.Visitor {
}
loop:
for i, stmt := range blk.List[:len(blk.List)-1] {
- // println("iterating ", len(blk.List))
next := blk.List[i+1]
if _, ok := next.(*ast.LabeledStmt); ok {
continue // skip if next statement is labeled
@@ -116,7 +115,7 @@ func newUnreachableCodeFailure(node ast.Node) lint.Failure {
return lint.Failure{
Confidence: 1,
Node: node,
- Category: "logic",
+ Category: lint.FailureCategoryLogic,
Failure: "unreachable code after this statement",
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/unused-receiver.go b/vendor/github.com/mgechev/revive/rule/unused-receiver.go
deleted file mode 100644
index 715dba3383..0000000000
--- a/vendor/github.com/mgechev/revive/rule/unused-receiver.go
+++ /dev/null
@@ -1,133 +0,0 @@
-package rule
-
-import (
- "fmt"
- "go/ast"
- "regexp"
- "sync"
-
- "github.com/mgechev/revive/lint"
-)
-
-// UnusedReceiverRule lints unused params in functions.
-type UnusedReceiverRule struct {
- configured bool
- // regex to check if some name is valid for unused parameter, "^_$" by default
- allowRegex *regexp.Regexp
- failureMsg string
- sync.Mutex
-}
-
-func (r *UnusedReceiverRule) configure(args lint.Arguments) {
- r.Lock()
- defer r.Unlock()
-
- if r.configured {
- return
- }
- r.configured = true
-
- // while by default args is an array, i think it's good to provide structures inside it by default, not arrays or primitives
- // it's more compatible to JSON nature of configurations
- var allowedRegexStr string
- if len(args) == 0 {
- allowedRegexStr = "^_$"
- r.failureMsg = "method receiver '%s' is not referenced in method's body, consider removing or renaming it as _"
- } else {
- // Arguments = [{}]
- options := args[0].(map[string]any)
- // Arguments = [{allowedRegex="^_"}]
-
- if allowedRegexParam, ok := options["allowRegex"]; ok {
- allowedRegexStr, ok = allowedRegexParam.(string)
- if !ok {
- panic(fmt.Errorf("error configuring [unused-receiver] rule: allowedRegex is not string but [%T]", allowedRegexParam))
- }
- }
- }
- var err error
- r.allowRegex, err = regexp.Compile(allowedRegexStr)
- if err != nil {
- panic(fmt.Errorf("error configuring [unused-receiver] rule: allowedRegex is not valid regex [%s]: %v", allowedRegexStr, err))
- }
- if r.failureMsg == "" {
- r.failureMsg = "method receiver '%s' is not referenced in method's body, consider removing or renaming it to match " + r.allowRegex.String()
- }
-}
-
-// Apply applies the rule to given file.
-func (r *UnusedReceiverRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- r.configure(args)
- var failures []lint.Failure
-
- onFailure := func(failure lint.Failure) {
- failures = append(failures, failure)
- }
-
- w := lintUnusedReceiverRule{
- onFailure: onFailure,
- allowRegex: r.allowRegex,
- failureMsg: r.failureMsg,
- }
-
- ast.Walk(w, file.AST)
-
- return failures
-}
-
-// Name returns the rule name.
-func (*UnusedReceiverRule) Name() string {
- return "unused-receiver"
-}
-
-type lintUnusedReceiverRule struct {
- onFailure func(lint.Failure)
- allowRegex *regexp.Regexp
- failureMsg string
-}
-
-func (w lintUnusedReceiverRule) Visit(node ast.Node) ast.Visitor {
- switch n := node.(type) {
- case *ast.FuncDecl:
- if n.Recv == nil {
- return nil // skip this func decl, not a method
- }
-
- rec := n.Recv.List[0] // safe to access only the first (unique) element of the list
- if len(rec.Names) < 1 {
- return nil // the receiver is anonymous: func (aType) Foo(...) ...
- }
-
- recID := rec.Names[0]
- if recID.Name == "_" {
- return nil // the receiver is already named _
- }
-
- if w.allowRegex != nil && w.allowRegex.FindStringIndex(recID.Name) != nil {
- return nil
- }
-
- // inspect the func body looking for references to the receiver id
- fselect := func(n ast.Node) bool {
- ident, isAnID := n.(*ast.Ident)
-
- return isAnID && ident.Obj == recID.Obj
- }
- refs2recID := pick(n.Body, fselect)
-
- if len(refs2recID) > 0 {
- return nil // the receiver is referenced in the func body
- }
-
- w.onFailure(lint.Failure{
- Confidence: 1,
- Node: recID,
- Category: "bad practice",
- Failure: fmt.Sprintf(w.failureMsg, recID.Name),
- })
-
- return nil // full method body already inspected
- }
-
- return w
-}
diff --git a/vendor/github.com/mgechev/revive/rule/unused-param.go b/vendor/github.com/mgechev/revive/rule/unused_param.go
similarity index 62%
rename from vendor/github.com/mgechev/revive/rule/unused-param.go
rename to vendor/github.com/mgechev/revive/rule/unused_param.go
index 4b04ee916b..f57c59b069 100644
--- a/vendor/github.com/mgechev/revive/rule/unused-param.go
+++ b/vendor/github.com/mgechev/revive/rule/unused_param.go
@@ -4,61 +4,53 @@ import (
"fmt"
"go/ast"
"regexp"
- "sync"
"github.com/mgechev/revive/lint"
)
+var allowBlankIdentifierRegex = regexp.MustCompile("^_$")
+
// UnusedParamRule lints unused params in functions.
type UnusedParamRule struct {
- configured bool
// regex to check if some name is valid for unused parameter, "^_$" by default
allowRegex *regexp.Regexp
failureMsg string
- sync.Mutex
}
-func (r *UnusedParamRule) configure(args lint.Arguments) {
- r.Lock()
- defer r.Unlock()
-
- if r.configured {
- return
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *UnusedParamRule) Configure(args lint.Arguments) error {
+ // while by default args is an array, it could be good to provide structures inside it by default, not arrays or primitives
+ // as it's more compatible to JSON nature of configurations
+ r.allowRegex = allowBlankIdentifierRegex
+ r.failureMsg = "parameter '%s' seems to be unused, consider removing or renaming it as _"
+ if len(args) == 0 {
+ return nil
}
- r.configured = true
+ // Arguments = [{}]
+ options := args[0].(map[string]any)
- // while by default args is an array, i think it's good to provide structures inside it by default, not arrays or primitives
- // it's more compatible to JSON nature of configurations
- var allowedRegexStr string
- if len(args) == 0 {
- allowedRegexStr = "^_$"
- r.failureMsg = "parameter '%s' seems to be unused, consider removing or renaming it as _"
- } else {
- // Arguments = [{}]
- options := args[0].(map[string]any)
- // Arguments = [{allowedRegex="^_"}]
-
- if allowedRegexParam, ok := options["allowRegex"]; ok {
- allowedRegexStr, ok = allowedRegexParam.(string)
- if !ok {
- panic(fmt.Errorf("error configuring %s rule: allowedRegex is not string but [%T]", r.Name(), allowedRegexParam))
- }
- }
+ allowRegexParam, ok := options["allowRegex"]
+ if !ok {
+ return nil
+ }
+ // Arguments = [{allowRegex="^_"}]
+ allowRegexStr, ok := allowRegexParam.(string)
+ if !ok {
+ panic(fmt.Errorf("error configuring %s rule: allowRegex is not string but [%T]", r.Name(), allowRegexParam))
}
var err error
- r.allowRegex, err = regexp.Compile(allowedRegexStr)
+ r.allowRegex, err = regexp.Compile(allowRegexStr)
if err != nil {
- panic(fmt.Errorf("error configuring %s rule: allowedRegex is not valid regex [%s]: %v", r.Name(), allowedRegexStr, err))
- }
-
- if r.failureMsg == "" {
- r.failureMsg = "parameter '%s' seems to be unused, consider removing or renaming it to match " + r.allowRegex.String()
+ return fmt.Errorf("error configuring %s rule: allowRegex is not valid regex [%s]: %w", r.Name(), allowRegexStr, err)
}
+ r.failureMsg = "parameter '%s' seems to be unused, consider removing or renaming it to match " + r.allowRegex.String()
+ return nil
}
// Apply applies the rule to given file.
-func (r *UnusedParamRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
- r.configure(args)
+func (r *UnusedParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
onFailure := func(failure lint.Failure) {
@@ -137,7 +129,7 @@ func (w lintUnusedParamRule) Visit(node ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Confidence: 1,
Node: n,
- Category: "bad practice",
+ Category: lint.FailureCategoryBadPractice,
Failure: fmt.Sprintf(w.failureMsg, n.Name),
})
}
@@ -147,6 +139,7 @@ func (w lintUnusedParamRule) Visit(node ast.Node) ast.Visitor {
return w // full method body was inspected
}
+// TODO: ast.Object is deprecated
func retrieveNamedParams(params *ast.FieldList) map[*ast.Object]bool {
result := map[*ast.Object]bool{}
if params.List == nil {
diff --git a/vendor/github.com/mgechev/revive/rule/unused_receiver.go b/vendor/github.com/mgechev/revive/rule/unused_receiver.go
new file mode 100644
index 0000000000..13ca39dff3
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/unused_receiver.go
@@ -0,0 +1,101 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "regexp"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// UnusedReceiverRule lints unused receivers in functions.
+type UnusedReceiverRule struct {
+ // regex to check if some name is valid for unused parameter, "^_$" by default
+ allowRegex *regexp.Regexp
+ failureMsg string
+}
+
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *UnusedReceiverRule) Configure(args lint.Arguments) error {
+ // while by default args is an array, it could be good to provide structures inside it by default, not arrays or primitives
+ // as it's more compatible to JSON nature of configurations
+ r.allowRegex = allowBlankIdentifierRegex
+ r.failureMsg = "method receiver '%s' is not referenced in method's body, consider removing or renaming it as _"
+ if len(args) == 0 {
+ return nil
+ }
+ // Arguments = [{}]
+ options := args[0].(map[string]any)
+
+ allowRegexParam, ok := options["allowRegex"]
+ if !ok {
+ return nil
+ }
+ // Arguments = [{allowRegex="^_"}]
+ allowRegexStr, ok := allowRegexParam.(string)
+ if !ok {
+ panic(fmt.Errorf("error configuring [unused-receiver] rule: allowRegex is not string but [%T]", allowRegexParam))
+ }
+ var err error
+ r.allowRegex, err = regexp.Compile(allowRegexStr)
+ if err != nil {
+ return fmt.Errorf("error configuring [unused-receiver] rule: allowRegex is not valid regex [%s]: %w", allowRegexStr, err)
+ }
+ r.failureMsg = "method receiver '%s' is not referenced in method's body, consider removing or renaming it to match " + r.allowRegex.String()
+ return nil
+}
+
+// Apply applies the rule to given file.
+func (r *UnusedReceiverRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ for _, decl := range file.AST.Decls {
+ funcDecl, ok := decl.(*ast.FuncDecl)
+ isMethod := ok && funcDecl.Recv != nil
+ if !isMethod {
+ continue
+ }
+
+ rec := funcDecl.Recv.List[0] // safe to access only the first (unique) element of the list
+ if len(rec.Names) < 1 {
+ continue // the receiver is anonymous: func (aType) Foo(...) ...
+ }
+
+ recID := rec.Names[0]
+ if recID.Name == "_" {
+ continue // the receiver is already named _
+ }
+
+ if r.allowRegex != nil && r.allowRegex.FindStringIndex(recID.Name) != nil {
+ continue
+ }
+
+ // inspect the func body looking for references to the receiver id
+ selectReceiverUses := func(n ast.Node) bool {
+ ident, isAnID := n.(*ast.Ident)
+
+ return isAnID && ident.Obj == recID.Obj
+ }
+ receiverUses := pick(funcDecl.Body, selectReceiverUses)
+
+ if len(receiverUses) > 0 {
+ continue // the receiver is referenced in the func body
+ }
+
+ failures = append(failures, lint.Failure{
+ Confidence: 1,
+ Node: recID,
+ Category: lint.FailureCategoryBadPractice,
+ Failure: fmt.Sprintf(r.failureMsg, recID.Name),
+ })
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*UnusedReceiverRule) Name() string {
+ return "unused-receiver"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/use-any.go b/vendor/github.com/mgechev/revive/rule/use_any.go
similarity index 82%
rename from vendor/github.com/mgechev/revive/rule/use-any.go
rename to vendor/github.com/mgechev/revive/rule/use_any.go
index bdf3c936dd..0ebb4d6280 100644
--- a/vendor/github.com/mgechev/revive/rule/use-any.go
+++ b/vendor/github.com/mgechev/revive/rule/use_any.go
@@ -6,7 +6,7 @@ import (
"github.com/mgechev/revive/lint"
)
-// UseAnyRule lints given else constructs.
+// UseAnyRule proposes to replace `interface{}` with its alias `any`.
type UseAnyRule struct{}
// Apply applies the rule to given file.
@@ -46,8 +46,8 @@ func (w lintUseAny) Visit(n ast.Node) ast.Visitor {
w.onFailure(lint.Failure{
Node: n,
Confidence: 1,
- Category: "naming",
- Failure: "since GO 1.18 'interface{}' can be replaced by 'any'",
+ Category: lint.FailureCategoryNaming,
+ Failure: "since Go 1.18 'interface{}' can be replaced by 'any'",
})
return w
diff --git a/vendor/github.com/mgechev/revive/rule/use_errors_new.go b/vendor/github.com/mgechev/revive/rule/use_errors_new.go
new file mode 100644
index 0000000000..a43505484c
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/use_errors_new.go
@@ -0,0 +1,60 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// UseErrorsNewRule spots calls to fmt.Errorf that can be replaced by errors.New.
+type UseErrorsNewRule struct{}
+
+// Apply applies the rule to given file.
+func (*UseErrorsNewRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ walker := lintFmtErrorf{
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (*UseErrorsNewRule) Name() string {
+ return "use-errors-new"
+}
+
+type lintFmtErrorf struct {
+ onFailure func(lint.Failure)
+}
+
+func (w lintFmtErrorf) Visit(n ast.Node) ast.Visitor {
+ funcCall, ok := n.(*ast.CallExpr)
+ if !ok {
+ return w // not a function call
+ }
+
+ isFmtErrorf := isPkgDot(funcCall.Fun, "fmt", "Errorf")
+ if !isFmtErrorf {
+ return w // not a call to fmt.Errorf
+ }
+
+ if len(funcCall.Args) > 1 {
+ return w // the use of fmt.Errorf is legit
+ }
+
+ // the call is of the form fmt.Errorf("...")
+ w.onFailure(lint.Failure{
+ Category: lint.FailureCategoryErrors,
+ Node: n,
+ Confidence: 1,
+ Failure: "replace fmt.Errorf by errors.New",
+ })
+
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/useless-break.go b/vendor/github.com/mgechev/revive/rule/useless_break.go
similarity index 100%
rename from vendor/github.com/mgechev/revive/rule/useless-break.go
rename to vendor/github.com/mgechev/revive/rule/useless_break.go
diff --git a/vendor/github.com/mgechev/revive/rule/utils.go b/vendor/github.com/mgechev/revive/rule/utils.go
index 5778e76963..5075c78234 100644
--- a/vendor/github.com/mgechev/revive/rule/utils.go
+++ b/vendor/github.com/mgechev/revive/rule/utils.go
@@ -6,29 +6,23 @@ import (
"go/ast"
"go/printer"
"go/token"
- "go/types"
"regexp"
- "strings"
"github.com/mgechev/revive/lint"
)
-// isBlank returns whether id is the blank identifier "_".
-// If id == nil, the answer is false.
-func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" }
-
-var commonMethods = map[string]bool{
- "Error": true,
- "Read": true,
- "ServeHTTP": true,
- "String": true,
- "Write": true,
- "Unwrap": true,
-}
-
-var knownNameExceptions = map[string]bool{
- "LastInsertId": true, // must match database/sql
- "kWh": true,
+// exitFunctions is a map of std packages and functions that are considered as exit functions.
+var exitFunctions = map[string]map[string]bool{
+ "os": {"Exit": true},
+ "syscall": {"Exit": true},
+ "log": {
+ "Fatal": true,
+ "Fatalf": true,
+ "Fatalln": true,
+ "Panic": true,
+ "Panicf": true,
+ "Panicln": true,
+ },
}
func isCgoExported(f *ast.FuncDecl) bool {
@@ -45,34 +39,11 @@ func isCgoExported(f *ast.FuncDecl) bool {
return false
}
-var allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`)
-
func isIdent(expr ast.Expr, ident string) bool {
id, ok := expr.(*ast.Ident)
return ok && id.Name == ident
}
-var zeroLiteral = map[string]bool{
- "false": true, // bool
- // runes
- `'\x00'`: true,
- `'\000'`: true,
- // strings
- `""`: true,
- "``": true,
- // numerics
- "0": true,
- "0.": true,
- "0.0": true,
- "0i": true,
-}
-
-func validType(t types.Type) bool {
- return t != nil &&
- t != types.Typ[types.Invalid] &&
- !strings.Contains(t.String(), "invalid type") // good but not foolproof
-}
-
// isPkgDot checks if the expression is .
func isPkgDot(expr ast.Expr, pkg, name string) bool {
sel, ok := expr.(*ast.SelectorExpr)
@@ -125,32 +96,6 @@ func (p picker) Visit(node ast.Node) ast.Visitor {
return p
}
-// isBoolOp returns true if the given token corresponds to
-// a bool operator
-func isBoolOp(t token.Token) bool {
- switch t {
- case token.LAND, token.LOR, token.EQL, token.NEQ:
- return true
- }
-
- return false
-}
-
-const (
- trueName = "true"
- falseName = "false"
-)
-
-func isExprABooleanLit(n ast.Node) (lexeme string, ok bool) {
- oper, ok := n.(*ast.Ident)
-
- if !ok {
- return "", false
- }
-
- return oper.Name, (oper.Name == trueName || oper.Name == falseName)
-}
-
// gofmt returns a string representation of an AST subtree.
func gofmt(x any) string {
buf := bytes.Buffer{}
@@ -160,8 +105,25 @@ func gofmt(x any) string {
}
// checkNumberOfArguments fails if the given number of arguments is not, at least, the expected one
-func checkNumberOfArguments(expected int, args lint.Arguments, ruleName string) {
+func checkNumberOfArguments(expected int, args lint.Arguments, ruleName string) error {
if len(args) < expected {
- panic(fmt.Sprintf("not enough arguments for %s rule, expected %d, got %d. Please check the rule's documentation", ruleName, expected, len(args)))
+ return fmt.Errorf("not enough arguments for %s rule, expected %d, got %d. Please check the rule's documentation", ruleName, expected, len(args))
}
+ return nil
+}
+
+var directiveCommentRE = regexp.MustCompile("^//(line |extern |export |[a-z0-9]+:[a-z0-9])") // see https://go-review.googlesource.com/c/website/+/442516/1..2/_content/doc/comment.md#494
+
+func isDirectiveComment(line string) bool {
+ return directiveCommentRE.MatchString(line)
+}
+
+// isCallToExitFunction checks if the function call is a call to an exit function.
+func isCallToExitFunction(pkgName, functionName string) bool {
+ return exitFunctions[pkgName] != nil && exitFunctions[pkgName][functionName]
+}
+
+// newInternalFailureError returns a slice of Failure with a single internal failure in it
+func newInternalFailureError(e error) []lint.Failure {
+ return []lint.Failure{lint.NewInternalFailure(e.Error())}
}
diff --git a/vendor/github.com/mgechev/revive/rule/var-declarations.go b/vendor/github.com/mgechev/revive/rule/var_declarations.go
similarity index 71%
rename from vendor/github.com/mgechev/revive/rule/var-declarations.go
rename to vendor/github.com/mgechev/revive/rule/var_declarations.go
index a15ff1eb41..8119fc8d45 100644
--- a/vendor/github.com/mgechev/revive/rule/var-declarations.go
+++ b/vendor/github.com/mgechev/revive/rule/var_declarations.go
@@ -5,11 +5,27 @@ import (
"go/ast"
"go/token"
"go/types"
+ "strings"
"github.com/mgechev/revive/lint"
)
-// VarDeclarationsRule lints given else constructs.
+var zeroLiteral = map[string]bool{
+ "false": true, // bool
+ // runes
+ `'\x00'`: true,
+ `'\000'`: true,
+ // strings
+ `""`: true,
+ "``": true,
+ // numerics
+ "0": true,
+ "0.": true,
+ "0.0": true,
+ "0i": true,
+}
+
+// VarDeclarationsRule reduces redundancies around variable declaration.
type VarDeclarationsRule struct{}
// Apply applies the rule to given file.
@@ -46,13 +62,15 @@ type lintVarDeclarations struct {
func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor {
switch v := node.(type) {
case *ast.GenDecl:
- if v.Tok != token.CONST && v.Tok != token.VAR {
+ isVarOrConstDeclaration := v.Tok == token.CONST || v.Tok == token.VAR
+ if !isVarOrConstDeclaration {
return nil
}
w.lastGen = v
return w
case *ast.ValueSpec:
- if w.lastGen.Tok == token.CONST {
+ isConstDeclaration := w.lastGen.Tok == token.CONST
+ if isConstDeclaration {
return nil
}
if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 {
@@ -64,18 +82,18 @@ func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor {
if isIdent(v.Names[0], "_") {
return nil
}
- // If the RHS is a zero value, suggest dropping it.
- zero := false
+ // If the RHS is a isZero value, suggest dropping it.
+ isZero := false
if lit, ok := rhs.(*ast.BasicLit); ok {
- zero = zeroLiteral[lit.Value]
+ isZero = isZeroValue(lit.Value, v.Type)
} else if isIdent(rhs, "nil") {
- zero = true
+ isZero = true
}
- if zero {
+ if isZero {
w.onFailure(lint.Failure{
Confidence: 0.9,
Node: rhs,
- Category: "zero-value",
+ Category: lint.FailureCategoryZeroValue,
Failure: fmt.Sprintf("should drop = %s from declaration of var %s; it is the zero value", w.file.Render(rhs), v.Names[0]),
})
return nil
@@ -109,7 +127,7 @@ func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor {
}
w.onFailure(lint.Failure{
- Category: "type-inference",
+ Category: lint.FailureCategoryTypeInference,
Confidence: 0.8,
Node: v.Type,
Failure: fmt.Sprintf("should omit type %s from declaration of var %s; it will be inferred from the right-hand side", w.file.Render(v.Type), v.Names[0]),
@@ -118,3 +136,22 @@ func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor {
}
return w
}
+
+func validType(t types.Type) bool {
+ return t != nil &&
+ t != types.Typ[types.Invalid] &&
+ !strings.Contains(t.String(), "invalid type") // good but not foolproof
+}
+
+func isZeroValue(litValue string, typ ast.Expr) bool {
+ switch val := typ.(type) {
+ case *ast.Ident:
+ if val.Name == "any" {
+ return litValue == "nil"
+ }
+ case *ast.InterfaceType:
+ return litValue == "nil"
+ }
+
+ return zeroLiteral[litValue]
+}
diff --git a/vendor/github.com/mgechev/revive/rule/var-naming.go b/vendor/github.com/mgechev/revive/rule/var_naming.go
similarity index 72%
rename from vendor/github.com/mgechev/revive/rule/var-naming.go
rename to vendor/github.com/mgechev/revive/rule/var_naming.go
index e91c22dc21..bffcbb2764 100644
--- a/vendor/github.com/mgechev/revive/rule/var-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/var_naming.go
@@ -6,40 +6,48 @@ import (
"go/token"
"regexp"
"strings"
- "sync"
"github.com/mgechev/revive/lint"
)
var anyCapsRE = regexp.MustCompile(`[A-Z]`)
+var allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`)
+
// regexp for constant names like `SOME_CONST`, `SOME_CONST_2`, `X123_3`, `_SOME_PRIVATE_CONST` (#851, #865)
var upperCaseConstRE = regexp.MustCompile(`^_?[A-Z][A-Z\d]*(_[A-Z\d]+)*$`)
-// VarNamingRule lints given else constructs.
+var knownNameExceptions = map[string]bool{
+ "LastInsertId": true, // must match database/sql
+ "kWh": true,
+}
+
+// VarNamingRule lints the name of a variable.
type VarNamingRule struct {
- configured bool
- allowlist []string
- blocklist []string
- upperCaseConst bool // if true - allows to use UPPER_SOME_NAMES for constants
+ allowList []string
+ blockList []string
+ allowUpperCaseConst bool // if true - allows to use UPPER_SOME_NAMES for constants
skipPackageNameChecks bool
- sync.Mutex
}
-func (r *VarNamingRule) configure(arguments lint.Arguments) {
- r.Lock()
- defer r.Unlock()
- if r.configured {
- return
- }
-
- r.configured = true
+// Configure validates the rule configuration, and configures the rule accordingly.
+//
+// Configuration implements the [lint.ConfigurableRule] interface.
+func (r *VarNamingRule) Configure(arguments lint.Arguments) error {
if len(arguments) >= 1 {
- r.allowlist = getList(arguments[0], "allowlist")
+ list, err := getList(arguments[0], "allowlist")
+ if err != nil {
+ return err
+ }
+ r.allowList = list
}
if len(arguments) >= 2 {
- r.blocklist = getList(arguments[1], "blocklist")
+ list, err := getList(arguments[1], "blocklist")
+ if err != nil {
+ return err
+ }
+ r.blockList = list
}
if len(arguments) >= 3 {
@@ -47,28 +55,29 @@ func (r *VarNamingRule) configure(arguments lint.Arguments) {
thirdArgument := arguments[2]
asSlice, ok := thirdArgument.([]any)
if !ok {
- panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, got %T", "options", arguments[2]))
+ return fmt.Errorf("invalid third argument to the var-naming rule. Expecting a %s of type slice, got %T", "options", arguments[2])
}
if len(asSlice) != 1 {
- panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, but %d", "options", len(asSlice)))
+ return fmt.Errorf("invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, but %d", "options", len(asSlice))
}
args, ok := asSlice[0].(map[string]any)
if !ok {
- panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, with map, but %T", "options", asSlice[0]))
+ return fmt.Errorf("invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, with map, but %T", "options", asSlice[0])
}
- r.upperCaseConst = fmt.Sprint(args["upperCaseConst"]) == "true"
+ r.allowUpperCaseConst = fmt.Sprint(args["upperCaseConst"]) == "true"
r.skipPackageNameChecks = fmt.Sprint(args["skipPackageNameChecks"]) == "true"
}
+ return nil
}
-func (r *VarNamingRule) applyPackageCheckRules(walker *lintNames) {
+func (*VarNamingRule) applyPackageCheckRules(walker *lintNames) {
// Package names need slightly different handling than other names.
if strings.Contains(walker.fileAst.Name.Name, "_") && !strings.HasSuffix(walker.fileAst.Name.Name, "_test") {
walker.onFailure(lint.Failure{
Failure: "don't use an underscore in package name",
Confidence: 1,
Node: walker.fileAst.Name,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
})
}
if anyCapsRE.MatchString(walker.fileAst.Name.Name) {
@@ -76,16 +85,13 @@ func (r *VarNamingRule) applyPackageCheckRules(walker *lintNames) {
Failure: fmt.Sprintf("don't use MixedCaps in package name; %s should be %s", walker.fileAst.Name.Name, strings.ToLower(walker.fileAst.Name.Name)),
Confidence: 1,
Node: walker.fileAst.Name,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
})
}
-
}
// Apply applies the rule to given file.
-func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
- r.configure(arguments)
-
+func (r *VarNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
fileAst := file.AST
@@ -93,12 +99,12 @@ func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.
walker := lintNames{
file: file,
fileAst: fileAst,
- allowlist: r.allowlist,
- blocklist: r.blocklist,
+ allowList: r.allowList,
+ blockList: r.blockList,
onFailure: func(failure lint.Failure) {
failures = append(failures, failure)
},
- upperCaseConst: r.upperCaseConst,
+ upperCaseConst: r.allowUpperCaseConst,
}
if !r.skipPackageNameChecks {
@@ -146,12 +152,12 @@ func (w *lintNames) check(id *ast.Ident, thing string) {
Failure: "don't use ALL_CAPS in Go names; use CamelCase",
Confidence: 0.8,
Node: id,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
})
return
}
- should := lint.Name(id.Name, w.allowlist, w.blocklist)
+ should := lint.Name(id.Name, w.allowList, w.blockList)
if id.Name == should {
return
}
@@ -161,7 +167,7 @@ func (w *lintNames) check(id *ast.Ident, thing string) {
Failure: fmt.Sprintf("don't use underscores in Go names; %s %s should be %s", thing, id.Name, should),
Confidence: 0.9,
Node: id,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
})
return
}
@@ -169,7 +175,7 @@ func (w *lintNames) check(id *ast.Ident, thing string) {
Failure: fmt.Sprintf("%s %s should be %s", thing, id.Name, should),
Confidence: 0.8,
Node: id,
- Category: "naming",
+ Category: lint.FailureCategoryNaming,
})
}
@@ -177,8 +183,8 @@ type lintNames struct {
file *lint.File
fileAst *ast.File
onFailure func(lint.Failure)
- allowlist []string
- blocklist []string
+ allowList []string
+ blockList []string
upperCaseConst bool
}
@@ -264,18 +270,18 @@ func (w *lintNames) Visit(n ast.Node) ast.Visitor {
return w
}
-func getList(arg any, argName string) []string {
- temp, ok := arg.([]any)
+func getList(arg any, argName string) ([]string, error) {
+ args, ok := arg.([]any)
if !ok {
- panic(fmt.Sprintf("Invalid argument to the var-naming rule. Expecting a %s of type slice with initialisms, got %T", argName, arg))
+ return nil, fmt.Errorf("invalid argument to the var-naming rule. Expecting a %s of type slice with initialisms, got %T", argName, arg)
}
var list []string
- for _, v := range temp {
- if val, ok := v.(string); ok {
- list = append(list, val)
- } else {
- panic(fmt.Sprintf("Invalid %s values of the var-naming rule. Expecting slice of strings but got element of type %T", val, arg))
+ for _, v := range args {
+ val, ok := v.(string)
+ if !ok {
+ return nil, fmt.Errorf("invalid %s values of the var-naming rule. Expecting slice of strings but got element of type %T", val, arg)
}
+ list = append(list, val)
}
- return list
+ return list, nil
}
diff --git a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go b/vendor/github.com/mgechev/revive/rule/waitgroup_by_value.go
similarity index 94%
rename from vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go
rename to vendor/github.com/mgechev/revive/rule/waitgroup_by_value.go
index 98644f41c4..1b8c2756cd 100644
--- a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go
+++ b/vendor/github.com/mgechev/revive/rule/waitgroup_by_value.go
@@ -38,7 +38,7 @@ func (w lintWaitGroupByValueRule) Visit(node ast.Node) ast.Visitor {
return w
}
- // Check all function's parameters
+ // Check all function parameters
for _, field := range fd.Type.Params.List {
if !w.isWaitGroup(field.Type) {
continue
@@ -51,7 +51,7 @@ func (w lintWaitGroupByValueRule) Visit(node ast.Node) ast.Visitor {
})
}
- return nil
+ return nil // skip visiting function body
}
func (lintWaitGroupByValueRule) isWaitGroup(ft ast.Expr) bool {
diff --git a/vendor/github.com/moricho/tparallel/.goreleaser.yaml b/vendor/github.com/moricho/tparallel/.goreleaser.yaml
index 4a04fe25b5..5349004c58 100644
--- a/vendor/github.com/moricho/tparallel/.goreleaser.yaml
+++ b/vendor/github.com/moricho/tparallel/.goreleaser.yaml
@@ -1,7 +1,9 @@
project_name: tparallel
+
before:
hooks:
- go mod tidy
+
builds:
- main: ./cmd/tparallel
binary: tparallel
@@ -19,15 +21,17 @@ builds:
archives:
- format: tar.gz
name_template: >-
- {{ .ProjectName }}_
+ {{- .ProjectName }}_
{{- title .Os }}_
{{- if eq .Arch "amd64" }}x86_64
{{- else if eq .Arch "386" }}i386
{{- else }}{{ .Arch }}{{ end }}
- {{- if .Arm }}v{{ .Arm }}{{ end }}
+ {{- if .Arm }}v{{ .Arm }}{{ end -}}
+
format_overrides:
- - goos: windows
- format: zip
+ - goos: windows
+ format: zip
+
checksum:
name_template: 'checksums.txt'
snapshot:
@@ -38,10 +42,12 @@ changelog:
exclude:
- '^docs:'
- '^test:'
+
release:
prerelease: auto
+
brews:
- - tap:
+ - repository:
owner: moricho
name: homebrew-tparallel
homepage: https://github.com/moricho/tparallel
@@ -49,4 +55,4 @@ brews:
install: |
bin.install "tparallel"
test: |
- system "#{bin}/goreleaser -v"
+ system "#{bin}/tparallel help"
diff --git a/vendor/github.com/moricho/tparallel/README.md b/vendor/github.com/moricho/tparallel/README.md
index 65ed46c422..c4f1efd01a 100644
--- a/vendor/github.com/moricho/tparallel/README.md
+++ b/vendor/github.com/moricho/tparallel/README.md
@@ -10,7 +10,7 @@ It detects the following:
- `t.Parallel()` is called in either a top-level test function or a sub-test function only
- Although `t.Parallel()` is called in the sub-test function, it is post-processed by `defer` instead of `t.Cleanup()`
-This tool was inspired by this blog: [Go 言語でのテストの並列化 〜t.Parallel()メソッドを理解する〜](https://engineering.mercari.com/blog/entry/how_to_use_t_parallel/)
+This tool was inspired by this blog: [Test parallelization in Go: Understanding the t.Parallel() method](https://engineering.mercari.com/en/blog/entry/20220408-how_to_use_t_parallel/)
## Installation
diff --git a/vendor/github.com/moricho/tparallel/testmap.go b/vendor/github.com/moricho/tparallel/testmap.go
index fa9bed7082..fd6a3b4326 100644
--- a/vendor/github.com/moricho/tparallel/testmap.go
+++ b/vendor/github.com/moricho/tparallel/testmap.go
@@ -49,6 +49,10 @@ func appendTestMap(subtests []*ssa.Function, instr ssa.Instruction) []*ssa.Funct
}
ssaCall := call.Value()
+ if ssaCall == nil {
+ return subtests
+ }
+
for _, arg := range ssaCall.Call.Args {
switch arg := arg.(type) {
case *ssa.Function:
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/.gitignore b/vendor/github.com/nunnatsa/ginkgolinter/.gitignore
index 7d7f8b10ce..67467b7170 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/.gitignore
+++ b/vendor/github.com/nunnatsa/ginkgolinter/.gitignore
@@ -1,2 +1,3 @@
ginkgolinter
bin/
+e2e
\ No newline at end of file
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/Makefile b/vendor/github.com/nunnatsa/ginkgolinter/Makefile
index 586633006a..8ddd8c42ce 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/Makefile
+++ b/vendor/github.com/nunnatsa/ginkgolinter/Makefile
@@ -5,7 +5,7 @@ HASH_FLAG := -X github.com/nunnatsa/ginkgolinter/version.gitHash=$(COMMIT_HASH)
BUILD_ARGS := -ldflags "$(VERSION_FLAG) $(HASH_FLAG)"
-build: unit-test
+build: goimports
go build $(BUILD_ARGS) -o ginkgolinter ./cmd/ginkgolinter
unit-test:
@@ -23,5 +23,11 @@ build-for-linux:
build-all: build build-for-linux build-for-mac build-for-windows
-test: build
- ./tests/e2e.sh
+test-cli:
+ cd tests; go test -v ./
+
+test: unit-test test-cli
+
+goimports:
+ go install golang.org/x/tools/cmd/goimports@latest
+ goimports -w -local="github.com/nunnatsa/ginkgolinter" $(shell find . -type f -name '*.go' ! -path "*/vendor/*")
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/README.md b/vendor/github.com/nunnatsa/ginkgolinter/README.md
index 977cec903e..83c436359f 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/README.md
+++ b/vendor/github.com/nunnatsa/ginkgolinter/README.md
@@ -180,7 +180,7 @@ var _ = Describe("checking something", Focus, func() {
These container, or the `Focus` spec, must not be part of the final source code, and should only be used locally by the
developer.
-***This rule is disabled by default***. Use the `--forbid-focus-container=true` command line flag to enable it.
+***This rule is disabled by default***. Use the `--forbid-focus-container` command line flag to enable it.
### Comparing values from different types [BUG]
@@ -189,7 +189,7 @@ The `Equal` and the `BeIdentical` matchers also check the type, not only the val
The following code will fail in runtime:
```go
x := 5 // x is int
-Expect(x).Should(Eqaul(uint(5)) // x and uint(5) are with different
+Expect(x).Should(Equal(uint(5)) // x and uint(5) are with different
```
When using negative checks, it's even worse, because we get a false positive:
```
@@ -202,7 +202,7 @@ using casting, or use the `BeEquivalentTo` matcher.
The linter can't guess what is the best solution in each case, and so it won't auto-fix this warning.
-To suppress this warning entirely, use the `--suppress-type-compare-assertion=true` command line parameter.
+To suppress this warning entirely, use the `--suppress-type-compare-assertion` command line parameter.
To suppress a specific file or line, use the `// ginkgo-linter:ignore-type-compare-warning` comment (see [below](#suppress-warning-from-the-code))
@@ -234,7 +234,7 @@ flag **is** set.
***Note***: This rule work with best-effort approach. It can't find many cases, like const defined not in the same
package, or when using variables.
-The timeout and polling intervals may be passed as optional arguments to the `Eventually` or `Constanly` functions, or
+The timeout and polling intervals may be passed as optional arguments to the `Eventually` or `Consistently` functions, or
using the `WithTimeout` or , `Within` methods (timeout), and `WithPolling` or `ProbeEvery` methods (polling).
This rule checks if the async (`Eventually` or `Consistently`) timeout duration, is not shorter than the polling interval.
@@ -249,8 +249,32 @@ This will probably happen when using the old format:
Eventually(aFunc, 500 * time.Millisecond /*timeout*/, 10 * time.Second /*polling*/).Should(Succeed())
```
+### Prevent Wrong Actual Values with the Succeed() matcher [Bug]
+The `Succeed()` matcher only accepts a single error value. this rule validates that.
+
+For example:
+ ```go
+ Expect(42).To(Succeed())
+ ```
+
+But mostly, we want to avoid using this matcher with functions that return multiple values, even if their last
+returned value is an error, because this is not supported:
+ ```go
+ Expect(os.Open("myFile.txt")).To(Succeed())
+ ```
+
+In async assertions (like `Eventually()`), the `Succeed()` matcher may also been used with functions that accept
+a Gomega object as their first parameter, and returns nothing, e.g. this is a valid usage of `Eventually`
+ ```go
+ Eventually(func(g Gomega){
+ g.Expect(true).To(BeTrue())
+ }).WithTimeout(10 * time.Millisecond).WithPolling(time.Millisecond).Should(Succeed())
+ ```
+
+***Note***: This rule **does not** support auto-fix.
+
### Avoid Spec Pollution: Don't Initialize Variables in Container Nodes [BUG/STYLE]:
-***Note***: Only applied when the `--forbid-spec-pollution=true` flag is set (disabled by default).
+***Note***: Only applied when the `--forbid-spec-pollution` flag is set (disabled by default).
According to [ginkgo documentation](https://onsi.github.io/ginkgo/#avoid-spec-pollution-dont-initialize-variables-in-container-nodes),
no variable should be assigned within a container node (`Describe`, `Context`, `When` or their `F`, `P` or `X` forms)
@@ -427,13 +451,13 @@ Expect("abc").ShouldNot(BeEmpty()) // => Expect("abc").ToNot(BeEmpty())
```
This rule support auto fixing.
-***This rule is disabled by default***. Use the `--force-expect-to=true` command line flag to enable it.
+***This rule is disabled by default***. Use the `--force-expect-to` command line flag to enable it.
### Async timing interval: multiple timeout or polling intervals [STYLE]
***Note***: Only applied when the `suppress-async-assertion` flag is **not set** *and* the `validate-async-intervals`
flag **is** set.
-The timeout and polling intervals may be passed as optional arguments to the `Eventually` or `Constanly` functions, or
+The timeout and polling intervals may be passed as optional arguments to the `Eventually` or `Consistently` functions, or
using the `WithTimeout` or , `Within` methods (timeout), and `WithPolling` or `ProbeEvery` methods (polling).
The linter checks that there is up to one polling argument and up to one timeout argument.
@@ -451,7 +475,7 @@ Eventually(aFunc, time.Second*10, time.Millisecond * 500).WithPolling(time.Milli
***Note***: Only applied when the `suppress-async-assertion` flag is **not set** *and* the `validate-async-intervals`
flag **is** set.
-gomega supports a few formats for timeout and polling intervals, when using the old format (the last two parameters of Eventually and Constantly):
+gomega supports a few formats for timeout and polling intervals, when using the old format (the last two parameters of Eventually and Consistently):
* a `time.Duration` value
* any kind of numeric value (int(8/16/32/64), uint(8/16/32/64) or float(32/64), as the number of seconds.
* duration string like `"12s"`
@@ -476,16 +500,42 @@ will be changed to:
```go
Eventually(aFunc, time.Second*5, time.Second*polling)
```
+
+### Correct usage of the `Succeed()` and the `HaveOccurred()` matchers
+This rule enforces using the `Success()` matcher only for functions, and the `HaveOccurred()` matcher only for error
+values.
+
+For example:
+ ```go
+ Expect(err).To(Succeed())
+ ```
+will trigger a warning with a suggestion to replace the mather to
+ ```go
+ Expect(err).ToNot(HaveOccurred())
+ ```
+
+and vice versa:
+ ```go
+ Expect(myErrorFunc()).ToNot(HaveOccurred())
+ ```
+will trigger a warning with a suggestion to replace the mather to
+ ```go
+ Expect(myErrorFunc()).To(Succeed())
+ ```
+***This rule is disabled by default***. Use the `--force-succeed` command line flag to enable it.
+
+***Note***: This rule **does** support auto-fix, when the `--fix` command line parameter is used.
+
## Suppress the linter
### Suppress warning from command line
-* Use the `--suppress-len-assertion=true` flag to suppress the wrong length and cap assertions warning
-* Use the `--suppress-nil-assertion=true` flag to suppress the wrong nil assertion warning
-* Use the `--suppress-err-assertion=true` flag to suppress the wrong error assertion warning
-* Use the `--suppress-compare-assertion=true` flag to suppress the wrong comparison assertion warning
-* Use the `--suppress-async-assertion=true` flag to suppress the function call in async assertion warning
-* Use the `--forbid-focus-container=true` flag to activate the focused container assertion (deactivated by default)
-* Use the `--suppress-type-compare-assertion=true` to suppress the type compare assertion warning
-* Use the `--allow-havelen-0=true` flag to avoid warnings about `HaveLen(0)`; Note: this parameter is only supported from
+* Use the `--suppress-len-assertion` flag to suppress the wrong length and cap assertions warning
+* Use the `--suppress-nil-assertion` flag to suppress the wrong nil assertion warning
+* Use the `--suppress-err-assertion` flag to suppress the wrong error assertion warning
+* Use the `--suppress-compare-assertion` flag to suppress the wrong comparison assertion warning
+* Use the `--suppress-async-assertion` flag to suppress the function call in async assertion warning
+* Use the `--forbid-focus-container` flag to activate the focused container assertion (deactivated by default)
+* Use the `--suppress-type-compare-assertion` to suppress the type compare assertion warning
+* Use the `--allow-havelen-0` flag to avoid warnings about `HaveLen(0)`; Note: this parameter is only supported from
command line, and not from a comment.
### Suppress warning from the code
@@ -509,7 +559,7 @@ To suppress the wrong async assertion warning, add a comment with (only)
`ginkgo-linter:ignore-async-assert-warning`.
-To supress the focus container warning, add a comment with (only)
+To suppress the focus container warning, add a comment with (only)
`ginkgo-linter:ignore-focus-container-warning`
@@ -522,10 +572,10 @@ Notice that this comment will not work for an anonymous variable container like
// ginkgo-linter:ignore-focus-container-warning (not working!!)
var _ = FDescribe(...)
```
-In this case, use the file comment (see bellow).
+In this case, use the file comment (see below).
There are two options to use these comments:
-1. If the comment is at the top of the file, supress the warning for the whole file; e.g.:
+1. If the comment is at the top of the file, suppress the warning for the whole file; e.g.:
```go
package mypackage
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go b/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go
index edff57acd1..ac762cd9b6 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go
@@ -25,31 +25,31 @@ func NewAnalyzerWithConfig(config *types.Config) *analysis.Analyzer {
// NewAnalyzer returns an Analyzer - the package interface with nogo
func NewAnalyzer() *analysis.Analyzer {
config := &types.Config{
- SuppressLen: false,
- SuppressNil: false,
- SuppressErr: false,
- SuppressCompare: false,
- ForbidFocus: false,
- AllowHaveLen0: false,
- ForceExpectTo: false,
+ SuppressLen: false,
+ SuppressNil: false,
+ SuppressErr: false,
+ SuppressCompare: false,
+ ForbidFocus: false,
+ AllowHaveLen0: false,
+ ForceExpectTo: false,
+ ForceSucceedForFuncs: false,
}
a := NewAnalyzerWithConfig(config)
- var ignored bool
a.Flags.Init("ginkgolinter", flag.ExitOnError)
- a.Flags.Var(&config.SuppressLen, "suppress-len-assertion", "Suppress warning for wrong length assertions")
- a.Flags.Var(&config.SuppressNil, "suppress-nil-assertion", "Suppress warning for wrong nil assertions")
- a.Flags.Var(&config.SuppressErr, "suppress-err-assertion", "Suppress warning for wrong error assertions")
- a.Flags.Var(&config.SuppressCompare, "suppress-compare-assertion", "Suppress warning for wrong comparison assertions")
- a.Flags.Var(&config.SuppressAsync, "suppress-async-assertion", "Suppress warning for function call in async assertion, like Eventually")
- a.Flags.Var(&config.ValidateAsyncIntervals, "validate-async-intervals", "best effort validation of async intervals (timeout and polling); ignored the suppress-async-assertion flag is true")
- a.Flags.Var(&config.SuppressTypeCompare, "suppress-type-compare-assertion", "Suppress warning for comparing values from different types, like int32 and uint32")
- a.Flags.Var(&config.AllowHaveLen0, "allow-havelen-0", "Do not warn for HaveLen(0); default = false")
- a.Flags.Var(&config.ForceExpectTo, "force-expect-to", "force using `Expect` with `To`, `ToNot` or `NotTo`. reject using `Expect` with `Should` or `ShouldNot`; default = false (not forced)")
- a.Flags.BoolVar(&ignored, "suppress-focus-container", true, "Suppress warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt. Deprecated and ignored: use --forbid-focus-container instead")
- a.Flags.Var(&config.ForbidFocus, "forbid-focus-container", "trigger a warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt; default = false.")
- a.Flags.Var(&config.ForbidSpecPollution, "forbid-spec-pollution", "trigger a warning for variable assignments in ginkgo containers like Describe, Context and When, instead of in BeforeEach(); default = false.")
+ a.Flags.BoolVar(&config.SuppressLen, "suppress-len-assertion", config.SuppressLen, "Suppress warning for wrong length assertions")
+ a.Flags.BoolVar(&config.SuppressNil, "suppress-nil-assertion", config.SuppressNil, "Suppress warning for wrong nil assertions")
+ a.Flags.BoolVar(&config.SuppressErr, "suppress-err-assertion", config.SuppressErr, "Suppress warning for wrong error assertions")
+ a.Flags.BoolVar(&config.SuppressCompare, "suppress-compare-assertion", config.SuppressCompare, "Suppress warning for wrong comparison assertions")
+ a.Flags.BoolVar(&config.SuppressAsync, "suppress-async-assertion", config.SuppressAsync, "Suppress warning for function call in async assertion, like Eventually")
+ a.Flags.BoolVar(&config.ValidateAsyncIntervals, "validate-async-intervals", config.ValidateAsyncIntervals, "best effort validation of async intervals (timeout and polling); ignored the suppress-async-assertion flag is true")
+ a.Flags.BoolVar(&config.SuppressTypeCompare, "suppress-type-compare-assertion", config.SuppressTypeCompare, "Suppress warning for comparing values from different types, like int32 and uint32")
+ a.Flags.BoolVar(&config.AllowHaveLen0, "allow-havelen-0", config.AllowHaveLen0, "Do not warn for HaveLen(0); default = false")
+ a.Flags.BoolVar(&config.ForceExpectTo, "force-expect-to", config.ForceExpectTo, "force using `Expect` with `To`, `ToNot` or `NotTo`. reject using `Expect` with `Should` or `ShouldNot`; default = false (not forced)")
+ a.Flags.BoolVar(&config.ForbidFocus, "forbid-focus-container", config.ForbidFocus, "trigger a warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt; default = false.")
+ a.Flags.BoolVar(&config.ForbidSpecPollution, "forbid-spec-pollution", config.ForbidSpecPollution, "trigger a warning for variable assignments in ginkgo containers like Describe, Context and When, instead of in BeforeEach(); default = false.")
+ a.Flags.BoolVar(&config.ForceSucceedForFuncs, "force-succeed", config.ForceSucceedForFuncs, "force using the Succeed matcher for error functions, and the HaveOccurred matcher for non-function error values")
return a
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/doc.go b/vendor/github.com/nunnatsa/ginkgolinter/doc.go
index dd9ecf58a8..2a935e9b34 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/doc.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/doc.go
@@ -30,6 +30,14 @@ For example:
This will probably happen when using the old format:
Eventually(aFunc, 500 * time.Millisecond, 10 * time.Second).Should(Succeed())
+* Success matcher validation: [BUG]
+ The Success matcher expect that the actual argument will be a single error. In async actual assertions, It also allow
+ functions with Gomega object as the function first parameter.
+For example:
+ Expect(myInt).To(Succeed())
+or
+ Eventually(func() int { return 42 }).Should(Succeed())
+
* reject variable assignments in ginkgo containers [Bug/Style]:
For example:
var _ = Describe("description", func(){
@@ -86,7 +94,7 @@ For example:
Eventually(func() bool { return true }, time.Second*10, 500*time.Millisecond).ProbeEvery(time.Millisecond * 500).Should(BeTrue())
* async timing interval: non-time.Duration intervals [Style]
-gomega supports a few formats for timeout and polling intervals, when using the old format (the last two parameters of Eventually and Constantly):
+gomega supports a few formats for timeout and polling intervals, when using the old format (the last two parameters of Eventually and Consistently):
* time.Duration
* any kind of numeric value, as number of seconds
* duration string like "12s"
@@ -96,4 +104,13 @@ methods.
For example:
Eventually(context.Background(), func() bool { return true }, "1s").Should(BeTrue())
Eventually(context.Background(), func() bool { return true }, time.Second*60, 15).Should(BeTrue())
+
+* Success <=> Eventually usage [Style]
+ enforce that the Succeed() matcher will be used for error functions, and the HaveOccurred() matcher will
+ be used for error values.
+
+For example:
+ Expect(err).ToNot(Succeed())
+or
+ Expect(funcRetError().ToNot(HaveOccurred())
`
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go
new file mode 100644
index 0000000000..5bd6dd6e7e
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go
@@ -0,0 +1,113 @@
+package actual
+
+import (
+ "go/ast"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+)
+
+type Actual struct {
+ Orig *ast.CallExpr
+ Clone *ast.CallExpr
+ Arg ArgPayload
+ argType gotypes.Type
+ isTuple bool
+ isAsync bool
+ asyncArg *AsyncArg
+ actualOffset int
+}
+
+func New(origExpr, cloneExpr *ast.CallExpr, orig *ast.CallExpr, clone *ast.CallExpr, pass *analysis.Pass, timePkg string, info *gomegahandler.GomegaBasicInfo) (*Actual, bool) {
+ arg, actualOffset := getActualArgPayload(orig, clone, pass, info)
+ if arg == nil {
+ return nil, false
+ }
+
+ argType := pass.TypesInfo.TypeOf(orig.Args[actualOffset])
+ isTuple := false
+
+ if tpl, ok := argType.(*gotypes.Tuple); ok {
+ if tpl.Len() > 0 {
+ argType = tpl.At(0).Type()
+ } else {
+ argType = nil
+ }
+
+ isTuple = tpl.Len() > 1
+ }
+
+ isAsyncExpr := gomegainfo.IsAsyncActualMethod(info.MethodName)
+
+ var asyncArg *AsyncArg
+ if isAsyncExpr {
+ asyncArg = newAsyncArg(origExpr, cloneExpr, orig, clone, argType, pass, actualOffset, timePkg)
+ }
+
+ return &Actual{
+ Orig: orig,
+ Clone: clone,
+ Arg: arg,
+ argType: argType,
+ isTuple: isTuple,
+ isAsync: isAsyncExpr,
+ asyncArg: asyncArg,
+ actualOffset: actualOffset,
+ }, true
+}
+
+func (a *Actual) ReplaceActual(newArgs ast.Expr) {
+ a.Clone.Args[a.actualOffset] = newArgs
+}
+
+func (a *Actual) ReplaceActualWithItsFirstArg() {
+ firstArgOfArg := a.Clone.Args[a.actualOffset].(*ast.CallExpr).Args[0]
+ a.ReplaceActual(firstArgOfArg)
+}
+
+func (a *Actual) IsAsync() bool {
+ return a.isAsync
+}
+
+func (a *Actual) IsTuple() bool {
+ return a.isTuple
+}
+
+func (a *Actual) ArgGOType() gotypes.Type {
+ return a.argType
+}
+
+func (a *Actual) GetAsyncArg() *AsyncArg {
+ return a.asyncArg
+}
+
+func (a *Actual) AppendWithArgsMethod() {
+ if a.asyncArg.fun != nil {
+ if len(a.asyncArg.fun.Args) > 0 {
+ actualOrigFunc := a.Clone.Fun
+ actualOrigArgs := a.Clone.Args
+
+ actualOrigArgs[a.actualOffset] = a.asyncArg.fun.Fun
+ call := &ast.SelectorExpr{
+ Sel: ast.NewIdent("WithArguments"),
+ X: &ast.CallExpr{
+ Fun: actualOrigFunc,
+ Args: actualOrigArgs,
+ },
+ }
+
+ a.Clone.Fun = call
+ a.Clone.Args = a.asyncArg.fun.Args
+ a.Clone = a.Clone.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr)
+ } else {
+ a.Clone.Args[a.actualOffset] = a.asyncArg.fun.Fun
+ }
+ }
+}
+
+func (a *Actual) GetActualArg() ast.Expr {
+ return a.Clone.Args[a.actualOffset]
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go
new file mode 100644
index 0000000000..7ba83c5869
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go
@@ -0,0 +1,248 @@
+package actual
+
+import (
+ "go/ast"
+ "go/token"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/ginkgoinfo"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+ "github.com/nunnatsa/ginkgolinter/internal/reverseassertion"
+)
+
+type ArgType uint64
+
+const (
+ UnknownActualArgType ArgType = 1 << iota
+ ErrActualArgType
+ LenFuncActualArgType
+ CapFuncActualArgType
+ ComparisonActualArgType
+ LenComparisonActualArgType
+ CapComparisonActualArgType
+ NilComparisonActualArgType
+ BinaryComparisonActualArgType
+ FuncSigArgType
+ ErrFuncActualArgType
+ GomegaParamArgType
+ MultiRetsArgType
+ ErrorMethodArgType
+
+ ErrorTypeArgType
+
+ EqualZero
+ GreaterThanZero
+)
+
+func (a ArgType) Is(val ArgType) bool {
+ return a&val != 0
+}
+
+func getActualArgPayload(origActualExpr, actualExprClone *ast.CallExpr, pass *analysis.Pass, info *gomegahandler.GomegaBasicInfo) (ArgPayload, int) {
+ origArgExpr, argExprClone, actualOffset, isGomegaExpr := getActualArg(origActualExpr, actualExprClone, info.MethodName, pass)
+ if !isGomegaExpr {
+ return nil, 0
+ }
+
+ var arg ArgPayload
+
+ if info.HasErrorMethod {
+ arg = &ErrorMethodPayload{}
+ } else if value.IsExprError(pass, origArgExpr) {
+ arg = newErrPayload(origArgExpr, argExprClone, pass)
+ } else {
+ switch expr := origArgExpr.(type) {
+ case *ast.CallExpr:
+ arg = newFuncCallArgPayload(expr, argExprClone.(*ast.CallExpr))
+
+ case *ast.BinaryExpr:
+ arg = parseBinaryExpr(expr, argExprClone.(*ast.BinaryExpr), pass)
+ }
+
+ }
+
+ if arg != nil {
+ return arg, actualOffset
+ }
+
+ t := pass.TypesInfo.TypeOf(origArgExpr)
+ if sig, ok := t.(*gotypes.Signature); ok {
+ arg = getAsyncFuncArg(sig)
+ if arg != nil {
+ return arg, actualOffset
+ }
+ }
+
+ return newRegularArgPayload(origArgExpr, argExprClone, pass), actualOffset
+}
+
+func getActualArg(origActualExpr *ast.CallExpr, actualExprClone *ast.CallExpr, actualMethodName string, pass *analysis.Pass) (ast.Expr, ast.Expr, int, bool) {
+ var (
+ origArgExpr ast.Expr
+ argExprClone ast.Expr
+ )
+
+ funcOffset := gomegainfo.ActualArgOffset(actualMethodName)
+ if funcOffset < 0 {
+ return nil, nil, 0, false
+ }
+
+ if len(origActualExpr.Args) <= funcOffset {
+ return nil, nil, 0, false
+ }
+
+ origArgExpr = origActualExpr.Args[funcOffset]
+ argExprClone = actualExprClone.Args[funcOffset]
+
+ if gomegainfo.IsAsyncActualMethod(actualMethodName) {
+ if ginkgoinfo.IsGinkgoContext(pass.TypesInfo.TypeOf(origArgExpr)) {
+ funcOffset++
+ if len(origActualExpr.Args) <= funcOffset {
+ return nil, nil, 0, false
+ }
+
+ origArgExpr = origActualExpr.Args[funcOffset]
+ argExprClone = actualExprClone.Args[funcOffset]
+ }
+ }
+
+ return origArgExpr, argExprClone, funcOffset, true
+}
+
+type ArgPayload interface {
+ ArgType() ArgType
+}
+
+type RegularArgPayload struct {
+ value.Value
+}
+
+func newRegularArgPayload(orig, clone ast.Expr, pass *analysis.Pass) *RegularArgPayload {
+ return &RegularArgPayload{
+ Value: value.New(orig, clone, pass),
+ }
+}
+
+func (*RegularArgPayload) ArgType() ArgType {
+ return UnknownActualArgType
+}
+
+type FuncCallArgPayload struct {
+ argType ArgType
+
+ origFunc *ast.CallExpr
+ cloneFunc *ast.CallExpr
+
+ origVal ast.Expr
+ cloneVal ast.Expr
+}
+
+func newFuncCallArgPayload(orig, clone *ast.CallExpr) ArgPayload {
+ funcName, ok := builtinFuncName(orig)
+ if !ok {
+ return nil
+ }
+
+ if len(orig.Args) != 1 {
+ return nil
+ }
+
+ var argType ArgType
+ switch funcName {
+ case "len":
+ argType = LenFuncActualArgType
+ case "cap":
+ argType = CapFuncActualArgType
+ default:
+ return nil
+ }
+
+ return &FuncCallArgPayload{
+ argType: argType,
+ origFunc: orig,
+ cloneFunc: clone,
+ origVal: orig.Args[0],
+ cloneVal: clone.Args[0],
+ }
+}
+
+func (f *FuncCallArgPayload) ArgType() ArgType {
+ return f.argType
+}
+
+type ErrPayload struct {
+ value.Valuer
+}
+
+func newErrPayload(orig, clone ast.Expr, pass *analysis.Pass) *ErrPayload {
+ return &ErrPayload{
+ Valuer: value.GetValuer(orig, clone, pass),
+ }
+}
+
+func (*ErrPayload) ArgType() ArgType {
+ return ErrActualArgType | ErrorTypeArgType
+}
+
+type ErrorMethodPayload struct{}
+
+func (ErrorMethodPayload) ArgType() ArgType {
+ return ErrorMethodArgType | ErrorTypeArgType
+}
+
+func parseBinaryExpr(origActualExpr, argExprClone *ast.BinaryExpr, pass *analysis.Pass) ArgPayload {
+ left, right, op := origActualExpr.X, origActualExpr.Y, origActualExpr.Op
+ replace := false
+ switch realFirst := left.(type) {
+ case *ast.Ident: // check if const
+ info, ok := pass.TypesInfo.Types[realFirst]
+ if ok {
+ if value.Is[*gotypes.Basic](info.Type) && (info.Value != nil || info.IsNil()) {
+ replace = true
+ }
+ }
+
+ case *ast.BasicLit:
+ replace = true
+ }
+
+ if replace {
+ left, right = right, left
+ }
+
+ switch op {
+ case token.EQL:
+ case token.NEQ:
+ case token.GTR, token.GEQ, token.LSS, token.LEQ:
+ if replace {
+ op = reverseassertion.ChangeCompareOperator(op)
+ }
+ default:
+ return nil
+ }
+
+ leftClone, rightClone := argExprClone.X, argExprClone.Y
+ if replace {
+ leftClone, rightClone = rightClone, leftClone
+ }
+
+ leftVal := value.GetValuer(left, leftClone, pass)
+ rightVal := value.GetValuer(right, rightClone, pass)
+
+ if value.IsNil(right, pass) {
+ return newNilComparisonPayload(leftVal, rightVal, op)
+ }
+
+ leftVal.IsFunc()
+ if firstFunc, ok := left.(*ast.CallExpr); ok {
+ if payload, ok := newFuncComparisonPayload(firstFunc, leftClone.(*ast.CallExpr), right, rightClone, op, pass); ok {
+ return payload
+ }
+ }
+
+ return newComparisonArgPayload(leftVal, rightVal, op)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go
new file mode 100644
index 0000000000..7c5df2a341
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go
@@ -0,0 +1,123 @@
+package actual
+
+import (
+ "go/ast"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/intervals"
+)
+
+type AsyncArg struct {
+ valid bool
+ fun *ast.CallExpr
+
+ timeoutInterval intervals.DurationValue
+ pollingInterval intervals.DurationValue
+ tooManyTimeouts bool
+ tooManyPolling bool
+}
+
+func newAsyncArg(origExpr, cloneExpr, orig, clone *ast.CallExpr, argType gotypes.Type, pass *analysis.Pass, actualOffset int, timePkg string) *AsyncArg {
+ var (
+ fun *ast.CallExpr
+ valid = true
+ timeout intervals.DurationValue
+ polling intervals.DurationValue
+ )
+
+ if _, isActualFuncCall := orig.Args[actualOffset].(*ast.CallExpr); isActualFuncCall {
+ fun = clone.Args[actualOffset].(*ast.CallExpr)
+ valid = isValidAsyncValueType(argType)
+ }
+
+ timeoutOffset := actualOffset + 1
+ //var err error
+ tooManyTimeouts := false
+ tooManyPolling := false
+
+ if len(orig.Args) > timeoutOffset {
+ timeout = intervals.GetDuration(pass, timeoutOffset, orig.Args[timeoutOffset], clone.Args[timeoutOffset], timePkg)
+ pollingOffset := actualOffset + 2
+ if len(orig.Args) > pollingOffset {
+ polling = intervals.GetDuration(pass, pollingOffset, orig.Args[pollingOffset], clone.Args[pollingOffset], timePkg)
+ }
+ }
+ selOrig := origExpr.Fun.(*ast.SelectorExpr)
+ selClone := cloneExpr.Fun.(*ast.SelectorExpr)
+
+ for {
+ callOrig, ok := selOrig.X.(*ast.CallExpr)
+ if !ok {
+ break
+ }
+ callClone := selClone.X.(*ast.CallExpr)
+
+ funOrig, ok := callOrig.Fun.(*ast.SelectorExpr)
+ if !ok {
+ break
+ }
+ funClone := callClone.Fun.(*ast.SelectorExpr)
+
+ switch funOrig.Sel.Name {
+ case "WithTimeout", "Within":
+ if timeout != nil {
+ tooManyTimeouts = true
+ } else if len(callOrig.Args) == 1 {
+ timeout = intervals.GetDurationFromValue(pass, callOrig.Args[0], callClone.Args[0])
+ }
+
+ case "WithPolling", "ProbeEvery":
+ if polling != nil {
+ tooManyPolling = true
+ } else if len(callOrig.Args) == 1 {
+ polling = intervals.GetDurationFromValue(pass, callOrig.Args[0], callClone.Args[0])
+ }
+ }
+
+ selOrig = funOrig
+ selClone = funClone
+ }
+
+ return &AsyncArg{
+ valid: valid,
+ fun: fun,
+ timeoutInterval: timeout,
+ pollingInterval: polling,
+ tooManyTimeouts: tooManyTimeouts,
+ tooManyPolling: tooManyPolling,
+ }
+}
+
+func (a *AsyncArg) IsValid() bool {
+ return a.valid
+}
+
+func (a *AsyncArg) Timeout() intervals.DurationValue {
+ return a.timeoutInterval
+}
+
+func (a *AsyncArg) Polling() intervals.DurationValue {
+ return a.pollingInterval
+}
+
+func (a *AsyncArg) TooManyTimeouts() bool {
+ return a.tooManyTimeouts
+}
+
+func (a *AsyncArg) TooManyPolling() bool {
+ return a.tooManyPolling
+}
+
+func isValidAsyncValueType(t gotypes.Type) bool {
+ switch t.(type) {
+ // allow functions that return function or channel.
+ case *gotypes.Signature, *gotypes.Chan, *gotypes.Pointer:
+ return true
+ case *gotypes.Named:
+ return isValidAsyncValueType(t.Underlying())
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go
new file mode 100644
index 0000000000..c777cd4a70
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go
@@ -0,0 +1,38 @@
+package actual
+
+import (
+ gotypes "go/types"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+ "github.com/nunnatsa/ginkgolinter/internal/interfaces"
+)
+
+func getAsyncFuncArg(sig *gotypes.Signature) ArgPayload {
+ argType := FuncSigArgType
+ if sig.Results().Len() == 1 {
+ if interfaces.ImplementsError(sig.Results().At(0).Type().Underlying()) {
+ argType |= ErrFuncActualArgType | ErrorTypeArgType
+ }
+ }
+
+ if sig.Params().Len() > 0 {
+ arg := sig.Params().At(0).Type()
+ if gomegainfo.IsGomegaType(arg) && sig.Results().Len() == 0 {
+ argType |= FuncSigArgType | GomegaParamArgType
+ }
+ }
+
+ if sig.Results().Len() > 1 {
+ argType |= FuncSigArgType | MultiRetsArgType
+ }
+
+ return &FuncSigArgPayload{argType: argType}
+}
+
+type FuncSigArgPayload struct {
+ argType ArgType
+}
+
+func (f FuncSigArgPayload) ArgType() ArgType {
+ return f.argType
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go
new file mode 100644
index 0000000000..2b16402db9
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go
@@ -0,0 +1,260 @@
+package actual
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/token"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+)
+
+type ComparisonActualPayload interface {
+ GetOp() token.Token
+ GetLeft() value.Valuer
+ GetRight() value.Valuer
+}
+
+type FuncComparisonPayload struct {
+ op token.Token
+ argType ArgType
+ val value.Valuer
+ left value.Valuer
+ arg ast.Expr
+}
+
+func newFuncComparisonPayload(origLeft, leftClone *ast.CallExpr, origRight, rightClone ast.Expr, op token.Token, pass *analysis.Pass) (*FuncComparisonPayload, bool) {
+
+ funcName, ok := builtinFuncName(origLeft)
+ if !ok {
+ return nil, false
+ }
+
+ if len(origLeft.Args) != 1 {
+ return nil, false
+ }
+
+ left := value.GetValuer(origLeft, leftClone, pass)
+ val := value.GetValuer(origRight, rightClone, pass)
+
+ argType := ComparisonActualArgType
+ switch funcName {
+ case "len":
+ argType |= LenComparisonActualArgType
+
+ if val.IsValueNumeric() {
+ if val.IsValueZero() {
+ switch op {
+ case token.EQL:
+ argType |= EqualZero
+
+ case token.NEQ, token.GTR:
+ argType |= GreaterThanZero
+ }
+ } else if val.GetValue().String() == "1" && op == token.GEQ {
+ argType |= GreaterThanZero
+ }
+ }
+
+ if !argType.Is(GreaterThanZero) && op != token.EQL && op != token.NEQ {
+ return nil, false
+ }
+
+ case "cap":
+ if op != token.EQL && op != token.NEQ {
+ return nil, false
+ }
+ argType |= CapComparisonActualArgType
+
+ default:
+ return nil, false
+ }
+
+ return &FuncComparisonPayload{
+ op: op,
+ argType: argType,
+ val: val,
+ left: left,
+ arg: leftClone.Args[0],
+ }, true
+}
+
+func (f *FuncComparisonPayload) GetLeft() value.Valuer {
+ return f.left
+}
+
+func (f *FuncComparisonPayload) GetRight() value.Valuer {
+ return f.val
+}
+
+func (f *FuncComparisonPayload) ArgType() ArgType {
+ return f.argType
+}
+
+func (f *FuncComparisonPayload) GetOp() token.Token {
+ return f.op
+}
+
+func (f *FuncComparisonPayload) GetValue() constant.Value {
+ return f.val.GetValue()
+}
+
+func (f *FuncComparisonPayload) GetType() gotypes.Type {
+ return f.val.GetType()
+}
+
+func (f *FuncComparisonPayload) GetValueExpr() ast.Expr {
+ return f.val.GetValueExpr()
+}
+
+func (f *FuncComparisonPayload) IsError() bool {
+ return f.val.IsError()
+}
+
+func (f *FuncComparisonPayload) IsValueZero() bool {
+ return f.val.IsValueZero()
+}
+
+func (f *FuncComparisonPayload) IsFunc() bool {
+ return true
+}
+
+func (f *FuncComparisonPayload) IsValueNumeric() bool {
+ return f.val.IsValueNumeric()
+}
+
+func (f *FuncComparisonPayload) IsValueInt() bool {
+ return f.val.IsValueInt()
+}
+
+func (f *FuncComparisonPayload) IsInterface() bool {
+ return f.val.IsInterface()
+}
+
+func (f *FuncComparisonPayload) IsPointer() bool {
+ return f.val.IsPointer()
+}
+
+func (f *FuncComparisonPayload) GetFuncArg() ast.Expr {
+ return f.arg
+}
+
+type ComparisonArgPayload struct {
+ left value.Valuer
+ right value.Valuer
+ op token.Token
+}
+
+func newComparisonArgPayload(left, right value.Valuer, op token.Token) *ComparisonArgPayload {
+ return &ComparisonArgPayload{
+ left: left,
+ right: right,
+ op: op,
+ }
+}
+
+func (*ComparisonArgPayload) ArgType() ArgType {
+ return BinaryComparisonActualArgType | ComparisonActualArgType
+}
+
+func (c *ComparisonArgPayload) GetOp() token.Token {
+ return c.op
+}
+
+func (c *ComparisonArgPayload) GetLeft() value.Valuer {
+ return c.left
+}
+
+func (c *ComparisonArgPayload) GetRight() value.Valuer {
+ return c.right
+}
+
+type NilComparisonPayload struct {
+ val value.Valuer
+ right value.Valuer
+ op token.Token
+}
+
+func newNilComparisonPayload(val, right value.Valuer, op token.Token) *NilComparisonPayload {
+ return &NilComparisonPayload{
+ val: val,
+ right: right,
+ op: op,
+ }
+}
+
+func (*NilComparisonPayload) ArgType() ArgType {
+ return NilComparisonActualArgType
+}
+
+func (n *NilComparisonPayload) GetLeft() value.Valuer {
+ return n.val
+}
+
+func (n *NilComparisonPayload) GetRight() value.Valuer {
+ return n.right
+}
+
+func (n *NilComparisonPayload) GetType() gotypes.Type {
+ return n.val.GetType()
+}
+
+func (n *NilComparisonPayload) GetValue() constant.Value {
+ return n.val.GetValue()
+}
+
+func (n *NilComparisonPayload) GetValueExpr() ast.Expr {
+ return n.val.GetValueExpr()
+}
+
+func (n *NilComparisonPayload) IsValueInt() bool {
+ return n.val.IsValueInt()
+}
+
+func (n *NilComparisonPayload) IsError() bool {
+ return n.val.IsError()
+}
+
+func (n *NilComparisonPayload) IsValueNumeric() bool {
+ return n.val.IsValueNumeric()
+}
+
+func (n *NilComparisonPayload) IsFunc() bool {
+ return n.val.IsFunc()
+}
+
+func (n *NilComparisonPayload) IsValueZero() bool {
+ return n.val.IsValueZero()
+}
+
+func (n *NilComparisonPayload) IsInterface() bool {
+ return n.val.IsInterface()
+}
+
+func (n *NilComparisonPayload) IsPointer() bool {
+ return n.val.IsPointer()
+}
+
+func (n *NilComparisonPayload) GetOp() token.Token {
+ return n.op
+}
+
+func builtinFuncName(callExpr *ast.CallExpr) (string, bool) {
+ argFunc, ok := callExpr.Fun.(*ast.Ident)
+ if !ok {
+ return "", false
+ }
+
+ if len(callExpr.Args) != 1 {
+ return "", false
+ }
+
+ switch name := argFunc.Name; name {
+ case "len", "cap", "min", "max":
+ return name, true
+ default:
+ return "", false
+ }
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go
new file mode 100644
index 0000000000..6e8e0db6ac
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go
@@ -0,0 +1,321 @@
+package expression
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ gotypes "go/types"
+
+ "github.com/nunnatsa/ginkgolinter/internal/formatter"
+
+ "github.com/go-toolsmith/astcopy"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+ "github.com/nunnatsa/ginkgolinter/internal/reverseassertion"
+)
+
+type GomegaExpression struct {
+ orig *ast.CallExpr
+ clone *ast.CallExpr
+
+ assertionFuncName string
+ origAssertionFuncName string
+ actualFuncName string
+
+ isAsync bool
+ isUsingGomegaVar bool
+
+ actual *actual.Actual
+ matcher *matcher.Matcher
+
+ handler gomegahandler.Handler
+}
+
+func New(origExpr *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, timePkg string) (*GomegaExpression, bool) {
+ info, ok := handler.GetGomegaBasicInfo(origExpr)
+ if !ok || !gomegainfo.IsActualMethod(info.MethodName) {
+ return nil, false
+ }
+
+ origSel, ok := origExpr.Fun.(*ast.SelectorExpr)
+ if !ok || !gomegainfo.IsAssertionFunc(origSel.Sel.Name) {
+ return &GomegaExpression{
+ orig: origExpr,
+ actualFuncName: info.MethodName,
+ }, true
+ }
+
+ exprClone := astcopy.CallExpr(origExpr)
+ selClone := exprClone.Fun.(*ast.SelectorExpr)
+
+ origActual := handler.GetActualExpr(origSel)
+ if origActual == nil {
+ return nil, false
+ }
+
+ actualClone := handler.GetActualExprClone(origSel, selClone)
+ if actualClone == nil {
+ return nil, false
+ }
+
+ actl, ok := actual.New(origExpr, exprClone, origActual, actualClone, pass, timePkg, info)
+ if !ok {
+ return nil, false
+ }
+
+ origMatcher, ok := origExpr.Args[0].(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+
+ matcherClone := exprClone.Args[0].(*ast.CallExpr)
+
+ mtchr, ok := matcher.New(origMatcher, matcherClone, pass, handler)
+ if !ok {
+ return nil, false
+ }
+
+ exprClone.Args[0] = mtchr.Clone
+
+ gexp := &GomegaExpression{
+ orig: origExpr,
+ clone: exprClone,
+
+ assertionFuncName: origSel.Sel.Name,
+ origAssertionFuncName: origSel.Sel.Name,
+ actualFuncName: info.MethodName,
+
+ isAsync: actl.IsAsync(),
+ isUsingGomegaVar: info.UseGomegaVar,
+
+ actual: actl,
+ matcher: mtchr,
+
+ handler: handler,
+ }
+
+ if mtchr.ShouldReverseLogic() {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ return gexp, true
+}
+
+func (e *GomegaExpression) IsMissingAssertion() bool {
+ return e.matcher == nil
+}
+
+func (e *GomegaExpression) GetActualFuncName() string {
+ if e == nil {
+ return ""
+ }
+ return e.actualFuncName
+}
+
+func (e *GomegaExpression) GetAssertFuncName() string {
+ if e == nil {
+ return ""
+ }
+ return e.assertionFuncName
+}
+
+func (e *GomegaExpression) GetOrigAssertFuncName() string {
+ if e == nil {
+ return ""
+ }
+ return e.origAssertionFuncName
+}
+
+func (e *GomegaExpression) IsAsync() bool {
+ return e.isAsync
+}
+
+func (e *GomegaExpression) IsUsingGomegaVar() bool {
+ return e.isUsingGomegaVar
+}
+
+func (e *GomegaExpression) ReverseAssertionFuncLogic() {
+ assertionFunc := e.clone.Fun.(*ast.SelectorExpr).Sel
+ newName := reverseassertion.ChangeAssertionLogic(assertionFunc.Name)
+ assertionFunc.Name = newName
+ e.assertionFuncName = newName
+}
+
+func (e *GomegaExpression) ReplaceAssertionMethod(name string) {
+ e.clone.Fun.(*ast.SelectorExpr).Sel.Name = name
+}
+
+func (e *GomegaExpression) ReplaceMatcherFuncName(name string) {
+ e.matcher.ReplaceMatcherFuncName(name)
+}
+
+func (e *GomegaExpression) ReplaceMatcherArgs(newArgs []ast.Expr) {
+ e.matcher.ReplaceMatcherArgs(newArgs)
+}
+
+func (e *GomegaExpression) RemoveMatcherArgs() {
+ e.matcher.ReplaceMatcherArgs(nil)
+}
+
+func (e *GomegaExpression) ReplaceActual(newArg ast.Expr) {
+ e.actual.ReplaceActual(newArg)
+}
+
+func (e *GomegaExpression) ReplaceActualWithItsFirstArg() {
+ e.actual.ReplaceActualWithItsFirstArg()
+}
+
+func (e *GomegaExpression) replaceMathcerFuncNoArgs(name string) {
+ e.matcher.ReplaceMatcherFuncName(name)
+ e.RemoveMatcherArgs()
+}
+
+func (e *GomegaExpression) SetMatcherBeZero() {
+ e.replaceMathcerFuncNoArgs("BeZero")
+}
+
+func (e *GomegaExpression) SetMatcherBeEmpty() {
+ e.replaceMathcerFuncNoArgs("BeEmpty")
+}
+
+func (e *GomegaExpression) SetLenNumericMatcher() {
+ if m, ok := e.matcher.GetMatcherInfo().(value.Valuer); ok && m.IsValueZero() {
+ e.SetMatcherBeEmpty()
+ } else {
+ e.ReplaceMatcherFuncName("HaveLen")
+ e.ReplaceMatcherArgs([]ast.Expr{m.GetValueExpr()})
+ }
+}
+
+func (e *GomegaExpression) SetLenNumericActual() {
+ if m, ok := e.matcher.GetMatcherInfo().(value.Valuer); ok && m.IsValueZero() {
+ e.SetMatcherBeEmpty()
+ } else {
+ e.ReplaceMatcherFuncName("HaveLen")
+ e.ReplaceMatcherArgs([]ast.Expr{m.GetValueExpr()})
+ }
+}
+
+func (e *GomegaExpression) SetMatcherLen(arg ast.Expr) {
+ e.ReplaceMatcherFuncName("HaveLen")
+ e.ReplaceMatcherArgs([]ast.Expr{arg})
+}
+
+func (e *GomegaExpression) SetMatcherCap(arg ast.Expr) {
+ e.ReplaceMatcherFuncName("HaveCap")
+ e.ReplaceMatcherArgs([]ast.Expr{arg})
+}
+
+func (e *GomegaExpression) SetMatcherCapZero() {
+ e.ReplaceMatcherFuncName("HaveCap")
+ e.ReplaceMatcherArgs([]ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}})
+}
+
+func (e *GomegaExpression) SetMatcherSucceed() {
+ e.replaceMathcerFuncNoArgs("Succeed")
+}
+
+func (e *GomegaExpression) SetMatcherHaveOccurred() {
+ e.replaceMathcerFuncNoArgs("HaveOccurred")
+}
+
+func (e *GomegaExpression) SetMatcherBeNil() {
+ e.replaceMathcerFuncNoArgs("BeNil")
+}
+
+func (e *GomegaExpression) SetMatcherBeTrue() {
+ e.replaceMathcerFuncNoArgs("BeTrue")
+}
+
+func (e *GomegaExpression) SetMatcherBeFalse() {
+ e.replaceMathcerFuncNoArgs("BeFalse")
+}
+
+func (e *GomegaExpression) SetMatcherHaveValue() {
+ newMatcherExp := e.handler.GetNewWrapperMatcher("HaveValue", e.matcher.Clone)
+ e.clone.Args[0] = newMatcherExp
+ e.matcher.Clone = newMatcherExp
+}
+
+func (e *GomegaExpression) SetMatcherEqual(arg ast.Expr) {
+ e.ReplaceMatcherFuncName("Equal")
+ e.ReplaceMatcherArgs([]ast.Expr{arg})
+}
+
+func (e *GomegaExpression) SetMatcherBeIdenticalTo(arg ast.Expr) {
+ e.ReplaceMatcherFuncName("BeIdenticalTo")
+ e.ReplaceMatcherArgs([]ast.Expr{arg})
+}
+
+func (e *GomegaExpression) SetMatcherBeNumerically(op token.Token, arg ast.Expr) {
+ e.ReplaceMatcherFuncName("BeNumerically")
+ e.ReplaceMatcherArgs([]ast.Expr{
+ &ast.BasicLit{Kind: token.STRING, Value: fmt.Sprintf("%q", op.String())},
+ arg,
+ })
+}
+
+func (e *GomegaExpression) IsNegativeAssertion() bool {
+ return reverseassertion.IsNegativeLogic(e.assertionFuncName)
+}
+
+func (e *GomegaExpression) GetClone() *ast.CallExpr {
+ return e.clone
+}
+
+// Actual proxies:
+
+func (e *GomegaExpression) GetActualClone() *ast.CallExpr {
+ return e.actual.Clone
+}
+
+func (e *GomegaExpression) AppendWithArgsToActual() {
+ e.actual.AppendWithArgsMethod()
+}
+
+func (e *GomegaExpression) GetAsyncActualArg() *actual.AsyncArg {
+ return e.actual.GetAsyncArg()
+}
+
+func (e *GomegaExpression) GetActualArg() actual.ArgPayload {
+ return e.actual.Arg
+}
+
+func (e *GomegaExpression) GetActualArgExpr() ast.Expr {
+ return e.actual.GetActualArg()
+}
+
+func (e *GomegaExpression) GetActualArgGOType() gotypes.Type {
+ return e.actual.ArgGOType()
+}
+
+func (e *GomegaExpression) ActualArgTypeIs(other actual.ArgType) bool {
+ return e.actual.Arg.ArgType().Is(other)
+}
+
+func (e *GomegaExpression) IsActualTuple() bool {
+ return e.actual.IsTuple()
+}
+
+// Matcher proxies
+
+func (e *GomegaExpression) GetMatcher() *matcher.Matcher {
+ return e.matcher
+}
+
+func (e *GomegaExpression) GetMatcherInfo() matcher.Info {
+ return e.matcher.GetMatcherInfo()
+}
+
+func (e *GomegaExpression) MatcherTypeIs(other matcher.Type) bool {
+ return e.matcher.GetMatcherInfo().Type().Is(other)
+}
+
+func (e *GomegaExpression) FormatOrig(frm *formatter.GoFmtFormatter) string {
+ return frm.Format(e.orig)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go
new file mode 100644
index 0000000000..24272535dc
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go
@@ -0,0 +1,77 @@
+package matcher
+
+import "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+
+type BeIdenticalToMatcher struct {
+ value.Value
+}
+
+func (BeIdenticalToMatcher) Type() Type {
+ return BeIdenticalToMatcherType
+}
+
+func (BeIdenticalToMatcher) MatcherName() string {
+ return beIdenticalTo
+}
+
+type BeEquivalentToMatcher struct {
+ value.Value
+}
+
+func (BeEquivalentToMatcher) Type() Type {
+ return BeEquivalentToMatcherType
+}
+
+func (BeEquivalentToMatcher) MatcherName() string {
+ return beEquivalentTo
+}
+
+type BeZeroMatcher struct{}
+
+func (BeZeroMatcher) Type() Type {
+ return BeZeroMatcherType
+}
+
+func (BeZeroMatcher) MatcherName() string {
+ return beZero
+}
+
+type BeEmptyMatcher struct{}
+
+func (BeEmptyMatcher) Type() Type {
+ return BeEmptyMatcherType
+}
+
+func (BeEmptyMatcher) MatcherName() string {
+ return beEmpty
+}
+
+type BeTrueMatcher struct{}
+
+func (BeTrueMatcher) Type() Type {
+ return BeTrueMatcherType | BoolValueTrue
+}
+
+func (BeTrueMatcher) MatcherName() string {
+ return beTrue
+}
+
+type BeFalseMatcher struct{}
+
+func (BeFalseMatcher) Type() Type {
+ return BeFalseMatcherType | BoolValueFalse
+}
+
+func (BeFalseMatcher) MatcherName() string {
+ return beFalse
+}
+
+type BeNilMatcher struct{}
+
+func (BeNilMatcher) Type() Type {
+ return BeNilMatcherType
+}
+
+func (BeNilMatcher) MatcherName() string {
+ return beNil
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go
new file mode 100644
index 0000000000..8683f02918
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go
@@ -0,0 +1,128 @@
+package matcher
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/token"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+)
+
+type BeNumericallyMatcher struct {
+ op token.Token
+ value value.Valuer
+ argType Type
+}
+
+var compareOps = map[string]token.Token{
+ `"=="`: token.EQL,
+ `"<"`: token.LSS,
+ `">"`: token.GTR,
+ `"="`: token.ASSIGN,
+ `"!="`: token.NEQ,
+ `"<="`: token.LEQ,
+ `">="`: token.GEQ,
+}
+
+func getCompareOp(opExp ast.Expr) token.Token {
+ basic, ok := opExp.(*ast.BasicLit)
+ if !ok {
+ return token.ILLEGAL
+ }
+ if basic.Kind != token.STRING {
+ return token.ILLEGAL
+ }
+
+ if tk, ok := compareOps[basic.Value]; ok {
+ return tk
+ }
+
+ return token.ILLEGAL
+}
+
+func newBeNumericallyMatcher(opExp, orig, clone ast.Expr, pass *analysis.Pass) Info {
+ op := getCompareOp(opExp)
+ if op == token.ILLEGAL {
+ return &UnspecifiedMatcher{
+ matcherName: beNumerically,
+ }
+ }
+
+ val := value.GetValuer(orig, clone, pass)
+ argType := BeNumericallyMatcherType
+
+ if val.IsValueNumeric() {
+ if v := val.GetValue().String(); v == "0" {
+ switch op {
+ case token.EQL:
+ argType |= EqualZero
+
+ case token.NEQ, token.GTR:
+ argType |= GreaterThanZero
+ }
+ } else if v == "1" && op == token.GEQ {
+ argType |= GreaterThanZero
+ }
+ }
+
+ return &BeNumericallyMatcher{
+ op: op,
+ value: val,
+ argType: argType,
+ }
+}
+
+func (m BeNumericallyMatcher) Type() Type {
+ return m.argType
+}
+
+func (BeNumericallyMatcher) MatcherName() string {
+ return beNumerically
+}
+
+func (m BeNumericallyMatcher) GetValueExpr() ast.Expr {
+ return m.value.GetValueExpr()
+}
+
+func (m BeNumericallyMatcher) GetValue() constant.Value {
+ return m.value.GetValue()
+}
+
+func (m BeNumericallyMatcher) GetType() gotypes.Type {
+ return m.value.GetType()
+}
+
+func (m BeNumericallyMatcher) GetOp() token.Token {
+ return m.op
+}
+
+func (m BeNumericallyMatcher) IsValueZero() bool {
+ return m.value.IsValueZero()
+}
+
+func (m BeNumericallyMatcher) IsValueInt() bool {
+ return m.value.IsValueInt()
+}
+
+func (m BeNumericallyMatcher) IsValueNumeric() bool {
+ return m.value.IsValueNumeric()
+}
+
+func (m BeNumericallyMatcher) IsError() bool {
+ return m.value.IsError()
+}
+
+func (m BeNumericallyMatcher) IsFunc() bool {
+ return m.value.IsFunc()
+}
+
+func (m BeNumericallyMatcher) IsInterface() bool {
+ return m.value.IsInterface()
+}
+
+func (m BeNumericallyMatcher) IsPointer() bool {
+ return m.value.IsPointer()
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go
new file mode 100644
index 0000000000..8cee8e408e
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go
@@ -0,0 +1,124 @@
+package matcher
+
+import (
+ "go/ast"
+ "go/constant"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+)
+
+func newEqualMatcher(orig, clone ast.Expr, pass *analysis.Pass) Info {
+ t := pass.TypesInfo.Types[orig]
+
+ if t.Value != nil {
+ if t.Value.Kind() == constant.Bool {
+ if t.Value.String() == "true" {
+ return &EqualTrueMatcher{}
+ }
+ return &EqualFalseMatcher{}
+ }
+ }
+
+ if value.IsNil(orig, pass) {
+ return &EqualNilMatcher{
+ gotype: pass.TypesInfo.TypeOf(orig),
+ }
+ }
+
+ val := value.GetValuer(orig, clone, pass)
+
+ return &EqualMatcher{
+ val: val,
+ }
+}
+
+type EqualMatcher struct {
+ val value.Valuer
+}
+
+func (EqualMatcher) Type() Type {
+ return EqualMatcherType
+}
+
+func (EqualMatcher) MatcherName() string {
+ return equal
+}
+
+func (m EqualMatcher) GetValue() constant.Value {
+ return m.val.GetValue()
+}
+
+func (m EqualMatcher) GetType() gotypes.Type {
+ return m.val.GetType()
+}
+
+func (m EqualMatcher) GetValueExpr() ast.Expr {
+ return m.val.GetValueExpr()
+}
+
+func (m EqualMatcher) IsValueZero() bool {
+ return m.val.IsValueZero()
+}
+
+func (m EqualMatcher) IsValueInt() bool {
+ return m.val.IsValueInt()
+}
+
+func (m EqualMatcher) IsValueNumeric() bool {
+ return m.val.IsValueNumeric()
+}
+
+func (m EqualMatcher) IsError() bool {
+ return m.val.IsError()
+}
+
+func (m EqualMatcher) IsFunc() bool {
+ return m.val.IsFunc()
+}
+
+func (m EqualMatcher) IsInterface() bool {
+ return m.val.IsInterface()
+}
+
+func (m EqualMatcher) IsPointer() bool {
+ return m.val.IsPointer()
+}
+
+type EqualNilMatcher struct {
+ gotype gotypes.Type
+}
+
+func (EqualNilMatcher) Type() Type {
+ return EqualNilMatcherType | EqualMatcherType | EqualValueMatcherType
+}
+
+func (EqualNilMatcher) MatcherName() string {
+ return equal
+}
+
+func (n EqualNilMatcher) GetType() gotypes.Type {
+ return n.gotype
+}
+
+type EqualTrueMatcher struct{}
+
+func (EqualTrueMatcher) Type() Type {
+ return EqualMatcherType | EqualBoolValueMatcherType | BoolValueTrue
+}
+
+func (EqualTrueMatcher) MatcherName() string {
+ return equal
+}
+
+type EqualFalseMatcher struct{}
+
+func (EqualFalseMatcher) Type() Type {
+ return EqualMatcherType | EqualBoolValueMatcherType | BoolValueFalse
+}
+
+func (EqualFalseMatcher) MatcherName() string {
+ return equal
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go
new file mode 100644
index 0000000000..a493287e01
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go
@@ -0,0 +1,199 @@
+package matcher
+
+import (
+ "go/ast"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/interfaces"
+)
+
+type HaveOccurredMatcher struct{}
+
+func (m *HaveOccurredMatcher) Type() Type {
+ return HaveOccurredMatcherType
+}
+func (m *HaveOccurredMatcher) MatcherName() string {
+ return haveOccurred
+}
+
+type SucceedMatcher struct{}
+
+func (m *SucceedMatcher) Type() Type {
+ return SucceedMatcherType
+}
+func (m *SucceedMatcher) MatcherName() string {
+ return succeed
+}
+
+type MatchErrorMatcher interface {
+ Info
+ AllowedNumArgs() int
+ NumArgs() int
+}
+
+type InvalidMatchErrorMatcher struct {
+ firstAgr ast.Expr
+ numArgs int
+}
+
+func (m *InvalidMatchErrorMatcher) Type() Type {
+ return MatchErrorMatcherType
+}
+
+func (m *InvalidMatchErrorMatcher) MatcherName() string {
+ return matchError
+}
+
+func (m *InvalidMatchErrorMatcher) AllowedNumArgs() int {
+ return 1
+}
+
+func (m *InvalidMatchErrorMatcher) NumArgs() int {
+ return m.numArgs
+}
+
+func (m *InvalidMatchErrorMatcher) GetValueExpr() ast.Expr {
+ return m.firstAgr
+}
+
+type MatchErrorMatcherWithErr struct {
+ numArgs int
+}
+
+func (m *MatchErrorMatcherWithErr) Type() Type {
+ return MatchErrorMatcherType | ErrMatchWithErr
+}
+
+func (m *MatchErrorMatcherWithErr) MatcherName() string {
+ return matchError
+}
+
+func (m *MatchErrorMatcherWithErr) AllowedNumArgs() int {
+ return 1
+}
+
+func (m *MatchErrorMatcherWithErr) NumArgs() int {
+ return m.numArgs
+}
+
+type MatchErrorMatcherWithErrFunc struct {
+ numArgs int
+ secondArgIsString bool
+}
+
+func (m *MatchErrorMatcherWithErrFunc) Type() Type {
+ return MatchErrorMatcherType | ErrMatchWithErrFunc
+}
+
+func (m *MatchErrorMatcherWithErrFunc) MatcherName() string {
+ return matchError
+}
+
+func (m *MatchErrorMatcherWithErrFunc) AllowedNumArgs() int {
+ return 2
+}
+
+func (m *MatchErrorMatcherWithErrFunc) NumArgs() int {
+ return m.numArgs
+}
+
+func (m *MatchErrorMatcherWithErrFunc) IsSecondArgString() bool {
+ return m.secondArgIsString
+}
+
+type MatchErrorMatcherWithString struct {
+ numArgs int
+}
+
+func (m *MatchErrorMatcherWithString) Type() Type {
+ return MatchErrorMatcherType | ErrMatchWithString
+}
+
+func (m *MatchErrorMatcherWithString) MatcherName() string {
+ return matchError
+}
+
+func (m *MatchErrorMatcherWithString) AllowedNumArgs() int {
+ return 1
+}
+
+func (m *MatchErrorMatcherWithString) NumArgs() int {
+ return m.numArgs
+}
+
+type MatchErrorMatcherWithMatcher struct {
+ numArgs int
+}
+
+func (m *MatchErrorMatcherWithMatcher) Type() Type {
+ return MatchErrorMatcherType | ErrMatchWithMatcher
+}
+
+func (m *MatchErrorMatcherWithMatcher) MatcherName() string {
+ return matchError
+}
+
+func (m *MatchErrorMatcherWithMatcher) AllowedNumArgs() int {
+ return 1
+}
+
+func (m *MatchErrorMatcherWithMatcher) NumArgs() int {
+ return m.numArgs
+}
+
+func newMatchErrorMatcher(args []ast.Expr, pass *analysis.Pass) MatchErrorMatcher {
+ numArgs := len(args)
+ if value.IsExprError(pass, args[0]) {
+ return &MatchErrorMatcherWithErr{numArgs: numArgs}
+ }
+
+ t := pass.TypesInfo.TypeOf(args[0])
+ if isString(args[0], pass) {
+ return &MatchErrorMatcherWithString{numArgs: numArgs}
+ }
+
+ if interfaces.ImplementsGomegaMatcher(t) {
+ return &MatchErrorMatcherWithMatcher{numArgs: numArgs}
+ }
+
+ if isFuncErrBool(t) {
+ isString := false
+ if numArgs > 1 {
+ t2 := pass.TypesInfo.TypeOf(args[1])
+ isString = gotypes.Identical(t2, gotypes.Typ[gotypes.String])
+ }
+ return &MatchErrorMatcherWithErrFunc{numArgs: numArgs, secondArgIsString: isString}
+ }
+
+ return &InvalidMatchErrorMatcher{numArgs: numArgs}
+}
+
+func isString(exp ast.Expr, pass *analysis.Pass) bool {
+ t := pass.TypesInfo.TypeOf(exp)
+ return gotypes.Identical(t, gotypes.Typ[gotypes.String])
+}
+
+// isFuncErrBool checks if a function is with the signature `func(error) bool`
+func isFuncErrBool(t gotypes.Type) bool {
+ sig, ok := t.(*gotypes.Signature)
+ if !ok {
+ return false
+ }
+ if sig.Params().Len() != 1 || sig.Results().Len() != 1 {
+ return false
+ }
+
+ if !interfaces.ImplementsError(sig.Params().At(0).Type()) {
+ return false
+ }
+
+ b, ok := sig.Results().At(0).Type().(*gotypes.Basic)
+ if ok && b.Name() == "bool" && b.Info() == gotypes.IsBoolean && b.Kind() == gotypes.Bool {
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go
new file mode 100644
index 0000000000..8e4f438e87
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go
@@ -0,0 +1,11 @@
+package matcher
+
+type HaveLenZeroMatcher struct{}
+
+func (HaveLenZeroMatcher) Type() Type {
+ return HaveLenZeroMatcherType
+}
+
+func (HaveLenZeroMatcher) MatcherName() string {
+ return haveLen
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go
new file mode 100644
index 0000000000..7a983cc9e8
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go
@@ -0,0 +1,86 @@
+package matcher
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+)
+
+const ( // gomega matchers
+ beEmpty = "BeEmpty"
+ beEquivalentTo = "BeEquivalentTo"
+ beFalse = "BeFalse"
+ beIdenticalTo = "BeIdenticalTo"
+ beNil = "BeNil"
+ beNumerically = "BeNumerically"
+ beTrue = "BeTrue"
+ beZero = "BeZero"
+ equal = "Equal"
+ haveLen = "HaveLen"
+ haveValue = "HaveValue"
+ and = "And"
+ or = "Or"
+ withTransform = "WithTransform"
+ matchError = "MatchError"
+ haveOccurred = "HaveOccurred"
+ succeed = "Succeed"
+)
+
+type Matcher struct {
+ funcName string
+ Orig *ast.CallExpr
+ Clone *ast.CallExpr
+ info Info
+ reverseLogic bool
+ handler gomegahandler.Handler
+}
+
+func New(origMatcher, matcherClone *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler) (*Matcher, bool) {
+ reverse := false
+ var assertFuncName string
+ for {
+ info, ok := handler.GetGomegaBasicInfo(origMatcher)
+ if !ok {
+ return nil, false
+ }
+
+ if info.MethodName != "Not" {
+ assertFuncName = info.MethodName
+ break
+ }
+
+ reverse = !reverse
+ origMatcher, ok = origMatcher.Args[0].(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+ matcherClone = matcherClone.Args[0].(*ast.CallExpr)
+ }
+
+ return &Matcher{
+ funcName: assertFuncName,
+ Orig: origMatcher,
+ Clone: matcherClone,
+ info: getMatcherInfo(origMatcher, matcherClone, assertFuncName, pass, handler),
+ reverseLogic: reverse,
+ handler: handler,
+ }, true
+}
+
+func (m *Matcher) ShouldReverseLogic() bool {
+ return m.reverseLogic
+}
+
+func (m *Matcher) GetMatcherInfo() Info {
+ return m.info
+}
+
+func (m *Matcher) ReplaceMatcherFuncName(name string) {
+ m.handler.ReplaceFunction(m.Clone, ast.NewIdent(name))
+}
+
+func (m *Matcher) ReplaceMatcherArgs(newArgs []ast.Expr) {
+ m.Clone.Args = newArgs
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go
new file mode 100644
index 0000000000..084226bcca
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go
@@ -0,0 +1,148 @@
+package matcher
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+)
+
+type Type uint64
+
+const (
+ Unspecified Type = 1 << iota
+ EqualMatcherType
+ BeZeroMatcherType
+ BeEmptyMatcherType
+ BeTrueMatcherType
+ BeFalseMatcherType
+ BeNumericallyMatcherType
+ HaveLenZeroMatcherType
+ BeEquivalentToMatcherType
+ BeIdenticalToMatcherType
+ BeNilMatcherType
+ MatchErrorMatcherType
+ MultipleMatcherMatherType
+ HaveValueMatherType
+ WithTransformMatherType
+ EqualBoolValueMatcherType
+ EqualValueMatcherType
+ HaveOccurredMatcherType
+ SucceedMatcherType
+ EqualNilMatcherType
+
+ BoolValueFalse
+ BoolValueTrue
+
+ OrMatherType
+ AndMatherType
+
+ ErrMatchWithErr
+ ErrMatchWithErrFunc
+ ErrMatchWithString
+ ErrMatchWithMatcher
+
+ EqualZero
+ GreaterThanZero
+)
+
+type Info interface {
+ Type() Type
+ MatcherName() string
+}
+
+func getMatcherInfo(orig, clone *ast.CallExpr, matcherName string, pass *analysis.Pass, handler gomegahandler.Handler) Info {
+ switch matcherName {
+ case equal:
+ return newEqualMatcher(orig.Args[0], clone.Args[0], pass)
+
+ case beZero:
+ return &BeZeroMatcher{}
+
+ case beEmpty:
+ return &BeEmptyMatcher{}
+
+ case beTrue:
+ return &BeTrueMatcher{}
+
+ case beFalse:
+ return &BeFalseMatcher{}
+
+ case beNil:
+ return &BeNilMatcher{}
+
+ case beNumerically:
+ if len(orig.Args) == 2 {
+ return newBeNumericallyMatcher(orig.Args[0], orig.Args[1], clone.Args[1], pass)
+ }
+
+ case haveLen:
+ if value.GetValuer(orig.Args[0], clone.Args[0], pass).IsValueZero() {
+ return &HaveLenZeroMatcher{}
+ }
+
+ case beEquivalentTo:
+ return &BeEquivalentToMatcher{
+ Value: value.New(orig.Args[0], clone.Args[0], pass),
+ }
+
+ case beIdenticalTo:
+ return &BeIdenticalToMatcher{
+ Value: value.New(orig.Args[0], clone.Args[0], pass),
+ }
+
+ case matchError:
+ return newMatchErrorMatcher(orig.Args, pass)
+
+ case haveValue:
+ if nestedMatcher, ok := getNestedMatcher(orig, clone, 0, pass, handler); ok {
+ return &HaveValueMatcher{
+ nested: nestedMatcher,
+ }
+ }
+
+ case withTransform:
+ if nestedMatcher, ok := getNestedMatcher(orig, clone, 1, pass, handler); ok {
+ return newWithTransformMatcher(orig.Args[0], nestedMatcher, pass)
+ }
+
+ case or, and:
+ matcherType := MultipleMatcherMatherType
+ if matcherName == or {
+ matcherType |= OrMatherType
+ } else {
+ matcherType |= AndMatherType
+ }
+
+ if m, ok := newMultipleMatchersMatcher(matcherType, orig.Args, clone.Args, pass, handler); ok {
+ return m
+ }
+
+ case succeed:
+ return &SucceedMatcher{}
+
+ case haveOccurred:
+ return &HaveOccurredMatcher{}
+
+ }
+
+ return &UnspecifiedMatcher{matcherName: matcherName}
+}
+
+type UnspecifiedMatcher struct {
+ matcherName string
+}
+
+func (UnspecifiedMatcher) Type() Type {
+ return Unspecified
+}
+
+func (u UnspecifiedMatcher) MatcherName() string {
+ return u.matcherName
+}
+
+func (t Type) Is(other Type) bool {
+ return t&other != 0
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go
new file mode 100644
index 0000000000..cc26e5ac2c
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go
@@ -0,0 +1,66 @@
+package matcher
+
+import (
+ "go/ast"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+)
+
+type HaveValueMatcher struct {
+ nested *Matcher
+}
+
+func (m *HaveValueMatcher) Type() Type {
+ return HaveValueMatherType
+}
+func (m *HaveValueMatcher) MatcherName() string {
+ return haveValue
+}
+
+func (m *HaveValueMatcher) GetNested() *Matcher {
+ return m.nested
+}
+
+type WithTransformMatcher struct {
+ funcType gotypes.Type
+ nested *Matcher
+}
+
+func (m *WithTransformMatcher) Type() Type {
+ return WithTransformMatherType
+}
+func (m *WithTransformMatcher) MatcherName() string {
+ return withTransform
+}
+
+func (m *WithTransformMatcher) GetNested() *Matcher {
+ return m.nested
+}
+
+func (m *WithTransformMatcher) GetFuncType() gotypes.Type {
+ return m.funcType
+}
+
+func getNestedMatcher(orig, clone *ast.CallExpr, offset int, pass *analysis.Pass, handler gomegahandler.Handler) (*Matcher, bool) {
+ if origNested, ok := orig.Args[offset].(*ast.CallExpr); ok {
+ cloneNested := clone.Args[offset].(*ast.CallExpr)
+
+ return New(origNested, cloneNested, pass, handler)
+ }
+
+ return nil, false
+}
+
+func newWithTransformMatcher(fun ast.Expr, nested *Matcher, pass *analysis.Pass) *WithTransformMatcher {
+ funcType := pass.TypesInfo.TypeOf(fun)
+ if sig, ok := funcType.(*gotypes.Signature); ok && sig.Results().Len() > 0 {
+ funcType = sig.Results().At(0).Type()
+ }
+ return &WithTransformMatcher{
+ funcType: funcType,
+ nested: nested,
+ }
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go
new file mode 100644
index 0000000000..9ce0cf5b83
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go
@@ -0,0 +1,62 @@
+package matcher
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+)
+
+type MultipleMatchersMatcher struct {
+ matherType Type
+ matchers []*Matcher
+}
+
+func (m *MultipleMatchersMatcher) Type() Type {
+ return m.matherType
+}
+
+func (m *MultipleMatchersMatcher) MatcherName() string {
+ if m.matherType.Is(OrMatherType) {
+ return or
+ }
+ return and
+}
+
+func newMultipleMatchersMatcher(matherType Type, orig, clone []ast.Expr, pass *analysis.Pass, handler gomegahandler.Handler) (*MultipleMatchersMatcher, bool) {
+ matchers := make([]*Matcher, len(orig))
+
+ for i := range orig {
+ nestedOrig, ok := orig[i].(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+
+ m, ok := New(nestedOrig, clone[i].(*ast.CallExpr), pass, handler)
+ if !ok {
+ return nil, false
+ }
+
+ m.reverseLogic = false
+
+ matchers[i] = m
+ }
+
+ return &MultipleMatchersMatcher{
+ matherType: matherType,
+ matchers: matchers,
+ }, true
+}
+
+func (m *MultipleMatchersMatcher) Len() int {
+ return len(m.matchers)
+}
+
+func (m *MultipleMatchersMatcher) At(i int) *Matcher {
+ if i >= len(m.matchers) {
+ panic("index out of range")
+ }
+
+ return m.matchers[i]
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go
new file mode 100644
index 0000000000..ba74722d27
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go
@@ -0,0 +1,225 @@
+package value
+
+import (
+ "go/ast"
+ "go/constant"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/interfaces"
+)
+
+type Valuer interface {
+ GetValue() constant.Value
+ GetType() gotypes.Type
+ GetValueExpr() ast.Expr
+ IsValueZero() bool
+ IsValueInt() bool
+ IsValueNumeric() bool
+ IsError() bool
+ IsFunc() bool
+ IsInterface() bool
+ IsPointer() bool
+}
+
+func GetValuer(orig, clone ast.Expr, pass *analysis.Pass) Valuer {
+ val := New(orig, clone, pass)
+ unspecified := UnspecifiedValue{
+ Value: val,
+ }
+
+ if orig == nil {
+ return unspecified
+ }
+
+ if IsExprError(pass, orig) {
+ return &ErrValue{
+ Value: val,
+ err: clone,
+ }
+ }
+
+ if val.GetValue() == nil || !val.tv.IsValue() {
+ return unspecified
+ }
+
+ if val.GetValue().Kind() == constant.Int {
+ num, ok := constant.Int64Val(val.GetValue())
+ if !ok {
+ return unspecified
+ }
+ return &IntValue{
+ Value: val,
+ val: num,
+ }
+ }
+
+ return unspecified
+}
+
+type Value struct {
+ expr ast.Expr
+ tv gotypes.TypeAndValue
+}
+
+func New(orig, clone ast.Expr, pass *analysis.Pass) Value {
+ tv := pass.TypesInfo.Types[orig]
+
+ return Value{
+ expr: clone,
+ tv: tv,
+ }
+}
+
+func (v Value) GetValueExpr() ast.Expr {
+ return v.expr
+}
+
+func (v Value) GetValue() constant.Value {
+ return v.tv.Value
+}
+
+func (v Value) GetType() gotypes.Type {
+ return v.tv.Type
+}
+
+func (v Value) IsInterface() bool {
+ return gotypes.IsInterface(v.tv.Type)
+}
+
+func (v Value) IsPointer() bool {
+ return Is[*gotypes.Pointer](v.tv.Type)
+}
+
+func (v Value) IsNil() bool {
+ return v.tv.IsNil()
+}
+
+type UnspecifiedValue struct {
+ Value
+}
+
+func (u UnspecifiedValue) IsValueZero() bool {
+ return false
+}
+
+func (u UnspecifiedValue) IsValueInt() bool {
+ return false
+}
+
+func (u UnspecifiedValue) IsValueNumeric() bool {
+ return false
+}
+
+func (u UnspecifiedValue) IsError() bool {
+ return false
+}
+
+func (u UnspecifiedValue) IsFunc() bool {
+ return isFunc(u.GetValueExpr())
+}
+
+type ErrValue struct {
+ Value
+ err ast.Expr
+}
+
+func (e ErrValue) IsValueZero() bool {
+ return false
+}
+
+func (e ErrValue) IsValueInt() bool {
+ return false
+}
+
+func (e ErrValue) IsValueNumeric() bool {
+ return false
+}
+
+func (e ErrValue) IsError() bool {
+ return true
+}
+
+func (e ErrValue) IsFunc() bool {
+ return isFunc(e.GetValueExpr())
+}
+
+type IntValuer interface {
+ GetIntValue() int64
+}
+
+type IntValue struct {
+ Value
+ val int64
+}
+
+func (i IntValue) IsValueZero() bool {
+ return i.val == 0
+}
+
+func (i IntValue) IsValueInt() bool {
+ return i.val == 0
+}
+
+func (i IntValue) IsValueNumeric() bool {
+ return true
+}
+
+func (i IntValue) IsError() bool {
+ return false
+}
+
+func (i IntValue) IsFunc() bool {
+ return false
+}
+
+func (i IntValue) GetIntValue() int64 {
+ return i.val
+}
+
+func isFunc(exp ast.Expr) bool {
+ return Is[*ast.CallExpr](exp)
+}
+
+func Is[T any](x any) bool {
+ _, matchType := x.(T)
+ return matchType
+}
+
+func IsExprError(pass *analysis.Pass, expr ast.Expr) bool {
+ actualArgType := pass.TypesInfo.TypeOf(expr)
+ switch t := actualArgType.(type) {
+ case *gotypes.Named:
+ return interfaces.ImplementsError(actualArgType)
+
+ case *gotypes.Pointer:
+ if interfaces.ImplementsError(t) {
+ return true
+ }
+
+ if tt, ok := t.Elem().(*gotypes.Named); ok {
+ return interfaces.ImplementsError(tt)
+ }
+
+ case *gotypes.Tuple:
+ if t.Len() > 0 {
+ switch t0 := t.At(0).Type().(type) {
+ case *gotypes.Named, *gotypes.Pointer:
+ if interfaces.ImplementsError(t0) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func IsNil(exp ast.Expr, pass *analysis.Pass) bool {
+ id, ok := exp.(*ast.Ident)
+ if !ok {
+ return false
+ }
+
+ return pass.TypesInfo.Types[id].IsNil()
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go
new file mode 100644
index 0000000000..64f3d99ad6
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go
@@ -0,0 +1,22 @@
+package formatter
+
+import (
+ "bytes"
+ "go/ast"
+ "go/printer"
+ "go/token"
+)
+
+type GoFmtFormatter struct {
+ fset *token.FileSet
+}
+
+func NewGoFmtFormatter(fset *token.FileSet) *GoFmtFormatter {
+ return &GoFmtFormatter{fset: fset}
+}
+
+func (f GoFmtFormatter) Format(exp ast.Expr) string {
+ var buf bytes.Buffer
+ _ = printer.Fprint(&buf, f.fset, exp)
+ return buf.String()
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go
new file mode 100644
index 0000000000..9c54b43346
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go
@@ -0,0 +1,36 @@
+package ginkgohandler
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+// dotHandler is used when importing ginkgo with dot; i.e.
+// import . "github.com/onsi/ginkgo"
+type dotHandler struct{}
+
+func (h dotHandler) HandleGinkgoSpecs(expr ast.Expr, config types.Config, pass *analysis.Pass) bool {
+ return handleGinkgoSpecs(expr, config, pass, h)
+}
+
+func (h dotHandler) getFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) {
+ if fun, ok := exp.Fun.(*ast.Ident); ok {
+ return isFocusContainer(fun.Name), fun
+ }
+ return false, nil
+}
+
+func (h dotHandler) isWrapContainer(exp *ast.CallExpr) bool {
+ if fun, ok := exp.Fun.(*ast.Ident); ok {
+ return isWrapContainer(fun.Name)
+ }
+ return false
+}
+
+func (h dotHandler) isFocusSpec(exp ast.Expr) bool {
+ id, ok := exp.(*ast.Ident)
+ return ok && id.Name == focusSpec
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go
new file mode 100644
index 0000000000..d8bb753992
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go
@@ -0,0 +1,63 @@
+package ginkgohandler
+
+const ( // container names
+ describe = "Describe"
+ pdescribe = "PDescribe"
+ xdescribe = "XDescribe"
+ fdescribe = "FDescribe"
+
+ when = "When"
+ pwhen = "PWhen"
+ xwhen = "XWhen"
+ fwhen = "FWhen"
+
+ contextContainer = "Context"
+ pcontext = "PContext"
+ xcontext = "XContext"
+ fcontext = "FContext"
+
+ it = "It"
+ pit = "PIt"
+ xit = "XIt"
+ fit = "FIt"
+
+ describeTable = "DescribeTable"
+ pdescribeTable = "PDescribeTable"
+ xdescribeTable = "XDescribeTable"
+ fdescribeTable = "FDescribeTable"
+
+ entry = "Entry"
+ pentry = "PEntry"
+ xentry = "XEntry"
+ fentry = "FEntry"
+)
+
+func isFocusContainer(name string) bool {
+ switch name {
+ case fdescribe, fcontext, fwhen, fit, fdescribeTable, fentry:
+ return true
+ }
+ return false
+}
+
+func isContainer(name string) bool {
+ switch name {
+ case it, when, contextContainer, describe, describeTable, entry,
+ pit, pwhen, pcontext, pdescribe, pdescribeTable, pentry,
+ xit, xwhen, xcontext, xdescribe, xdescribeTable, xentry:
+ return true
+ }
+ return isFocusContainer(name)
+}
+
+func isWrapContainer(name string) bool {
+ switch name {
+ case when, contextContainer, describe,
+ fwhen, fcontext, fdescribe,
+ pwhen, pcontext, pdescribe,
+ xwhen, xcontext, xdescribe:
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go
index f10d831840..c44e3e8d8c 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go
@@ -2,6 +2,10 @@ package ginkgohandler
import (
"go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/types"
)
const (
@@ -14,116 +18,31 @@ const (
// Handler provide different handling, depend on the way ginkgo was imported, whether
// in imported with "." name, custom name or without any name.
type Handler interface {
- GetFocusContainerName(*ast.CallExpr) (bool, *ast.Ident)
- IsWrapContainer(*ast.CallExpr) bool
- IsFocusSpec(ident ast.Expr) bool
+ HandleGinkgoSpecs(ast.Expr, types.Config, *analysis.Pass) bool
+ getFocusContainerName(*ast.CallExpr) (bool, *ast.Ident)
+ isWrapContainer(*ast.CallExpr) bool
+ isFocusSpec(ident ast.Expr) bool
}
// GetGinkgoHandler returns a ginkgor handler according to the way ginkgo was imported in the specific file
func GetGinkgoHandler(file *ast.File) Handler {
for _, imp := range file.Imports {
- if imp.Path.Value != importPath && imp.Path.Value != importPathV2 {
- continue
- }
+ switch imp.Path.Value {
+
+ case importPath, importPathV2:
+ switch name := imp.Name.String(); {
+ case name == ".":
+ return dotHandler{}
+ case name == "": // import with no local name
+ return nameHandler("ginkgo")
+ default:
+ return nameHandler(name)
+ }
- switch name := imp.Name.String(); {
- case name == ".":
- return dotHandler{}
- case name == "": // import with no local name
- return nameHandler("ginkgo")
default:
- return nameHandler(name)
- }
- }
-
- return nil // no ginkgo import; this file does not use ginkgo
-}
-
-// dotHandler is used when importing ginkgo with dot; i.e.
-// import . "github.com/onsi/ginkgo"
-type dotHandler struct{}
-
-func (h dotHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) {
- if fun, ok := exp.Fun.(*ast.Ident); ok {
- return isFocusContainer(fun.Name), fun
- }
- return false, nil
-}
-
-func (h dotHandler) IsWrapContainer(exp *ast.CallExpr) bool {
- if fun, ok := exp.Fun.(*ast.Ident); ok {
- return IsWrapContainer(fun.Name)
- }
- return false
-}
-
-func (h dotHandler) IsFocusSpec(exp ast.Expr) bool {
- id, ok := exp.(*ast.Ident)
- return ok && id.Name == focusSpec
-}
-
-// nameHandler is used when importing ginkgo without name; i.e.
-// import "github.com/onsi/ginkgo"
-//
-// or with a custom name; e.g.
-// import customname "github.com/onsi/ginkgo"
-type nameHandler string
-
-func (h nameHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) {
- if sel, ok := exp.Fun.(*ast.SelectorExpr); ok {
- if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) {
- return isFocusContainer(sel.Sel.Name), sel.Sel
- }
- }
- return false, nil
-}
-
-func (h nameHandler) IsWrapContainer(exp *ast.CallExpr) bool {
- if sel, ok := exp.Fun.(*ast.SelectorExpr); ok {
- if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) {
- return IsWrapContainer(sel.Sel.Name)
- }
- }
- return false
-
-}
-
-func (h nameHandler) IsFocusSpec(exp ast.Expr) bool {
- if selExp, ok := exp.(*ast.SelectorExpr); ok {
- if x, ok := selExp.X.(*ast.Ident); ok && x.Name == string(h) {
- return selExp.Sel.Name == focusSpec
+ continue
}
}
- return false
-}
-
-func isFocusContainer(name string) bool {
- switch name {
- case "FDescribe", "FContext", "FWhen", "FIt", "FDescribeTable", "FEntry":
- return true
- }
- return false
-}
-
-func IsContainer(name string) bool {
- switch name {
- case "It", "When", "Context", "Describe", "DescribeTable", "Entry",
- "PIt", "PWhen", "PContext", "PDescribe", "PDescribeTable", "PEntry",
- "XIt", "XWhen", "XContext", "XDescribe", "XDescribeTable", "XEntry":
- return true
- }
- return isFocusContainer(name)
-}
-
-func IsWrapContainer(name string) bool {
- switch name {
- case "When", "Context", "Describe",
- "FWhen", "FContext", "FDescribe",
- "PWhen", "PContext", "PDescribe",
- "XWhen", "XContext", "XDescribe":
- return true
- }
-
- return false
+ return nil
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go
new file mode 100644
index 0000000000..322bbc4533
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go
@@ -0,0 +1,195 @@
+package ginkgohandler
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const (
+ linterName = "ginkgo-linter"
+ focusContainerFound = linterName + ": Focus container found. This is used only for local debug and should not be part of the actual source code. Consider to replace with %q"
+ focusSpecFound = linterName + ": Focus spec found. This is used only for local debug and should not be part of the actual source code. Consider to remove it"
+ useBeforeEachTemplate = "use BeforeEach() to assign variable %s"
+)
+
+func handleGinkgoSpecs(expr ast.Expr, config types.Config, pass *analysis.Pass, ginkgoHndlr Handler) bool {
+ goDeeper := false
+ if exp, ok := expr.(*ast.CallExpr); ok {
+ if config.ForbidFocus && checkFocusContainer(pass, ginkgoHndlr, exp) {
+ goDeeper = true
+ }
+
+ if config.ForbidSpecPollution && checkAssignmentsInContainer(pass, ginkgoHndlr, exp) {
+ goDeeper = true
+ }
+ }
+ return goDeeper
+}
+
+func checkAssignmentsInContainer(pass *analysis.Pass, ginkgoHndlr Handler, exp *ast.CallExpr) bool {
+ foundSomething := false
+ if ginkgoHndlr.isWrapContainer(exp) {
+ for _, arg := range exp.Args {
+ if fn, ok := arg.(*ast.FuncLit); ok {
+ if fn.Body != nil {
+ if checkAssignments(pass, fn.Body.List) {
+ foundSomething = true
+ }
+ break
+ }
+ }
+ }
+ }
+
+ return foundSomething
+}
+
+func checkAssignments(pass *analysis.Pass, list []ast.Stmt) bool {
+ foundSomething := false
+ for _, stmt := range list {
+ switch st := stmt.(type) {
+ case *ast.DeclStmt:
+ if checkAssignmentDecl(pass, st) {
+ foundSomething = true
+ }
+
+ case *ast.AssignStmt:
+ if checkAssignmentAssign(pass, st) {
+ foundSomething = true
+ }
+
+ case *ast.IfStmt:
+ if checkAssignmentIf(pass, st) {
+ foundSomething = true
+ }
+ }
+ }
+
+ return foundSomething
+}
+
+func checkAssignmentsValues(pass *analysis.Pass, names []*ast.Ident, values []ast.Expr) bool {
+ foundSomething := false
+ for i, val := range values {
+ if !is[*ast.FuncLit](val) {
+ reportNoFix(pass, names[i].Pos(), useBeforeEachTemplate, names[i].Name)
+ foundSomething = true
+ }
+ }
+
+ return foundSomething
+}
+
+func checkAssignmentDecl(pass *analysis.Pass, ds *ast.DeclStmt) bool {
+ foundSomething := false
+ if gen, ok := ds.Decl.(*ast.GenDecl); ok {
+ if gen.Tok != token.VAR {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ if valSpec, ok := spec.(*ast.ValueSpec); ok {
+ if checkAssignmentsValues(pass, valSpec.Names, valSpec.Values) {
+ foundSomething = true
+ }
+ }
+ }
+ }
+
+ return foundSomething
+}
+
+func checkAssignmentAssign(pass *analysis.Pass, as *ast.AssignStmt) bool {
+ foundSomething := false
+ for i, val := range as.Rhs {
+ if !is[*ast.FuncLit](val) {
+ if id, isIdent := as.Lhs[i].(*ast.Ident); isIdent && id.Name != "_" {
+ reportNoFix(pass, id.Pos(), useBeforeEachTemplate, id.Name)
+ foundSomething = true
+ }
+ }
+ }
+ return foundSomething
+}
+
+func checkAssignmentIf(pass *analysis.Pass, is *ast.IfStmt) bool {
+ foundSomething := false
+
+ if is.Body != nil {
+ if checkAssignments(pass, is.Body.List) {
+ foundSomething = true
+ }
+ }
+ if is.Else != nil {
+ if block, isBlock := is.Else.(*ast.BlockStmt); isBlock {
+ if checkAssignments(pass, block.List) {
+ foundSomething = true
+ }
+ }
+ }
+
+ return foundSomething
+}
+
+func checkFocusContainer(pass *analysis.Pass, handler Handler, exp *ast.CallExpr) bool {
+ foundFocus := false
+ isFocus, id := handler.getFocusContainerName(exp)
+ if isFocus {
+ reportNewName(pass, id, id.Name[1:], id.Name)
+ foundFocus = true
+ }
+
+ if id != nil && isContainer(id.Name) {
+ for _, arg := range exp.Args {
+ if handler.isFocusSpec(arg) {
+ reportNoFix(pass, arg.Pos(), focusSpecFound)
+ foundFocus = true
+ } else if callExp, ok := arg.(*ast.CallExpr); ok {
+ if checkFocusContainer(pass, handler, callExp) { // handle table entries
+ foundFocus = true
+ }
+ }
+ }
+ }
+
+ return foundFocus
+}
+
+func reportNewName(pass *analysis.Pass, id *ast.Ident, newName string, oldExpr string) {
+ pass.Report(analysis.Diagnostic{
+ Pos: id.Pos(),
+ Message: fmt.Sprintf(focusContainerFound, newName),
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ Message: fmt.Sprintf("should replace %s with %s", oldExpr, newName),
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: id.Pos(),
+ End: id.End(),
+ NewText: []byte(newName),
+ },
+ },
+ },
+ },
+ })
+}
+
+func reportNoFix(pass *analysis.Pass, pos token.Pos, message string, args ...any) {
+ if len(args) > 0 {
+ message = fmt.Sprintf(message, args...)
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: pos,
+ Message: message,
+ })
+}
+
+func is[T any](x any) bool {
+ _, matchType := x.(T)
+ return matchType
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go
new file mode 100644
index 0000000000..2ef9fe703c
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go
@@ -0,0 +1,49 @@
+package ginkgohandler
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+// nameHandler is used when importing ginkgo without name; i.e.
+// import "github.com/onsi/ginkgo"
+//
+// or with a custom name; e.g.
+// import customname "github.com/onsi/ginkgo"
+type nameHandler string
+
+func (h nameHandler) HandleGinkgoSpecs(expr ast.Expr, config types.Config, pass *analysis.Pass) bool {
+ return handleGinkgoSpecs(expr, config, pass, h)
+}
+
+func (h nameHandler) getFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) {
+ if sel, ok := exp.Fun.(*ast.SelectorExpr); ok {
+ if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) {
+ return isFocusContainer(sel.Sel.Name), sel.Sel
+ }
+ }
+ return false, nil
+}
+
+func (h nameHandler) isWrapContainer(exp *ast.CallExpr) bool {
+ if sel, ok := exp.Fun.(*ast.SelectorExpr); ok {
+ if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) {
+ return isWrapContainer(sel.Sel.Name)
+ }
+ }
+ return false
+
+}
+
+func (h nameHandler) isFocusSpec(exp ast.Expr) bool {
+ if selExp, ok := exp.(*ast.SelectorExpr); ok {
+ if x, ok := selExp.X.(*ast.Ident); ok && x.Name == string(h) {
+ return selExp.Sel.Name == focusSpec
+ }
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgoinfo/ginkgoinfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgoinfo/ginkgoinfo.go
new file mode 100644
index 0000000000..bdc8b2e16c
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgoinfo/ginkgoinfo.go
@@ -0,0 +1,26 @@
+package ginkgoinfo
+
+import (
+ gotypes "go/types"
+ "strings"
+)
+
+const (
+ ctxTypeName = "context.Context"
+ ginkgoCtxSuffix = "github.com/onsi/ginkgo/v2/internal.SpecContext"
+)
+
+func IsGinkgoContext(t gotypes.Type) bool {
+ maybeCtx := gotypes.Unalias(t)
+
+ typeName := maybeCtx.String()
+ if typeName == ctxTypeName {
+ return true
+ }
+
+ if strings.HasSuffix(typeName, ginkgoCtxSuffix) {
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go
new file mode 100644
index 0000000000..8ab87c76e9
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go
@@ -0,0 +1,109 @@
+package gomegahandler
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+)
+
+// dotHandler is used when importing gomega with dot; i.e.
+// import . "github.com/onsi/gomega"
+type dotHandler struct {
+ pass *analysis.Pass
+}
+
+// GetGomegaBasicInfo returns the name of the gomega function, e.g. `Expect` + some additional info
+func (h dotHandler) GetGomegaBasicInfo(expr *ast.CallExpr) (*GomegaBasicInfo, bool) {
+ info := &GomegaBasicInfo{}
+ for {
+ switch actualFunc := expr.Fun.(type) {
+ case *ast.Ident:
+ info.MethodName = actualFunc.Name
+ return info, true
+ case *ast.SelectorExpr:
+ if h.isGomegaVar(actualFunc.X) {
+ info.UseGomegaVar = true
+ info.MethodName = actualFunc.Sel.Name
+ return info, true
+ }
+
+ if actualFunc.Sel.Name == "Error" {
+ info.HasErrorMethod = true
+ }
+
+ if x, ok := actualFunc.X.(*ast.CallExpr); ok {
+ expr = x
+ } else {
+ return nil, false
+ }
+ default:
+ return nil, false
+ }
+ }
+}
+
+// ReplaceFunction replaces the function with another one, for fix suggestions
+func (dotHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) {
+ switch f := caller.Fun.(type) {
+ case *ast.Ident:
+ caller.Fun = newExpr
+ case *ast.SelectorExpr:
+ f.Sel = newExpr
+ }
+}
+
+func (dotHandler) GetNewWrapperMatcher(name string, existing *ast.CallExpr) *ast.CallExpr {
+ return &ast.CallExpr{
+ Fun: ast.NewIdent(name),
+ Args: []ast.Expr{existing},
+ }
+}
+
+func (h dotHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr {
+ actualExpr, ok := assertionFunc.X.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+
+ switch fun := actualExpr.Fun.(type) {
+ case *ast.Ident:
+ return actualExpr
+ case *ast.SelectorExpr:
+ if gomegainfo.IsActualMethod(fun.Sel.Name) {
+ if h.isGomegaVar(fun.X) {
+ return actualExpr
+ }
+ } else {
+ return h.GetActualExpr(fun)
+ }
+ }
+ return nil
+}
+
+func (h dotHandler) GetActualExprClone(origFunc, funcClone *ast.SelectorExpr) *ast.CallExpr {
+ actualExpr, ok := funcClone.X.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+
+ switch funClone := actualExpr.Fun.(type) {
+ case *ast.Ident:
+ return actualExpr
+ case *ast.SelectorExpr:
+ origFun := origFunc.X.(*ast.CallExpr).Fun.(*ast.SelectorExpr)
+ if gomegainfo.IsActualMethod(funClone.Sel.Name) {
+ if h.isGomegaVar(origFun.X) {
+ return actualExpr
+ }
+ } else {
+ return h.GetActualExprClone(origFun, funClone)
+ }
+ }
+ return nil
+}
+
+func (h dotHandler) isGomegaVar(x ast.Expr) bool {
+ return gomegainfo.IsGomegaVar(x, h.pass)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go
index 4290e73736..881ec87896 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go
@@ -2,7 +2,8 @@ package gomegahandler
import (
"go/ast"
- "go/token"
+
+ "golang.org/x/tools/go/analysis"
)
const (
@@ -13,19 +14,25 @@ const (
// in imported with "." name, custom name or without any name.
type Handler interface {
// GetActualFuncName returns the name of the gomega function, e.g. `Expect`
- GetActualFuncName(*ast.CallExpr) (string, bool)
+ GetGomegaBasicInfo(*ast.CallExpr) (*GomegaBasicInfo, bool)
// ReplaceFunction replaces the function with another one, for fix suggestions
ReplaceFunction(*ast.CallExpr, *ast.Ident)
- getDefFuncName(expr *ast.CallExpr) string
+ GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr
- getFieldType(field *ast.Field) string
+ GetActualExprClone(origFunc, funcClone *ast.SelectorExpr) *ast.CallExpr
- GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr
+ GetNewWrapperMatcher(name string, existing *ast.CallExpr) *ast.CallExpr
+}
+
+type GomegaBasicInfo struct {
+ MethodName string
+ UseGomegaVar bool
+ HasErrorMethod bool
}
// GetGomegaHandler returns a gomegar handler according to the way gomega was imported in the specific file
-func GetGomegaHandler(file *ast.File) Handler {
+func GetGomegaHandler(file *ast.File, pass *analysis.Pass) Handler {
for _, imp := range file.Imports {
if imp.Path.Value != importPath {
continue
@@ -33,209 +40,15 @@ func GetGomegaHandler(file *ast.File) Handler {
switch name := imp.Name.String(); {
case name == ".":
- return dotHandler{}
+ return &dotHandler{
+ pass: pass,
+ }
case name == "": // import with no local name
- return nameHandler("gomega")
+ return &nameHandler{name: "gomega", pass: pass}
default:
- return nameHandler(name)
+ return &nameHandler{name: name, pass: pass}
}
}
return nil // no gomega import; this file does not use gomega
}
-
-// dotHandler is used when importing gomega with dot; i.e.
-// import . "github.com/onsi/gomega"
-type dotHandler struct{}
-
-// GetActualFuncName returns the name of the gomega function, e.g. `Expect`
-func (h dotHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) {
- switch actualFunc := expr.Fun.(type) {
- case *ast.Ident:
- return actualFunc.Name, true
- case *ast.SelectorExpr:
- if isGomegaVar(actualFunc.X, h) {
- return actualFunc.Sel.Name, true
- }
-
- if x, ok := actualFunc.X.(*ast.CallExpr); ok {
- return h.GetActualFuncName(x)
- }
-
- case *ast.CallExpr:
- return h.GetActualFuncName(actualFunc)
- }
- return "", false
-}
-
-// ReplaceFunction replaces the function with another one, for fix suggestions
-func (dotHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) {
- switch f := caller.Fun.(type) {
- case *ast.Ident:
- caller.Fun = newExpr
- case *ast.SelectorExpr:
- f.Sel = newExpr
- }
-}
-
-func (dotHandler) getDefFuncName(expr *ast.CallExpr) string {
- if f, ok := expr.Fun.(*ast.Ident); ok {
- return f.Name
- }
- return ""
-}
-
-func (dotHandler) getFieldType(field *ast.Field) string {
- switch t := field.Type.(type) {
- case *ast.Ident:
- return t.Name
- case *ast.StarExpr:
- if name, ok := t.X.(*ast.Ident); ok {
- return name.Name
- }
- }
- return ""
-}
-
-// nameHandler is used when importing gomega without name; i.e.
-// import "github.com/onsi/gomega"
-//
-// or with a custom name; e.g.
-// import customname "github.com/onsi/gomega"
-type nameHandler string
-
-// GetActualFuncName returns the name of the gomega function, e.g. `Expect`
-func (g nameHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) {
- selector, ok := expr.Fun.(*ast.SelectorExpr)
- if !ok {
- return "", false
- }
-
- switch x := selector.X.(type) {
- case *ast.Ident:
- if x.Name != string(g) {
- if !isGomegaVar(x, g) {
- return "", false
- }
- }
-
- return selector.Sel.Name, true
-
- case *ast.CallExpr:
- return g.GetActualFuncName(x)
- }
-
- return "", false
-}
-
-// ReplaceFunction replaces the function with another one, for fix suggestions
-func (nameHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) {
- caller.Fun.(*ast.SelectorExpr).Sel = newExpr
-}
-
-func (g nameHandler) getDefFuncName(expr *ast.CallExpr) string {
- if sel, ok := expr.Fun.(*ast.SelectorExpr); ok {
- if f, ok := sel.X.(*ast.Ident); ok && f.Name == string(g) {
- return sel.Sel.Name
- }
- }
- return ""
-}
-
-func (g nameHandler) getFieldType(field *ast.Field) string {
- switch t := field.Type.(type) {
- case *ast.SelectorExpr:
- if id, ok := t.X.(*ast.Ident); ok {
- if id.Name == string(g) {
- return t.Sel.Name
- }
- }
- case *ast.StarExpr:
- if sel, ok := t.X.(*ast.SelectorExpr); ok {
- if x, ok := sel.X.(*ast.Ident); ok && x.Name == string(g) {
- return sel.Sel.Name
- }
- }
-
- }
- return ""
-}
-
-func isGomegaVar(x ast.Expr, handler Handler) bool {
- if i, ok := x.(*ast.Ident); ok {
- if i.Obj != nil && i.Obj.Kind == ast.Var {
- switch decl := i.Obj.Decl.(type) {
- case *ast.AssignStmt:
- if decl.Tok == token.DEFINE {
- if defFunc, ok := decl.Rhs[0].(*ast.CallExpr); ok {
- fName := handler.getDefFuncName(defFunc)
- switch fName {
- case "NewGomega", "NewWithT", "NewGomegaWithT":
- return true
- }
- }
- }
- case *ast.Field:
- name := handler.getFieldType(decl)
- switch name {
- case "Gomega", "WithT", "GomegaWithT":
- return true
- }
- }
- }
- }
- return false
-}
-
-func (h dotHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr {
- actualExpr, ok := assertionFunc.X.(*ast.CallExpr)
- if !ok {
- return nil
- }
-
- switch fun := actualExpr.Fun.(type) {
- case *ast.Ident:
- return actualExpr
- case *ast.SelectorExpr:
- if isHelperMethods(fun.Sel.Name) {
- return h.GetActualExpr(fun)
- }
- if isGomegaVar(fun.X, h) {
- return actualExpr
- }
- }
- return nil
-}
-
-func (g nameHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr {
- actualExpr, ok := assertionFunc.X.(*ast.CallExpr)
- if !ok {
- return nil
- }
-
- switch fun := actualExpr.Fun.(type) {
- case *ast.Ident:
- return actualExpr
- case *ast.SelectorExpr:
- if x, ok := fun.X.(*ast.Ident); ok && x.Name == string(g) {
- return actualExpr
- }
- if isHelperMethods(fun.Sel.Name) {
- return g.GetActualExpr(fun)
- }
-
- if isGomegaVar(fun.X, g) {
- return actualExpr
- }
- }
- return nil
-}
-
-func isHelperMethods(funcName string) bool {
- switch funcName {
- case "WithOffset", "WithTimeout", "WithPolling", "Within", "ProbeEvery", "WithContext", "WithArguments", "MustPassRepeatedly":
- return true
- }
-
- return false
-}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go
new file mode 100644
index 0000000000..61c471f4c2
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go
@@ -0,0 +1,123 @@
+package gomegahandler
+
+import (
+ "go/ast"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// nameHandler is used when importing gomega without name; i.e.
+// import "github.com/onsi/gomega"
+//
+// or with a custom name; e.g.
+// import customname "github.com/onsi/gomega"
+type nameHandler struct {
+ name string
+ pass *analysis.Pass
+}
+
+// GetGomegaBasicInfo returns the name of the gomega function, e.g. `Expect` + some additional info
+func (g nameHandler) GetGomegaBasicInfo(expr *ast.CallExpr) (*GomegaBasicInfo, bool) {
+ info := &GomegaBasicInfo{}
+ for {
+ selector, ok := expr.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return nil, false
+ }
+
+ if selector.Sel.Name == "Error" {
+ info.HasErrorMethod = true
+ }
+
+ switch x := selector.X.(type) {
+ case *ast.Ident:
+ if x.Name != g.name {
+ if !g.isGomegaVar(x) {
+ return nil, false
+ }
+ info.UseGomegaVar = true
+ }
+
+ info.MethodName = selector.Sel.Name
+
+ return info, true
+
+ case *ast.CallExpr:
+ expr = x
+
+ default:
+ return nil, false
+ }
+ }
+}
+
+// ReplaceFunction replaces the function with another one, for fix suggestions
+func (nameHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) {
+ caller.Fun.(*ast.SelectorExpr).Sel = newExpr
+}
+
+func (g nameHandler) isGomegaVar(x ast.Expr) bool {
+ return gomegainfo.IsGomegaVar(x, g.pass)
+}
+
+func (g nameHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr {
+ actualExpr, ok := assertionFunc.X.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+
+ switch fun := actualExpr.Fun.(type) {
+ case *ast.Ident:
+ return actualExpr
+ case *ast.SelectorExpr:
+ if x, ok := fun.X.(*ast.Ident); ok && x.Name == g.name {
+ return actualExpr
+ }
+ if gomegainfo.IsActualMethod(fun.Sel.Name) {
+ if g.isGomegaVar(fun.X) {
+ return actualExpr
+ }
+ } else {
+ return g.GetActualExpr(fun)
+ }
+ }
+ return nil
+}
+
+func (g nameHandler) GetActualExprClone(origFunc, funcClone *ast.SelectorExpr) *ast.CallExpr {
+ actualExpr, ok := funcClone.X.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+
+ switch funClone := actualExpr.Fun.(type) {
+ case *ast.Ident:
+ return actualExpr
+ case *ast.SelectorExpr:
+ if x, ok := funClone.X.(*ast.Ident); ok && x.Name == g.name {
+ return actualExpr
+ }
+ origFun := origFunc.X.(*ast.CallExpr).Fun.(*ast.SelectorExpr)
+ if gomegainfo.IsActualMethod(funClone.Sel.Name) {
+ if g.isGomegaVar(origFun.X) {
+ return actualExpr
+ }
+ } else {
+ return g.GetActualExprClone(origFun, funClone)
+ }
+
+ }
+ return nil
+}
+
+func (g nameHandler) GetNewWrapperMatcher(name string, existing *ast.CallExpr) *ast.CallExpr {
+ return &ast.CallExpr{
+ Fun: &ast.SelectorExpr{
+ X: ast.NewIdent(g.name),
+ Sel: ast.NewIdent(name),
+ },
+ Args: []ast.Expr{existing},
+ }
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go
new file mode 100644
index 0000000000..93be55ec0a
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go
@@ -0,0 +1,117 @@
+package gomegainfo
+
+import (
+ "go/ast"
+ gotypes "go/types"
+ "strings"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+const ( // gomega actual method names
+ expect = "Expect"
+ expectWithOffset = "ExpectWithOffset"
+ omega = "Ω"
+ eventually = "Eventually"
+ eventuallyWithOffset = "EventuallyWithOffset"
+ consistently = "Consistently"
+ consistentlyWithOffset = "ConsistentlyWithOffset"
+)
+
+const ( // assertion methods
+ to = "To"
+ toNot = "ToNot"
+ notTo = "NotTo"
+ should = "Should"
+ shouldNot = "ShouldNot"
+)
+
+var funcOffsetMap = map[string]int{
+ expect: 0,
+ expectWithOffset: 1,
+ omega: 0,
+ eventually: 0,
+ eventuallyWithOffset: 1,
+ consistently: 0,
+ consistentlyWithOffset: 1,
+}
+
+func IsActualMethod(name string) bool {
+ _, found := funcOffsetMap[name]
+ return found
+}
+
+func ActualArgOffset(methodName string) int {
+ funcOffset, ok := funcOffsetMap[methodName]
+ if !ok {
+ return -1
+ }
+ return funcOffset
+}
+
+func GetAllowedAssertionMethods(actualMethodName string) string {
+ switch actualMethodName {
+ case expect, expectWithOffset:
+ return `"To()", "ToNot()" or "NotTo()"`
+
+ case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset:
+ return `"Should()" or "ShouldNot()"`
+
+ case omega:
+ return `"Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"`
+
+ default:
+ return ""
+ }
+}
+
+var asyncFuncSet = map[string]struct{}{
+ eventually: {},
+ eventuallyWithOffset: {},
+ consistently: {},
+ consistentlyWithOffset: {},
+}
+
+func IsAsyncActualMethod(name string) bool {
+ _, ok := asyncFuncSet[name]
+ return ok
+}
+
+func IsAssertionFunc(name string) bool {
+ switch name {
+ case to, toNot, notTo, should, shouldNot:
+ return true
+ }
+ return false
+}
+
+func IsGomegaVar(x ast.Expr, pass *analysis.Pass) bool {
+ if _, isIdent := x.(*ast.Ident); !isIdent {
+ return false
+ }
+
+ tx, ok := pass.TypesInfo.Types[x]
+ if !ok {
+ return false
+ }
+
+ return IsGomegaType(tx.Type)
+}
+
+const (
+ gomegaStructType = "github.com/onsi/gomega/internal.Gomega"
+ gomegaInterface = "github.com/onsi/gomega/types.Gomega"
+)
+
+func IsGomegaType(t gotypes.Type) bool {
+ switch ttx := gotypes.Unalias(t).(type) {
+ case *gotypes.Pointer:
+ return IsGomegaType(ttx.Elem())
+
+ case *gotypes.Named:
+ name := ttx.String()
+ return strings.HasSuffix(name, gomegaStructType) || strings.HasSuffix(name, gomegaInterface)
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go
index dafeacd4ff..91849ca563 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go
@@ -72,5 +72,5 @@ func ImplementsError(t gotypes.Type) bool {
}
func ImplementsGomegaMatcher(t gotypes.Type) bool {
- return gotypes.Implements(t, gomegaMatcherType)
+ return t != nil && gotypes.Implements(t, gomegaMatcherType)
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go
index b8166bdb21..51d55166de 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go
@@ -1,285 +1,166 @@
package intervals
import (
- "errors"
"go/ast"
"go/constant"
"go/token"
gotypes "go/types"
- "strconv"
"time"
"golang.org/x/tools/go/analysis"
-
- "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
- "github.com/nunnatsa/ginkgolinter/internal/reports"
)
-type noDurationIntervalErr struct {
- value string
-}
-
-func (err noDurationIntervalErr) Error() string {
- return "only use time.Duration for timeout and polling in Eventually() or Consistently()"
-}
-
-func CheckIntervals(pass *analysis.Pass, expr *ast.CallExpr, actualExpr *ast.CallExpr, reportBuilder *reports.Builder, handler gomegahandler.Handler, timePkg string, funcIndex int) {
- var (
- timeout time.Duration
- polling time.Duration
- err error
- )
-
- timeoutOffset := funcIndex + 1
- if len(actualExpr.Args) > timeoutOffset {
- timeout, err = getDuration(pass, actualExpr.Args[timeoutOffset], timePkg)
- if err != nil {
- suggestFix := false
- if tryFixIntDuration(expr, err, handler, timePkg, timeoutOffset) {
- suggestFix = true
- }
- reportBuilder.AddIssue(suggestFix, err.Error())
- }
- pollingOffset := funcIndex + 2
- if len(actualExpr.Args) > pollingOffset {
- polling, err = getDuration(pass, actualExpr.Args[pollingOffset], timePkg)
- if err != nil {
- suggestFix := false
- if tryFixIntDuration(expr, err, handler, timePkg, pollingOffset) {
- suggestFix = true
+func GetDuration(pass *analysis.Pass, argOffset int, origInterval, intervalClone ast.Expr, timePkg string) DurationValue {
+ tv := pass.TypesInfo.Types[origInterval]
+ argType := tv.Type
+ if durType, ok := argType.(*gotypes.Named); ok {
+ if durType.String() == "time.Duration" {
+ if tv.Value != nil {
+ if val, ok := constant.Int64Val(tv.Value); ok {
+ return &RealDurationValue{
+ dur: time.Duration(val),
+ expr: intervalClone,
+ }
}
- reportBuilder.AddIssue(suggestFix, err.Error())
+ }
+ return &UnknownDurationTypeValue{
+ expr: intervalClone,
}
}
}
- selExp := expr.Fun.(*ast.SelectorExpr)
- for {
- call, ok := selExp.X.(*ast.CallExpr)
- if !ok {
- break
- }
-
- fun, ok := call.Fun.(*ast.SelectorExpr)
- if !ok {
- break
- }
-
- switch fun.Sel.Name {
- case "WithTimeout", "Within":
- if timeout != 0 {
- reportBuilder.AddIssue(false, "timeout defined more than once")
- } else if len(call.Args) == 1 {
- timeout, err = getDurationFromValue(pass, call.Args[0], timePkg)
- if err != nil {
- reportBuilder.AddIssue(false, err.Error())
+ if basic, ok := argType.(*gotypes.Basic); ok && tv.Value != nil {
+ if basic.Info()&gotypes.IsInteger != 0 {
+ if num, ok := constant.Int64Val(tv.Value); ok {
+ return &NumericDurationValue{
+ timePkg: timePkg,
+ numSeconds: num,
+ offset: argOffset,
+ dur: time.Duration(num) * time.Second,
+ expr: intervalClone,
}
}
+ }
- case "WithPolling", "ProbeEvery":
- if polling != 0 {
- reportBuilder.AddIssue(false, "polling defined more than once")
- } else if len(call.Args) == 1 {
- polling, err = getDurationFromValue(pass, call.Args[0], timePkg)
- if err != nil {
- reportBuilder.AddIssue(false, err.Error())
+ if basic.Info()&gotypes.IsFloat != 0 {
+ if num, ok := constant.Float64Val(tv.Value); ok {
+ return &NumericDurationValue{
+ timePkg: timePkg,
+ numSeconds: int64(num),
+ offset: argOffset,
+ dur: time.Duration(num) * time.Second,
+ expr: intervalClone,
}
}
}
-
- selExp = fun
}
- if timeout != 0 && polling != 0 && timeout < polling {
- reportBuilder.AddIssue(false, "timeout must not be shorter than the polling interval")
- }
+ return &UnknownDurationValue{expr: intervalClone}
}
-func tryFixIntDuration(expr *ast.CallExpr, err error, handler gomegahandler.Handler, timePkg string, offset int) bool {
- suggestFix := false
- var durErr noDurationIntervalErr
- if errors.As(err, &durErr) {
- if len(durErr.value) > 0 {
- actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
- var newArg ast.Expr
- second := &ast.SelectorExpr{
- Sel: ast.NewIdent("Second"),
- X: ast.NewIdent(timePkg),
+func GetDurationFromValue(pass *analysis.Pass, orig, clone ast.Expr) DurationValue {
+ tv := pass.TypesInfo.Types[orig]
+ interval := tv.Value
+ if interval != nil {
+ if val, ok := constant.Int64Val(interval); ok {
+ return RealDurationValue{
+ dur: time.Duration(val),
+ expr: orig,
}
- if durErr.value == "1" {
- newArg = second
- } else {
- newArg = &ast.BinaryExpr{
- X: second,
- Op: token.MUL,
- Y: actualExpr.Args[offset],
- }
- }
- actualExpr.Args[offset] = newArg
- suggestFix = true
}
}
-
- return suggestFix
+ return UnknownDurationTypeValue{expr: clone}
}
-func getDuration(pass *analysis.Pass, interval ast.Expr, timePkg string) (time.Duration, error) {
- argType := pass.TypesInfo.TypeOf(interval)
- if durType, ok := argType.(*gotypes.Named); ok {
- if durType.Obj().Name() == "Duration" && durType.Obj().Pkg().Name() == "time" {
- return getDurationFromValue(pass, interval, timePkg)
- }
- }
+type DurationValue interface {
+ Duration() time.Duration
+}
- value := ""
- switch val := interval.(type) {
- case *ast.BasicLit:
- if val.Kind == token.INT {
- value = val.Value
- }
- case *ast.Ident:
- i, err := getConstDuration(pass, val, timePkg)
- if err != nil || i == 0 {
- return 0, nil
- }
- value = val.Name
- }
+type NumericValue interface {
+ GetOffset() int
+ GetDurationExpr() ast.Expr
+}
+type RealDurationValue struct {
+ dur time.Duration
+ expr ast.Expr
+}
- return 0, noDurationIntervalErr{value: value}
+func (r RealDurationValue) Duration() time.Duration {
+ return r.dur
}
-func getDurationFromValue(pass *analysis.Pass, interval ast.Expr, timePkg string) (time.Duration, error) {
- switch dur := interval.(type) {
- case *ast.SelectorExpr:
- ident, ok := dur.X.(*ast.Ident)
- if ok {
- if ident.Name == timePkg {
- return getTimeDurationValue(dur)
- }
- return getDurationFromValue(pass, dur.Sel, timePkg)
- }
- case *ast.BinaryExpr:
- return getBinaryExprDuration(pass, dur, timePkg)
+type NumericDurationValue struct {
+ timePkg string
+ numSeconds int64
+ offset int
+ dur time.Duration
+ expr ast.Expr
+}
- case *ast.Ident:
- return getConstDuration(pass, dur, timePkg)
- }
+func (r *NumericDurationValue) Duration() time.Duration {
+ return r.dur
+}
- return 0, nil
+func (r *NumericDurationValue) GetOffset() int {
+ return r.offset
}
-func getConstDuration(pass *analysis.Pass, ident *ast.Ident, timePkg string) (time.Duration, error) {
- o := pass.TypesInfo.ObjectOf(ident)
- if o != nil {
- if c, ok := o.(*gotypes.Const); ok {
- if c.Val().Kind() == constant.Int {
- i, err := strconv.Atoi(c.Val().String())
- if err != nil {
- return 0, nil
- }
- return time.Duration(i), nil
- }
- }
+func (r *NumericDurationValue) GetDurationExpr() ast.Expr {
+ var newArg ast.Expr
+ second := &ast.SelectorExpr{
+ Sel: ast.NewIdent("Second"),
+ X: ast.NewIdent(r.timePkg),
}
- if ident.Obj != nil && ident.Obj.Kind == ast.Con && ident.Obj.Decl != nil {
- if vals, ok := ident.Obj.Decl.(*ast.ValueSpec); ok {
- if len(vals.Values) == 1 {
- switch val := vals.Values[0].(type) {
- case *ast.BasicLit:
- if val.Kind == token.INT {
- i, err := strconv.Atoi(val.Value)
- if err != nil {
- return 0, nil
- }
- return time.Duration(i), nil
- }
- return 0, nil
- case *ast.BinaryExpr:
- return getBinaryExprDuration(pass, val, timePkg)
- }
- }
+ if r.numSeconds == 1 {
+ newArg = second
+ } else {
+ newArg = &ast.BinaryExpr{
+ X: second,
+ Op: token.MUL,
+ Y: r.expr,
}
}
- return 0, nil
+ return newArg
}
-func getTimeDurationValue(dur *ast.SelectorExpr) (time.Duration, error) {
- switch dur.Sel.Name {
- case "Nanosecond":
- return time.Nanosecond, nil
- case "Microsecond":
- return time.Microsecond, nil
- case "Millisecond":
- return time.Millisecond, nil
- case "Second":
- return time.Second, nil
- case "Minute":
- return time.Minute, nil
- case "Hour":
- return time.Hour, nil
- default:
- return 0, errors.New("unknown duration value") // should never happen
- }
+type UnknownDurationValue struct {
+ expr ast.Expr
}
-func getBinaryExprDuration(pass *analysis.Pass, expr *ast.BinaryExpr, timePkg string) (time.Duration, error) {
- x, err := getBinaryDurValue(pass, expr.X, timePkg)
- if err != nil || x == 0 {
- return 0, nil
- }
- y, err := getBinaryDurValue(pass, expr.Y, timePkg)
- if err != nil || y == 0 {
- return 0, nil
- }
+func (r UnknownDurationValue) Duration() time.Duration {
+ return 0
+}
- switch expr.Op {
- case token.ADD:
- return x + y, nil
- case token.SUB:
- val := x - y
- if val > 0 {
- return val, nil
- }
- return 0, nil
- case token.MUL:
- return x * y, nil
- case token.QUO:
- if y == 0 {
- return 0, nil
- }
- return x / y, nil
- case token.REM:
- if y == 0 {
- return 0, nil
- }
- return x % y, nil
- default:
- return 0, nil
- }
+type UnknownNumericValue struct {
+ expr ast.Expr
+ offset int
}
-func getBinaryDurValue(pass *analysis.Pass, expr ast.Expr, timePkg string) (time.Duration, error) {
- switch x := expr.(type) {
- case *ast.SelectorExpr:
- return getDurationFromValue(pass, x, timePkg)
- case *ast.BinaryExpr:
- return getBinaryExprDuration(pass, x, timePkg)
- case *ast.BasicLit:
- if x.Kind == token.INT {
- val, err := strconv.Atoi(x.Value)
- if err != nil {
- return 0, err
- }
- return time.Duration(val), nil
- }
- case *ast.ParenExpr:
- return getBinaryDurValue(pass, x.X, timePkg)
+func (r UnknownNumericValue) Duration() time.Duration {
+ return 0
+}
- case *ast.Ident:
- return getConstDuration(pass, x, timePkg)
+func (r UnknownNumericValue) GetDurationExpr() ast.Expr {
+ return &ast.BinaryExpr{
+ X: &ast.SelectorExpr{
+ Sel: ast.NewIdent("Second"),
+ X: ast.NewIdent("time"),
+ },
+ Op: token.MUL,
+ Y: r.expr,
}
+}
+
+func (r UnknownNumericValue) GetOffset() int {
+ return r.offset
+}
+
+type UnknownDurationTypeValue struct {
+ expr ast.Expr
+}
- return 0, nil
+func (r UnknownDurationTypeValue) Duration() time.Duration {
+ return 0
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go
index c7f931ca75..dee88bd2c8 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go
@@ -1,13 +1,13 @@
package reports
import (
- "bytes"
"fmt"
"go/ast"
- "go/printer"
"go/token"
"strings"
+ "github.com/nunnatsa/ginkgolinter/internal/formatter"
+
"golang.org/x/tools/go/analysis"
)
@@ -18,19 +18,25 @@ type Builder struct {
issues []string
fixOffer string
suggestFix bool
+ formatter *formatter.GoFmtFormatter
}
-func NewBuilder(fset *token.FileSet, oldExpr ast.Expr) *Builder {
+func NewBuilder(oldExpr ast.Expr, expFormatter *formatter.GoFmtFormatter) *Builder {
b := &Builder{
pos: oldExpr.Pos(),
end: oldExpr.End(),
- oldExpr: goFmt(fset, oldExpr),
+ oldExpr: expFormatter.Format(oldExpr),
suggestFix: false,
+ formatter: expFormatter,
}
return b
}
+func (b *Builder) OldExp() string {
+ return b.oldExpr
+}
+
func (b *Builder) AddIssue(suggestFix bool, issue string, args ...any) {
if len(args) > 0 {
issue = fmt.Sprintf(issue, args...)
@@ -42,9 +48,11 @@ func (b *Builder) AddIssue(suggestFix bool, issue string, args ...any) {
}
}
-func (b *Builder) SetFixOffer(fset *token.FileSet, fixOffer ast.Expr) {
- if offer := goFmt(fset, fixOffer); offer != b.oldExpr {
- b.fixOffer = offer
+func (b *Builder) SetFixOffer(fixOffer ast.Expr) {
+ if b.suggestFix {
+ if offer := b.formatter.Format(fixOffer); offer != b.oldExpr {
+ b.fixOffer = offer
+ }
}
}
@@ -76,10 +84,8 @@ func (b *Builder) Build() analysis.Diagnostic {
return diagnostic
}
-func goFmt(fset *token.FileSet, x ast.Expr) string {
- var b bytes.Buffer
- _ = printer.Fprint(&b, fset, x)
- return b.String()
+func (b *Builder) FormatExpr(expr ast.Expr) string {
+ return b.formatter.Format(expr)
}
func (b *Builder) getMessage() string {
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go
new file mode 100644
index 0000000000..307cd2d125
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go
@@ -0,0 +1,41 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const valueInEventually = "use a function call in %[1]s. This actually checks nothing, because %[1]s receives the function returned value, instead of function itself, and this value is never changed"
+
+// AsyncFuncCallRule checks that there is no function call actual parameter,
+// in an async actual method (e.g. Eventually).
+//
+// Async actual methods should get the function itself, not a function call, because
+// then there is no async operation at all, and we're waiting for the function to be
+// returned before calling the assertion.
+//
+// We do allow functions that return a function, a channel or a pointer.
+type AsyncFuncCallRule struct{}
+
+func (r AsyncFuncCallRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if config.SuppressAsync || !gexp.IsAsync() {
+ return false
+ }
+
+ if asyncArg := gexp.GetAsyncActualArg(); asyncArg != nil {
+ return !asyncArg.IsValid()
+ }
+
+ return false
+}
+
+func (r AsyncFuncCallRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if r.isApplied(gexp, config) {
+
+ gexp.AppendWithArgsToActual()
+
+ reportBuilder.AddIssue(true, valueInEventually, gexp.GetActualFuncName())
+ }
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go
new file mode 100644
index 0000000000..803c705deb
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go
@@ -0,0 +1,30 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type AsyncSucceedRule struct{}
+
+func (AsyncSucceedRule) isApply(gexp *expression.GomegaExpression) bool {
+ return gexp.IsAsync() &&
+ gexp.MatcherTypeIs(matcher.SucceedMatcherType) &&
+ gexp.ActualArgTypeIs(actual.FuncSigArgType) &&
+ !gexp.ActualArgTypeIs(actual.ErrorTypeArgType|actual.GomegaParamArgType)
+}
+
+func (r AsyncSucceedRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if r.isApply(gexp) {
+ if gexp.ActualArgTypeIs(actual.MultiRetsArgType) {
+ reportBuilder.AddIssue(false, "Success matcher does not support multiple values")
+ } else {
+ reportBuilder.AddIssue(false, "Success matcher only support a single error value, or function with Gomega as its first parameter")
+ }
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go
new file mode 100644
index 0000000000..ca5c326195
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go
@@ -0,0 +1,79 @@
+package rules
+
+import (
+ "go/ast"
+ "time"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/intervals"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const (
+ multipleTimeouts = "timeout defined more than once"
+ multiplePolling = "polling defined more than once"
+ onlyUseTimeDurationForInterval = "only use time.Duration for timeout and polling in Eventually() or Consistently()"
+ pollingGreaterThanTimeout = "timeout must not be shorter than the polling interval"
+)
+
+type AsyncTimeIntervalsRule struct{}
+
+func (r AsyncTimeIntervalsRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ return !config.SuppressAsync && config.ValidateAsyncIntervals && gexp.IsAsync()
+}
+
+func (r AsyncTimeIntervalsRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if r.isApplied(gexp, config) {
+ asyncArg := gexp.GetAsyncActualArg()
+ if asyncArg.TooManyTimeouts() {
+ reportBuilder.AddIssue(false, multipleTimeouts)
+ }
+
+ if asyncArg.TooManyPolling() {
+ reportBuilder.AddIssue(false, multiplePolling)
+ }
+
+ timeoutDuration := checkInterval(gexp, asyncArg.Timeout(), reportBuilder)
+ pollingDuration := checkInterval(gexp, asyncArg.Polling(), reportBuilder)
+
+ if timeoutDuration > 0 && pollingDuration > 0 && pollingDuration > timeoutDuration {
+ reportBuilder.AddIssue(false, pollingGreaterThanTimeout)
+ }
+ }
+
+ return false
+}
+
+func checkInterval(gexp *expression.GomegaExpression, durVal intervals.DurationValue, reportBuilder *reports.Builder) time.Duration {
+ if durVal != nil {
+ switch to := durVal.(type) {
+ case *intervals.RealDurationValue, *intervals.UnknownDurationTypeValue:
+
+ case *intervals.NumericDurationValue:
+ if checkNumericInterval(gexp.GetActualClone(), to) {
+ reportBuilder.AddIssue(true, onlyUseTimeDurationForInterval)
+ }
+
+ case *intervals.UnknownDurationValue:
+ reportBuilder.AddIssue(true, onlyUseTimeDurationForInterval)
+ }
+
+ return durVal.Duration()
+ }
+
+ return 0
+}
+
+func checkNumericInterval(intervalMethod *ast.CallExpr, interval intervals.DurationValue) bool {
+ if interval != nil {
+ if numVal, ok := interval.(intervals.NumericValue); ok {
+ if offset := numVal.GetOffset(); offset > 0 {
+ intervalMethod.Args[offset] = numVal.GetDurationExpr()
+ return true
+ }
+ }
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go
new file mode 100644
index 0000000000..e3ad45d960
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go
@@ -0,0 +1,128 @@
+package rules
+
+import (
+ "go/token"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const wrongCapWarningTemplate = "wrong cap assertion"
+
+// CapRule does not allow using the cap() function in actual with numeric comparison.
+// it suggests to use the HaveLen matcher, instead.
+type CapRule struct{}
+
+func (r *CapRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ if r.fixExpression(gexp) {
+ reportBuilder.AddIssue(true, wrongCapWarningTemplate)
+ return true
+ }
+ return false
+}
+
+func (r *CapRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if config.SuppressLen {
+ return false
+ }
+
+ //matcherType := gexp.matcher.GetMatcherInfo().Type()
+ if gexp.ActualArgTypeIs(actual.CapFuncActualArgType) {
+ if gexp.MatcherTypeIs(matcher.EqualMatcherType | matcher.BeZeroMatcherType) {
+ return true
+ }
+
+ if gexp.MatcherTypeIs(matcher.BeNumericallyMatcherType) {
+ mtchr := gexp.GetMatcherInfo().(*matcher.BeNumericallyMatcher)
+ return mtchr.GetOp() == token.EQL || mtchr.GetOp() == token.NEQ || gexp.MatcherTypeIs(matcher.EqualZero|matcher.GreaterThanZero)
+ }
+ }
+
+ if gexp.ActualArgTypeIs(actual.CapComparisonActualArgType) && gexp.MatcherTypeIs(matcher.BeTrueMatcherType|matcher.BeFalseMatcherType|matcher.EqualBoolValueMatcherType) {
+ return true
+ }
+
+ return false
+}
+
+func (r *CapRule) fixExpression(gexp *expression.GomegaExpression) bool {
+ if gexp.ActualArgTypeIs(actual.CapFuncActualArgType) {
+ return r.fixEqual(gexp)
+ }
+
+ if gexp.ActualArgTypeIs(actual.CapComparisonActualArgType) {
+ return r.fixComparison(gexp)
+ }
+
+ return false
+}
+
+func (r *CapRule) fixEqual(gexp *expression.GomegaExpression) bool {
+ matcherInfo := gexp.GetMatcherInfo()
+ switch mtchr := matcherInfo.(type) {
+ case *matcher.EqualMatcher:
+ gexp.SetMatcherCap(mtchr.GetValueExpr())
+
+ case *matcher.BeZeroMatcher:
+ gexp.SetMatcherCapZero()
+
+ case *matcher.BeNumericallyMatcher:
+ if !r.handleBeNumerically(gexp, mtchr) {
+ return false
+ }
+
+ default:
+ return false
+ }
+
+ gexp.ReplaceActualWithItsFirstArg()
+
+ return true
+}
+
+func (r *CapRule) fixComparison(gexp *expression.GomegaExpression) bool {
+ actl := gexp.GetActualArg().(*actual.FuncComparisonPayload)
+ if op := actl.GetOp(); op == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ } else if op != token.EQL {
+ return false
+ }
+
+ gexp.SetMatcherCap(actl.GetValueExpr())
+ gexp.ReplaceActual(actl.GetFuncArg())
+
+ if gexp.MatcherTypeIs(matcher.BoolValueFalse) {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ return true
+}
+
+func (r *CapRule) handleBeNumerically(gexp *expression.GomegaExpression, matcher *matcher.BeNumericallyMatcher) bool {
+ op := matcher.GetOp()
+ val := matcher.GetValue()
+ isValZero := val.String() == "0"
+ isValOne := val.String() == "1"
+
+ if (op == token.GTR && isValZero) || (op == token.GEQ && isValOne) {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherCapZero()
+ } else if op == token.EQL {
+ gexp.SetMatcherCap(matcher.GetValueExpr())
+ } else if op == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherCap(matcher.GetValueExpr())
+ } else {
+ return false
+ }
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go
new file mode 100644
index 0000000000..dcbea1bc97
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go
@@ -0,0 +1,64 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const comparePointerToValue = "comparing a pointer to a value will always fail"
+
+type ComparePointRule struct{}
+
+func (r ComparePointRule) isApplied(gexp *expression.GomegaExpression) bool {
+ actl, ok := gexp.GetActualArg().(*actual.RegularArgPayload)
+ if !ok {
+ return false
+ }
+
+ return actl.IsPointer()
+}
+
+func (r ComparePointRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ switch mtchr := gexp.GetMatcherInfo().(type) {
+ case *matcher.EqualMatcher:
+ if mtchr.IsPointer() || mtchr.IsInterface() {
+ return false
+ }
+
+ case *matcher.BeEquivalentToMatcher:
+ if mtchr.IsPointer() || mtchr.IsInterface() || mtchr.IsNil() {
+ return false
+ }
+
+ case *matcher.BeIdenticalToMatcher:
+ if mtchr.IsPointer() || mtchr.IsInterface() || mtchr.IsNil() {
+ return false
+ }
+
+ case *matcher.EqualNilMatcher:
+ return false
+
+ case *matcher.BeTrueMatcher,
+ *matcher.BeFalseMatcher,
+ *matcher.BeNumericallyMatcher,
+ *matcher.EqualTrueMatcher,
+ *matcher.EqualFalseMatcher:
+
+ default:
+ return false
+ }
+
+ getMatcherOnlyRules().Apply(gexp, config, reportBuilder)
+
+ gexp.SetMatcherHaveValue()
+ reportBuilder.AddIssue(true, comparePointerToValue)
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go
new file mode 100644
index 0000000000..fb38529e0e
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go
@@ -0,0 +1,75 @@
+package rules
+
+import (
+ "go/token"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const wrongCompareWarningTemplate = "wrong comparison assertion"
+
+type ComparisonRule struct{}
+
+func (r ComparisonRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if config.SuppressCompare {
+ return false
+ }
+
+ return gexp.ActualArgTypeIs(actual.ComparisonActualArgType)
+}
+
+func (r ComparisonRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ actl, ok := gexp.GetActualArg().(actual.ComparisonActualPayload)
+ if !ok {
+ return false
+ }
+
+ switch actl.GetOp() {
+ case token.EQL:
+ r.handleEqualComparison(gexp, actl)
+
+ case token.NEQ:
+ gexp.ReverseAssertionFuncLogic()
+ r.handleEqualComparison(gexp, actl)
+ case token.GTR, token.GEQ, token.LSS, token.LEQ:
+ if !actl.GetRight().IsValueNumeric() {
+ return false
+ }
+
+ gexp.SetMatcherBeNumerically(actl.GetOp(), actl.GetRight().GetValueExpr())
+
+ default:
+ return false
+ }
+
+ if gexp.MatcherTypeIs(matcher.BoolValueFalse) {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ gexp.ReplaceActual(actl.GetLeft().GetValueExpr())
+
+ reportBuilder.AddIssue(true, wrongCompareWarningTemplate)
+ return true
+}
+
+func (r ComparisonRule) handleEqualComparison(gexp *expression.GomegaExpression, actual actual.ComparisonActualPayload) {
+ if actual.GetRight().IsValueZero() {
+ gexp.SetMatcherBeZero()
+ } else {
+ left := actual.GetLeft()
+ arg := actual.GetRight().GetValueExpr()
+ if left.IsInterface() || left.IsPointer() {
+ gexp.SetMatcherBeIdenticalTo(arg)
+ } else {
+ gexp.SetMatcherEqual(arg)
+ }
+ }
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go
new file mode 100644
index 0000000000..6ce7be5a54
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go
@@ -0,0 +1,30 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const doubleNegativeWarningTemplate = "avoid double negative assertion"
+
+type DoubleNegativeRule struct{}
+
+func (DoubleNegativeRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.MatcherTypeIs(matcher.BeFalseMatcherType) &&
+ gexp.IsNegativeAssertion()
+}
+
+func (r DoubleNegativeRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherBeTrue()
+
+ reportBuilder.AddIssue(true, doubleNegativeWarningTemplate)
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go
new file mode 100644
index 0000000000..e9eaa1b801
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go
@@ -0,0 +1,36 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const wrongBoolWarningTemplate = "wrong boolean assertion"
+
+type EqualBoolRule struct{}
+
+func (r EqualBoolRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.MatcherTypeIs(matcher.EqualBoolValueMatcherType)
+}
+
+func (r EqualBoolRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ if gexp.MatcherTypeIs(matcher.BoolValueTrue) {
+ gexp.SetMatcherBeTrue()
+ } else {
+ if gexp.IsNegativeAssertion() {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherBeTrue()
+ } else {
+ gexp.SetMatcherBeFalse()
+ }
+ }
+
+ reportBuilder.AddIssue(true, wrongBoolWarningTemplate)
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go
new file mode 100644
index 0000000000..4b6eafdda0
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go
@@ -0,0 +1,119 @@
+package rules
+
+import (
+ gotypes "go/types"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const compareDifferentTypes = "use %[1]s with different types: Comparing %[2]s with %[3]s; either change the expected value type if possible, or use the BeEquivalentTo() matcher, instead of %[1]s()"
+
+type EqualDifferentTypesRule struct{}
+
+func (r EqualDifferentTypesRule) isApplied(config types.Config) bool {
+ return !config.SuppressTypeCompare
+}
+
+func (r EqualDifferentTypesRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(config) {
+ return false
+ }
+
+ return r.checkEqualDifferentTypes(gexp, gexp.GetMatcher(), false, reportBuilder)
+}
+
+func (r EqualDifferentTypesRule) checkEqualDifferentTypes(gexp *expression.GomegaExpression, mtchr *matcher.Matcher, parentPointer bool, reportBuilder *reports.Builder) bool {
+ actualType := gexp.GetActualArgGOType()
+
+ if parentPointer {
+ if t, ok := actualType.(*gotypes.Pointer); ok {
+ actualType = t.Elem()
+ }
+ }
+
+ var (
+ matcherType gotypes.Type
+ matcherName string
+ )
+
+ switch specificMatcher := mtchr.GetMatcherInfo().(type) {
+ case *matcher.EqualMatcher:
+ matcherType = specificMatcher.GetType()
+ matcherName = specificMatcher.MatcherName()
+
+ case *matcher.BeIdenticalToMatcher:
+ matcherType = specificMatcher.GetType()
+ matcherName = specificMatcher.MatcherName()
+
+ case *matcher.HaveValueMatcher:
+ return r.checkEqualDifferentTypes(gexp, specificMatcher.GetNested(), true, reportBuilder)
+
+ case *matcher.MultipleMatchersMatcher:
+ foundIssue := false
+ for i := range specificMatcher.Len() {
+ if r.checkEqualDifferentTypes(gexp, specificMatcher.At(i), parentPointer, reportBuilder) {
+ foundIssue = true
+ }
+
+ }
+ return foundIssue
+
+ case *matcher.EqualNilMatcher:
+ matcherType = specificMatcher.GetType()
+ matcherName = specificMatcher.MatcherName()
+
+ case *matcher.WithTransformMatcher:
+ nested := specificMatcher.GetNested()
+ switch specificNested := nested.GetMatcherInfo().(type) {
+ case *matcher.EqualMatcher:
+ matcherType = specificNested.GetType()
+ matcherName = specificNested.MatcherName()
+
+ case *matcher.BeIdenticalToMatcher:
+ matcherType = specificNested.GetType()
+ matcherName = specificNested.MatcherName()
+
+ default:
+ return false
+ }
+
+ actualType = specificMatcher.GetFuncType()
+ default:
+ return false
+ }
+
+ if !gotypes.Identical(matcherType, actualType) {
+ if r.isImplementing(matcherType, actualType) || r.isImplementing(actualType, matcherType) {
+ return false
+ }
+
+ reportBuilder.AddIssue(false, compareDifferentTypes, matcherName, actualType, matcherType)
+ return true
+ }
+
+ return false
+}
+
+func (r EqualDifferentTypesRule) isImplementing(ifs, impl gotypes.Type) bool {
+ if gotypes.IsInterface(ifs) {
+
+ var (
+ theIfs *gotypes.Interface
+ ok bool
+ )
+
+ for {
+ theIfs, ok = ifs.(*gotypes.Interface)
+ if ok {
+ break
+ }
+ ifs = ifs.Underlying()
+ }
+
+ return gotypes.Implements(impl, theIfs)
+ }
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go
new file mode 100644
index 0000000000..f27dfb0d88
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go
@@ -0,0 +1,29 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+// EqualNilRule validate that there is no use of Equal(nil) in the code
+// It is part of assertion only rules
+type EqualNilRule struct{}
+
+func (r EqualNilRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ return !config.SuppressNil &&
+ gexp.MatcherTypeIs(matcher.EqualValueMatcherType)
+}
+
+func (r EqualNilRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ gexp.SetMatcherBeNil()
+
+ reportBuilder.AddIssue(true, wrongNilWarningTemplate)
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go
new file mode 100644
index 0000000000..81932cc2c5
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go
@@ -0,0 +1,42 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type ErrorEqualNilRule struct{}
+
+func (ErrorEqualNilRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if config.SuppressErr {
+ return false
+ }
+
+ if !gexp.IsAsync() && gexp.ActualArgTypeIs(actual.FuncSigArgType) {
+ return false
+ }
+
+ return gexp.ActualArgTypeIs(actual.ErrorTypeArgType) &&
+ gexp.MatcherTypeIs(matcher.BeNilMatcherType|matcher.EqualNilMatcherType)
+}
+
+func (r ErrorEqualNilRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ if v, ok := gexp.GetActualArg().(value.Valuer); ok && v.IsFunc() || gexp.ActualArgTypeIs(actual.ErrFuncActualArgType) {
+ gexp.SetMatcherSucceed()
+ } else {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherHaveOccurred()
+ }
+
+ reportBuilder.AddIssue(true, wrongErrWarningTemplate)
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go
new file mode 100644
index 0000000000..391d1d449b
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go
@@ -0,0 +1,43 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const forceExpectToTemplate = "must not use %s with %s"
+
+type ForceExpectToRule struct{}
+
+func (ForceExpectToRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if !config.ForceExpectTo {
+ return false
+ }
+
+ actlName := gexp.GetActualFuncName()
+ return actlName == "Expect" || actlName == "ExpectWithOffset"
+}
+
+func (r ForceExpectToRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ var newName string
+
+ switch gexp.GetAssertFuncName() {
+ case "Should":
+ newName = "To"
+ case "ShouldNot":
+ newName = "ToNot"
+ default:
+ return false
+ }
+
+ gexp.ReplaceAssertionMethod(newName)
+ reportBuilder.AddIssue(true, forceExpectToTemplate, gexp.GetActualFuncName(), gexp.GetOrigAssertFuncName())
+
+ // always return false, to keep checking another rules.
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go
new file mode 100644
index 0000000000..159fb615a0
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go
@@ -0,0 +1,23 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type HaveLen0 struct{}
+
+func (r *HaveLen0) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ return gexp.MatcherTypeIs(matcher.HaveLenZeroMatcherType) && !config.AllowHaveLen0
+}
+
+func (r *HaveLen0) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+ gexp.SetMatcherBeEmpty()
+ reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go
new file mode 100644
index 0000000000..317e22ed3d
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go
@@ -0,0 +1,35 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type HaveOccurredRule struct{}
+
+func (r HaveOccurredRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.MatcherTypeIs(matcher.HaveOccurredMatcherType)
+}
+
+func (r HaveOccurredRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ if !gexp.ActualArgTypeIs(actual.ErrorTypeArgType) {
+ reportBuilder.AddIssue(false, "asserting a non-error type with HaveOccurred matcher")
+ return true
+ }
+
+ if config.ForceSucceedForFuncs && gexp.GetActualArg().(*actual.ErrPayload).IsFunc() {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherSucceed()
+ reportBuilder.AddIssue(true, "prefer using the Succeed matcher for error function, instead of HaveOccurred")
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go
new file mode 100644
index 0000000000..06d6f2c687
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go
@@ -0,0 +1,119 @@
+package rules
+
+import (
+ "go/token"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const wrongLengthWarningTemplate = "wrong length assertion"
+
+// LenRule does not allow using the len() function in actual with numeric comparison. Instead,
+// it suggests to use the HaveLen matcher, or the BeEmpty matcher, if comparing to zero.
+type LenRule struct{}
+
+func (r *LenRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ if r.fixExpression(gexp) {
+ reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
+ return true
+ }
+ return false
+}
+
+func (r *LenRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if config.SuppressLen {
+ return false
+ }
+
+ if gexp.ActualArgTypeIs(actual.LenFuncActualArgType) {
+ if gexp.MatcherTypeIs(matcher.EqualMatcherType | matcher.BeZeroMatcherType) {
+ return true
+ }
+
+ if gexp.MatcherTypeIs(matcher.BeNumericallyMatcherType) {
+ mtchr := gexp.GetMatcherInfo().(*matcher.BeNumericallyMatcher)
+ return mtchr.GetOp() == token.EQL || mtchr.GetOp() == token.NEQ || gexp.MatcherTypeIs(matcher.EqualZero|matcher.GreaterThanZero)
+ }
+ }
+
+ if gexp.ActualArgTypeIs(actual.LenComparisonActualArgType) && gexp.MatcherTypeIs(matcher.BeTrueMatcherType|matcher.BeFalseMatcherType|matcher.EqualBoolValueMatcherType) {
+ return true
+ }
+
+ return false
+}
+
+func (r *LenRule) fixExpression(gexp *expression.GomegaExpression) bool {
+ if gexp.ActualArgTypeIs(actual.LenFuncActualArgType) {
+ return r.fixEqual(gexp)
+ }
+
+ if gexp.ActualArgTypeIs(actual.LenComparisonActualArgType) {
+ return r.fixComparison(gexp)
+ }
+
+ return false
+}
+
+func (r *LenRule) fixEqual(gexp *expression.GomegaExpression) bool {
+
+ if gexp.MatcherTypeIs(matcher.EqualMatcherType) {
+ gexp.SetLenNumericMatcher()
+
+ } else if gexp.MatcherTypeIs(matcher.BeZeroMatcherType) {
+ gexp.SetMatcherBeEmpty()
+
+ } else if gexp.MatcherTypeIs(matcher.BeNumericallyMatcherType) {
+ mtchr := gexp.GetMatcherInfo().(*matcher.BeNumericallyMatcher)
+ op := mtchr.GetOp()
+
+ if op == token.EQL {
+ gexp.SetLenNumericMatcher()
+ } else if op == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetLenNumericMatcher()
+ } else if gexp.MatcherTypeIs(matcher.GreaterThanZero) {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherBeEmpty()
+ } else {
+ return false
+ }
+ } else {
+ return false
+ }
+
+ gexp.ReplaceActualWithItsFirstArg()
+ return true
+}
+
+func (r *LenRule) fixComparison(gexp *expression.GomegaExpression) bool {
+ actl := gexp.GetActualArg().(*actual.FuncComparisonPayload)
+ if op := actl.GetOp(); op == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ } else if op != token.EQL {
+ return false
+ }
+
+ if gexp.MatcherTypeIs(matcher.BoolValueFalse) {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ if actl.IsValueZero() {
+ gexp.SetMatcherBeEmpty()
+ } else {
+ gexp.SetMatcherLen(actl.GetValueExpr())
+ }
+
+ gexp.ReplaceActual(actl.GetFuncArg())
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go
new file mode 100644
index 0000000000..1174393c6b
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go
@@ -0,0 +1,12 @@
+package rules
+
+var matcherOnlyRules = Rules{
+ &HaveLen0{},
+ &EqualBoolRule{},
+ &EqualNilRule{},
+ &DoubleNegativeRule{},
+}
+
+func getMatcherOnlyRules() Rules {
+ return matcherOnlyRules
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go
new file mode 100644
index 0000000000..767b4b621e
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go
@@ -0,0 +1,110 @@
+package rules
+
+import (
+ "go/ast"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const (
+ matchErrorArgWrongType = "the MatchError matcher used to assert a non error type (%s)"
+ matchErrorWrongTypeAssertion = "MatchError first parameter (%s) must be error, string, GomegaMatcher or func(error)bool are allowed"
+ matchErrorMissingDescription = "missing function description as second parameter of MatchError"
+ matchErrorRedundantArg = "redundant MatchError arguments; consider removing them"
+ matchErrorNoFuncDescription = "The second parameter of MatchError must be the function description (string)"
+)
+
+// MatchErrorRule validates the usage of the MatchError matcher.
+//
+// # First, it checks that the actual value is actually an error
+//
+// Then, it checks the matcher itself: this matcher can be used in 3 different ways:
+// 1. With error type variable
+// 2. With another gomega matcher, to check the actual err.Error() value
+// 3. With function with a signature of func(error) bool. In this case, additional description
+// string variable is required.
+type MatchErrorRule struct{}
+
+func (r MatchErrorRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.MatcherTypeIs(matcher.MatchErrorMatcherType | matcher.MultipleMatcherMatherType)
+}
+
+func (r MatchErrorRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ return checkMatchError(gexp, reportBuilder)
+}
+
+func checkMatchError(gexp *expression.GomegaExpression, reportBuilder *reports.Builder) bool {
+ mtchr := gexp.GetMatcherInfo()
+ switch m := mtchr.(type) {
+ case matcher.MatchErrorMatcher:
+ return checkMatchErrorMatcher(gexp, gexp.GetMatcher(), m, reportBuilder)
+
+ case *matcher.MultipleMatchersMatcher:
+ res := false
+ for i := range m.Len() {
+ nested := m.At(i)
+ if specific, ok := nested.GetMatcherInfo().(matcher.MatchErrorMatcher); ok {
+ if valid := checkMatchErrorMatcher(gexp, gexp.GetMatcher(), specific, reportBuilder); valid {
+ res = true
+ }
+ }
+ }
+ return res
+ default:
+ return false
+ }
+}
+
+func checkMatchErrorMatcher(gexp *expression.GomegaExpression, mtchr *matcher.Matcher, mtchrInfo matcher.MatchErrorMatcher, reportBuilder *reports.Builder) bool {
+ if !gexp.ActualArgTypeIs(actual.ErrorTypeArgType) {
+ reportBuilder.AddIssue(false, matchErrorArgWrongType, reportBuilder.FormatExpr(gexp.GetActualArgExpr()))
+ }
+
+ switch m := mtchrInfo.(type) {
+ case *matcher.InvalidMatchErrorMatcher:
+ reportBuilder.AddIssue(false, matchErrorWrongTypeAssertion, reportBuilder.FormatExpr(mtchr.Clone.Args[0]))
+
+ case *matcher.MatchErrorMatcherWithErrFunc:
+ if m.NumArgs() == m.AllowedNumArgs() {
+ if !m.IsSecondArgString() {
+ reportBuilder.AddIssue(false, matchErrorNoFuncDescription)
+ }
+ return true
+ }
+
+ if m.NumArgs() == 1 {
+ reportBuilder.AddIssue(false, matchErrorMissingDescription)
+ return true
+ }
+
+ case *matcher.MatchErrorMatcherWithErr,
+ *matcher.MatchErrorMatcherWithMatcher,
+ *matcher.MatchErrorMatcherWithString:
+ // continue
+ default:
+ return false
+ }
+
+ if mtchrInfo.NumArgs() == mtchrInfo.AllowedNumArgs() {
+ return true
+ }
+
+ if mtchrInfo.NumArgs() > mtchrInfo.AllowedNumArgs() {
+ var newArgsSuggestion []ast.Expr
+ for i := 0; i < mtchrInfo.AllowedNumArgs(); i++ {
+ newArgsSuggestion = append(newArgsSuggestion, mtchr.Clone.Args[i])
+ }
+ mtchr.Clone.Args = newArgsSuggestion
+ reportBuilder.AddIssue(false, matchErrorRedundantArg)
+ return true
+ }
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go
new file mode 100644
index 0000000000..43fc58bf6b
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go
@@ -0,0 +1,27 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const missingAssertionMessage = `%q: missing assertion method. Expected %s`
+
+type MissingAssertionRule struct{}
+
+func (r MissingAssertionRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.IsMissingAssertion()
+}
+
+func (r MissingAssertionRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ actualMethodName := gexp.GetActualFuncName()
+ reportBuilder.AddIssue(false, missingAssertionMessage, actualMethodName, gomegainfo.GetAllowedAssertionMethods(actualMethodName))
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go
new file mode 100644
index 0000000000..6677dce3bb
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go
@@ -0,0 +1,75 @@
+package rules
+
+import (
+ "go/token"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const (
+ wrongNilWarningTemplate = "wrong nil assertion"
+ wrongErrWarningTemplate = "wrong error assertion"
+)
+
+type NilCompareRule struct{}
+
+func (r NilCompareRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ isErr, ruleApplied := r.isApplied(gexp, config)
+ if !ruleApplied {
+ return false
+ }
+
+ if gexp.MatcherTypeIs(matcher.BoolValueFalse) {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ r.handleNilBeBoolMatcher(gexp, gexp.GetActualArg().(*actual.NilComparisonPayload), reportBuilder, isErr)
+
+ return true
+}
+
+func (r NilCompareRule) isApplied(gexp *expression.GomegaExpression, config types.Config) (bool, bool) {
+ if !gexp.MatcherTypeIs(matcher.EqualBoolValueMatcherType | matcher.BeTrueMatcherType | matcher.BeFalseMatcherType) {
+ return false, false
+ }
+
+ actl, ok := gexp.GetActualArg().(*actual.NilComparisonPayload)
+ if !ok {
+ return false, false
+ }
+
+ isErr := actl.IsError() && !config.SuppressErr
+
+ if !isErr && config.SuppressNil {
+ return isErr, false
+ }
+
+ return isErr, true
+}
+
+func (r NilCompareRule) handleNilBeBoolMatcher(gexp *expression.GomegaExpression, actl *actual.NilComparisonPayload, reportBuilder *reports.Builder, isErr bool) {
+ template := wrongNilWarningTemplate
+ if isErr {
+ template = wrongErrWarningTemplate
+ if actl.IsFunc() {
+ gexp.SetMatcherSucceed()
+ } else {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherHaveOccurred()
+ }
+ } else {
+ gexp.SetMatcherBeNil()
+ }
+
+ gexp.ReplaceActual(actl.GetValueExpr())
+
+ if actl.GetOp() == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ reportBuilder.AddIssue(true, template)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go
new file mode 100644
index 0000000000..cf331c21c4
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go
@@ -0,0 +1,61 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type Rule interface {
+ Apply(*expression.GomegaExpression, types.Config, *reports.Builder) bool
+}
+
+var rules = Rules{
+ &ForceExpectToRule{},
+ &LenRule{},
+ &CapRule{},
+ &ComparisonRule{},
+ &NilCompareRule{},
+ &ComparePointRule{},
+ &ErrorEqualNilRule{},
+ &MatchErrorRule{},
+ getMatcherOnlyRules(),
+ &EqualDifferentTypesRule{},
+ &HaveOccurredRule{},
+ &SucceedRule{},
+}
+
+var asyncRules = Rules{
+ &AsyncFuncCallRule{},
+ &AsyncTimeIntervalsRule{},
+ &ErrorEqualNilRule{},
+ &MatchErrorRule{},
+ &AsyncSucceedRule{},
+ getMatcherOnlyRules(),
+}
+
+func GetRules() Rules {
+ return rules
+}
+
+func GetAsyncRules() Rules {
+ return asyncRules
+}
+
+type Rules []Rule
+
+func (r Rules) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ for _, rule := range r {
+ if rule.Apply(gexp, config, reportBuilder) {
+ return true
+ }
+ }
+
+ return false
+}
+
+var missingAssertionRule = MissingAssertionRule{}
+
+func GetMissingAssertionRule() Rule {
+ return missingAssertionRule
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go
new file mode 100644
index 0000000000..45a8d948b4
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go
@@ -0,0 +1,41 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type SucceedRule struct{}
+
+func (r SucceedRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return !gexp.IsAsync() && gexp.MatcherTypeIs(matcher.SucceedMatcherType)
+}
+
+func (r SucceedRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ if !gexp.ActualArgTypeIs(actual.ErrorTypeArgType) {
+ if gexp.IsActualTuple() {
+ reportBuilder.AddIssue(false, "the Success matcher does not support multiple values")
+ } else {
+ reportBuilder.AddIssue(false, "asserting a non-error type with Succeed matcher")
+ }
+ return true
+ }
+
+ if config.ForceSucceedForFuncs && !gexp.GetActualArg().(*actual.ErrPayload).IsFunc() {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherHaveOccurred()
+
+ reportBuilder.AddIssue(true, "prefer using the HaveOccurred matcher for non-function error value, instead of Succeed")
+
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go b/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go
index 574fdfadf3..188b2b5f91 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go
@@ -1,24 +1,16 @@
package linter
import (
- "bytes"
- "fmt"
"go/ast"
- "go/constant"
- "go/printer"
- "go/token"
- gotypes "go/types"
- "reflect"
- "github.com/go-toolsmith/astcopy"
"golang.org/x/tools/go/analysis"
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/formatter"
"github.com/nunnatsa/ginkgolinter/internal/ginkgohandler"
"github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
- "github.com/nunnatsa/ginkgolinter/internal/interfaces"
- "github.com/nunnatsa/ginkgolinter/internal/intervals"
"github.com/nunnatsa/ginkgolinter/internal/reports"
- "github.com/nunnatsa/ginkgolinter/internal/reverseassertion"
+ "github.com/nunnatsa/ginkgolinter/internal/rules"
"github.com/nunnatsa/ginkgolinter/types"
)
@@ -26,62 +18,6 @@ import (
//
// For more details, look at the README.md file
-const (
- linterName = "ginkgo-linter"
- wrongLengthWarningTemplate = "wrong length assertion"
- wrongCapWarningTemplate = "wrong cap assertion"
- wrongNilWarningTemplate = "wrong nil assertion"
- wrongBoolWarningTemplate = "wrong boolean assertion"
- wrongErrWarningTemplate = "wrong error assertion"
- wrongCompareWarningTemplate = "wrong comparison assertion"
- doubleNegativeWarningTemplate = "avoid double negative assertion"
- valueInEventually = "use a function call in %s. This actually checks nothing, because %s receives the function returned value, instead of function itself, and this value is never changed"
- comparePointerToValue = "comparing a pointer to a value will always fail"
- missingAssertionMessage = linterName + `: %q: missing assertion method. Expected %s`
- focusContainerFound = linterName + ": Focus container found. This is used only for local debug and should not be part of the actual source code. Consider to replace with %q"
- focusSpecFound = linterName + ": Focus spec found. This is used only for local debug and should not be part of the actual source code. Consider to remove it"
- compareDifferentTypes = "use %[1]s with different types: Comparing %[2]s with %[3]s; either change the expected value type if possible, or use the BeEquivalentTo() matcher, instead of %[1]s()"
- matchErrorArgWrongType = "the MatchError matcher used to assert a non error type (%s)"
- matchErrorWrongTypeAssertion = "MatchError first parameter (%s) must be error, string, GomegaMatcher or func(error)bool are allowed"
- matchErrorMissingDescription = "missing function description as second parameter of MatchError"
- matchErrorRedundantArg = "redundant MatchError arguments; consider removing them"
- matchErrorNoFuncDescription = "The second parameter of MatchError must be the function description (string)"
- forceExpectToTemplate = "must not use Expect with %s"
- useBeforeEachTemplate = "use BeforeEach() to assign variable %s"
-)
-
-const ( // gomega matchers
- beEmpty = "BeEmpty"
- beEquivalentTo = "BeEquivalentTo"
- beFalse = "BeFalse"
- beIdenticalTo = "BeIdenticalTo"
- beNil = "BeNil"
- beNumerically = "BeNumerically"
- beTrue = "BeTrue"
- beZero = "BeZero"
- equal = "Equal"
- haveLen = "HaveLen"
- haveCap = "HaveCap"
- haveOccurred = "HaveOccurred"
- haveValue = "HaveValue"
- not = "Not"
- omega = "Ω"
- succeed = "Succeed"
- and = "And"
- or = "Or"
- withTransform = "WithTransform"
- matchError = "MatchError"
-)
-
-const ( // gomega actuals
- expect = "Expect"
- expectWithOffset = "ExpectWithOffset"
- eventually = "Eventually"
- eventuallyWithOffset = "EventuallyWithOffset"
- consistently = "Consistently"
- consistentlyWithOffset = "ConsistentlyWithOffset"
-)
-
type GinkgoLinter struct {
config *types.Config
}
@@ -94,7 +30,7 @@ func NewGinkgoLinter(config *types.Config) *GinkgoLinter {
}
// Run is the main assertion function
-func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) {
+func (l *GinkgoLinter) Run(pass *analysis.Pass) (any, error) {
for _, file := range pass.Files {
fileConfig := l.config.Clone()
@@ -102,39 +38,20 @@ func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) {
fileConfig.UpdateFromFile(cm)
- gomegaHndlr := gomegahandler.GetGomegaHandler(file)
+ gomegaHndlr := gomegahandler.GetGomegaHandler(file, pass)
ginkgoHndlr := ginkgohandler.GetGinkgoHandler(file)
if gomegaHndlr == nil && ginkgoHndlr == nil { // no gomega or ginkgo imports => no use in gomega in this file; nothing to do here
continue
}
- timePks := ""
- for _, imp := range file.Imports {
- if imp.Path.Value == `"time"` {
- if imp.Name == nil {
- timePks = "time"
- } else {
- timePks = imp.Name.Name
- }
- }
- }
-
ast.Inspect(file, func(n ast.Node) bool {
if ginkgoHndlr != nil {
goDeeper := false
spec, ok := n.(*ast.ValueSpec)
if ok {
for _, val := range spec.Values {
- if exp, ok := val.(*ast.CallExpr); ok {
- if bool(fileConfig.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, exp) {
- goDeeper = true
- }
-
- if bool(fileConfig.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, exp) {
- goDeeper = true
- }
- }
+ goDeeper = ginkgoHndlr.HandleGinkgoSpecs(val, fileConfig, pass) || goDeeper
}
}
if goDeeper {
@@ -147,1527 +64,68 @@ func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) {
return true
}
- config := fileConfig.Clone()
-
- if comments, ok := cm[stmt]; ok {
- config.UpdateFromComment(comments)
- }
-
// search for function calls
assertionExp, ok := stmt.X.(*ast.CallExpr)
if !ok {
return true
}
+ config := fileConfig.Clone()
+ if comments, ok := cm[stmt]; ok {
+ config.UpdateFromComment(comments)
+ }
+
if ginkgoHndlr != nil {
- goDeeper := false
- if bool(config.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, assertionExp) {
- goDeeper = true
- }
- if bool(config.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, assertionExp) {
- goDeeper = true
- }
- if goDeeper {
+ if ginkgoHndlr.HandleGinkgoSpecs(assertionExp, config, pass) {
return true
}
}
- // no more ginkgo checks. From here it's only gomega. So if there is no gomega handler, exit here. This is
- // mostly to prevent nil pointer error.
+ // no more ginkgo checks. From here it's only gomega. So if there is no gomega handler, exit here.
if gomegaHndlr == nil {
return true
}
- assertionFunc, ok := assertionExp.Fun.(*ast.SelectorExpr)
- if !ok {
- checkNoAssertion(pass, assertionExp, gomegaHndlr)
- return true
- }
-
- if !isAssertionFunc(assertionFunc.Sel.Name) {
- checkNoAssertion(pass, assertionExp, gomegaHndlr)
- return true
- }
-
- actualExpr := gomegaHndlr.GetActualExpr(assertionFunc)
- if actualExpr == nil {
+ gexp, ok := expression.New(assertionExp, pass, gomegaHndlr, getTimePkg(file))
+ if !ok || gexp == nil {
return true
}
- return checkExpression(pass, config, assertionExp, actualExpr, gomegaHndlr, timePks)
+ reportBuilder := reports.NewBuilder(assertionExp, formatter.NewGoFmtFormatter(pass.Fset))
+ return checkGomegaExpression(gexp, config, reportBuilder, pass)
})
}
return nil, nil
}
-func checkAssignmentsInContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler, exp *ast.CallExpr) bool {
- foundSomething := false
- if ginkgoHndlr.IsWrapContainer(exp) {
- for _, arg := range exp.Args {
- if fn, ok := arg.(*ast.FuncLit); ok {
- if fn.Body != nil {
- if checkAssignments(pass, fn.Body.List) {
- foundSomething = true
- }
- break
- }
- }
- }
- }
-
- return foundSomething
-}
-
-func checkAssignments(pass *analysis.Pass, list []ast.Stmt) bool {
- foundSomething := false
- for _, stmt := range list {
- switch st := stmt.(type) {
- case *ast.DeclStmt:
- if gen, ok := st.Decl.(*ast.GenDecl); ok {
- if gen.Tok != token.VAR {
- continue
- }
- for _, spec := range gen.Specs {
- if valSpec, ok := spec.(*ast.ValueSpec); ok {
- if checkAssignmentsValues(pass, valSpec.Names, valSpec.Values) {
- foundSomething = true
- }
- }
- }
- }
-
- case *ast.AssignStmt:
- for i, val := range st.Rhs {
- if !is[*ast.FuncLit](val) {
- if id, isIdent := st.Lhs[i].(*ast.Ident); isIdent && id.Name != "_" {
- reportNoFix(pass, id.Pos(), useBeforeEachTemplate, id.Name)
- foundSomething = true
- }
- }
- }
-
- case *ast.IfStmt:
- if st.Body != nil {
- if checkAssignments(pass, st.Body.List) {
- foundSomething = true
- }
- }
- if st.Else != nil {
- if block, isBlock := st.Else.(*ast.BlockStmt); isBlock {
- if checkAssignments(pass, block.List) {
- foundSomething = true
- }
- }
- }
- }
- }
-
- return foundSomething
-}
-
-func checkAssignmentsValues(pass *analysis.Pass, names []*ast.Ident, values []ast.Expr) bool {
- foundSomething := false
- for i, val := range values {
- if !is[*ast.FuncLit](val) {
- reportNoFix(pass, names[i].Pos(), useBeforeEachTemplate, names[i].Name)
- foundSomething = true
- }
- }
-
- return foundSomething
-}
-
-func checkFocusContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler, exp *ast.CallExpr) bool {
- foundFocus := false
- isFocus, id := ginkgoHndlr.GetFocusContainerName(exp)
- if isFocus {
- reportNewName(pass, id, id.Name[1:], focusContainerFound, id.Name)
- foundFocus = true
- }
-
- if id != nil && ginkgohandler.IsContainer(id.Name) {
- for _, arg := range exp.Args {
- if ginkgoHndlr.IsFocusSpec(arg) {
- reportNoFix(pass, arg.Pos(), focusSpecFound)
- foundFocus = true
- } else if callExp, ok := arg.(*ast.CallExpr); ok {
- if checkFocusContainer(pass, ginkgoHndlr, callExp) { // handle table entries
- foundFocus = true
- }
- }
- }
- }
-
- return foundFocus
-}
-
-func checkExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, timePkg string) bool {
- expr := astcopy.CallExpr(assertionExp)
-
- reportBuilder := reports.NewBuilder(pass.Fset, expr)
-
+func checkGomegaExpression(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder, pass *analysis.Pass) bool {
goNested := false
- if checkAsyncAssertion(pass, config, expr, actualExpr, handler, reportBuilder, timePkg) {
+ if rules.GetMissingAssertionRule().Apply(gexp, config, reportBuilder) {
goNested = true
} else {
-
- actualArg := getActualArg(actualExpr, handler)
- if actualArg == nil {
- return true
- }
-
- if config.ForceExpectTo {
- goNested = forceExpectTo(expr, handler, reportBuilder) || goNested
+ if gexp.IsAsync() {
+ rules.GetAsyncRules().Apply(gexp, config, reportBuilder)
+ goNested = true
+ } else {
+ rules.GetRules().Apply(gexp, config, reportBuilder)
}
-
- goNested = doCheckExpression(pass, config, assertionExp, actualArg, expr, handler, reportBuilder) || goNested
}
if reportBuilder.HasReport() {
- reportBuilder.SetFixOffer(pass.Fset, expr)
+ reportBuilder.SetFixOffer(gexp.GetClone())
pass.Report(reportBuilder.Build())
}
return goNested
}
-func forceExpectTo(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- if asrtFun, ok := expr.Fun.(*ast.SelectorExpr); ok {
- if actualFuncName, ok := handler.GetActualFuncName(expr); ok && actualFuncName == expect {
- var (
- name string
- newIdent *ast.Ident
- )
-
- switch name = asrtFun.Sel.Name; name {
- case "Should":
- newIdent = ast.NewIdent("To")
- case "ShouldNot":
- newIdent = ast.NewIdent("ToNot")
- default:
- return false
- }
-
- handler.ReplaceFunction(expr, newIdent)
- reportBuilder.AddIssue(true, fmt.Sprintf(forceExpectToTemplate, name))
- return true
- }
- }
-
- return false
-}
-
-func doCheckExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualArg ast.Expr, expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- if !bool(config.SuppressLen) && isActualIsLenFunc(actualArg) {
- return checkLengthMatcher(expr, pass, handler, reportBuilder)
-
- } else if !bool(config.SuppressLen) && isActualIsCapFunc(actualArg) {
- return checkCapMatcher(expr, handler, reportBuilder)
-
- } else if nilable, compOp := getNilableFromComparison(actualArg); nilable != nil {
- if isExprError(pass, nilable) {
- if config.SuppressErr {
- return true
- }
- } else if config.SuppressNil {
- return true
- }
-
- return checkNilMatcher(expr, pass, nilable, handler, compOp == token.NEQ, reportBuilder)
-
- } else if first, second, op, ok := isComparison(pass, actualArg); ok {
- matcher, shouldContinue := startCheckComparison(expr, handler)
- if !shouldContinue {
- return false
- }
- if !config.SuppressLen {
- if isActualIsLenFunc(first) {
- if handleLenComparison(pass, expr, matcher, first, second, op, handler, reportBuilder) {
- return false
- }
- }
- if isActualIsCapFunc(first) {
- if handleCapComparison(expr, matcher, first, second, op, handler, reportBuilder) {
- return false
- }
- }
- }
- return bool(config.SuppressCompare) || checkComparison(expr, pass, matcher, handler, first, second, op, reportBuilder)
-
- } else if checkMatchError(pass, assertionExp, actualArg, handler, reportBuilder) {
- return false
- } else if isExprError(pass, actualArg) {
- return bool(config.SuppressErr) || checkNilError(pass, expr, handler, actualArg, reportBuilder)
-
- } else if checkPointerComparison(pass, config, assertionExp, expr, actualArg, handler, reportBuilder) {
- return false
- } else if !handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder) {
- return false
- } else if !config.SuppressTypeCompare {
- return !checkEqualWrongType(pass, assertionExp, actualArg, handler, reportBuilder)
- }
-
- return true
-}
-
-func checkMatchError(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- matcher, ok := origExp.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return doCheckMatchError(pass, origExp, matcher, actualArg, handler, reportBuilder)
-}
-
-func doCheckMatchError(pass *analysis.Pass, origExp *ast.CallExpr, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- name, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return false
- }
- switch name {
- case matchError:
- case not:
- nested, ok := matcher.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return doCheckMatchError(pass, origExp, nested, actualArg, handler, reportBuilder)
- case and, or:
- res := false
- for _, arg := range matcher.Args {
- if nested, ok := arg.(*ast.CallExpr); ok {
- if valid := doCheckMatchError(pass, origExp, nested, actualArg, handler, reportBuilder); valid {
- res = true
- }
- }
- }
- return res
- default:
- return false
- }
-
- if !isExprError(pass, actualArg) {
- reportBuilder.AddIssue(false, matchErrorArgWrongType, goFmt(pass.Fset, actualArg))
- }
-
- expr := astcopy.CallExpr(matcher)
-
- validAssertion, requiredParams := checkMatchErrorAssertion(pass, matcher)
- if !validAssertion {
- reportBuilder.AddIssue(false, matchErrorWrongTypeAssertion, goFmt(pass.Fset, matcher.Args[0]))
- }
-
- numParams := len(matcher.Args)
- if numParams == requiredParams {
- if numParams == 2 {
- t := pass.TypesInfo.TypeOf(matcher.Args[1])
- if !gotypes.Identical(t, gotypes.Typ[gotypes.String]) {
- reportBuilder.AddIssue(false, matchErrorNoFuncDescription)
- return true
- }
- }
- return true
- }
-
- if requiredParams == 2 && numParams == 1 {
- reportBuilder.AddIssue(false, matchErrorMissingDescription)
- return true
- }
-
- var newArgsSuggestion = []ast.Expr{expr.Args[0]}
- if requiredParams == 2 {
- newArgsSuggestion = append(newArgsSuggestion, expr.Args[1])
- }
- expr.Args = newArgsSuggestion
-
- reportBuilder.AddIssue(true, matchErrorRedundantArg)
- return true
-}
-
-func checkMatchErrorAssertion(pass *analysis.Pass, matcher *ast.CallExpr) (bool, int) {
- if isErrorMatcherValidArg(pass, matcher.Args[0]) {
- return true, 1
- }
-
- t1 := pass.TypesInfo.TypeOf(matcher.Args[0])
- if isFuncErrBool(t1) {
- return true, 2
- }
-
- return false, 0
-}
-
-// isFuncErrBool checks if a function is with the signature `func(error) bool`
-func isFuncErrBool(t gotypes.Type) bool {
- sig, ok := t.(*gotypes.Signature)
- if !ok {
- return false
- }
- if sig.Params().Len() != 1 || sig.Results().Len() != 1 {
- return false
- }
-
- if !interfaces.ImplementsError(sig.Params().At(0).Type()) {
- return false
- }
-
- b, ok := sig.Results().At(0).Type().(*gotypes.Basic)
- if ok && b.Name() == "bool" && b.Info() == gotypes.IsBoolean && b.Kind() == gotypes.Bool {
- return true
- }
-
- return false
-}
-
-func isErrorMatcherValidArg(pass *analysis.Pass, arg ast.Expr) bool {
- if isExprError(pass, arg) {
- return true
- }
-
- if t, ok := pass.TypesInfo.TypeOf(arg).(*gotypes.Basic); ok && t.Kind() == gotypes.String {
- return true
- }
-
- t := pass.TypesInfo.TypeOf(arg)
-
- return interfaces.ImplementsGomegaMatcher(t)
-}
-
-func checkEqualWrongType(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- matcher, ok := origExp.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return checkEqualDifferentTypes(pass, matcher, actualArg, handler, false, reportBuilder)
-}
-
-func checkEqualDifferentTypes(pass *analysis.Pass, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, parentPointer bool, reportBuilder *reports.Builder) bool {
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return false
- }
-
- actualType := pass.TypesInfo.TypeOf(actualArg)
-
- switch matcherFuncName {
- case equal, beIdenticalTo: // continue
- case and, or:
- foundIssue := false
- for _, nestedExp := range matcher.Args {
- nested, ok := nestedExp.(*ast.CallExpr)
- if !ok {
- continue
- }
- if checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder) {
- foundIssue = true
- }
- }
-
- return foundIssue
- case withTransform:
- nested, ok := matcher.Args[1].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- matcherFuncName, ok = handler.GetActualFuncName(nested)
- switch matcherFuncName {
- case equal, beIdenticalTo:
- case not:
- return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder)
- default:
- return false
- }
-
- if t := getFuncType(pass, matcher.Args[0]); t != nil {
- actualType = t
- matcher = nested
-
- if !ok {
- return false
- }
- } else {
- return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder)
- }
-
- case not:
- nested, ok := matcher.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder)
-
- case haveValue:
- nested, ok := matcher.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return checkEqualDifferentTypes(pass, nested, actualArg, handler, true, reportBuilder)
- default:
- return false
- }
-
- matcherValue := matcher.Args[0]
-
- switch act := actualType.(type) {
- case *gotypes.Tuple:
- actualType = act.At(0).Type()
- case *gotypes.Pointer:
- if parentPointer {
- actualType = act.Elem()
- }
- }
-
- matcherType := pass.TypesInfo.TypeOf(matcherValue)
-
- if !reflect.DeepEqual(matcherType, actualType) {
- // Equal can handle comparison of interface and a value that implements it
- if isImplementing(matcherType, actualType) || isImplementing(actualType, matcherType) {
- return false
- }
-
- reportBuilder.AddIssue(false, compareDifferentTypes, matcherFuncName, actualType, matcherType)
- return true
- }
-
- return false
-}
-
-func getFuncType(pass *analysis.Pass, expr ast.Expr) gotypes.Type {
- switch f := expr.(type) {
- case *ast.FuncLit:
- if f.Type != nil && f.Type.Results != nil && len(f.Type.Results.List) > 0 {
- return pass.TypesInfo.TypeOf(f.Type.Results.List[0].Type)
- }
- case *ast.Ident:
- a := pass.TypesInfo.TypeOf(f)
- if sig, ok := a.(*gotypes.Signature); ok && sig.Results().Len() > 0 {
- return sig.Results().At(0).Type()
- }
- }
-
- return nil
-}
-
-func isImplementing(ifs, impl gotypes.Type) bool {
- if gotypes.IsInterface(ifs) {
-
- var (
- theIfs *gotypes.Interface
- ok bool
- )
-
- for {
- theIfs, ok = ifs.(*gotypes.Interface)
- if ok {
- break
- }
- ifs = ifs.Underlying()
- }
-
- return gotypes.Implements(impl, theIfs)
- }
- return false
-}
-
-// be careful - never change origExp!!! only modify its clone, expr!!!
-func checkPointerComparison(pass *analysis.Pass, config types.Config, origExp *ast.CallExpr, expr *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- if !isPointer(pass, actualArg) {
- return false
- }
- matcher, ok := origExp.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return false
- }
-
- // not using recurse here, since we need the original expression, in order to get the TypeInfo, while we should not
- // modify it.
- for matcherFuncName == not {
- reverseAssertionFuncLogic(expr)
- expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0]
- matcher, ok = matcher.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- matcherFuncName, ok = handler.GetActualFuncName(matcher)
- if !ok {
- return false
- }
- }
-
- switch matcherFuncName {
- case equal, beIdenticalTo, beEquivalentTo:
- arg := matcher.Args[0]
- if isPointer(pass, arg) {
- return false
- }
- if isNil(arg) {
- return false
- }
- if isInterface(pass, arg) {
- return false
- }
- case beFalse, beTrue, beNumerically:
- default:
- return false
- }
-
- handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder)
-
- args := []ast.Expr{astcopy.CallExpr(expr.Args[0].(*ast.CallExpr))}
- handler.ReplaceFunction(expr.Args[0].(*ast.CallExpr), ast.NewIdent(haveValue))
- expr.Args[0].(*ast.CallExpr).Args = args
-
- reportBuilder.AddIssue(true, comparePointerToValue)
- return true
-}
-
-// check async assertion does not assert function call. This is a real bug in the test. In this case, the assertion is
-// done on the returned value, instead of polling the result of a function, for instance.
-func checkAsyncAssertion(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder, timePkg string) bool {
- funcName, ok := handler.GetActualFuncName(actualExpr)
- if !ok {
- return false
- }
-
- var funcIndex int
- switch funcName {
- case eventually, consistently:
- funcIndex = 0
- case eventuallyWithOffset, consistentlyWithOffset:
- funcIndex = 1
- default:
- return false
- }
-
- if !config.SuppressAsync && len(actualExpr.Args) > funcIndex {
- t := pass.TypesInfo.TypeOf(actualExpr.Args[funcIndex])
-
- // skip context variable, if used as first argument
- if "context.Context" == t.String() {
- funcIndex++
- }
-
- if len(actualExpr.Args) > funcIndex {
- if fun, funcCall := actualExpr.Args[funcIndex].(*ast.CallExpr); funcCall {
- t = pass.TypesInfo.TypeOf(fun)
- if !isValidAsyncValueType(t) {
- actualExpr = handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
-
- if len(fun.Args) > 0 {
- origArgs := actualExpr.Args
- origFunc := actualExpr.Fun
- actualExpr.Args = fun.Args
-
- origArgs[funcIndex] = fun.Fun
- call := &ast.SelectorExpr{
- Sel: ast.NewIdent("WithArguments"),
- X: &ast.CallExpr{
- Fun: origFunc,
- Args: origArgs,
- },
- }
-
- actualExpr.Fun = call
- actualExpr.Args = fun.Args
- actualExpr = actualExpr.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr)
- } else {
- actualExpr.Args[funcIndex] = fun.Fun
- }
-
- reportBuilder.AddIssue(true, valueInEventually, funcName, funcName)
- }
- }
- }
-
- if config.ValidateAsyncIntervals {
- intervals.CheckIntervals(pass, expr, actualExpr, reportBuilder, handler, timePkg, funcIndex)
+func getTimePkg(file *ast.File) string {
+ timePkg := "time"
+ for _, imp := range file.Imports {
+ if imp.Path.Value == `"time"` && imp.Name != nil {
+ timePkg = imp.Name.Name
}
}
- handleAssertionOnly(pass, config, expr, handler, actualExpr, reportBuilder)
- return true
-}
-
-func isValidAsyncValueType(t gotypes.Type) bool {
- switch t.(type) {
- // allow functions that return function or channel.
- case *gotypes.Signature, *gotypes.Chan, *gotypes.Pointer:
- return true
- case *gotypes.Named:
- return isValidAsyncValueType(t.Underlying())
- }
-
- return false
-}
-
-func startCheckComparison(exp *ast.CallExpr, handler gomegahandler.Handler) (*ast.CallExpr, bool) {
- matcher, ok := exp.Args[0].(*ast.CallExpr)
- if !ok {
- return nil, false
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return nil, false
- }
-
- switch matcherFuncName {
- case beTrue:
- case beFalse:
- reverseAssertionFuncLogic(exp)
- case equal:
- boolean, found := matcher.Args[0].(*ast.Ident)
- if !found {
- return nil, false
- }
-
- if boolean.Name == "false" {
- reverseAssertionFuncLogic(exp)
- } else if boolean.Name != "true" {
- return nil, false
- }
-
- case not:
- reverseAssertionFuncLogic(exp)
- exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
- return startCheckComparison(exp, handler)
-
- default:
- return nil, false
- }
-
- return matcher, true
-}
-
-func checkComparison(exp *ast.CallExpr, pass *analysis.Pass, matcher *ast.CallExpr, handler gomegahandler.Handler, first ast.Expr, second ast.Expr, op token.Token, reportBuilder *reports.Builder) bool {
- fun, ok := exp.Fun.(*ast.SelectorExpr)
- if !ok {
- return true
- }
-
- call := handler.GetActualExpr(fun)
- if call == nil {
- return true
- }
-
- switch op {
- case token.EQL:
- handleEqualComparison(pass, matcher, first, second, handler)
-
- case token.NEQ:
- reverseAssertionFuncLogic(exp)
- handleEqualComparison(pass, matcher, first, second, handler)
- case token.GTR, token.GEQ, token.LSS, token.LEQ:
- if !isNumeric(pass, first) {
- return true
- }
- handler.ReplaceFunction(matcher, ast.NewIdent(beNumerically))
- matcher.Args = []ast.Expr{
- &ast.BasicLit{Kind: token.STRING, Value: fmt.Sprintf(`"%s"`, op.String())},
- second,
- }
- default:
- return true
- }
-
- call.Args = []ast.Expr{first}
- reportBuilder.AddIssue(true, wrongCompareWarningTemplate)
- return false
-}
-
-func handleEqualComparison(pass *analysis.Pass, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, handler gomegahandler.Handler) {
- if isZero(pass, second) {
- handler.ReplaceFunction(matcher, ast.NewIdent(beZero))
- matcher.Args = nil
- } else {
- t := pass.TypesInfo.TypeOf(first)
- if gotypes.IsInterface(t) {
- handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo))
- } else if is[*gotypes.Pointer](t) {
- handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo))
- } else {
- handler.ReplaceFunction(matcher, ast.NewIdent(equal))
- }
-
- matcher.Args = []ast.Expr{second}
- }
-}
-
-func handleLenComparison(pass *analysis.Pass, exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- switch op {
- case token.EQL:
- case token.NEQ:
- reverseAssertionFuncLogic(exp)
- default:
- return false
- }
-
- var eql *ast.Ident
- if isZero(pass, second) {
- eql = ast.NewIdent(beEmpty)
- } else {
- eql = ast.NewIdent(haveLen)
- matcher.Args = []ast.Expr{second}
- }
-
- handler.ReplaceFunction(matcher, eql)
- firstLen, ok := first.(*ast.CallExpr) // assuming it's len()
- if !ok {
- return false // should never happen
- }
-
- val := firstLen.Args[0]
- fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr))
- fun.Args = []ast.Expr{val}
-
- reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
- return true
-}
-
-func handleCapComparison(exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- switch op {
- case token.EQL:
- case token.NEQ:
- reverseAssertionFuncLogic(exp)
- default:
- return false
- }
-
- eql := ast.NewIdent(haveCap)
- matcher.Args = []ast.Expr{second}
-
- handler.ReplaceFunction(matcher, eql)
- firstLen, ok := first.(*ast.CallExpr) // assuming it's len()
- if !ok {
- return false // should never happen
- }
-
- val := firstLen.Args[0]
- fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr))
- fun.Args = []ast.Expr{val}
-
- reportBuilder.AddIssue(true, wrongCapWarningTemplate)
- return true
-}
-
-// Check if the "actual" argument is a call to the golang built-in len() function
-func isActualIsLenFunc(actualArg ast.Expr) bool {
- return checkActualFuncName(actualArg, "len")
-}
-
-// Check if the "actual" argument is a call to the golang built-in len() function
-func isActualIsCapFunc(actualArg ast.Expr) bool {
- return checkActualFuncName(actualArg, "cap")
-}
-
-func checkActualFuncName(actualArg ast.Expr, name string) bool {
- lenArgExp, ok := actualArg.(*ast.CallExpr)
- if !ok {
- return false
- }
-
- lenFunc, ok := lenArgExp.Fun.(*ast.Ident)
- return ok && lenFunc.Name == name
-}
-
-// Check if matcher function is in one of the patterns we want to avoid
-func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- matcher, ok := exp.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return true
- }
-
- switch matcherFuncName {
- case equal:
- handleEqualLenMatcher(matcher, pass, exp, handler, reportBuilder)
- return false
-
- case beZero:
- handleBeZero(exp, handler, reportBuilder)
- return false
-
- case beNumerically:
- return handleBeNumerically(matcher, pass, exp, handler, reportBuilder)
-
- case not:
- reverseAssertionFuncLogic(exp)
- exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
- return checkLengthMatcher(exp, pass, handler, reportBuilder)
-
- default:
- return true
- }
-}
-
-// Check if matcher function is in one of the patterns we want to avoid
-func checkCapMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- matcher, ok := exp.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return true
- }
-
- switch matcherFuncName {
- case equal:
- handleEqualCapMatcher(matcher, exp, handler, reportBuilder)
- return false
-
- case beZero:
- handleCapBeZero(exp, handler, reportBuilder)
- return false
-
- case beNumerically:
- return handleCapBeNumerically(matcher, exp, handler, reportBuilder)
-
- case not:
- reverseAssertionFuncLogic(exp)
- exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
- return checkCapMatcher(exp, handler, reportBuilder)
-
- default:
- return true
- }
-}
-
-// Check if matcher function is in one of the patterns we want to avoid
-func checkNilMatcher(exp *ast.CallExpr, pass *analysis.Pass, nilable ast.Expr, handler gomegahandler.Handler, notEqual bool, reportBuilder *reports.Builder) bool {
- matcher, ok := exp.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return true
- }
-
- switch matcherFuncName {
- case equal:
- handleEqualNilMatcher(matcher, pass, exp, handler, nilable, notEqual, reportBuilder)
-
- case beTrue:
- handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, reportBuilder)
-
- case beFalse:
- reverseAssertionFuncLogic(exp)
- handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, reportBuilder)
-
- case not:
- reverseAssertionFuncLogic(exp)
- exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
- return checkNilMatcher(exp, pass, nilable, handler, notEqual, reportBuilder)
-
- default:
- return true
- }
- return false
-}
-
-func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, reportBuilder *reports.Builder) bool {
- if len(assertionExp.Args) == 0 {
- return true
- }
-
- equalFuncExpr, ok := assertionExp.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- funcName, ok := handler.GetActualFuncName(equalFuncExpr)
- if !ok {
- return true
- }
-
- switch funcName {
- case beNil: // no additional processing needed.
- case equal:
-
- if len(equalFuncExpr.Args) == 0 {
- return true
- }
-
- nilable, ok := equalFuncExpr.Args[0].(*ast.Ident)
- if !ok || nilable.Name != "nil" {
- return true
- }
-
- case not:
- reverseAssertionFuncLogic(assertionExp)
- assertionExp.Args[0] = assertionExp.Args[0].(*ast.CallExpr).Args[0]
- return checkNilError(pass, assertionExp, handler, actualArg, reportBuilder)
- default:
- return true
- }
-
- var newFuncName string
- if is[*ast.CallExpr](actualArg) {
- newFuncName = succeed
- } else {
- reverseAssertionFuncLogic(assertionExp)
- newFuncName = haveOccurred
- }
-
- handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(newFuncName))
- equalFuncExpr.Args = nil
-
- reportBuilder.AddIssue(true, wrongErrWarningTemplate)
- return false
-}
-
-// handleAssertionOnly checks use-cases when the actual value is valid, but only the assertion should be fixed
-// it handles:
-//
-// Equal(nil) => BeNil()
-// Equal(true) => BeTrue()
-// Equal(false) => BeFalse()
-// HaveLen(0) => BeEmpty()
-func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, reportBuilder *reports.Builder) bool {
- if len(expr.Args) == 0 {
- return true
- }
-
- equalFuncExpr, ok := expr.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- funcName, ok := handler.GetActualFuncName(equalFuncExpr)
- if !ok {
- return true
- }
-
- switch funcName {
- case equal:
- if len(equalFuncExpr.Args) == 0 {
- return true
- }
-
- tkn, ok := equalFuncExpr.Args[0].(*ast.Ident)
- if !ok {
- return true
- }
-
- var replacement string
- var template string
- switch tkn.Name {
- case "nil":
- if config.SuppressNil {
- return true
- }
- replacement = beNil
- template = wrongNilWarningTemplate
- case "true":
- replacement = beTrue
- template = wrongBoolWarningTemplate
- case "false":
- if isNegativeAssertion(expr) {
- reverseAssertionFuncLogic(expr)
- replacement = beTrue
- } else {
- replacement = beFalse
- }
- template = wrongBoolWarningTemplate
- default:
- return true
- }
-
- handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(replacement))
- equalFuncExpr.Args = nil
-
- reportBuilder.AddIssue(true, template)
- return false
-
- case beFalse:
- if isNegativeAssertion(expr) {
- reverseAssertionFuncLogic(expr)
- handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beTrue))
- reportBuilder.AddIssue(true, doubleNegativeWarningTemplate)
- return false
- }
- return false
-
- case haveLen:
- if config.AllowHaveLen0 {
- return true
- }
-
- if len(equalFuncExpr.Args) > 0 {
- if isZero(pass, equalFuncExpr.Args[0]) {
- handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beEmpty))
- equalFuncExpr.Args = nil
- reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
- return false
- }
- }
-
- return true
-
- case not:
- reverseAssertionFuncLogic(expr)
- expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0]
- return handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder)
- default:
- return true
- }
-}
-
-func isZero(pass *analysis.Pass, arg ast.Expr) bool {
- if val, ok := arg.(*ast.BasicLit); ok && val.Kind == token.INT && val.Value == "0" {
- return true
- }
- info, ok := pass.TypesInfo.Types[arg]
- if ok {
- if t, ok := info.Type.(*gotypes.Basic); ok && t.Kind() == gotypes.Int && info.Value != nil {
- if i, ok := constant.Int64Val(info.Value); ok && i == 0 {
- return true
- }
- }
- } else if val, ok := arg.(*ast.Ident); ok && val.Obj != nil && val.Obj.Kind == ast.Con {
- if spec, ok := val.Obj.Decl.(*ast.ValueSpec); ok {
- if len(spec.Values) == 1 {
- if value, ok := spec.Values[0].(*ast.BasicLit); ok && value.Kind == token.INT && value.Value == "0" {
- return true
- }
- }
- }
- }
-
- return false
-}
-
-// getActualArg checks that the function is an assertion's actual function and return the "actual" parameter. If the
-// function is not assertion's actual function, return nil.
-func getActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) ast.Expr {
- funcName, ok := handler.GetActualFuncName(actualExpr)
- if !ok {
- return nil
- }
-
- switch funcName {
- case expect, omega:
- return actualExpr.Args[0]
- case expectWithOffset:
- return actualExpr.Args[1]
- default:
- return nil
- }
-}
-
-// Replace the len function call by its parameter, to create a fix suggestion
-func replaceLenActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) {
- name, ok := handler.GetActualFuncName(actualExpr)
- if !ok {
- return
- }
-
- switch name {
- case expect, omega:
- arg := actualExpr.Args[0]
- if isActualIsLenFunc(arg) || isActualIsCapFunc(arg) {
- // replace the len function call by its parameter, to create a fix suggestion
- actualExpr.Args[0] = arg.(*ast.CallExpr).Args[0]
- }
- case expectWithOffset:
- arg := actualExpr.Args[1]
- if isActualIsLenFunc(arg) || isActualIsCapFunc(arg) {
- // replace the len function call by its parameter, to create a fix suggestion
- actualExpr.Args[1] = arg.(*ast.CallExpr).Args[0]
- }
- }
-}
-
-// Replace the nil comparison with the compared object, to create a fix suggestion
-func replaceNilActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr) bool {
- actualFuncName, ok := handler.GetActualFuncName(actualExpr)
- if !ok {
- return false
- }
-
- switch actualFuncName {
- case expect, omega:
- actualExpr.Args[0] = nilable
- return true
-
- case expectWithOffset:
- actualExpr.Args[1] = nilable
- return true
-
- default:
- return false
- }
-}
-
-// For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number
-func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- opExp, ok1 := matcher.Args[0].(*ast.BasicLit)
- valExp, ok2 := matcher.Args[1].(*ast.BasicLit)
-
- if ok1 && ok2 {
- op := opExp.Value
- val := valExp.Value
-
- if (op == `">"` && val == "0") || (op == `">="` && val == "1") {
- reverseAssertionFuncLogic(exp)
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty))
- exp.Args[0].(*ast.CallExpr).Args = nil
- } else if op == `"=="` {
- chooseNumericMatcher(pass, exp, handler, valExp)
- } else if op == `"!="` {
- reverseAssertionFuncLogic(exp)
- chooseNumericMatcher(pass, exp, handler, valExp)
- } else {
- return true
- }
-
- reportLengthAssertion(exp, handler, reportBuilder)
- return false
- }
- return true
-}
-
-// For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number
-func handleCapBeNumerically(matcher *ast.CallExpr, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- opExp, ok1 := matcher.Args[0].(*ast.BasicLit)
- valExp, ok2 := matcher.Args[1].(*ast.BasicLit)
-
- if ok1 && ok2 {
- op := opExp.Value
- val := valExp.Value
-
- if (op == `">"` && val == "0") || (op == `">="` && val == "1") {
- reverseAssertionFuncLogic(exp)
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}}
- } else if op == `"=="` {
- replaceNumericCapMatcher(exp, handler, valExp)
- } else if op == `"!="` {
- reverseAssertionFuncLogic(exp)
- replaceNumericCapMatcher(exp, handler, valExp)
- } else {
- return true
- }
-
- reportCapAssertion(exp, handler, reportBuilder)
- return false
- }
- return true
-}
-
-func chooseNumericMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, valExp ast.Expr) {
- caller := exp.Args[0].(*ast.CallExpr)
- if isZero(pass, valExp) {
- handler.ReplaceFunction(caller, ast.NewIdent(beEmpty))
- exp.Args[0].(*ast.CallExpr).Args = nil
- } else {
- handler.ReplaceFunction(caller, ast.NewIdent(haveLen))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{valExp}
- }
-}
-
-func replaceNumericCapMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, valExp ast.Expr) {
- caller := exp.Args[0].(*ast.CallExpr)
- handler.ReplaceFunction(caller, ast.NewIdent(haveCap))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{valExp}
-}
-
-func reverseAssertionFuncLogic(exp *ast.CallExpr) {
- assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel
- assertionFunc.Name = reverseassertion.ChangeAssertionLogic(assertionFunc.Name)
-}
-
-func isNegativeAssertion(exp *ast.CallExpr) bool {
- assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel
- return reverseassertion.IsNegativeLogic(assertionFunc.Name)
-}
-
-func handleEqualLenMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- equalTo, ok := matcher.Args[0].(*ast.BasicLit)
- if ok {
- chooseNumericMatcher(pass, exp, handler, equalTo)
- } else {
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveLen))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]}
- }
- reportLengthAssertion(exp, handler, reportBuilder)
-}
-
-func handleEqualCapMatcher(matcher *ast.CallExpr, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]}
- reportCapAssertion(exp, handler, reportBuilder)
-}
-
-func handleBeZero(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- exp.Args[0].(*ast.CallExpr).Args = nil
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty))
- reportLengthAssertion(exp, handler, reportBuilder)
-}
-
-func handleCapBeZero(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- exp.Args[0].(*ast.CallExpr).Args = nil
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}}
- reportCapAssertion(exp, handler, reportBuilder)
-}
-
-func handleEqualNilMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, reportBuilder *reports.Builder) {
- equalTo, ok := matcher.Args[0].(*ast.Ident)
- if !ok {
- return
- }
-
- if equalTo.Name == "false" {
- reverseAssertionFuncLogic(exp)
- } else if equalTo.Name != "true" {
- return
- }
-
- newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable)
-
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName))
- exp.Args[0].(*ast.CallExpr).Args = nil
-
- reportNilAssertion(exp, handler, nilable, notEqual, isItError, reportBuilder)
-}
-
-func handleNilBeBoolMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, reportBuilder *reports.Builder) {
- newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable)
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName))
- exp.Args[0].(*ast.CallExpr).Args = nil
-
- reportNilAssertion(exp, handler, nilable, notEqual, isItError, reportBuilder)
-}
-
-func handleNilComparisonErr(pass *analysis.Pass, exp *ast.CallExpr, nilable ast.Expr) (string, bool) {
- newFuncName := beNil
- isItError := isExprError(pass, nilable)
- if isItError {
- if is[*ast.CallExpr](nilable) {
- newFuncName = succeed
- } else {
- reverseAssertionFuncLogic(exp)
- newFuncName = haveOccurred
- }
- }
-
- return newFuncName, isItError
-}
-
-func isAssertionFunc(name string) bool {
- switch name {
- case "To", "ToNot", "NotTo", "Should", "ShouldNot":
- return true
- }
- return false
-}
-
-func reportLengthAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
- replaceLenActualArg(actualExpr, handler)
-
- reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
-}
-
-func reportCapAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
- replaceLenActualArg(actualExpr, handler)
-
- reportBuilder.AddIssue(true, wrongCapWarningTemplate)
-}
-
-func reportNilAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, isItError bool, reportBuilder *reports.Builder) {
- actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
- changed := replaceNilActualArg(actualExpr, handler, nilable)
- if !changed {
- return
- }
-
- if notEqual {
- reverseAssertionFuncLogic(expr)
- }
- template := wrongNilWarningTemplate
- if isItError {
- template = wrongErrWarningTemplate
- }
-
- reportBuilder.AddIssue(true, template)
-}
-
-func reportNewName(pass *analysis.Pass, id *ast.Ident, newName string, messageTemplate, oldExpr string) {
- pass.Report(analysis.Diagnostic{
- Pos: id.Pos(),
- Message: fmt.Sprintf(messageTemplate, newName),
- SuggestedFixes: []analysis.SuggestedFix{
- {
- Message: fmt.Sprintf("should replace %s with %s", oldExpr, newName),
- TextEdits: []analysis.TextEdit{
- {
- Pos: id.Pos(),
- End: id.End(),
- NewText: []byte(newName),
- },
- },
- },
- },
- })
-}
-
-func reportNoFix(pass *analysis.Pass, pos token.Pos, message string, args ...any) {
- if len(args) > 0 {
- message = fmt.Sprintf(message, args...)
- }
-
- pass.Report(analysis.Diagnostic{
- Pos: pos,
- Message: message,
- })
-}
-
-func getNilableFromComparison(actualArg ast.Expr) (ast.Expr, token.Token) {
- bin, ok := actualArg.(*ast.BinaryExpr)
- if !ok {
- return nil, token.ILLEGAL
- }
-
- if bin.Op == token.EQL || bin.Op == token.NEQ {
- if isNil(bin.Y) {
- return bin.X, bin.Op
- } else if isNil(bin.X) {
- return bin.Y, bin.Op
- }
- }
-
- return nil, token.ILLEGAL
-}
-
-func isNil(expr ast.Expr) bool {
- nilObject, ok := expr.(*ast.Ident)
- return ok && nilObject.Name == "nil" && nilObject.Obj == nil
-}
-
-func isComparison(pass *analysis.Pass, actualArg ast.Expr) (ast.Expr, ast.Expr, token.Token, bool) {
- bin, ok := actualArg.(*ast.BinaryExpr)
- if !ok {
- return nil, nil, token.ILLEGAL, false
- }
-
- first, second, op := bin.X, bin.Y, bin.Op
- replace := false
- switch realFirst := first.(type) {
- case *ast.Ident: // check if const
- info, ok := pass.TypesInfo.Types[realFirst]
- if ok {
- if is[*gotypes.Basic](info.Type) && info.Value != nil {
- replace = true
- }
- }
-
- case *ast.BasicLit:
- replace = true
- }
-
- if replace {
- first, second = second, first
- }
-
- switch op {
- case token.EQL:
- case token.NEQ:
- case token.GTR, token.GEQ, token.LSS, token.LEQ:
- if replace {
- op = reverseassertion.ChangeCompareOperator(op)
- }
- default:
- return nil, nil, token.ILLEGAL, false
- }
- return first, second, op, true
-}
-
-func goFmt(fset *token.FileSet, x ast.Expr) string {
- var b bytes.Buffer
- _ = printer.Fprint(&b, fset, x)
- return b.String()
-}
-
-func isExprError(pass *analysis.Pass, expr ast.Expr) bool {
- actualArgType := pass.TypesInfo.TypeOf(expr)
- switch t := actualArgType.(type) {
- case *gotypes.Named:
- if interfaces.ImplementsError(actualArgType) {
- return true
- }
- case *gotypes.Tuple:
- if t.Len() > 0 {
- switch t0 := t.At(0).Type().(type) {
- case *gotypes.Named, *gotypes.Pointer:
- if interfaces.ImplementsError(t0) {
- return true
- }
- }
- }
- }
- return false
-}
-
-func isPointer(pass *analysis.Pass, expr ast.Expr) bool {
- t := pass.TypesInfo.TypeOf(expr)
- return is[*gotypes.Pointer](t)
-}
-
-func isInterface(pass *analysis.Pass, expr ast.Expr) bool {
- t := pass.TypesInfo.TypeOf(expr)
- return gotypes.IsInterface(t)
-}
-
-func isNumeric(pass *analysis.Pass, node ast.Expr) bool {
- t := pass.TypesInfo.TypeOf(node)
-
- switch t.String() {
- case "int", "uint", "int8", "uint8", "int16", "uint16", "int32", "uint32", "int64", "uint64", "float32", "float64":
- return true
- }
- return false
-}
-
-func checkNoAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler) {
- funcName, ok := handler.GetActualFuncName(expr)
- if ok {
- var allowedFunction string
- switch funcName {
- case expect, expectWithOffset:
- allowedFunction = `"To()", "ToNot()" or "NotTo()"`
- case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset:
- allowedFunction = `"Should()" or "ShouldNot()"`
- case omega:
- allowedFunction = `"Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"`
- default:
- return
- }
- reportNoFix(pass, expr.Pos(), missingAssertionMessage, funcName, allowedFunction)
- }
-}
-
-func is[T any](x any) bool {
- _, matchType := x.(T)
- return matchType
+ return timePkg
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/types/boolean.go b/vendor/github.com/nunnatsa/ginkgolinter/types/boolean.go
deleted file mode 100644
index be510c4e95..0000000000
--- a/vendor/github.com/nunnatsa/ginkgolinter/types/boolean.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package types
-
-import (
- "errors"
- "strings"
-)
-
-// Boolean is a bool, implementing the flag.Value interface, to be used as a flag var.
-type Boolean bool
-
-func (b *Boolean) Set(value string) error {
- if b == nil {
- return errors.New("trying to set nil parameter")
- }
- switch strings.ToLower(value) {
- case "true":
- *b = true
- case "false":
- *b = false
- default:
- return errors.New(value + " is not a Boolean value")
-
- }
- return nil
-}
-
-func (b Boolean) String() string {
- if b {
- return "true"
- }
- return "false"
-}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go b/vendor/github.com/nunnatsa/ginkgolinter/types/config.go
index b6838e5244..81a9ebe327 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/types/config.go
@@ -17,21 +17,22 @@ const (
)
type Config struct {
- SuppressLen Boolean
- SuppressNil Boolean
- SuppressErr Boolean
- SuppressCompare Boolean
- SuppressAsync Boolean
- ForbidFocus Boolean
- SuppressTypeCompare Boolean
- AllowHaveLen0 Boolean
- ForceExpectTo Boolean
- ValidateAsyncIntervals Boolean
- ForbidSpecPollution Boolean
+ SuppressLen bool
+ SuppressNil bool
+ SuppressErr bool
+ SuppressCompare bool
+ SuppressAsync bool
+ ForbidFocus bool
+ SuppressTypeCompare bool
+ AllowHaveLen0 bool
+ ForceExpectTo bool
+ ValidateAsyncIntervals bool
+ ForbidSpecPollution bool
+ ForceSucceedForFuncs bool
}
func (s *Config) AllTrue() bool {
- return bool(s.SuppressLen && s.SuppressNil && s.SuppressErr && s.SuppressCompare && s.SuppressAsync && !s.ForbidFocus)
+ return s.SuppressLen && s.SuppressNil && s.SuppressErr && s.SuppressCompare && s.SuppressAsync && !s.ForbidFocus
}
func (s *Config) Clone() Config {
@@ -47,6 +48,7 @@ func (s *Config) Clone() Config {
ForceExpectTo: s.ForceExpectTo,
ValidateAsyncIntervals: s.ValidateAsyncIntervals,
ForbidSpecPollution: s.ForbidSpecPollution,
+ ForceSucceedForFuncs: s.ForceSucceedForFuncs,
}
}
diff --git a/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml b/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml
index 1d8b69e65e..ec52857a3e 100644
--- a/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml
+++ b/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml
@@ -1,3 +1,4 @@
+version: 2
before:
hooks:
- go mod tidy
diff --git a/vendor/github.com/pelletier/go-toml/v2/README.md b/vendor/github.com/pelletier/go-toml/v2/README.md
index d964b25fe1..0755e55642 100644
--- a/vendor/github.com/pelletier/go-toml/v2/README.md
+++ b/vendor/github.com/pelletier/go-toml/v2/README.md
@@ -565,7 +565,7 @@ complete solutions exist out there.
## Versioning
-Expect for parts explicitely marked otherwise, go-toml follows [Semantic
+Expect for parts explicitly marked otherwise, go-toml follows [Semantic
Versioning](https://semver.org). The supported version of
[TOML](https://github.com/toml-lang/toml) is indicated at the beginning of this
document. The last two major versions of Go are supported (see [Go Release
diff --git a/vendor/github.com/pelletier/go-toml/v2/marshaler.go b/vendor/github.com/pelletier/go-toml/v2/marshaler.go
index 7f4e20c128..161acd9343 100644
--- a/vendor/github.com/pelletier/go-toml/v2/marshaler.go
+++ b/vendor/github.com/pelletier/go-toml/v2/marshaler.go
@@ -8,7 +8,7 @@ import (
"io"
"math"
"reflect"
- "sort"
+ "slices"
"strconv"
"strings"
"time"
@@ -280,7 +280,7 @@ func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, e
}
hasTextMarshaler := v.Type().Implements(textMarshalerType)
- if hasTextMarshaler || (v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) {
+ if hasTextMarshaler || (v.CanAddr() && reflect.PointerTo(v.Type()).Implements(textMarshalerType)) {
if !hasTextMarshaler {
v = v.Addr()
}
@@ -631,6 +631,18 @@ func (enc *Encoder) keyToString(k reflect.Value) (string, error) {
return "", fmt.Errorf("toml: error marshalling key %v from text: %w", k, err)
}
return string(keyB), nil
+
+ case keyType.Kind() == reflect.Int || keyType.Kind() == reflect.Int8 || keyType.Kind() == reflect.Int16 || keyType.Kind() == reflect.Int32 || keyType.Kind() == reflect.Int64:
+ return strconv.FormatInt(k.Int(), 10), nil
+
+ case keyType.Kind() == reflect.Uint || keyType.Kind() == reflect.Uint8 || keyType.Kind() == reflect.Uint16 || keyType.Kind() == reflect.Uint32 || keyType.Kind() == reflect.Uint64:
+ return strconv.FormatUint(k.Uint(), 10), nil
+
+ case keyType.Kind() == reflect.Float32:
+ return strconv.FormatFloat(k.Float(), 'f', -1, 32), nil
+
+ case keyType.Kind() == reflect.Float64:
+ return strconv.FormatFloat(k.Float(), 'f', -1, 64), nil
}
return "", fmt.Errorf("toml: type %s is not supported as a map key", keyType.Kind())
}
@@ -668,8 +680,8 @@ func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte
}
func sortEntriesByKey(e []entry) {
- sort.Slice(e, func(i, j int) bool {
- return e[i].Key < e[j].Key
+ slices.SortFunc(e, func(a, b entry) int {
+ return strings.Compare(a.Key, b.Key)
})
}
@@ -732,7 +744,7 @@ func walkStruct(ctx encoderCtx, t *table, v reflect.Value) {
if fieldType.Anonymous {
if fieldType.Type.Kind() == reflect.Struct {
walkStruct(ctx, t, f)
- } else if fieldType.Type.Kind() == reflect.Pointer && !f.IsNil() && f.Elem().Kind() == reflect.Struct {
+ } else if fieldType.Type.Kind() == reflect.Ptr && !f.IsNil() && f.Elem().Kind() == reflect.Struct {
walkStruct(ctx, t, f.Elem())
}
continue
@@ -951,7 +963,7 @@ func willConvertToTable(ctx encoderCtx, v reflect.Value) bool {
if !v.IsValid() {
return false
}
- if v.Type() == timeType || v.Type().Implements(textMarshalerType) || (v.Kind() != reflect.Ptr && v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) {
+ if v.Type() == timeType || v.Type().Implements(textMarshalerType) || (v.Kind() != reflect.Ptr && v.CanAddr() && reflect.PointerTo(v.Type()).Implements(textMarshalerType)) {
return false
}
diff --git a/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go b/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go
index 98231bae65..c3df8bee1c 100644
--- a/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go
+++ b/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go
@@ -5,9 +5,9 @@ import (
"errors"
"fmt"
"io"
- "io/ioutil"
"math"
"reflect"
+ "strconv"
"strings"
"sync/atomic"
"time"
@@ -21,10 +21,8 @@ import (
//
// It is a shortcut for Decoder.Decode() with the default options.
func Unmarshal(data []byte, v interface{}) error {
- p := unstable.Parser{}
- p.Reset(data)
- d := decoder{p: &p}
-
+ d := decoder{}
+ d.p.Reset(data)
return d.FromParser(v)
}
@@ -117,27 +115,25 @@ func (d *Decoder) EnableUnmarshalerInterface() *Decoder {
// Inline Table -> same as Table
// Array of Tables -> same as Array and Table
func (d *Decoder) Decode(v interface{}) error {
- b, err := ioutil.ReadAll(d.r)
+ b, err := io.ReadAll(d.r)
if err != nil {
return fmt.Errorf("toml: %w", err)
}
- p := unstable.Parser{}
- p.Reset(b)
dec := decoder{
- p: &p,
strict: strict{
Enabled: d.strict,
},
unmarshalerInterface: d.unmarshalerInterface,
}
+ dec.p.Reset(b)
return dec.FromParser(v)
}
type decoder struct {
// Which parser instance in use for this decoding session.
- p *unstable.Parser
+ p unstable.Parser
// Flag indicating that the current expression is stashed.
// If set to true, calling nextExpr will not actually pull a new expression
@@ -1078,12 +1074,39 @@ func (d *decoder) keyFromData(keyType reflect.Type, data []byte) (reflect.Value,
}
return mk, nil
- case reflect.PtrTo(keyType).Implements(textUnmarshalerType):
+ case reflect.PointerTo(keyType).Implements(textUnmarshalerType):
mk := reflect.New(keyType)
if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err)
}
return mk.Elem(), nil
+
+ case keyType.Kind() == reflect.Int || keyType.Kind() == reflect.Int8 || keyType.Kind() == reflect.Int16 || keyType.Kind() == reflect.Int32 || keyType.Kind() == reflect.Int64:
+ key, err := strconv.ParseInt(string(data), 10, 64)
+ if err != nil {
+ return reflect.Value{}, fmt.Errorf("toml: error parsing key of type %s from integer: %w", stringType, err)
+ }
+ return reflect.ValueOf(key).Convert(keyType), nil
+ case keyType.Kind() == reflect.Uint || keyType.Kind() == reflect.Uint8 || keyType.Kind() == reflect.Uint16 || keyType.Kind() == reflect.Uint32 || keyType.Kind() == reflect.Uint64:
+ key, err := strconv.ParseUint(string(data), 10, 64)
+ if err != nil {
+ return reflect.Value{}, fmt.Errorf("toml: error parsing key of type %s from unsigned integer: %w", stringType, err)
+ }
+ return reflect.ValueOf(key).Convert(keyType), nil
+
+ case keyType.Kind() == reflect.Float32:
+ key, err := strconv.ParseFloat(string(data), 32)
+ if err != nil {
+ return reflect.Value{}, fmt.Errorf("toml: error parsing key of type %s from float: %w", stringType, err)
+ }
+ return reflect.ValueOf(float32(key)), nil
+
+ case keyType.Kind() == reflect.Float64:
+ key, err := strconv.ParseFloat(string(data), 64)
+ if err != nil {
+ return reflect.Value{}, fmt.Errorf("toml: error parsing key of type %s from float: %w", stringType, err)
+ }
+ return reflect.ValueOf(float64(key)), nil
}
return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", stringType, keyType)
}
diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go
index 791e50fdb9..c639af6f37 100644
--- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go
+++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go
@@ -53,10 +53,13 @@ func setDefaultAllowedErrors() {
{Err: "io.EOF", Fun: "(*io.SectionReader).Read"},
{Err: "io.EOF", Fun: "(*io.SectionReader).ReadAt"},
{Err: "io.ErrClosedPipe", Fun: "(*io.PipeWriter).Write"},
+ {Err: "io.EOF", Fun: "io.ReadAtLeast"},
{Err: "io.ErrShortBuffer", Fun: "io.ReadAtLeast"},
{Err: "io.ErrUnexpectedEOF", Fun: "io.ReadAtLeast"},
{Err: "io.EOF", Fun: "io.ReadFull"},
{Err: "io.ErrUnexpectedEOF", Fun: "io.ReadFull"},
+ // pkg/mime
+ {Err: "mime.ErrInvalidMediaParameter", Fun: "mime.ParseMediaType"},
// pkg/net/http
{Err: "net/http.ErrServerClosed", Fun: "(*net/http.Server).ListenAndServe"},
{Err: "net/http.ErrServerClosed", Fun: "(*net/http.Server).ListenAndServeTLS"},
@@ -82,6 +85,7 @@ func setDefaultAllowedErrors() {
{Err: "context.Canceled", Fun: "(context.Context).Err"},
// pkg/encoding/json
{Err: "io.EOF", Fun: "(*encoding/json.Decoder).Decode"},
+ {Err: "io.EOF", Fun: "(*encoding/json.Decoder).Token"},
// pkg/encoding/csv
{Err: "io.EOF", Fun: "(*encoding/csv.Reader).Read"},
// pkg/mime/multipart
diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go
index 9ac465c650..ed3dd0dc65 100644
--- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go
+++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go
@@ -82,7 +82,7 @@ func LintFmtErrorfCalls(fset *token.FileSet, info types.Info, multipleWraps bool
argIndex++
}
- if verb.format == "w" {
+ if verb.format == "w" || verb.format == "T" {
continue
}
if argIndex-1 >= len(args) {
diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go b/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go
index 4c5c2a2abe..d29573a6fb 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go
@@ -221,6 +221,10 @@ func typeIdentical(x, y types.Type, p *ifacePair) bool {
case *typeparams.TypeParam:
// nothing to do (x and y being equal is caught in the very beginning of this function)
+ case *types.Alias:
+ // an alias type is identical if the type it's an alias of is identical to it.
+ return typeIdentical(types.Unalias(x), y, p)
+
case nil:
// avoid a crash in case of nil type
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go
index 4eb90d51b2..cc40506af6 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go
@@ -618,8 +618,7 @@ func (conv *converter) convertFilterExprImpl(e ast.Expr) ir.FilterExpr {
case *ast.UnaryExpr:
x := conv.convertFilterExpr(e.X)
args := []ir.FilterExpr{x}
- switch e.Op {
- case token.NOT:
+ if e.Op == token.NOT {
return ir.FilterExpr{Op: ir.FilterNotOp, Args: args}
}
diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go
index b81fb8f1db..95ca9297ef 100644
--- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go
+++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go
@@ -605,11 +605,12 @@ func (cl *compiler) compileCall(key funcKey, sig *types.Signature, args []ast.Ex
}
var op opcode
- if sig.Results().Len() == 0 {
+ switch {
+ case sig.Results().Len() == 0:
op = opVoidCall
- } else if typeIsInt(sig.Results().At(0).Type()) {
+ case typeIsInt(sig.Results().At(0).Type()):
op = opIntCall
- } else {
+ default:
op = opCall
}
diff --git a/vendor/github.com/raeperd/recvcheck/.gitignore b/vendor/github.com/raeperd/recvcheck/.gitignore
new file mode 100644
index 0000000000..4212673324
--- /dev/null
+++ b/vendor/github.com/raeperd/recvcheck/.gitignore
@@ -0,0 +1,3 @@
+.idea/
+coverage.txt
+/recvcheck
diff --git a/vendor/github.com/raeperd/recvcheck/.golangci.yml b/vendor/github.com/raeperd/recvcheck/.golangci.yml
new file mode 100644
index 0000000000..18692d50b8
--- /dev/null
+++ b/vendor/github.com/raeperd/recvcheck/.golangci.yml
@@ -0,0 +1,10 @@
+linters:
+ enable:
+ - recvcheck
+
+output:
+ show-stats: true
+ sort-results: true
+ sort-order:
+ - linter
+ - file
diff --git a/vendor/github.com/jirfag/go-printf-func-name/LICENSE b/vendor/github.com/raeperd/recvcheck/LICENSE
similarity index 97%
rename from vendor/github.com/jirfag/go-printf-func-name/LICENSE
rename to vendor/github.com/raeperd/recvcheck/LICENSE
index d06a809c26..a46db59be3 100644
--- a/vendor/github.com/jirfag/go-printf-func-name/LICENSE
+++ b/vendor/github.com/raeperd/recvcheck/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) 2020 Isaev Denis
+Copyright (c) 2024 raeperd
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/raeperd/recvcheck/Makefile b/vendor/github.com/raeperd/recvcheck/Makefile
new file mode 100644
index 0000000000..d78605a3bd
--- /dev/null
+++ b/vendor/github.com/raeperd/recvcheck/Makefile
@@ -0,0 +1,16 @@
+.PHONY: clean lint test build
+
+default: clean lint test build
+
+clean:
+ rm -rf coverage.txt
+
+build:
+ go build -ldflags "-s -w" -trimpath ./cmd/recvcheck/
+
+test: clean
+ go test -race -coverprofile=coverage.txt .
+
+lint:
+ golangci-lint run
+
diff --git a/vendor/github.com/raeperd/recvcheck/README.md b/vendor/github.com/raeperd/recvcheck/README.md
new file mode 100644
index 0000000000..067aa3c580
--- /dev/null
+++ b/vendor/github.com/raeperd/recvcheck/README.md
@@ -0,0 +1,52 @@
+# recvcheck
+[](https://github.com/raeperd/recvcheck/actions/workflows/build.yaml) [](https://goreportcard.com/report/github.com/raeperd/recvcheck)
+Golang linter for check receiver type in method
+
+## Motivation
+From [Go Wiki: Go Code Review Comments - The Go Programming Language](https://go.dev/wiki/CodeReviewComments#receiver-type)
+> Don’t mix receiver types. Choose either pointers or struct types for all available method
+
+Following code from [Dave Cheney](https://dave.cheney.net/2015/11/18/wednesday-pop-quiz-spot-the-race) causes data race. Could you find it?
+This linter does it for you.
+
+```go
+package main
+
+import (
+ "fmt"
+ "time"
+)
+
+type RPC struct {
+ result int
+ done chan struct{}
+}
+
+func (rpc *RPC) compute() {
+ time.Sleep(time.Second) // strenuous computation intensifies
+ rpc.result = 42
+ close(rpc.done)
+}
+
+func (RPC) version() int {
+ return 1 // never going to need to change this
+}
+
+func main() {
+ rpc := &RPC{done: make(chan struct{})}
+
+ go rpc.compute() // kick off computation in the background
+ version := rpc.version() // grab some other information while we're waiting
+ <-rpc.done // wait for computation to finish
+ result := rpc.result
+
+ fmt.Printf("RPC computation complete, result: %d, version: %d\n", result, version)
+}
+```
+
+## References
+- [Is there a way to detect following data race code using golangci-lint or other linter?? · golangci/golangci-lint · Discussion #5006](https://github.com/golangci/golangci-lint/discussions/5006)
+ - [Wednesday pop quiz: spot the race | Dave Cheney](https://dave.cheney.net/2015/11/18/wednesday-pop-quiz-spot-the-race)
+
+
+
diff --git a/vendor/github.com/raeperd/recvcheck/analyzer.go b/vendor/github.com/raeperd/recvcheck/analyzer.go
new file mode 100644
index 0000000000..11fb38e72e
--- /dev/null
+++ b/vendor/github.com/raeperd/recvcheck/analyzer.go
@@ -0,0 +1,135 @@
+package recvcheck
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+)
+
+// NewAnalyzer returns a new analyzer to check for receiver type consistency.
+func NewAnalyzer(s Settings) *analysis.Analyzer {
+ a := &analyzer{
+ excluded: map[string]struct{}{},
+ }
+
+ if !s.DisableBuiltin {
+ // Default excludes for Marshal/Encode methods https://github.com/raeperd/recvcheck/issues/7
+ a.excluded = map[string]struct{}{
+ "*.MarshalText": {},
+ "*.MarshalJSON": {},
+ "*.MarshalYAML": {},
+ "*.MarshalXML": {},
+ "*.MarshalBinary": {},
+ "*.GobEncode": {},
+ }
+ }
+
+ for _, exclusion := range s.Exclusions {
+ a.excluded[exclusion] = struct{}{}
+ }
+
+ return &analysis.Analyzer{
+ Name: "recvcheck",
+ Doc: "checks for receiver type consistency",
+ Run: a.run,
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ }
+}
+
+// Settings is the configuration for the analyzer.
+type Settings struct {
+ // DisableBuiltin if true, disables the built-in method excludes.
+ // Built-in excluded methods:
+ // - "MarshalText"
+ // - "MarshalJSON"
+ // - "MarshalYAML"
+ // - "MarshalXML"
+ // - "MarshalBinary"
+ // - "GobEncode"
+ DisableBuiltin bool
+
+ // Exclusions format is `struct_name.method_name` (ex: `Foo.MethodName`).
+ // A wildcard `*` can use as a struct name (ex: `*.MethodName`).
+ Exclusions []string
+}
+
+type analyzer struct {
+ excluded map[string]struct{}
+}
+
+func (r *analyzer) run(pass *analysis.Pass) (any, error) {
+ inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+ structs := map[string]*structType{}
+ inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil)}, func(n ast.Node) {
+ funcDecl, ok := n.(*ast.FuncDecl)
+ if !ok || funcDecl.Recv == nil || len(funcDecl.Recv.List) != 1 {
+ return
+ }
+
+ recv, isStar := recvTypeIdent(funcDecl.Recv.List[0].Type)
+ if recv == nil {
+ return
+ }
+
+ if r.isExcluded(recv, funcDecl) {
+ return
+ }
+
+ st, ok := structs[recv.Name]
+ if !ok {
+ structs[recv.Name] = &structType{}
+ st = structs[recv.Name]
+ }
+
+ if isStar {
+ st.starUsed = true
+ } else {
+ st.typeUsed = true
+ }
+ })
+
+ for recv, st := range structs {
+ if st.starUsed && st.typeUsed {
+ pass.Reportf(pass.Pkg.Scope().Lookup(recv).Pos(), "the methods of %q use pointer receiver and non-pointer receiver.", recv)
+ }
+ }
+
+ return nil, nil
+}
+
+func (r *analyzer) isExcluded(recv *ast.Ident, f *ast.FuncDecl) bool {
+ if f.Name == nil || f.Name.Name == "" {
+ return true
+ }
+
+ _, found := r.excluded[recv.Name+"."+f.Name.Name]
+ if found {
+ return true
+ }
+
+ _, found = r.excluded["*."+f.Name.Name]
+
+ return found
+}
+
+type structType struct {
+ starUsed bool
+ typeUsed bool
+}
+
+func recvTypeIdent(r ast.Expr) (*ast.Ident, bool) {
+ switch n := r.(type) {
+ case *ast.StarExpr:
+ if i, ok := n.X.(*ast.Ident); ok {
+ return i, true
+ }
+
+ case *ast.Ident:
+ return n, false
+ }
+
+ return nil, false
+}
diff --git a/vendor/github.com/shazow/go-diff/LICENSE b/vendor/github.com/rivo/uniseg/LICENSE.txt
similarity index 94%
rename from vendor/github.com/shazow/go-diff/LICENSE
rename to vendor/github.com/rivo/uniseg/LICENSE.txt
index 85e1e4b33a..5040f1ef80 100644
--- a/vendor/github.com/shazow/go-diff/LICENSE
+++ b/vendor/github.com/rivo/uniseg/LICENSE.txt
@@ -1,6 +1,6 @@
-The MIT License (MIT)
+MIT License
-Copyright (c) 2015 Andrey Petrov
+Copyright (c) 2019 Oliver Kuederle
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-
diff --git a/vendor/github.com/rivo/uniseg/README.md b/vendor/github.com/rivo/uniseg/README.md
new file mode 100644
index 0000000000..a8191b8154
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/README.md
@@ -0,0 +1,137 @@
+# Unicode Text Segmentation for Go
+
+[](https://pkg.go.dev/github.com/rivo/uniseg)
+[](https://goreportcard.com/report/github.com/rivo/uniseg)
+
+This Go package implements Unicode Text Segmentation according to [Unicode Standard Annex #29](https://unicode.org/reports/tr29/), Unicode Line Breaking according to [Unicode Standard Annex #14](https://unicode.org/reports/tr14/) (Unicode version 15.0.0), and monospace font string width calculation similar to [wcwidth](https://man7.org/linux/man-pages/man3/wcwidth.3.html).
+
+## Background
+
+### Grapheme Clusters
+
+In Go, [strings are read-only slices of bytes](https://go.dev/blog/strings). They can be turned into Unicode code points using the `for` loop or by casting: `[]rune(str)`. However, multiple code points may be combined into one user-perceived character or what the Unicode specification calls "grapheme cluster". Here are some examples:
+
+|String|Bytes (UTF-8)|Code points (runes)|Grapheme clusters|
+|-|-|-|-|
+|Käse|6 bytes: `4b 61 cc 88 73 65`|5 code points: `4b 61 308 73 65`|4 clusters: `[4b],[61 308],[73],[65]`|
+|🏳️🌈|14 bytes: `f0 9f 8f b3 ef b8 8f e2 80 8d f0 9f 8c 88`|4 code points: `1f3f3 fe0f 200d 1f308`|1 cluster: `[1f3f3 fe0f 200d 1f308]`|
+|🇩🇪|8 bytes: `f0 9f 87 a9 f0 9f 87 aa`|2 code points: `1f1e9 1f1ea`|1 cluster: `[1f1e9 1f1ea]`|
+
+This package provides tools to iterate over these grapheme clusters. This may be used to determine the number of user-perceived characters, to split strings in their intended places, or to extract individual characters which form a unit.
+
+### Word Boundaries
+
+Word boundaries are used in a number of different contexts. The most familiar ones are selection (double-click mouse selection), cursor movement ("move to next word" control-arrow keys), and the dialog option "Whole Word Search" for search and replace. They are also used in database queries, to determine whether elements are within a certain number of words of one another. Searching may also use word boundaries in determining matching items. This package provides tools to determine word boundaries within strings.
+
+### Sentence Boundaries
+
+Sentence boundaries are often used for triple-click or some other method of selecting or iterating through blocks of text that are larger than single words. They are also used to determine whether words occur within the same sentence in database queries. This package provides tools to determine sentence boundaries within strings.
+
+### Line Breaking
+
+Line breaking, also known as word wrapping, is the process of breaking a section of text into lines such that it will fit in the available width of a page, window or other display area. This package provides tools to determine where a string may or may not be broken and where it must be broken (for example after newline characters).
+
+### Monospace Width
+
+Most terminals or text displays / text editors using a monospace font (for example source code editors) use a fixed width for each character. Some characters such as emojis or characters found in Asian and other languages may take up more than one character cell. This package provides tools to determine the number of cells a string will take up when displayed in a monospace font. See [here](https://pkg.go.dev/github.com/rivo/uniseg#hdr-Monospace_Width) for more information.
+
+## Installation
+
+```bash
+go get github.com/rivo/uniseg
+```
+
+## Examples
+
+### Counting Characters in a String
+
+```go
+n := uniseg.GraphemeClusterCount("🇩🇪🏳️🌈")
+fmt.Println(n)
+// 2
+```
+
+### Calculating the Monospace String Width
+
+```go
+width := uniseg.StringWidth("🇩🇪🏳️🌈!")
+fmt.Println(width)
+// 5
+```
+
+### Using the [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) Class
+
+This is the most convenient method of iterating over grapheme clusters:
+
+```go
+gr := uniseg.NewGraphemes("👍🏼!")
+for gr.Next() {
+ fmt.Printf("%x ", gr.Runes())
+}
+// [1f44d 1f3fc] [21]
+```
+
+### Using the [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) Function
+
+This avoids allocating a new `Graphemes` object but it requires the handling of states and boundaries:
+
+```go
+str := "🇩🇪🏳️🌈"
+state := -1
+var c string
+for len(str) > 0 {
+ c, str, _, state = uniseg.StepString(str, state)
+ fmt.Printf("%x ", []rune(c))
+}
+// [1f1e9 1f1ea] [1f3f3 fe0f 200d 1f308]
+```
+
+### Advanced Examples
+
+The [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class offers the most convenient way to access all functionality of this package. But in some cases, it may be better to use the specialized functions directly. For example, if you're only interested in word segmentation, use [`FirstWord`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWord) or [`FirstWordInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWordInString):
+
+```go
+str := "Hello, world!"
+state := -1
+var c string
+for len(str) > 0 {
+ c, str, state = uniseg.FirstWordInString(str, state)
+ fmt.Printf("(%s)\n", c)
+}
+// (Hello)
+// (,)
+// ( )
+// (world)
+// (!)
+```
+
+Similarly, use
+
+- [`FirstGraphemeCluster`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeCluster) or [`FirstGraphemeClusterInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeClusterInString) for grapheme cluster determination only,
+- [`FirstSentence`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentence) or [`FirstSentenceInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentenceInString) for sentence segmentation only, and
+- [`FirstLineSegment`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegment) or [`FirstLineSegmentInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegmentInString) for line breaking / word wrapping (although using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) is preferred as it will observe grapheme cluster boundaries).
+
+If you're only interested in the width of characters, use [`FirstGraphemeCluster`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeCluster) or [`FirstGraphemeClusterInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeClusterInString). It is much faster than using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step), [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString), or the [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class because it does not include the logic for word / sentence / line boundaries.
+
+Finally, if you need to reverse a string while preserving grapheme clusters, use [`ReverseString`](https://pkg.go.dev/github.com/rivo/uniseg#ReverseString):
+
+```go
+fmt.Println(uniseg.ReverseString("🇩🇪🏳️🌈"))
+// 🏳️🌈🇩🇪
+```
+
+## Documentation
+
+Refer to https://pkg.go.dev/github.com/rivo/uniseg for the package's documentation.
+
+## Dependencies
+
+This package does not depend on any packages outside the standard library.
+
+## Sponsor this Project
+
+[Become a Sponsor on GitHub](https://github.com/sponsors/rivo?metadata_source=uniseg_readme) to support this project!
+
+## Your Feedback
+
+Add your issue here on GitHub, preferably before submitting any PR's. Feel free to get in touch if you have any questions.
\ No newline at end of file
diff --git a/vendor/github.com/rivo/uniseg/doc.go b/vendor/github.com/rivo/uniseg/doc.go
new file mode 100644
index 0000000000..11224ae22d
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/doc.go
@@ -0,0 +1,108 @@
+/*
+Package uniseg implements Unicode Text Segmentation, Unicode Line Breaking, and
+string width calculation for monospace fonts. Unicode Text Segmentation conforms
+to Unicode Standard Annex #29 (https://unicode.org/reports/tr29/) and Unicode
+Line Breaking conforms to Unicode Standard Annex #14
+(https://unicode.org/reports/tr14/).
+
+In short, using this package, you can split a string into grapheme clusters
+(what people would usually refer to as a "character"), into words, and into
+sentences. Or, in its simplest case, this package allows you to count the number
+of characters in a string, especially when it contains complex characters such
+as emojis, combining characters, or characters from Asian, Arabic, Hebrew, or
+other languages. Additionally, you can use it to implement line breaking (or
+"word wrapping"), that is, to determine where text can be broken over to the
+next line when the width of the line is not big enough to fit the entire text.
+Finally, you can use it to calculate the display width of a string for monospace
+fonts.
+
+# Getting Started
+
+If you just want to count the number of characters in a string, you can use
+[GraphemeClusterCount]. If you want to determine the display width of a string,
+you can use [StringWidth]. If you want to iterate over a string, you can use
+[Step], [StepString], or the [Graphemes] class (more convenient but less
+performant). This will provide you with all information: grapheme clusters,
+word boundaries, sentence boundaries, line breaks, and monospace character
+widths. The specialized functions [FirstGraphemeCluster],
+[FirstGraphemeClusterInString], [FirstWord], [FirstWordInString],
+[FirstSentence], and [FirstSentenceInString] can be used if only one type of
+information is needed.
+
+# Grapheme Clusters
+
+Consider the rainbow flag emoji: 🏳️🌈. On most modern systems, it appears as one
+character. But its string representation actually has 14 bytes, so counting
+bytes (or using len("🏳️🌈")) will not work as expected. Counting runes won't,
+either: The flag has 4 Unicode code points, thus 4 runes. The stdlib function
+utf8.RuneCountInString("🏳️🌈") and len([]rune("🏳️🌈")) will both return 4.
+
+The [GraphemeClusterCount] function will return 1 for the rainbow flag emoji.
+The Graphemes class and a variety of functions in this package will allow you to
+split strings into its grapheme clusters.
+
+# Word Boundaries
+
+Word boundaries are used in a number of different contexts. The most familiar
+ones are selection (double-click mouse selection), cursor movement ("move to
+next word" control-arrow keys), and the dialog option "Whole Word Search" for
+search and replace. This package provides methods for determining word
+boundaries.
+
+# Sentence Boundaries
+
+Sentence boundaries are often used for triple-click or some other method of
+selecting or iterating through blocks of text that are larger than single words.
+They are also used to determine whether words occur within the same sentence in
+database queries. This package provides methods for determining sentence
+boundaries.
+
+# Line Breaking
+
+Line breaking, also known as word wrapping, is the process of breaking a section
+of text into lines such that it will fit in the available width of a page,
+window or other display area. This package provides methods to determine the
+positions in a string where a line must be broken, may be broken, or must not be
+broken.
+
+# Monospace Width
+
+Monospace width, as referred to in this package, is the width of a string in a
+monospace font. This is commonly used in terminal user interfaces or text
+displays or editors that don't support proportional fonts. A width of 1
+corresponds to a single character cell. The C function [wcswidth()] and its
+implementation in other programming languages is in widespread use for the same
+purpose. However, there is no standard for the calculation of such widths, and
+this package differs from wcswidth() in a number of ways, presumably to generate
+more visually pleasing results.
+
+To start, we assume that every code point has a width of 1, with the following
+exceptions:
+
+ - Code points with grapheme cluster break properties Control, CR, LF, Extend,
+ and ZWJ have a width of 0.
+ - U+2E3A, Two-Em Dash, has a width of 3.
+ - U+2E3B, Three-Em Dash, has a width of 4.
+ - Characters with the East-Asian Width properties "Fullwidth" (F) and "Wide"
+ (W) have a width of 2. (Properties "Ambiguous" (A) and "Neutral" (N) both
+ have a width of 1.)
+ - Code points with grapheme cluster break property Regional Indicator have a
+ width of 2.
+ - Code points with grapheme cluster break property Extended Pictographic have
+ a width of 2, unless their Emoji Presentation flag is "No", in which case
+ the width is 1.
+
+For Hangul grapheme clusters composed of conjoining Jamo and for Regional
+Indicators (flags), all code points except the first one have a width of 0. For
+grapheme clusters starting with an Extended Pictographic, any additional code
+point will force a total width of 2, except if the Variation Selector-15
+(U+FE0E) is included, in which case the total width is always 1. Grapheme
+clusters ending with Variation Selector-16 (U+FE0F) have a width of 2.
+
+Note that whether these widths appear correct depends on your application's
+render engine, to which extent it conforms to the Unicode Standard, and its
+choice of font.
+
+[wcswidth()]: https://man7.org/linux/man-pages/man3/wcswidth.3.html
+*/
+package uniseg
diff --git a/vendor/github.com/rivo/uniseg/eastasianwidth.go b/vendor/github.com/rivo/uniseg/eastasianwidth.go
new file mode 100644
index 0000000000..5fc54d9915
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/eastasianwidth.go
@@ -0,0 +1,2588 @@
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// eastAsianWidth are taken from
+// https://www.unicode.org/Public/15.0.0/ucd/EastAsianWidth.txt
+// and
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var eastAsianWidth = [][3]int{
+ {0x0000, 0x001F, prN}, // Cc [32] ..
+ {0x0020, 0x0020, prNa}, // Zs SPACE
+ {0x0021, 0x0023, prNa}, // Po [3] EXCLAMATION MARK..NUMBER SIGN
+ {0x0024, 0x0024, prNa}, // Sc DOLLAR SIGN
+ {0x0025, 0x0027, prNa}, // Po [3] PERCENT SIGN..APOSTROPHE
+ {0x0028, 0x0028, prNa}, // Ps LEFT PARENTHESIS
+ {0x0029, 0x0029, prNa}, // Pe RIGHT PARENTHESIS
+ {0x002A, 0x002A, prNa}, // Po ASTERISK
+ {0x002B, 0x002B, prNa}, // Sm PLUS SIGN
+ {0x002C, 0x002C, prNa}, // Po COMMA
+ {0x002D, 0x002D, prNa}, // Pd HYPHEN-MINUS
+ {0x002E, 0x002F, prNa}, // Po [2] FULL STOP..SOLIDUS
+ {0x0030, 0x0039, prNa}, // Nd [10] DIGIT ZERO..DIGIT NINE
+ {0x003A, 0x003B, prNa}, // Po [2] COLON..SEMICOLON
+ {0x003C, 0x003E, prNa}, // Sm [3] LESS-THAN SIGN..GREATER-THAN SIGN
+ {0x003F, 0x0040, prNa}, // Po [2] QUESTION MARK..COMMERCIAL AT
+ {0x0041, 0x005A, prNa}, // Lu [26] LATIN CAPITAL LETTER A..LATIN CAPITAL LETTER Z
+ {0x005B, 0x005B, prNa}, // Ps LEFT SQUARE BRACKET
+ {0x005C, 0x005C, prNa}, // Po REVERSE SOLIDUS
+ {0x005D, 0x005D, prNa}, // Pe RIGHT SQUARE BRACKET
+ {0x005E, 0x005E, prNa}, // Sk CIRCUMFLEX ACCENT
+ {0x005F, 0x005F, prNa}, // Pc LOW LINE
+ {0x0060, 0x0060, prNa}, // Sk GRAVE ACCENT
+ {0x0061, 0x007A, prNa}, // Ll [26] LATIN SMALL LETTER A..LATIN SMALL LETTER Z
+ {0x007B, 0x007B, prNa}, // Ps LEFT CURLY BRACKET
+ {0x007C, 0x007C, prNa}, // Sm VERTICAL LINE
+ {0x007D, 0x007D, prNa}, // Pe RIGHT CURLY BRACKET
+ {0x007E, 0x007E, prNa}, // Sm TILDE
+ {0x007F, 0x007F, prN}, // Cc
+ {0x0080, 0x009F, prN}, // Cc [32] ..
+ {0x00A0, 0x00A0, prN}, // Zs NO-BREAK SPACE
+ {0x00A1, 0x00A1, prA}, // Po INVERTED EXCLAMATION MARK
+ {0x00A2, 0x00A3, prNa}, // Sc [2] CENT SIGN..POUND SIGN
+ {0x00A4, 0x00A4, prA}, // Sc CURRENCY SIGN
+ {0x00A5, 0x00A5, prNa}, // Sc YEN SIGN
+ {0x00A6, 0x00A6, prNa}, // So BROKEN BAR
+ {0x00A7, 0x00A7, prA}, // Po SECTION SIGN
+ {0x00A8, 0x00A8, prA}, // Sk DIAERESIS
+ {0x00A9, 0x00A9, prN}, // So COPYRIGHT SIGN
+ {0x00AA, 0x00AA, prA}, // Lo FEMININE ORDINAL INDICATOR
+ {0x00AB, 0x00AB, prN}, // Pi LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00AC, 0x00AC, prNa}, // Sm NOT SIGN
+ {0x00AD, 0x00AD, prA}, // Cf SOFT HYPHEN
+ {0x00AE, 0x00AE, prA}, // So REGISTERED SIGN
+ {0x00AF, 0x00AF, prNa}, // Sk MACRON
+ {0x00B0, 0x00B0, prA}, // So DEGREE SIGN
+ {0x00B1, 0x00B1, prA}, // Sm PLUS-MINUS SIGN
+ {0x00B2, 0x00B3, prA}, // No [2] SUPERSCRIPT TWO..SUPERSCRIPT THREE
+ {0x00B4, 0x00B4, prA}, // Sk ACUTE ACCENT
+ {0x00B5, 0x00B5, prN}, // Ll MICRO SIGN
+ {0x00B6, 0x00B7, prA}, // Po [2] PILCROW SIGN..MIDDLE DOT
+ {0x00B8, 0x00B8, prA}, // Sk CEDILLA
+ {0x00B9, 0x00B9, prA}, // No SUPERSCRIPT ONE
+ {0x00BA, 0x00BA, prA}, // Lo MASCULINE ORDINAL INDICATOR
+ {0x00BB, 0x00BB, prN}, // Pf RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00BC, 0x00BE, prA}, // No [3] VULGAR FRACTION ONE QUARTER..VULGAR FRACTION THREE QUARTERS
+ {0x00BF, 0x00BF, prA}, // Po INVERTED QUESTION MARK
+ {0x00C0, 0x00C5, prN}, // Lu [6] LATIN CAPITAL LETTER A WITH GRAVE..LATIN CAPITAL LETTER A WITH RING ABOVE
+ {0x00C6, 0x00C6, prA}, // Lu LATIN CAPITAL LETTER AE
+ {0x00C7, 0x00CF, prN}, // Lu [9] LATIN CAPITAL LETTER C WITH CEDILLA..LATIN CAPITAL LETTER I WITH DIAERESIS
+ {0x00D0, 0x00D0, prA}, // Lu LATIN CAPITAL LETTER ETH
+ {0x00D1, 0x00D6, prN}, // Lu [6] LATIN CAPITAL LETTER N WITH TILDE..LATIN CAPITAL LETTER O WITH DIAERESIS
+ {0x00D7, 0x00D7, prA}, // Sm MULTIPLICATION SIGN
+ {0x00D8, 0x00D8, prA}, // Lu LATIN CAPITAL LETTER O WITH STROKE
+ {0x00D9, 0x00DD, prN}, // Lu [5] LATIN CAPITAL LETTER U WITH GRAVE..LATIN CAPITAL LETTER Y WITH ACUTE
+ {0x00DE, 0x00E1, prA}, // L& [4] LATIN CAPITAL LETTER THORN..LATIN SMALL LETTER A WITH ACUTE
+ {0x00E2, 0x00E5, prN}, // Ll [4] LATIN SMALL LETTER A WITH CIRCUMFLEX..LATIN SMALL LETTER A WITH RING ABOVE
+ {0x00E6, 0x00E6, prA}, // Ll LATIN SMALL LETTER AE
+ {0x00E7, 0x00E7, prN}, // Ll LATIN SMALL LETTER C WITH CEDILLA
+ {0x00E8, 0x00EA, prA}, // Ll [3] LATIN SMALL LETTER E WITH GRAVE..LATIN SMALL LETTER E WITH CIRCUMFLEX
+ {0x00EB, 0x00EB, prN}, // Ll LATIN SMALL LETTER E WITH DIAERESIS
+ {0x00EC, 0x00ED, prA}, // Ll [2] LATIN SMALL LETTER I WITH GRAVE..LATIN SMALL LETTER I WITH ACUTE
+ {0x00EE, 0x00EF, prN}, // Ll [2] LATIN SMALL LETTER I WITH CIRCUMFLEX..LATIN SMALL LETTER I WITH DIAERESIS
+ {0x00F0, 0x00F0, prA}, // Ll LATIN SMALL LETTER ETH
+ {0x00F1, 0x00F1, prN}, // Ll LATIN SMALL LETTER N WITH TILDE
+ {0x00F2, 0x00F3, prA}, // Ll [2] LATIN SMALL LETTER O WITH GRAVE..LATIN SMALL LETTER O WITH ACUTE
+ {0x00F4, 0x00F6, prN}, // Ll [3] LATIN SMALL LETTER O WITH CIRCUMFLEX..LATIN SMALL LETTER O WITH DIAERESIS
+ {0x00F7, 0x00F7, prA}, // Sm DIVISION SIGN
+ {0x00F8, 0x00FA, prA}, // Ll [3] LATIN SMALL LETTER O WITH STROKE..LATIN SMALL LETTER U WITH ACUTE
+ {0x00FB, 0x00FB, prN}, // Ll LATIN SMALL LETTER U WITH CIRCUMFLEX
+ {0x00FC, 0x00FC, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS
+ {0x00FD, 0x00FD, prN}, // Ll LATIN SMALL LETTER Y WITH ACUTE
+ {0x00FE, 0x00FE, prA}, // Ll LATIN SMALL LETTER THORN
+ {0x00FF, 0x00FF, prN}, // Ll LATIN SMALL LETTER Y WITH DIAERESIS
+ {0x0100, 0x0100, prN}, // Lu LATIN CAPITAL LETTER A WITH MACRON
+ {0x0101, 0x0101, prA}, // Ll LATIN SMALL LETTER A WITH MACRON
+ {0x0102, 0x0110, prN}, // L& [15] LATIN CAPITAL LETTER A WITH BREVE..LATIN CAPITAL LETTER D WITH STROKE
+ {0x0111, 0x0111, prA}, // Ll LATIN SMALL LETTER D WITH STROKE
+ {0x0112, 0x0112, prN}, // Lu LATIN CAPITAL LETTER E WITH MACRON
+ {0x0113, 0x0113, prA}, // Ll LATIN SMALL LETTER E WITH MACRON
+ {0x0114, 0x011A, prN}, // L& [7] LATIN CAPITAL LETTER E WITH BREVE..LATIN CAPITAL LETTER E WITH CARON
+ {0x011B, 0x011B, prA}, // Ll LATIN SMALL LETTER E WITH CARON
+ {0x011C, 0x0125, prN}, // L& [10] LATIN CAPITAL LETTER G WITH CIRCUMFLEX..LATIN SMALL LETTER H WITH CIRCUMFLEX
+ {0x0126, 0x0127, prA}, // L& [2] LATIN CAPITAL LETTER H WITH STROKE..LATIN SMALL LETTER H WITH STROKE
+ {0x0128, 0x012A, prN}, // L& [3] LATIN CAPITAL LETTER I WITH TILDE..LATIN CAPITAL LETTER I WITH MACRON
+ {0x012B, 0x012B, prA}, // Ll LATIN SMALL LETTER I WITH MACRON
+ {0x012C, 0x0130, prN}, // L& [5] LATIN CAPITAL LETTER I WITH BREVE..LATIN CAPITAL LETTER I WITH DOT ABOVE
+ {0x0131, 0x0133, prA}, // L& [3] LATIN SMALL LETTER DOTLESS I..LATIN SMALL LIGATURE IJ
+ {0x0134, 0x0137, prN}, // L& [4] LATIN CAPITAL LETTER J WITH CIRCUMFLEX..LATIN SMALL LETTER K WITH CEDILLA
+ {0x0138, 0x0138, prA}, // Ll LATIN SMALL LETTER KRA
+ {0x0139, 0x013E, prN}, // L& [6] LATIN CAPITAL LETTER L WITH ACUTE..LATIN SMALL LETTER L WITH CARON
+ {0x013F, 0x0142, prA}, // L& [4] LATIN CAPITAL LETTER L WITH MIDDLE DOT..LATIN SMALL LETTER L WITH STROKE
+ {0x0143, 0x0143, prN}, // Lu LATIN CAPITAL LETTER N WITH ACUTE
+ {0x0144, 0x0144, prA}, // Ll LATIN SMALL LETTER N WITH ACUTE
+ {0x0145, 0x0147, prN}, // L& [3] LATIN CAPITAL LETTER N WITH CEDILLA..LATIN CAPITAL LETTER N WITH CARON
+ {0x0148, 0x014B, prA}, // L& [4] LATIN SMALL LETTER N WITH CARON..LATIN SMALL LETTER ENG
+ {0x014C, 0x014C, prN}, // Lu LATIN CAPITAL LETTER O WITH MACRON
+ {0x014D, 0x014D, prA}, // Ll LATIN SMALL LETTER O WITH MACRON
+ {0x014E, 0x0151, prN}, // L& [4] LATIN CAPITAL LETTER O WITH BREVE..LATIN SMALL LETTER O WITH DOUBLE ACUTE
+ {0x0152, 0x0153, prA}, // L& [2] LATIN CAPITAL LIGATURE OE..LATIN SMALL LIGATURE OE
+ {0x0154, 0x0165, prN}, // L& [18] LATIN CAPITAL LETTER R WITH ACUTE..LATIN SMALL LETTER T WITH CARON
+ {0x0166, 0x0167, prA}, // L& [2] LATIN CAPITAL LETTER T WITH STROKE..LATIN SMALL LETTER T WITH STROKE
+ {0x0168, 0x016A, prN}, // L& [3] LATIN CAPITAL LETTER U WITH TILDE..LATIN CAPITAL LETTER U WITH MACRON
+ {0x016B, 0x016B, prA}, // Ll LATIN SMALL LETTER U WITH MACRON
+ {0x016C, 0x017F, prN}, // L& [20] LATIN CAPITAL LETTER U WITH BREVE..LATIN SMALL LETTER LONG S
+ {0x0180, 0x01BA, prN}, // L& [59] LATIN SMALL LETTER B WITH STROKE..LATIN SMALL LETTER EZH WITH TAIL
+ {0x01BB, 0x01BB, prN}, // Lo LATIN LETTER TWO WITH STROKE
+ {0x01BC, 0x01BF, prN}, // L& [4] LATIN CAPITAL LETTER TONE FIVE..LATIN LETTER WYNN
+ {0x01C0, 0x01C3, prN}, // Lo [4] LATIN LETTER DENTAL CLICK..LATIN LETTER RETROFLEX CLICK
+ {0x01C4, 0x01CD, prN}, // L& [10] LATIN CAPITAL LETTER DZ WITH CARON..LATIN CAPITAL LETTER A WITH CARON
+ {0x01CE, 0x01CE, prA}, // Ll LATIN SMALL LETTER A WITH CARON
+ {0x01CF, 0x01CF, prN}, // Lu LATIN CAPITAL LETTER I WITH CARON
+ {0x01D0, 0x01D0, prA}, // Ll LATIN SMALL LETTER I WITH CARON
+ {0x01D1, 0x01D1, prN}, // Lu LATIN CAPITAL LETTER O WITH CARON
+ {0x01D2, 0x01D2, prA}, // Ll LATIN SMALL LETTER O WITH CARON
+ {0x01D3, 0x01D3, prN}, // Lu LATIN CAPITAL LETTER U WITH CARON
+ {0x01D4, 0x01D4, prA}, // Ll LATIN SMALL LETTER U WITH CARON
+ {0x01D5, 0x01D5, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON
+ {0x01D6, 0x01D6, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND MACRON
+ {0x01D7, 0x01D7, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE
+ {0x01D8, 0x01D8, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE
+ {0x01D9, 0x01D9, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON
+ {0x01DA, 0x01DA, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND CARON
+ {0x01DB, 0x01DB, prN}, // Lu LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE
+ {0x01DC, 0x01DC, prA}, // Ll LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE
+ {0x01DD, 0x024F, prN}, // L& [115] LATIN SMALL LETTER TURNED E..LATIN SMALL LETTER Y WITH STROKE
+ {0x0250, 0x0250, prN}, // Ll LATIN SMALL LETTER TURNED A
+ {0x0251, 0x0251, prA}, // Ll LATIN SMALL LETTER ALPHA
+ {0x0252, 0x0260, prN}, // Ll [15] LATIN SMALL LETTER TURNED ALPHA..LATIN SMALL LETTER G WITH HOOK
+ {0x0261, 0x0261, prA}, // Ll LATIN SMALL LETTER SCRIPT G
+ {0x0262, 0x0293, prN}, // Ll [50] LATIN LETTER SMALL CAPITAL G..LATIN SMALL LETTER EZH WITH CURL
+ {0x0294, 0x0294, prN}, // Lo LATIN LETTER GLOTTAL STOP
+ {0x0295, 0x02AF, prN}, // Ll [27] LATIN LETTER PHARYNGEAL VOICED FRICATIVE..LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL
+ {0x02B0, 0x02C1, prN}, // Lm [18] MODIFIER LETTER SMALL H..MODIFIER LETTER REVERSED GLOTTAL STOP
+ {0x02C2, 0x02C3, prN}, // Sk [2] MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER RIGHT ARROWHEAD
+ {0x02C4, 0x02C4, prA}, // Sk MODIFIER LETTER UP ARROWHEAD
+ {0x02C5, 0x02C5, prN}, // Sk MODIFIER LETTER DOWN ARROWHEAD
+ {0x02C6, 0x02C6, prN}, // Lm MODIFIER LETTER CIRCUMFLEX ACCENT
+ {0x02C7, 0x02C7, prA}, // Lm CARON
+ {0x02C8, 0x02C8, prN}, // Lm MODIFIER LETTER VERTICAL LINE
+ {0x02C9, 0x02CB, prA}, // Lm [3] MODIFIER LETTER MACRON..MODIFIER LETTER GRAVE ACCENT
+ {0x02CC, 0x02CC, prN}, // Lm MODIFIER LETTER LOW VERTICAL LINE
+ {0x02CD, 0x02CD, prA}, // Lm MODIFIER LETTER LOW MACRON
+ {0x02CE, 0x02CF, prN}, // Lm [2] MODIFIER LETTER LOW GRAVE ACCENT..MODIFIER LETTER LOW ACUTE ACCENT
+ {0x02D0, 0x02D0, prA}, // Lm MODIFIER LETTER TRIANGULAR COLON
+ {0x02D1, 0x02D1, prN}, // Lm MODIFIER LETTER HALF TRIANGULAR COLON
+ {0x02D2, 0x02D7, prN}, // Sk [6] MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER MINUS SIGN
+ {0x02D8, 0x02DB, prA}, // Sk [4] BREVE..OGONEK
+ {0x02DC, 0x02DC, prN}, // Sk SMALL TILDE
+ {0x02DD, 0x02DD, prA}, // Sk DOUBLE ACUTE ACCENT
+ {0x02DE, 0x02DE, prN}, // Sk MODIFIER LETTER RHOTIC HOOK
+ {0x02DF, 0x02DF, prA}, // Sk MODIFIER LETTER CROSS ACCENT
+ {0x02E0, 0x02E4, prN}, // Lm [5] MODIFIER LETTER SMALL GAMMA..MODIFIER LETTER SMALL REVERSED GLOTTAL STOP
+ {0x02E5, 0x02EB, prN}, // Sk [7] MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER YANG DEPARTING TONE MARK
+ {0x02EC, 0x02EC, prN}, // Lm MODIFIER LETTER VOICING
+ {0x02ED, 0x02ED, prN}, // Sk MODIFIER LETTER UNASPIRATED
+ {0x02EE, 0x02EE, prN}, // Lm MODIFIER LETTER DOUBLE APOSTROPHE
+ {0x02EF, 0x02FF, prN}, // Sk [17] MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW
+ {0x0300, 0x036F, prA}, // Mn [112] COMBINING GRAVE ACCENT..COMBINING LATIN SMALL LETTER X
+ {0x0370, 0x0373, prN}, // L& [4] GREEK CAPITAL LETTER HETA..GREEK SMALL LETTER ARCHAIC SAMPI
+ {0x0374, 0x0374, prN}, // Lm GREEK NUMERAL SIGN
+ {0x0375, 0x0375, prN}, // Sk GREEK LOWER NUMERAL SIGN
+ {0x0376, 0x0377, prN}, // L& [2] GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA..GREEK SMALL LETTER PAMPHYLIAN DIGAMMA
+ {0x037A, 0x037A, prN}, // Lm GREEK YPOGEGRAMMENI
+ {0x037B, 0x037D, prN}, // Ll [3] GREEK SMALL REVERSED LUNATE SIGMA SYMBOL..GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x037E, 0x037E, prN}, // Po GREEK QUESTION MARK
+ {0x037F, 0x037F, prN}, // Lu GREEK CAPITAL LETTER YOT
+ {0x0384, 0x0385, prN}, // Sk [2] GREEK TONOS..GREEK DIALYTIKA TONOS
+ {0x0386, 0x0386, prN}, // Lu GREEK CAPITAL LETTER ALPHA WITH TONOS
+ {0x0387, 0x0387, prN}, // Po GREEK ANO TELEIA
+ {0x0388, 0x038A, prN}, // Lu [3] GREEK CAPITAL LETTER EPSILON WITH TONOS..GREEK CAPITAL LETTER IOTA WITH TONOS
+ {0x038C, 0x038C, prN}, // Lu GREEK CAPITAL LETTER OMICRON WITH TONOS
+ {0x038E, 0x0390, prN}, // L& [3] GREEK CAPITAL LETTER UPSILON WITH TONOS..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
+ {0x0391, 0x03A1, prA}, // Lu [17] GREEK CAPITAL LETTER ALPHA..GREEK CAPITAL LETTER RHO
+ {0x03A3, 0x03A9, prA}, // Lu [7] GREEK CAPITAL LETTER SIGMA..GREEK CAPITAL LETTER OMEGA
+ {0x03AA, 0x03B0, prN}, // L& [7] GREEK CAPITAL LETTER IOTA WITH DIALYTIKA..GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
+ {0x03B1, 0x03C1, prA}, // Ll [17] GREEK SMALL LETTER ALPHA..GREEK SMALL LETTER RHO
+ {0x03C2, 0x03C2, prN}, // Ll GREEK SMALL LETTER FINAL SIGMA
+ {0x03C3, 0x03C9, prA}, // Ll [7] GREEK SMALL LETTER SIGMA..GREEK SMALL LETTER OMEGA
+ {0x03CA, 0x03F5, prN}, // L& [44] GREEK SMALL LETTER IOTA WITH DIALYTIKA..GREEK LUNATE EPSILON SYMBOL
+ {0x03F6, 0x03F6, prN}, // Sm GREEK REVERSED LUNATE EPSILON SYMBOL
+ {0x03F7, 0x03FF, prN}, // L& [9] GREEK CAPITAL LETTER SHO..GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x0400, 0x0400, prN}, // Lu CYRILLIC CAPITAL LETTER IE WITH GRAVE
+ {0x0401, 0x0401, prA}, // Lu CYRILLIC CAPITAL LETTER IO
+ {0x0402, 0x040F, prN}, // Lu [14] CYRILLIC CAPITAL LETTER DJE..CYRILLIC CAPITAL LETTER DZHE
+ {0x0410, 0x044F, prA}, // L& [64] CYRILLIC CAPITAL LETTER A..CYRILLIC SMALL LETTER YA
+ {0x0450, 0x0450, prN}, // Ll CYRILLIC SMALL LETTER IE WITH GRAVE
+ {0x0451, 0x0451, prA}, // Ll CYRILLIC SMALL LETTER IO
+ {0x0452, 0x0481, prN}, // L& [48] CYRILLIC SMALL LETTER DJE..CYRILLIC SMALL LETTER KOPPA
+ {0x0482, 0x0482, prN}, // So CYRILLIC THOUSANDS SIGN
+ {0x0483, 0x0487, prN}, // Mn [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE
+ {0x0488, 0x0489, prN}, // Me [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN
+ {0x048A, 0x04FF, prN}, // L& [118] CYRILLIC CAPITAL LETTER SHORT I WITH TAIL..CYRILLIC SMALL LETTER HA WITH STROKE
+ {0x0500, 0x052F, prN}, // L& [48] CYRILLIC CAPITAL LETTER KOMI DE..CYRILLIC SMALL LETTER EL WITH DESCENDER
+ {0x0531, 0x0556, prN}, // Lu [38] ARMENIAN CAPITAL LETTER AYB..ARMENIAN CAPITAL LETTER FEH
+ {0x0559, 0x0559, prN}, // Lm ARMENIAN MODIFIER LETTER LEFT HALF RING
+ {0x055A, 0x055F, prN}, // Po [6] ARMENIAN APOSTROPHE..ARMENIAN ABBREVIATION MARK
+ {0x0560, 0x0588, prN}, // Ll [41] ARMENIAN SMALL LETTER TURNED AYB..ARMENIAN SMALL LETTER YI WITH STROKE
+ {0x0589, 0x0589, prN}, // Po ARMENIAN FULL STOP
+ {0x058A, 0x058A, prN}, // Pd ARMENIAN HYPHEN
+ {0x058D, 0x058E, prN}, // So [2] RIGHT-FACING ARMENIAN ETERNITY SIGN..LEFT-FACING ARMENIAN ETERNITY SIGN
+ {0x058F, 0x058F, prN}, // Sc ARMENIAN DRAM SIGN
+ {0x0591, 0x05BD, prN}, // Mn [45] HEBREW ACCENT ETNAHTA..HEBREW POINT METEG
+ {0x05BE, 0x05BE, prN}, // Pd HEBREW PUNCTUATION MAQAF
+ {0x05BF, 0x05BF, prN}, // Mn HEBREW POINT RAFE
+ {0x05C0, 0x05C0, prN}, // Po HEBREW PUNCTUATION PASEQ
+ {0x05C1, 0x05C2, prN}, // Mn [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT
+ {0x05C3, 0x05C3, prN}, // Po HEBREW PUNCTUATION SOF PASUQ
+ {0x05C4, 0x05C5, prN}, // Mn [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT
+ {0x05C6, 0x05C6, prN}, // Po HEBREW PUNCTUATION NUN HAFUKHA
+ {0x05C7, 0x05C7, prN}, // Mn HEBREW POINT QAMATS QATAN
+ {0x05D0, 0x05EA, prN}, // Lo [27] HEBREW LETTER ALEF..HEBREW LETTER TAV
+ {0x05EF, 0x05F2, prN}, // Lo [4] HEBREW YOD TRIANGLE..HEBREW LIGATURE YIDDISH DOUBLE YOD
+ {0x05F3, 0x05F4, prN}, // Po [2] HEBREW PUNCTUATION GERESH..HEBREW PUNCTUATION GERSHAYIM
+ {0x0600, 0x0605, prN}, // Cf [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE
+ {0x0606, 0x0608, prN}, // Sm [3] ARABIC-INDIC CUBE ROOT..ARABIC RAY
+ {0x0609, 0x060A, prN}, // Po [2] ARABIC-INDIC PER MILLE SIGN..ARABIC-INDIC PER TEN THOUSAND SIGN
+ {0x060B, 0x060B, prN}, // Sc AFGHANI SIGN
+ {0x060C, 0x060D, prN}, // Po [2] ARABIC COMMA..ARABIC DATE SEPARATOR
+ {0x060E, 0x060F, prN}, // So [2] ARABIC POETIC VERSE SIGN..ARABIC SIGN MISRA
+ {0x0610, 0x061A, prN}, // Mn [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA
+ {0x061B, 0x061B, prN}, // Po ARABIC SEMICOLON
+ {0x061C, 0x061C, prN}, // Cf ARABIC LETTER MARK
+ {0x061D, 0x061F, prN}, // Po [3] ARABIC END OF TEXT MARK..ARABIC QUESTION MARK
+ {0x0620, 0x063F, prN}, // Lo [32] ARABIC LETTER KASHMIRI YEH..ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE
+ {0x0640, 0x0640, prN}, // Lm ARABIC TATWEEL
+ {0x0641, 0x064A, prN}, // Lo [10] ARABIC LETTER FEH..ARABIC LETTER YEH
+ {0x064B, 0x065F, prN}, // Mn [21] ARABIC FATHATAN..ARABIC WAVY HAMZA BELOW
+ {0x0660, 0x0669, prN}, // Nd [10] ARABIC-INDIC DIGIT ZERO..ARABIC-INDIC DIGIT NINE
+ {0x066A, 0x066D, prN}, // Po [4] ARABIC PERCENT SIGN..ARABIC FIVE POINTED STAR
+ {0x066E, 0x066F, prN}, // Lo [2] ARABIC LETTER DOTLESS BEH..ARABIC LETTER DOTLESS QAF
+ {0x0670, 0x0670, prN}, // Mn ARABIC LETTER SUPERSCRIPT ALEF
+ {0x0671, 0x06D3, prN}, // Lo [99] ARABIC LETTER ALEF WASLA..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE
+ {0x06D4, 0x06D4, prN}, // Po ARABIC FULL STOP
+ {0x06D5, 0x06D5, prN}, // Lo ARABIC LETTER AE
+ {0x06D6, 0x06DC, prN}, // Mn [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN
+ {0x06DD, 0x06DD, prN}, // Cf ARABIC END OF AYAH
+ {0x06DE, 0x06DE, prN}, // So ARABIC START OF RUB EL HIZB
+ {0x06DF, 0x06E4, prN}, // Mn [6] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH MADDA
+ {0x06E5, 0x06E6, prN}, // Lm [2] ARABIC SMALL WAW..ARABIC SMALL YEH
+ {0x06E7, 0x06E8, prN}, // Mn [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON
+ {0x06E9, 0x06E9, prN}, // So ARABIC PLACE OF SAJDAH
+ {0x06EA, 0x06ED, prN}, // Mn [4] ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM
+ {0x06EE, 0x06EF, prN}, // Lo [2] ARABIC LETTER DAL WITH INVERTED V..ARABIC LETTER REH WITH INVERTED V
+ {0x06F0, 0x06F9, prN}, // Nd [10] EXTENDED ARABIC-INDIC DIGIT ZERO..EXTENDED ARABIC-INDIC DIGIT NINE
+ {0x06FA, 0x06FC, prN}, // Lo [3] ARABIC LETTER SHEEN WITH DOT BELOW..ARABIC LETTER GHAIN WITH DOT BELOW
+ {0x06FD, 0x06FE, prN}, // So [2] ARABIC SIGN SINDHI AMPERSAND..ARABIC SIGN SINDHI POSTPOSITION MEN
+ {0x06FF, 0x06FF, prN}, // Lo ARABIC LETTER HEH WITH INVERTED V
+ {0x0700, 0x070D, prN}, // Po [14] SYRIAC END OF PARAGRAPH..SYRIAC HARKLEAN ASTERISCUS
+ {0x070F, 0x070F, prN}, // Cf SYRIAC ABBREVIATION MARK
+ {0x0710, 0x0710, prN}, // Lo SYRIAC LETTER ALAPH
+ {0x0711, 0x0711, prN}, // Mn SYRIAC LETTER SUPERSCRIPT ALAPH
+ {0x0712, 0x072F, prN}, // Lo [30] SYRIAC LETTER BETH..SYRIAC LETTER PERSIAN DHALATH
+ {0x0730, 0x074A, prN}, // Mn [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
+ {0x074D, 0x074F, prN}, // Lo [3] SYRIAC LETTER SOGDIAN ZHAIN..SYRIAC LETTER SOGDIAN FE
+ {0x0750, 0x077F, prN}, // Lo [48] ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW..ARABIC LETTER KAF WITH TWO DOTS ABOVE
+ {0x0780, 0x07A5, prN}, // Lo [38] THAANA LETTER HAA..THAANA LETTER WAAVU
+ {0x07A6, 0x07B0, prN}, // Mn [11] THAANA ABAFILI..THAANA SUKUN
+ {0x07B1, 0x07B1, prN}, // Lo THAANA LETTER NAA
+ {0x07C0, 0x07C9, prN}, // Nd [10] NKO DIGIT ZERO..NKO DIGIT NINE
+ {0x07CA, 0x07EA, prN}, // Lo [33] NKO LETTER A..NKO LETTER JONA RA
+ {0x07EB, 0x07F3, prN}, // Mn [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE
+ {0x07F4, 0x07F5, prN}, // Lm [2] NKO HIGH TONE APOSTROPHE..NKO LOW TONE APOSTROPHE
+ {0x07F6, 0x07F6, prN}, // So NKO SYMBOL OO DENNEN
+ {0x07F7, 0x07F9, prN}, // Po [3] NKO SYMBOL GBAKURUNEN..NKO EXCLAMATION MARK
+ {0x07FA, 0x07FA, prN}, // Lm NKO LAJANYALAN
+ {0x07FD, 0x07FD, prN}, // Mn NKO DANTAYALAN
+ {0x07FE, 0x07FF, prN}, // Sc [2] NKO DOROME SIGN..NKO TAMAN SIGN
+ {0x0800, 0x0815, prN}, // Lo [22] SAMARITAN LETTER ALAF..SAMARITAN LETTER TAAF
+ {0x0816, 0x0819, prN}, // Mn [4] SAMARITAN MARK IN..SAMARITAN MARK DAGESH
+ {0x081A, 0x081A, prN}, // Lm SAMARITAN MODIFIER LETTER EPENTHETIC YUT
+ {0x081B, 0x0823, prN}, // Mn [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A
+ {0x0824, 0x0824, prN}, // Lm SAMARITAN MODIFIER LETTER SHORT A
+ {0x0825, 0x0827, prN}, // Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U
+ {0x0828, 0x0828, prN}, // Lm SAMARITAN MODIFIER LETTER I
+ {0x0829, 0x082D, prN}, // Mn [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA
+ {0x0830, 0x083E, prN}, // Po [15] SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU
+ {0x0840, 0x0858, prN}, // Lo [25] MANDAIC LETTER HALQA..MANDAIC LETTER AIN
+ {0x0859, 0x085B, prN}, // Mn [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK
+ {0x085E, 0x085E, prN}, // Po MANDAIC PUNCTUATION
+ {0x0860, 0x086A, prN}, // Lo [11] SYRIAC LETTER MALAYALAM NGA..SYRIAC LETTER MALAYALAM SSA
+ {0x0870, 0x0887, prN}, // Lo [24] ARABIC LETTER ALEF WITH ATTACHED FATHA..ARABIC BASELINE ROUND DOT
+ {0x0888, 0x0888, prN}, // Sk ARABIC RAISED ROUND DOT
+ {0x0889, 0x088E, prN}, // Lo [6] ARABIC LETTER NOON WITH INVERTED SMALL V..ARABIC VERTICAL TAIL
+ {0x0890, 0x0891, prN}, // Cf [2] ARABIC POUND MARK ABOVE..ARABIC PIASTRE MARK ABOVE
+ {0x0898, 0x089F, prN}, // Mn [8] ARABIC SMALL HIGH WORD AL-JUZ..ARABIC HALF MADDA OVER MADDA
+ {0x08A0, 0x08C8, prN}, // Lo [41] ARABIC LETTER BEH WITH SMALL V BELOW..ARABIC LETTER GRAF
+ {0x08C9, 0x08C9, prN}, // Lm ARABIC SMALL FARSI YEH
+ {0x08CA, 0x08E1, prN}, // Mn [24] ARABIC SMALL HIGH FARSI YEH..ARABIC SMALL HIGH SIGN SAFHA
+ {0x08E2, 0x08E2, prN}, // Cf ARABIC DISPUTED END OF AYAH
+ {0x08E3, 0x08FF, prN}, // Mn [29] ARABIC TURNED DAMMA BELOW..ARABIC MARK SIDEWAYS NOON GHUNNA
+ {0x0900, 0x0902, prN}, // Mn [3] DEVANAGARI SIGN INVERTED CANDRABINDU..DEVANAGARI SIGN ANUSVARA
+ {0x0903, 0x0903, prN}, // Mc DEVANAGARI SIGN VISARGA
+ {0x0904, 0x0939, prN}, // Lo [54] DEVANAGARI LETTER SHORT A..DEVANAGARI LETTER HA
+ {0x093A, 0x093A, prN}, // Mn DEVANAGARI VOWEL SIGN OE
+ {0x093B, 0x093B, prN}, // Mc DEVANAGARI VOWEL SIGN OOE
+ {0x093C, 0x093C, prN}, // Mn DEVANAGARI SIGN NUKTA
+ {0x093D, 0x093D, prN}, // Lo DEVANAGARI SIGN AVAGRAHA
+ {0x093E, 0x0940, prN}, // Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II
+ {0x0941, 0x0948, prN}, // Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI
+ {0x0949, 0x094C, prN}, // Mc [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU
+ {0x094D, 0x094D, prN}, // Mn DEVANAGARI SIGN VIRAMA
+ {0x094E, 0x094F, prN}, // Mc [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW
+ {0x0950, 0x0950, prN}, // Lo DEVANAGARI OM
+ {0x0951, 0x0957, prN}, // Mn [7] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI VOWEL SIGN UUE
+ {0x0958, 0x0961, prN}, // Lo [10] DEVANAGARI LETTER QA..DEVANAGARI LETTER VOCALIC LL
+ {0x0962, 0x0963, prN}, // Mn [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL
+ {0x0964, 0x0965, prN}, // Po [2] DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA
+ {0x0966, 0x096F, prN}, // Nd [10] DEVANAGARI DIGIT ZERO..DEVANAGARI DIGIT NINE
+ {0x0970, 0x0970, prN}, // Po DEVANAGARI ABBREVIATION SIGN
+ {0x0971, 0x0971, prN}, // Lm DEVANAGARI SIGN HIGH SPACING DOT
+ {0x0972, 0x097F, prN}, // Lo [14] DEVANAGARI LETTER CANDRA A..DEVANAGARI LETTER BBA
+ {0x0980, 0x0980, prN}, // Lo BENGALI ANJI
+ {0x0981, 0x0981, prN}, // Mn BENGALI SIGN CANDRABINDU
+ {0x0982, 0x0983, prN}, // Mc [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA
+ {0x0985, 0x098C, prN}, // Lo [8] BENGALI LETTER A..BENGALI LETTER VOCALIC L
+ {0x098F, 0x0990, prN}, // Lo [2] BENGALI LETTER E..BENGALI LETTER AI
+ {0x0993, 0x09A8, prN}, // Lo [22] BENGALI LETTER O..BENGALI LETTER NA
+ {0x09AA, 0x09B0, prN}, // Lo [7] BENGALI LETTER PA..BENGALI LETTER RA
+ {0x09B2, 0x09B2, prN}, // Lo BENGALI LETTER LA
+ {0x09B6, 0x09B9, prN}, // Lo [4] BENGALI LETTER SHA..BENGALI LETTER HA
+ {0x09BC, 0x09BC, prN}, // Mn BENGALI SIGN NUKTA
+ {0x09BD, 0x09BD, prN}, // Lo BENGALI SIGN AVAGRAHA
+ {0x09BE, 0x09C0, prN}, // Mc [3] BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN II
+ {0x09C1, 0x09C4, prN}, // Mn [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR
+ {0x09C7, 0x09C8, prN}, // Mc [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI
+ {0x09CB, 0x09CC, prN}, // Mc [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU
+ {0x09CD, 0x09CD, prN}, // Mn BENGALI SIGN VIRAMA
+ {0x09CE, 0x09CE, prN}, // Lo BENGALI LETTER KHANDA TA
+ {0x09D7, 0x09D7, prN}, // Mc BENGALI AU LENGTH MARK
+ {0x09DC, 0x09DD, prN}, // Lo [2] BENGALI LETTER RRA..BENGALI LETTER RHA
+ {0x09DF, 0x09E1, prN}, // Lo [3] BENGALI LETTER YYA..BENGALI LETTER VOCALIC LL
+ {0x09E2, 0x09E3, prN}, // Mn [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL
+ {0x09E6, 0x09EF, prN}, // Nd [10] BENGALI DIGIT ZERO..BENGALI DIGIT NINE
+ {0x09F0, 0x09F1, prN}, // Lo [2] BENGALI LETTER RA WITH MIDDLE DIAGONAL..BENGALI LETTER RA WITH LOWER DIAGONAL
+ {0x09F2, 0x09F3, prN}, // Sc [2] BENGALI RUPEE MARK..BENGALI RUPEE SIGN
+ {0x09F4, 0x09F9, prN}, // No [6] BENGALI CURRENCY NUMERATOR ONE..BENGALI CURRENCY DENOMINATOR SIXTEEN
+ {0x09FA, 0x09FA, prN}, // So BENGALI ISSHAR
+ {0x09FB, 0x09FB, prN}, // Sc BENGALI GANDA MARK
+ {0x09FC, 0x09FC, prN}, // Lo BENGALI LETTER VEDIC ANUSVARA
+ {0x09FD, 0x09FD, prN}, // Po BENGALI ABBREVIATION SIGN
+ {0x09FE, 0x09FE, prN}, // Mn BENGALI SANDHI MARK
+ {0x0A01, 0x0A02, prN}, // Mn [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI
+ {0x0A03, 0x0A03, prN}, // Mc GURMUKHI SIGN VISARGA
+ {0x0A05, 0x0A0A, prN}, // Lo [6] GURMUKHI LETTER A..GURMUKHI LETTER UU
+ {0x0A0F, 0x0A10, prN}, // Lo [2] GURMUKHI LETTER EE..GURMUKHI LETTER AI
+ {0x0A13, 0x0A28, prN}, // Lo [22] GURMUKHI LETTER OO..GURMUKHI LETTER NA
+ {0x0A2A, 0x0A30, prN}, // Lo [7] GURMUKHI LETTER PA..GURMUKHI LETTER RA
+ {0x0A32, 0x0A33, prN}, // Lo [2] GURMUKHI LETTER LA..GURMUKHI LETTER LLA
+ {0x0A35, 0x0A36, prN}, // Lo [2] GURMUKHI LETTER VA..GURMUKHI LETTER SHA
+ {0x0A38, 0x0A39, prN}, // Lo [2] GURMUKHI LETTER SA..GURMUKHI LETTER HA
+ {0x0A3C, 0x0A3C, prN}, // Mn GURMUKHI SIGN NUKTA
+ {0x0A3E, 0x0A40, prN}, // Mc [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II
+ {0x0A41, 0x0A42, prN}, // Mn [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU
+ {0x0A47, 0x0A48, prN}, // Mn [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI
+ {0x0A4B, 0x0A4D, prN}, // Mn [3] GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA
+ {0x0A51, 0x0A51, prN}, // Mn GURMUKHI SIGN UDAAT
+ {0x0A59, 0x0A5C, prN}, // Lo [4] GURMUKHI LETTER KHHA..GURMUKHI LETTER RRA
+ {0x0A5E, 0x0A5E, prN}, // Lo GURMUKHI LETTER FA
+ {0x0A66, 0x0A6F, prN}, // Nd [10] GURMUKHI DIGIT ZERO..GURMUKHI DIGIT NINE
+ {0x0A70, 0x0A71, prN}, // Mn [2] GURMUKHI TIPPI..GURMUKHI ADDAK
+ {0x0A72, 0x0A74, prN}, // Lo [3] GURMUKHI IRI..GURMUKHI EK ONKAR
+ {0x0A75, 0x0A75, prN}, // Mn GURMUKHI SIGN YAKASH
+ {0x0A76, 0x0A76, prN}, // Po GURMUKHI ABBREVIATION SIGN
+ {0x0A81, 0x0A82, prN}, // Mn [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA
+ {0x0A83, 0x0A83, prN}, // Mc GUJARATI SIGN VISARGA
+ {0x0A85, 0x0A8D, prN}, // Lo [9] GUJARATI LETTER A..GUJARATI VOWEL CANDRA E
+ {0x0A8F, 0x0A91, prN}, // Lo [3] GUJARATI LETTER E..GUJARATI VOWEL CANDRA O
+ {0x0A93, 0x0AA8, prN}, // Lo [22] GUJARATI LETTER O..GUJARATI LETTER NA
+ {0x0AAA, 0x0AB0, prN}, // Lo [7] GUJARATI LETTER PA..GUJARATI LETTER RA
+ {0x0AB2, 0x0AB3, prN}, // Lo [2] GUJARATI LETTER LA..GUJARATI LETTER LLA
+ {0x0AB5, 0x0AB9, prN}, // Lo [5] GUJARATI LETTER VA..GUJARATI LETTER HA
+ {0x0ABC, 0x0ABC, prN}, // Mn GUJARATI SIGN NUKTA
+ {0x0ABD, 0x0ABD, prN}, // Lo GUJARATI SIGN AVAGRAHA
+ {0x0ABE, 0x0AC0, prN}, // Mc [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II
+ {0x0AC1, 0x0AC5, prN}, // Mn [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E
+ {0x0AC7, 0x0AC8, prN}, // Mn [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI
+ {0x0AC9, 0x0AC9, prN}, // Mc GUJARATI VOWEL SIGN CANDRA O
+ {0x0ACB, 0x0ACC, prN}, // Mc [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU
+ {0x0ACD, 0x0ACD, prN}, // Mn GUJARATI SIGN VIRAMA
+ {0x0AD0, 0x0AD0, prN}, // Lo GUJARATI OM
+ {0x0AE0, 0x0AE1, prN}, // Lo [2] GUJARATI LETTER VOCALIC RR..GUJARATI LETTER VOCALIC LL
+ {0x0AE2, 0x0AE3, prN}, // Mn [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL
+ {0x0AE6, 0x0AEF, prN}, // Nd [10] GUJARATI DIGIT ZERO..GUJARATI DIGIT NINE
+ {0x0AF0, 0x0AF0, prN}, // Po GUJARATI ABBREVIATION SIGN
+ {0x0AF1, 0x0AF1, prN}, // Sc GUJARATI RUPEE SIGN
+ {0x0AF9, 0x0AF9, prN}, // Lo GUJARATI LETTER ZHA
+ {0x0AFA, 0x0AFF, prN}, // Mn [6] GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
+ {0x0B01, 0x0B01, prN}, // Mn ORIYA SIGN CANDRABINDU
+ {0x0B02, 0x0B03, prN}, // Mc [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA
+ {0x0B05, 0x0B0C, prN}, // Lo [8] ORIYA LETTER A..ORIYA LETTER VOCALIC L
+ {0x0B0F, 0x0B10, prN}, // Lo [2] ORIYA LETTER E..ORIYA LETTER AI
+ {0x0B13, 0x0B28, prN}, // Lo [22] ORIYA LETTER O..ORIYA LETTER NA
+ {0x0B2A, 0x0B30, prN}, // Lo [7] ORIYA LETTER PA..ORIYA LETTER RA
+ {0x0B32, 0x0B33, prN}, // Lo [2] ORIYA LETTER LA..ORIYA LETTER LLA
+ {0x0B35, 0x0B39, prN}, // Lo [5] ORIYA LETTER VA..ORIYA LETTER HA
+ {0x0B3C, 0x0B3C, prN}, // Mn ORIYA SIGN NUKTA
+ {0x0B3D, 0x0B3D, prN}, // Lo ORIYA SIGN AVAGRAHA
+ {0x0B3E, 0x0B3E, prN}, // Mc ORIYA VOWEL SIGN AA
+ {0x0B3F, 0x0B3F, prN}, // Mn ORIYA VOWEL SIGN I
+ {0x0B40, 0x0B40, prN}, // Mc ORIYA VOWEL SIGN II
+ {0x0B41, 0x0B44, prN}, // Mn [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR
+ {0x0B47, 0x0B48, prN}, // Mc [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI
+ {0x0B4B, 0x0B4C, prN}, // Mc [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU
+ {0x0B4D, 0x0B4D, prN}, // Mn ORIYA SIGN VIRAMA
+ {0x0B55, 0x0B56, prN}, // Mn [2] ORIYA SIGN OVERLINE..ORIYA AI LENGTH MARK
+ {0x0B57, 0x0B57, prN}, // Mc ORIYA AU LENGTH MARK
+ {0x0B5C, 0x0B5D, prN}, // Lo [2] ORIYA LETTER RRA..ORIYA LETTER RHA
+ {0x0B5F, 0x0B61, prN}, // Lo [3] ORIYA LETTER YYA..ORIYA LETTER VOCALIC LL
+ {0x0B62, 0x0B63, prN}, // Mn [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL
+ {0x0B66, 0x0B6F, prN}, // Nd [10] ORIYA DIGIT ZERO..ORIYA DIGIT NINE
+ {0x0B70, 0x0B70, prN}, // So ORIYA ISSHAR
+ {0x0B71, 0x0B71, prN}, // Lo ORIYA LETTER WA
+ {0x0B72, 0x0B77, prN}, // No [6] ORIYA FRACTION ONE QUARTER..ORIYA FRACTION THREE SIXTEENTHS
+ {0x0B82, 0x0B82, prN}, // Mn TAMIL SIGN ANUSVARA
+ {0x0B83, 0x0B83, prN}, // Lo TAMIL SIGN VISARGA
+ {0x0B85, 0x0B8A, prN}, // Lo [6] TAMIL LETTER A..TAMIL LETTER UU
+ {0x0B8E, 0x0B90, prN}, // Lo [3] TAMIL LETTER E..TAMIL LETTER AI
+ {0x0B92, 0x0B95, prN}, // Lo [4] TAMIL LETTER O..TAMIL LETTER KA
+ {0x0B99, 0x0B9A, prN}, // Lo [2] TAMIL LETTER NGA..TAMIL LETTER CA
+ {0x0B9C, 0x0B9C, prN}, // Lo TAMIL LETTER JA
+ {0x0B9E, 0x0B9F, prN}, // Lo [2] TAMIL LETTER NYA..TAMIL LETTER TTA
+ {0x0BA3, 0x0BA4, prN}, // Lo [2] TAMIL LETTER NNA..TAMIL LETTER TA
+ {0x0BA8, 0x0BAA, prN}, // Lo [3] TAMIL LETTER NA..TAMIL LETTER PA
+ {0x0BAE, 0x0BB9, prN}, // Lo [12] TAMIL LETTER MA..TAMIL LETTER HA
+ {0x0BBE, 0x0BBF, prN}, // Mc [2] TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN I
+ {0x0BC0, 0x0BC0, prN}, // Mn TAMIL VOWEL SIGN II
+ {0x0BC1, 0x0BC2, prN}, // Mc [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU
+ {0x0BC6, 0x0BC8, prN}, // Mc [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI
+ {0x0BCA, 0x0BCC, prN}, // Mc [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU
+ {0x0BCD, 0x0BCD, prN}, // Mn TAMIL SIGN VIRAMA
+ {0x0BD0, 0x0BD0, prN}, // Lo TAMIL OM
+ {0x0BD7, 0x0BD7, prN}, // Mc TAMIL AU LENGTH MARK
+ {0x0BE6, 0x0BEF, prN}, // Nd [10] TAMIL DIGIT ZERO..TAMIL DIGIT NINE
+ {0x0BF0, 0x0BF2, prN}, // No [3] TAMIL NUMBER TEN..TAMIL NUMBER ONE THOUSAND
+ {0x0BF3, 0x0BF8, prN}, // So [6] TAMIL DAY SIGN..TAMIL AS ABOVE SIGN
+ {0x0BF9, 0x0BF9, prN}, // Sc TAMIL RUPEE SIGN
+ {0x0BFA, 0x0BFA, prN}, // So TAMIL NUMBER SIGN
+ {0x0C00, 0x0C00, prN}, // Mn TELUGU SIGN COMBINING CANDRABINDU ABOVE
+ {0x0C01, 0x0C03, prN}, // Mc [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA
+ {0x0C04, 0x0C04, prN}, // Mn TELUGU SIGN COMBINING ANUSVARA ABOVE
+ {0x0C05, 0x0C0C, prN}, // Lo [8] TELUGU LETTER A..TELUGU LETTER VOCALIC L
+ {0x0C0E, 0x0C10, prN}, // Lo [3] TELUGU LETTER E..TELUGU LETTER AI
+ {0x0C12, 0x0C28, prN}, // Lo [23] TELUGU LETTER O..TELUGU LETTER NA
+ {0x0C2A, 0x0C39, prN}, // Lo [16] TELUGU LETTER PA..TELUGU LETTER HA
+ {0x0C3C, 0x0C3C, prN}, // Mn TELUGU SIGN NUKTA
+ {0x0C3D, 0x0C3D, prN}, // Lo TELUGU SIGN AVAGRAHA
+ {0x0C3E, 0x0C40, prN}, // Mn [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II
+ {0x0C41, 0x0C44, prN}, // Mc [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR
+ {0x0C46, 0x0C48, prN}, // Mn [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI
+ {0x0C4A, 0x0C4D, prN}, // Mn [4] TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA
+ {0x0C55, 0x0C56, prN}, // Mn [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK
+ {0x0C58, 0x0C5A, prN}, // Lo [3] TELUGU LETTER TSA..TELUGU LETTER RRRA
+ {0x0C5D, 0x0C5D, prN}, // Lo TELUGU LETTER NAKAARA POLLU
+ {0x0C60, 0x0C61, prN}, // Lo [2] TELUGU LETTER VOCALIC RR..TELUGU LETTER VOCALIC LL
+ {0x0C62, 0x0C63, prN}, // Mn [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL
+ {0x0C66, 0x0C6F, prN}, // Nd [10] TELUGU DIGIT ZERO..TELUGU DIGIT NINE
+ {0x0C77, 0x0C77, prN}, // Po TELUGU SIGN SIDDHAM
+ {0x0C78, 0x0C7E, prN}, // No [7] TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR..TELUGU FRACTION DIGIT THREE FOR EVEN POWERS OF FOUR
+ {0x0C7F, 0x0C7F, prN}, // So TELUGU SIGN TUUMU
+ {0x0C80, 0x0C80, prN}, // Lo KANNADA SIGN SPACING CANDRABINDU
+ {0x0C81, 0x0C81, prN}, // Mn KANNADA SIGN CANDRABINDU
+ {0x0C82, 0x0C83, prN}, // Mc [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA
+ {0x0C84, 0x0C84, prN}, // Po KANNADA SIGN SIDDHAM
+ {0x0C85, 0x0C8C, prN}, // Lo [8] KANNADA LETTER A..KANNADA LETTER VOCALIC L
+ {0x0C8E, 0x0C90, prN}, // Lo [3] KANNADA LETTER E..KANNADA LETTER AI
+ {0x0C92, 0x0CA8, prN}, // Lo [23] KANNADA LETTER O..KANNADA LETTER NA
+ {0x0CAA, 0x0CB3, prN}, // Lo [10] KANNADA LETTER PA..KANNADA LETTER LLA
+ {0x0CB5, 0x0CB9, prN}, // Lo [5] KANNADA LETTER VA..KANNADA LETTER HA
+ {0x0CBC, 0x0CBC, prN}, // Mn KANNADA SIGN NUKTA
+ {0x0CBD, 0x0CBD, prN}, // Lo KANNADA SIGN AVAGRAHA
+ {0x0CBE, 0x0CBE, prN}, // Mc KANNADA VOWEL SIGN AA
+ {0x0CBF, 0x0CBF, prN}, // Mn KANNADA VOWEL SIGN I
+ {0x0CC0, 0x0CC4, prN}, // Mc [5] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN VOCALIC RR
+ {0x0CC6, 0x0CC6, prN}, // Mn KANNADA VOWEL SIGN E
+ {0x0CC7, 0x0CC8, prN}, // Mc [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI
+ {0x0CCA, 0x0CCB, prN}, // Mc [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO
+ {0x0CCC, 0x0CCD, prN}, // Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
+ {0x0CD5, 0x0CD6, prN}, // Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
+ {0x0CDD, 0x0CDE, prN}, // Lo [2] KANNADA LETTER NAKAARA POLLU..KANNADA LETTER FA
+ {0x0CE0, 0x0CE1, prN}, // Lo [2] KANNADA LETTER VOCALIC RR..KANNADA LETTER VOCALIC LL
+ {0x0CE2, 0x0CE3, prN}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0CE6, 0x0CEF, prN}, // Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
+ {0x0CF1, 0x0CF2, prN}, // Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0CF3, 0x0CF3, prN}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
+ {0x0D00, 0x0D01, prN}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
+ {0x0D02, 0x0D03, prN}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
+ {0x0D04, 0x0D0C, prN}, // Lo [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
+ {0x0D0E, 0x0D10, prN}, // Lo [3] MALAYALAM LETTER E..MALAYALAM LETTER AI
+ {0x0D12, 0x0D3A, prN}, // Lo [41] MALAYALAM LETTER O..MALAYALAM LETTER TTTA
+ {0x0D3B, 0x0D3C, prN}, // Mn [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
+ {0x0D3D, 0x0D3D, prN}, // Lo MALAYALAM SIGN AVAGRAHA
+ {0x0D3E, 0x0D40, prN}, // Mc [3] MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN II
+ {0x0D41, 0x0D44, prN}, // Mn [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR
+ {0x0D46, 0x0D48, prN}, // Mc [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI
+ {0x0D4A, 0x0D4C, prN}, // Mc [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU
+ {0x0D4D, 0x0D4D, prN}, // Mn MALAYALAM SIGN VIRAMA
+ {0x0D4E, 0x0D4E, prN}, // Lo MALAYALAM LETTER DOT REPH
+ {0x0D4F, 0x0D4F, prN}, // So MALAYALAM SIGN PARA
+ {0x0D54, 0x0D56, prN}, // Lo [3] MALAYALAM LETTER CHILLU M..MALAYALAM LETTER CHILLU LLL
+ {0x0D57, 0x0D57, prN}, // Mc MALAYALAM AU LENGTH MARK
+ {0x0D58, 0x0D5E, prN}, // No [7] MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH..MALAYALAM FRACTION ONE FIFTH
+ {0x0D5F, 0x0D61, prN}, // Lo [3] MALAYALAM LETTER ARCHAIC II..MALAYALAM LETTER VOCALIC LL
+ {0x0D62, 0x0D63, prN}, // Mn [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL
+ {0x0D66, 0x0D6F, prN}, // Nd [10] MALAYALAM DIGIT ZERO..MALAYALAM DIGIT NINE
+ {0x0D70, 0x0D78, prN}, // No [9] MALAYALAM NUMBER TEN..MALAYALAM FRACTION THREE SIXTEENTHS
+ {0x0D79, 0x0D79, prN}, // So MALAYALAM DATE MARK
+ {0x0D7A, 0x0D7F, prN}, // Lo [6] MALAYALAM LETTER CHILLU NN..MALAYALAM LETTER CHILLU K
+ {0x0D81, 0x0D81, prN}, // Mn SINHALA SIGN CANDRABINDU
+ {0x0D82, 0x0D83, prN}, // Mc [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA
+ {0x0D85, 0x0D96, prN}, // Lo [18] SINHALA LETTER AYANNA..SINHALA LETTER AUYANNA
+ {0x0D9A, 0x0DB1, prN}, // Lo [24] SINHALA LETTER ALPAPRAANA KAYANNA..SINHALA LETTER DANTAJA NAYANNA
+ {0x0DB3, 0x0DBB, prN}, // Lo [9] SINHALA LETTER SANYAKA DAYANNA..SINHALA LETTER RAYANNA
+ {0x0DBD, 0x0DBD, prN}, // Lo SINHALA LETTER DANTAJA LAYANNA
+ {0x0DC0, 0x0DC6, prN}, // Lo [7] SINHALA LETTER VAYANNA..SINHALA LETTER FAYANNA
+ {0x0DCA, 0x0DCA, prN}, // Mn SINHALA SIGN AL-LAKUNA
+ {0x0DCF, 0x0DD1, prN}, // Mc [3] SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA
+ {0x0DD2, 0x0DD4, prN}, // Mn [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA
+ {0x0DD6, 0x0DD6, prN}, // Mn SINHALA VOWEL SIGN DIGA PAA-PILLA
+ {0x0DD8, 0x0DDF, prN}, // Mc [8] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA
+ {0x0DE6, 0x0DEF, prN}, // Nd [10] SINHALA LITH DIGIT ZERO..SINHALA LITH DIGIT NINE
+ {0x0DF2, 0x0DF3, prN}, // Mc [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA
+ {0x0DF4, 0x0DF4, prN}, // Po SINHALA PUNCTUATION KUNDDALIYA
+ {0x0E01, 0x0E30, prN}, // Lo [48] THAI CHARACTER KO KAI..THAI CHARACTER SARA A
+ {0x0E31, 0x0E31, prN}, // Mn THAI CHARACTER MAI HAN-AKAT
+ {0x0E32, 0x0E33, prN}, // Lo [2] THAI CHARACTER SARA AA..THAI CHARACTER SARA AM
+ {0x0E34, 0x0E3A, prN}, // Mn [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU
+ {0x0E3F, 0x0E3F, prN}, // Sc THAI CURRENCY SYMBOL BAHT
+ {0x0E40, 0x0E45, prN}, // Lo [6] THAI CHARACTER SARA E..THAI CHARACTER LAKKHANGYAO
+ {0x0E46, 0x0E46, prN}, // Lm THAI CHARACTER MAIYAMOK
+ {0x0E47, 0x0E4E, prN}, // Mn [8] THAI CHARACTER MAITAIKHU..THAI CHARACTER YAMAKKAN
+ {0x0E4F, 0x0E4F, prN}, // Po THAI CHARACTER FONGMAN
+ {0x0E50, 0x0E59, prN}, // Nd [10] THAI DIGIT ZERO..THAI DIGIT NINE
+ {0x0E5A, 0x0E5B, prN}, // Po [2] THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT
+ {0x0E81, 0x0E82, prN}, // Lo [2] LAO LETTER KO..LAO LETTER KHO SUNG
+ {0x0E84, 0x0E84, prN}, // Lo LAO LETTER KHO TAM
+ {0x0E86, 0x0E8A, prN}, // Lo [5] LAO LETTER PALI GHA..LAO LETTER SO TAM
+ {0x0E8C, 0x0EA3, prN}, // Lo [24] LAO LETTER PALI JHA..LAO LETTER LO LING
+ {0x0EA5, 0x0EA5, prN}, // Lo LAO LETTER LO LOOT
+ {0x0EA7, 0x0EB0, prN}, // Lo [10] LAO LETTER WO..LAO VOWEL SIGN A
+ {0x0EB1, 0x0EB1, prN}, // Mn LAO VOWEL SIGN MAI KAN
+ {0x0EB2, 0x0EB3, prN}, // Lo [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM
+ {0x0EB4, 0x0EBC, prN}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
+ {0x0EBD, 0x0EBD, prN}, // Lo LAO SEMIVOWEL SIGN NYO
+ {0x0EC0, 0x0EC4, prN}, // Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
+ {0x0EC6, 0x0EC6, prN}, // Lm LAO KO LA
+ {0x0EC8, 0x0ECE, prN}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN
+ {0x0ED0, 0x0ED9, prN}, // Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE
+ {0x0EDC, 0x0EDF, prN}, // Lo [4] LAO HO NO..LAO LETTER KHMU NYO
+ {0x0F00, 0x0F00, prN}, // Lo TIBETAN SYLLABLE OM
+ {0x0F01, 0x0F03, prN}, // So [3] TIBETAN MARK GTER YIG MGO TRUNCATED A..TIBETAN MARK GTER YIG MGO -UM GTER TSHEG MA
+ {0x0F04, 0x0F12, prN}, // Po [15] TIBETAN MARK INITIAL YIG MGO MDUN MA..TIBETAN MARK RGYA GRAM SHAD
+ {0x0F13, 0x0F13, prN}, // So TIBETAN MARK CARET -DZUD RTAGS ME LONG CAN
+ {0x0F14, 0x0F14, prN}, // Po TIBETAN MARK GTER TSHEG
+ {0x0F15, 0x0F17, prN}, // So [3] TIBETAN LOGOTYPE SIGN CHAD RTAGS..TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS
+ {0x0F18, 0x0F19, prN}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
+ {0x0F1A, 0x0F1F, prN}, // So [6] TIBETAN SIGN RDEL DKAR GCIG..TIBETAN SIGN RDEL DKAR RDEL NAG
+ {0x0F20, 0x0F29, prN}, // Nd [10] TIBETAN DIGIT ZERO..TIBETAN DIGIT NINE
+ {0x0F2A, 0x0F33, prN}, // No [10] TIBETAN DIGIT HALF ONE..TIBETAN DIGIT HALF ZERO
+ {0x0F34, 0x0F34, prN}, // So TIBETAN MARK BSDUS RTAGS
+ {0x0F35, 0x0F35, prN}, // Mn TIBETAN MARK NGAS BZUNG NYI ZLA
+ {0x0F36, 0x0F36, prN}, // So TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN
+ {0x0F37, 0x0F37, prN}, // Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS
+ {0x0F38, 0x0F38, prN}, // So TIBETAN MARK CHE MGO
+ {0x0F39, 0x0F39, prN}, // Mn TIBETAN MARK TSA -PHRU
+ {0x0F3A, 0x0F3A, prN}, // Ps TIBETAN MARK GUG RTAGS GYON
+ {0x0F3B, 0x0F3B, prN}, // Pe TIBETAN MARK GUG RTAGS GYAS
+ {0x0F3C, 0x0F3C, prN}, // Ps TIBETAN MARK ANG KHANG GYON
+ {0x0F3D, 0x0F3D, prN}, // Pe TIBETAN MARK ANG KHANG GYAS
+ {0x0F3E, 0x0F3F, prN}, // Mc [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES
+ {0x0F40, 0x0F47, prN}, // Lo [8] TIBETAN LETTER KA..TIBETAN LETTER JA
+ {0x0F49, 0x0F6C, prN}, // Lo [36] TIBETAN LETTER NYA..TIBETAN LETTER RRA
+ {0x0F71, 0x0F7E, prN}, // Mn [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO
+ {0x0F7F, 0x0F7F, prN}, // Mc TIBETAN SIGN RNAM BCAD
+ {0x0F80, 0x0F84, prN}, // Mn [5] TIBETAN VOWEL SIGN REVERSED I..TIBETAN MARK HALANTA
+ {0x0F85, 0x0F85, prN}, // Po TIBETAN MARK PALUTA
+ {0x0F86, 0x0F87, prN}, // Mn [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS
+ {0x0F88, 0x0F8C, prN}, // Lo [5] TIBETAN SIGN LCE TSA CAN..TIBETAN SIGN INVERTED MCHU CAN
+ {0x0F8D, 0x0F97, prN}, // Mn [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA
+ {0x0F99, 0x0FBC, prN}, // Mn [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA
+ {0x0FBE, 0x0FC5, prN}, // So [8] TIBETAN KU RU KHA..TIBETAN SYMBOL RDO RJE
+ {0x0FC6, 0x0FC6, prN}, // Mn TIBETAN SYMBOL PADMA GDAN
+ {0x0FC7, 0x0FCC, prN}, // So [6] TIBETAN SYMBOL RDO RJE RGYA GRAM..TIBETAN SYMBOL NOR BU BZHI -KHYIL
+ {0x0FCE, 0x0FCF, prN}, // So [2] TIBETAN SIGN RDEL NAG RDEL DKAR..TIBETAN SIGN RDEL NAG GSUM
+ {0x0FD0, 0x0FD4, prN}, // Po [5] TIBETAN MARK BSKA- SHOG GI MGO RGYAN..TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA
+ {0x0FD5, 0x0FD8, prN}, // So [4] RIGHT-FACING SVASTI SIGN..LEFT-FACING SVASTI SIGN WITH DOTS
+ {0x0FD9, 0x0FDA, prN}, // Po [2] TIBETAN MARK LEADING MCHAN RTAGS..TIBETAN MARK TRAILING MCHAN RTAGS
+ {0x1000, 0x102A, prN}, // Lo [43] MYANMAR LETTER KA..MYANMAR LETTER AU
+ {0x102B, 0x102C, prN}, // Mc [2] MYANMAR VOWEL SIGN TALL AA..MYANMAR VOWEL SIGN AA
+ {0x102D, 0x1030, prN}, // Mn [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU
+ {0x1031, 0x1031, prN}, // Mc MYANMAR VOWEL SIGN E
+ {0x1032, 0x1037, prN}, // Mn [6] MYANMAR VOWEL SIGN AI..MYANMAR SIGN DOT BELOW
+ {0x1038, 0x1038, prN}, // Mc MYANMAR SIGN VISARGA
+ {0x1039, 0x103A, prN}, // Mn [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT
+ {0x103B, 0x103C, prN}, // Mc [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA
+ {0x103D, 0x103E, prN}, // Mn [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA
+ {0x103F, 0x103F, prN}, // Lo MYANMAR LETTER GREAT SA
+ {0x1040, 0x1049, prN}, // Nd [10] MYANMAR DIGIT ZERO..MYANMAR DIGIT NINE
+ {0x104A, 0x104F, prN}, // Po [6] MYANMAR SIGN LITTLE SECTION..MYANMAR SYMBOL GENITIVE
+ {0x1050, 0x1055, prN}, // Lo [6] MYANMAR LETTER SHA..MYANMAR LETTER VOCALIC LL
+ {0x1056, 0x1057, prN}, // Mc [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR
+ {0x1058, 0x1059, prN}, // Mn [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL
+ {0x105A, 0x105D, prN}, // Lo [4] MYANMAR LETTER MON NGA..MYANMAR LETTER MON BBE
+ {0x105E, 0x1060, prN}, // Mn [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA
+ {0x1061, 0x1061, prN}, // Lo MYANMAR LETTER SGAW KAREN SHA
+ {0x1062, 0x1064, prN}, // Mc [3] MYANMAR VOWEL SIGN SGAW KAREN EU..MYANMAR TONE MARK SGAW KAREN KE PHO
+ {0x1065, 0x1066, prN}, // Lo [2] MYANMAR LETTER WESTERN PWO KAREN THA..MYANMAR LETTER WESTERN PWO KAREN PWA
+ {0x1067, 0x106D, prN}, // Mc [7] MYANMAR VOWEL SIGN WESTERN PWO KAREN EU..MYANMAR SIGN WESTERN PWO KAREN TONE-5
+ {0x106E, 0x1070, prN}, // Lo [3] MYANMAR LETTER EASTERN PWO KAREN NNA..MYANMAR LETTER EASTERN PWO KAREN GHWA
+ {0x1071, 0x1074, prN}, // Mn [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE
+ {0x1075, 0x1081, prN}, // Lo [13] MYANMAR LETTER SHAN KA..MYANMAR LETTER SHAN HA
+ {0x1082, 0x1082, prN}, // Mn MYANMAR CONSONANT SIGN SHAN MEDIAL WA
+ {0x1083, 0x1084, prN}, // Mc [2] MYANMAR VOWEL SIGN SHAN AA..MYANMAR VOWEL SIGN SHAN E
+ {0x1085, 0x1086, prN}, // Mn [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y
+ {0x1087, 0x108C, prN}, // Mc [6] MYANMAR SIGN SHAN TONE-2..MYANMAR SIGN SHAN COUNCIL TONE-3
+ {0x108D, 0x108D, prN}, // Mn MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE
+ {0x108E, 0x108E, prN}, // Lo MYANMAR LETTER RUMAI PALAUNG FA
+ {0x108F, 0x108F, prN}, // Mc MYANMAR SIGN RUMAI PALAUNG TONE-5
+ {0x1090, 0x1099, prN}, // Nd [10] MYANMAR SHAN DIGIT ZERO..MYANMAR SHAN DIGIT NINE
+ {0x109A, 0x109C, prN}, // Mc [3] MYANMAR SIGN KHAMTI TONE-1..MYANMAR VOWEL SIGN AITON A
+ {0x109D, 0x109D, prN}, // Mn MYANMAR VOWEL SIGN AITON AI
+ {0x109E, 0x109F, prN}, // So [2] MYANMAR SYMBOL SHAN ONE..MYANMAR SYMBOL SHAN EXCLAMATION
+ {0x10A0, 0x10C5, prN}, // Lu [38] GEORGIAN CAPITAL LETTER AN..GEORGIAN CAPITAL LETTER HOE
+ {0x10C7, 0x10C7, prN}, // Lu GEORGIAN CAPITAL LETTER YN
+ {0x10CD, 0x10CD, prN}, // Lu GEORGIAN CAPITAL LETTER AEN
+ {0x10D0, 0x10FA, prN}, // Ll [43] GEORGIAN LETTER AN..GEORGIAN LETTER AIN
+ {0x10FB, 0x10FB, prN}, // Po GEORGIAN PARAGRAPH SEPARATOR
+ {0x10FC, 0x10FC, prN}, // Lm MODIFIER LETTER GEORGIAN NAR
+ {0x10FD, 0x10FF, prN}, // Ll [3] GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN
+ {0x1100, 0x115F, prW}, // Lo [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER
+ {0x1160, 0x11FF, prN}, // Lo [160] HANGUL JUNGSEONG FILLER..HANGUL JONGSEONG SSANGNIEUN
+ {0x1200, 0x1248, prN}, // Lo [73] ETHIOPIC SYLLABLE HA..ETHIOPIC SYLLABLE QWA
+ {0x124A, 0x124D, prN}, // Lo [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE
+ {0x1250, 0x1256, prN}, // Lo [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SYLLABLE QHO
+ {0x1258, 0x1258, prN}, // Lo ETHIOPIC SYLLABLE QHWA
+ {0x125A, 0x125D, prN}, // Lo [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC SYLLABLE QHWE
+ {0x1260, 0x1288, prN}, // Lo [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYLLABLE XWA
+ {0x128A, 0x128D, prN}, // Lo [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SYLLABLE XWE
+ {0x1290, 0x12B0, prN}, // Lo [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYLLABLE KWA
+ {0x12B2, 0x12B5, prN}, // Lo [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SYLLABLE KWE
+ {0x12B8, 0x12BE, prN}, // Lo [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SYLLABLE KXO
+ {0x12C0, 0x12C0, prN}, // Lo ETHIOPIC SYLLABLE KXWA
+ {0x12C2, 0x12C5, prN}, // Lo [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC SYLLABLE KXWE
+ {0x12C8, 0x12D6, prN}, // Lo [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYLLABLE PHARYNGEAL O
+ {0x12D8, 0x1310, prN}, // Lo [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYLLABLE GWA
+ {0x1312, 0x1315, prN}, // Lo [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SYLLABLE GWE
+ {0x1318, 0x135A, prN}, // Lo [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SYLLABLE FYA
+ {0x135D, 0x135F, prN}, // Mn [3] ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING GEMINATION MARK
+ {0x1360, 0x1368, prN}, // Po [9] ETHIOPIC SECTION MARK..ETHIOPIC PARAGRAPH SEPARATOR
+ {0x1369, 0x137C, prN}, // No [20] ETHIOPIC DIGIT ONE..ETHIOPIC NUMBER TEN THOUSAND
+ {0x1380, 0x138F, prN}, // Lo [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..ETHIOPIC SYLLABLE PWE
+ {0x1390, 0x1399, prN}, // So [10] ETHIOPIC TONAL MARK YIZET..ETHIOPIC TONAL MARK KURT
+ {0x13A0, 0x13F5, prN}, // Lu [86] CHEROKEE LETTER A..CHEROKEE LETTER MV
+ {0x13F8, 0x13FD, prN}, // Ll [6] CHEROKEE SMALL LETTER YE..CHEROKEE SMALL LETTER MV
+ {0x1400, 0x1400, prN}, // Pd CANADIAN SYLLABICS HYPHEN
+ {0x1401, 0x166C, prN}, // Lo [620] CANADIAN SYLLABICS E..CANADIAN SYLLABICS CARRIER TTSA
+ {0x166D, 0x166D, prN}, // So CANADIAN SYLLABICS CHI SIGN
+ {0x166E, 0x166E, prN}, // Po CANADIAN SYLLABICS FULL STOP
+ {0x166F, 0x167F, prN}, // Lo [17] CANADIAN SYLLABICS QAI..CANADIAN SYLLABICS BLACKFOOT W
+ {0x1680, 0x1680, prN}, // Zs OGHAM SPACE MARK
+ {0x1681, 0x169A, prN}, // Lo [26] OGHAM LETTER BEITH..OGHAM LETTER PEITH
+ {0x169B, 0x169B, prN}, // Ps OGHAM FEATHER MARK
+ {0x169C, 0x169C, prN}, // Pe OGHAM REVERSED FEATHER MARK
+ {0x16A0, 0x16EA, prN}, // Lo [75] RUNIC LETTER FEHU FEOH FE F..RUNIC LETTER X
+ {0x16EB, 0x16ED, prN}, // Po [3] RUNIC SINGLE PUNCTUATION..RUNIC CROSS PUNCTUATION
+ {0x16EE, 0x16F0, prN}, // Nl [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHOR SYMBOL
+ {0x16F1, 0x16F8, prN}, // Lo [8] RUNIC LETTER K..RUNIC LETTER FRANKS CASKET AESC
+ {0x1700, 0x1711, prN}, // Lo [18] TAGALOG LETTER A..TAGALOG LETTER HA
+ {0x1712, 0x1714, prN}, // Mn [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN VIRAMA
+ {0x1715, 0x1715, prN}, // Mc TAGALOG SIGN PAMUDPOD
+ {0x171F, 0x171F, prN}, // Lo TAGALOG LETTER ARCHAIC RA
+ {0x1720, 0x1731, prN}, // Lo [18] HANUNOO LETTER A..HANUNOO LETTER HA
+ {0x1732, 0x1733, prN}, // Mn [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U
+ {0x1734, 0x1734, prN}, // Mc HANUNOO SIGN PAMUDPOD
+ {0x1735, 0x1736, prN}, // Po [2] PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION
+ {0x1740, 0x1751, prN}, // Lo [18] BUHID LETTER A..BUHID LETTER HA
+ {0x1752, 0x1753, prN}, // Mn [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U
+ {0x1760, 0x176C, prN}, // Lo [13] TAGBANWA LETTER A..TAGBANWA LETTER YA
+ {0x176E, 0x1770, prN}, // Lo [3] TAGBANWA LETTER LA..TAGBANWA LETTER SA
+ {0x1772, 0x1773, prN}, // Mn [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U
+ {0x1780, 0x17B3, prN}, // Lo [52] KHMER LETTER KA..KHMER INDEPENDENT VOWEL QAU
+ {0x17B4, 0x17B5, prN}, // Mn [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA
+ {0x17B6, 0x17B6, prN}, // Mc KHMER VOWEL SIGN AA
+ {0x17B7, 0x17BD, prN}, // Mn [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA
+ {0x17BE, 0x17C5, prN}, // Mc [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU
+ {0x17C6, 0x17C6, prN}, // Mn KHMER SIGN NIKAHIT
+ {0x17C7, 0x17C8, prN}, // Mc [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU
+ {0x17C9, 0x17D3, prN}, // Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT
+ {0x17D4, 0x17D6, prN}, // Po [3] KHMER SIGN KHAN..KHMER SIGN CAMNUC PII KUUH
+ {0x17D7, 0x17D7, prN}, // Lm KHMER SIGN LEK TOO
+ {0x17D8, 0x17DA, prN}, // Po [3] KHMER SIGN BEYYAL..KHMER SIGN KOOMUUT
+ {0x17DB, 0x17DB, prN}, // Sc KHMER CURRENCY SYMBOL RIEL
+ {0x17DC, 0x17DC, prN}, // Lo KHMER SIGN AVAKRAHASANYA
+ {0x17DD, 0x17DD, prN}, // Mn KHMER SIGN ATTHACAN
+ {0x17E0, 0x17E9, prN}, // Nd [10] KHMER DIGIT ZERO..KHMER DIGIT NINE
+ {0x17F0, 0x17F9, prN}, // No [10] KHMER SYMBOL LEK ATTAK SON..KHMER SYMBOL LEK ATTAK PRAM-BUON
+ {0x1800, 0x1805, prN}, // Po [6] MONGOLIAN BIRGA..MONGOLIAN FOUR DOTS
+ {0x1806, 0x1806, prN}, // Pd MONGOLIAN TODO SOFT HYPHEN
+ {0x1807, 0x180A, prN}, // Po [4] MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER..MONGOLIAN NIRUGU
+ {0x180B, 0x180D, prN}, // Mn [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE
+ {0x180E, 0x180E, prN}, // Cf MONGOLIAN VOWEL SEPARATOR
+ {0x180F, 0x180F, prN}, // Mn MONGOLIAN FREE VARIATION SELECTOR FOUR
+ {0x1810, 0x1819, prN}, // Nd [10] MONGOLIAN DIGIT ZERO..MONGOLIAN DIGIT NINE
+ {0x1820, 0x1842, prN}, // Lo [35] MONGOLIAN LETTER A..MONGOLIAN LETTER CHI
+ {0x1843, 0x1843, prN}, // Lm MONGOLIAN LETTER TODO LONG VOWEL SIGN
+ {0x1844, 0x1878, prN}, // Lo [53] MONGOLIAN LETTER TODO E..MONGOLIAN LETTER CHA WITH TWO DOTS
+ {0x1880, 0x1884, prN}, // Lo [5] MONGOLIAN LETTER ALI GALI ANUSVARA ONE..MONGOLIAN LETTER ALI GALI INVERTED UBADAMA
+ {0x1885, 0x1886, prN}, // Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA
+ {0x1887, 0x18A8, prN}, // Lo [34] MONGOLIAN LETTER ALI GALI A..MONGOLIAN LETTER MANCHU ALI GALI BHA
+ {0x18A9, 0x18A9, prN}, // Mn MONGOLIAN LETTER ALI GALI DAGALGA
+ {0x18AA, 0x18AA, prN}, // Lo MONGOLIAN LETTER MANCHU ALI GALI LHA
+ {0x18B0, 0x18F5, prN}, // Lo [70] CANADIAN SYLLABICS OY..CANADIAN SYLLABICS CARRIER DENTAL S
+ {0x1900, 0x191E, prN}, // Lo [31] LIMBU VOWEL-CARRIER LETTER..LIMBU LETTER TRA
+ {0x1920, 0x1922, prN}, // Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U
+ {0x1923, 0x1926, prN}, // Mc [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU
+ {0x1927, 0x1928, prN}, // Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O
+ {0x1929, 0x192B, prN}, // Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA
+ {0x1930, 0x1931, prN}, // Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA
+ {0x1932, 0x1932, prN}, // Mn LIMBU SMALL LETTER ANUSVARA
+ {0x1933, 0x1938, prN}, // Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA
+ {0x1939, 0x193B, prN}, // Mn [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
+ {0x1940, 0x1940, prN}, // So LIMBU SIGN LOO
+ {0x1944, 0x1945, prN}, // Po [2] LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK
+ {0x1946, 0x194F, prN}, // Nd [10] LIMBU DIGIT ZERO..LIMBU DIGIT NINE
+ {0x1950, 0x196D, prN}, // Lo [30] TAI LE LETTER KA..TAI LE LETTER AI
+ {0x1970, 0x1974, prN}, // Lo [5] TAI LE LETTER TONE-2..TAI LE LETTER TONE-6
+ {0x1980, 0x19AB, prN}, // Lo [44] NEW TAI LUE LETTER HIGH QA..NEW TAI LUE LETTER LOW SUA
+ {0x19B0, 0x19C9, prN}, // Lo [26] NEW TAI LUE VOWEL SIGN VOWEL SHORTENER..NEW TAI LUE TONE MARK-2
+ {0x19D0, 0x19D9, prN}, // Nd [10] NEW TAI LUE DIGIT ZERO..NEW TAI LUE DIGIT NINE
+ {0x19DA, 0x19DA, prN}, // No NEW TAI LUE THAM DIGIT ONE
+ {0x19DE, 0x19DF, prN}, // So [2] NEW TAI LUE SIGN LAE..NEW TAI LUE SIGN LAEV
+ {0x19E0, 0x19FF, prN}, // So [32] KHMER SYMBOL PATHAMASAT..KHMER SYMBOL DAP-PRAM ROC
+ {0x1A00, 0x1A16, prN}, // Lo [23] BUGINESE LETTER KA..BUGINESE LETTER HA
+ {0x1A17, 0x1A18, prN}, // Mn [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U
+ {0x1A19, 0x1A1A, prN}, // Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O
+ {0x1A1B, 0x1A1B, prN}, // Mn BUGINESE VOWEL SIGN AE
+ {0x1A1E, 0x1A1F, prN}, // Po [2] BUGINESE PALLAWA..BUGINESE END OF SECTION
+ {0x1A20, 0x1A54, prN}, // Lo [53] TAI THAM LETTER HIGH KA..TAI THAM LETTER GREAT SA
+ {0x1A55, 0x1A55, prN}, // Mc TAI THAM CONSONANT SIGN MEDIAL RA
+ {0x1A56, 0x1A56, prN}, // Mn TAI THAM CONSONANT SIGN MEDIAL LA
+ {0x1A57, 0x1A57, prN}, // Mc TAI THAM CONSONANT SIGN LA TANG LAI
+ {0x1A58, 0x1A5E, prN}, // Mn [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA
+ {0x1A60, 0x1A60, prN}, // Mn TAI THAM SIGN SAKOT
+ {0x1A61, 0x1A61, prN}, // Mc TAI THAM VOWEL SIGN A
+ {0x1A62, 0x1A62, prN}, // Mn TAI THAM VOWEL SIGN MAI SAT
+ {0x1A63, 0x1A64, prN}, // Mc [2] TAI THAM VOWEL SIGN AA..TAI THAM VOWEL SIGN TALL AA
+ {0x1A65, 0x1A6C, prN}, // Mn [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW
+ {0x1A6D, 0x1A72, prN}, // Mc [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI
+ {0x1A73, 0x1A7C, prN}, // Mn [10] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN KHUEN-LUE KARAN
+ {0x1A7F, 0x1A7F, prN}, // Mn TAI THAM COMBINING CRYPTOGRAMMIC DOT
+ {0x1A80, 0x1A89, prN}, // Nd [10] TAI THAM HORA DIGIT ZERO..TAI THAM HORA DIGIT NINE
+ {0x1A90, 0x1A99, prN}, // Nd [10] TAI THAM THAM DIGIT ZERO..TAI THAM THAM DIGIT NINE
+ {0x1AA0, 0x1AA6, prN}, // Po [7] TAI THAM SIGN WIANG..TAI THAM SIGN REVERSED ROTATED RANA
+ {0x1AA7, 0x1AA7, prN}, // Lm TAI THAM SIGN MAI YAMOK
+ {0x1AA8, 0x1AAD, prN}, // Po [6] TAI THAM SIGN KAAN..TAI THAM SIGN CAANG
+ {0x1AB0, 0x1ABD, prN}, // Mn [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW
+ {0x1ABE, 0x1ABE, prN}, // Me COMBINING PARENTHESES OVERLAY
+ {0x1ABF, 0x1ACE, prN}, // Mn [16] COMBINING LATIN SMALL LETTER W BELOW..COMBINING LATIN SMALL LETTER INSULAR T
+ {0x1B00, 0x1B03, prN}, // Mn [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG
+ {0x1B04, 0x1B04, prN}, // Mc BALINESE SIGN BISAH
+ {0x1B05, 0x1B33, prN}, // Lo [47] BALINESE LETTER AKARA..BALINESE LETTER HA
+ {0x1B34, 0x1B34, prN}, // Mn BALINESE SIGN REREKAN
+ {0x1B35, 0x1B35, prN}, // Mc BALINESE VOWEL SIGN TEDUNG
+ {0x1B36, 0x1B3A, prN}, // Mn [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA
+ {0x1B3B, 0x1B3B, prN}, // Mc BALINESE VOWEL SIGN RA REPA TEDUNG
+ {0x1B3C, 0x1B3C, prN}, // Mn BALINESE VOWEL SIGN LA LENGA
+ {0x1B3D, 0x1B41, prN}, // Mc [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG
+ {0x1B42, 0x1B42, prN}, // Mn BALINESE VOWEL SIGN PEPET
+ {0x1B43, 0x1B44, prN}, // Mc [2] BALINESE VOWEL SIGN PEPET TEDUNG..BALINESE ADEG ADEG
+ {0x1B45, 0x1B4C, prN}, // Lo [8] BALINESE LETTER KAF SASAK..BALINESE LETTER ARCHAIC JNYA
+ {0x1B50, 0x1B59, prN}, // Nd [10] BALINESE DIGIT ZERO..BALINESE DIGIT NINE
+ {0x1B5A, 0x1B60, prN}, // Po [7] BALINESE PANTI..BALINESE PAMENENG
+ {0x1B61, 0x1B6A, prN}, // So [10] BALINESE MUSICAL SYMBOL DONG..BALINESE MUSICAL SYMBOL DANG GEDE
+ {0x1B6B, 0x1B73, prN}, // Mn [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG
+ {0x1B74, 0x1B7C, prN}, // So [9] BALINESE MUSICAL SYMBOL RIGHT-HAND OPEN DUG..BALINESE MUSICAL SYMBOL LEFT-HAND OPEN PING
+ {0x1B7D, 0x1B7E, prN}, // Po [2] BALINESE PANTI LANTANG..BALINESE PAMADA LANTANG
+ {0x1B80, 0x1B81, prN}, // Mn [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR
+ {0x1B82, 0x1B82, prN}, // Mc SUNDANESE SIGN PANGWISAD
+ {0x1B83, 0x1BA0, prN}, // Lo [30] SUNDANESE LETTER A..SUNDANESE LETTER HA
+ {0x1BA1, 0x1BA1, prN}, // Mc SUNDANESE CONSONANT SIGN PAMINGKAL
+ {0x1BA2, 0x1BA5, prN}, // Mn [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU
+ {0x1BA6, 0x1BA7, prN}, // Mc [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG
+ {0x1BA8, 0x1BA9, prN}, // Mn [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG
+ {0x1BAA, 0x1BAA, prN}, // Mc SUNDANESE SIGN PAMAAEH
+ {0x1BAB, 0x1BAD, prN}, // Mn [3] SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA
+ {0x1BAE, 0x1BAF, prN}, // Lo [2] SUNDANESE LETTER KHA..SUNDANESE LETTER SYA
+ {0x1BB0, 0x1BB9, prN}, // Nd [10] SUNDANESE DIGIT ZERO..SUNDANESE DIGIT NINE
+ {0x1BBA, 0x1BBF, prN}, // Lo [6] SUNDANESE AVAGRAHA..SUNDANESE LETTER FINAL M
+ {0x1BC0, 0x1BE5, prN}, // Lo [38] BATAK LETTER A..BATAK LETTER U
+ {0x1BE6, 0x1BE6, prN}, // Mn BATAK SIGN TOMPI
+ {0x1BE7, 0x1BE7, prN}, // Mc BATAK VOWEL SIGN E
+ {0x1BE8, 0x1BE9, prN}, // Mn [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE
+ {0x1BEA, 0x1BEC, prN}, // Mc [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O
+ {0x1BED, 0x1BED, prN}, // Mn BATAK VOWEL SIGN KARO O
+ {0x1BEE, 0x1BEE, prN}, // Mc BATAK VOWEL SIGN U
+ {0x1BEF, 0x1BF1, prN}, // Mn [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H
+ {0x1BF2, 0x1BF3, prN}, // Mc [2] BATAK PANGOLAT..BATAK PANONGONAN
+ {0x1BFC, 0x1BFF, prN}, // Po [4] BATAK SYMBOL BINDU NA METEK..BATAK SYMBOL BINDU PANGOLAT
+ {0x1C00, 0x1C23, prN}, // Lo [36] LEPCHA LETTER KA..LEPCHA LETTER A
+ {0x1C24, 0x1C2B, prN}, // Mc [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU
+ {0x1C2C, 0x1C33, prN}, // Mn [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T
+ {0x1C34, 0x1C35, prN}, // Mc [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG
+ {0x1C36, 0x1C37, prN}, // Mn [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
+ {0x1C3B, 0x1C3F, prN}, // Po [5] LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK
+ {0x1C40, 0x1C49, prN}, // Nd [10] LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE
+ {0x1C4D, 0x1C4F, prN}, // Lo [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA
+ {0x1C50, 0x1C59, prN}, // Nd [10] OL CHIKI DIGIT ZERO..OL CHIKI DIGIT NINE
+ {0x1C5A, 0x1C77, prN}, // Lo [30] OL CHIKI LETTER LA..OL CHIKI LETTER OH
+ {0x1C78, 0x1C7D, prN}, // Lm [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD
+ {0x1C7E, 0x1C7F, prN}, // Po [2] OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD
+ {0x1C80, 0x1C88, prN}, // Ll [9] CYRILLIC SMALL LETTER ROUNDED VE..CYRILLIC SMALL LETTER UNBLENDED UK
+ {0x1C90, 0x1CBA, prN}, // Lu [43] GEORGIAN MTAVRULI CAPITAL LETTER AN..GEORGIAN MTAVRULI CAPITAL LETTER AIN
+ {0x1CBD, 0x1CBF, prN}, // Lu [3] GEORGIAN MTAVRULI CAPITAL LETTER AEN..GEORGIAN MTAVRULI CAPITAL LETTER LABIAL SIGN
+ {0x1CC0, 0x1CC7, prN}, // Po [8] SUNDANESE PUNCTUATION BINDU SURYA..SUNDANESE PUNCTUATION BINDU BA SATANGA
+ {0x1CD0, 0x1CD2, prN}, // Mn [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA
+ {0x1CD3, 0x1CD3, prN}, // Po VEDIC SIGN NIHSHVASA
+ {0x1CD4, 0x1CE0, prN}, // Mn [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
+ {0x1CE1, 0x1CE1, prN}, // Mc VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
+ {0x1CE2, 0x1CE8, prN}, // Mn [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL
+ {0x1CE9, 0x1CEC, prN}, // Lo [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA..VEDIC SIGN ANUSVARA VAMAGOMUKHA WITH TAIL
+ {0x1CED, 0x1CED, prN}, // Mn VEDIC SIGN TIRYAK
+ {0x1CEE, 0x1CF3, prN}, // Lo [6] VEDIC SIGN HEXIFORM LONG ANUSVARA..VEDIC SIGN ROTATED ARDHAVISARGA
+ {0x1CF4, 0x1CF4, prN}, // Mn VEDIC TONE CANDRA ABOVE
+ {0x1CF5, 0x1CF6, prN}, // Lo [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN UPADHMANIYA
+ {0x1CF7, 0x1CF7, prN}, // Mc VEDIC SIGN ATIKRAMA
+ {0x1CF8, 0x1CF9, prN}, // Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
+ {0x1CFA, 0x1CFA, prN}, // Lo VEDIC SIGN DOUBLE ANUSVARA ANTARGOMUKHA
+ {0x1D00, 0x1D2B, prN}, // Ll [44] LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL
+ {0x1D2C, 0x1D6A, prN}, // Lm [63] MODIFIER LETTER CAPITAL A..GREEK SUBSCRIPT SMALL LETTER CHI
+ {0x1D6B, 0x1D77, prN}, // Ll [13] LATIN SMALL LETTER UE..LATIN SMALL LETTER TURNED G
+ {0x1D78, 0x1D78, prN}, // Lm MODIFIER LETTER CYRILLIC EN
+ {0x1D79, 0x1D7F, prN}, // Ll [7] LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER UPSILON WITH STROKE
+ {0x1D80, 0x1D9A, prN}, // Ll [27] LATIN SMALL LETTER B WITH PALATAL HOOK..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK
+ {0x1D9B, 0x1DBF, prN}, // Lm [37] MODIFIER LETTER SMALL TURNED ALPHA..MODIFIER LETTER SMALL THETA
+ {0x1DC0, 0x1DFF, prN}, // Mn [64] COMBINING DOTTED GRAVE ACCENT..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x1E00, 0x1EFF, prN}, // L& [256] LATIN CAPITAL LETTER A WITH RING BELOW..LATIN SMALL LETTER Y WITH LOOP
+ {0x1F00, 0x1F15, prN}, // L& [22] GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F18, 0x1F1D, prN}, // Lu [6] GREEK CAPITAL LETTER EPSILON WITH PSILI..GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F20, 0x1F45, prN}, // L& [38] GREEK SMALL LETTER ETA WITH PSILI..GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F48, 0x1F4D, prN}, // Lu [6] GREEK CAPITAL LETTER OMICRON WITH PSILI..GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F50, 0x1F57, prN}, // Ll [8] GREEK SMALL LETTER UPSILON WITH PSILI..GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI
+ {0x1F59, 0x1F59, prN}, // Lu GREEK CAPITAL LETTER UPSILON WITH DASIA
+ {0x1F5B, 0x1F5B, prN}, // Lu GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA
+ {0x1F5D, 0x1F5D, prN}, // Lu GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA
+ {0x1F5F, 0x1F7D, prN}, // L& [31] GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI..GREEK SMALL LETTER OMEGA WITH OXIA
+ {0x1F80, 0x1FB4, prN}, // L& [53] GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI..GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FB6, 0x1FBC, prN}, // L& [7] GREEK SMALL LETTER ALPHA WITH PERISPOMENI..GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
+ {0x1FBD, 0x1FBD, prN}, // Sk GREEK KORONIS
+ {0x1FBE, 0x1FBE, prN}, // Ll GREEK PROSGEGRAMMENI
+ {0x1FBF, 0x1FC1, prN}, // Sk [3] GREEK PSILI..GREEK DIALYTIKA AND PERISPOMENI
+ {0x1FC2, 0x1FC4, prN}, // Ll [3] GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FC6, 0x1FCC, prN}, // L& [7] GREEK SMALL LETTER ETA WITH PERISPOMENI..GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
+ {0x1FCD, 0x1FCF, prN}, // Sk [3] GREEK PSILI AND VARIA..GREEK PSILI AND PERISPOMENI
+ {0x1FD0, 0x1FD3, prN}, // Ll [4] GREEK SMALL LETTER IOTA WITH VRACHY..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
+ {0x1FD6, 0x1FDB, prN}, // L& [6] GREEK SMALL LETTER IOTA WITH PERISPOMENI..GREEK CAPITAL LETTER IOTA WITH OXIA
+ {0x1FDD, 0x1FDF, prN}, // Sk [3] GREEK DASIA AND VARIA..GREEK DASIA AND PERISPOMENI
+ {0x1FE0, 0x1FEC, prN}, // L& [13] GREEK SMALL LETTER UPSILON WITH VRACHY..GREEK CAPITAL LETTER RHO WITH DASIA
+ {0x1FED, 0x1FEF, prN}, // Sk [3] GREEK DIALYTIKA AND VARIA..GREEK VARIA
+ {0x1FF2, 0x1FF4, prN}, // Ll [3] GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FF6, 0x1FFC, prN}, // L& [7] GREEK SMALL LETTER OMEGA WITH PERISPOMENI..GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
+ {0x1FFD, 0x1FFE, prN}, // Sk [2] GREEK OXIA..GREEK DASIA
+ {0x2000, 0x200A, prN}, // Zs [11] EN QUAD..HAIR SPACE
+ {0x200B, 0x200F, prN}, // Cf [5] ZERO WIDTH SPACE..RIGHT-TO-LEFT MARK
+ {0x2010, 0x2010, prA}, // Pd HYPHEN
+ {0x2011, 0x2012, prN}, // Pd [2] NON-BREAKING HYPHEN..FIGURE DASH
+ {0x2013, 0x2015, prA}, // Pd [3] EN DASH..HORIZONTAL BAR
+ {0x2016, 0x2016, prA}, // Po DOUBLE VERTICAL LINE
+ {0x2017, 0x2017, prN}, // Po DOUBLE LOW LINE
+ {0x2018, 0x2018, prA}, // Pi LEFT SINGLE QUOTATION MARK
+ {0x2019, 0x2019, prA}, // Pf RIGHT SINGLE QUOTATION MARK
+ {0x201A, 0x201A, prN}, // Ps SINGLE LOW-9 QUOTATION MARK
+ {0x201B, 0x201B, prN}, // Pi SINGLE HIGH-REVERSED-9 QUOTATION MARK
+ {0x201C, 0x201C, prA}, // Pi LEFT DOUBLE QUOTATION MARK
+ {0x201D, 0x201D, prA}, // Pf RIGHT DOUBLE QUOTATION MARK
+ {0x201E, 0x201E, prN}, // Ps DOUBLE LOW-9 QUOTATION MARK
+ {0x201F, 0x201F, prN}, // Pi DOUBLE HIGH-REVERSED-9 QUOTATION MARK
+ {0x2020, 0x2022, prA}, // Po [3] DAGGER..BULLET
+ {0x2023, 0x2023, prN}, // Po TRIANGULAR BULLET
+ {0x2024, 0x2027, prA}, // Po [4] ONE DOT LEADER..HYPHENATION POINT
+ {0x2028, 0x2028, prN}, // Zl LINE SEPARATOR
+ {0x2029, 0x2029, prN}, // Zp PARAGRAPH SEPARATOR
+ {0x202A, 0x202E, prN}, // Cf [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE
+ {0x202F, 0x202F, prN}, // Zs NARROW NO-BREAK SPACE
+ {0x2030, 0x2030, prA}, // Po PER MILLE SIGN
+ {0x2031, 0x2031, prN}, // Po PER TEN THOUSAND SIGN
+ {0x2032, 0x2033, prA}, // Po [2] PRIME..DOUBLE PRIME
+ {0x2034, 0x2034, prN}, // Po TRIPLE PRIME
+ {0x2035, 0x2035, prA}, // Po REVERSED PRIME
+ {0x2036, 0x2038, prN}, // Po [3] REVERSED DOUBLE PRIME..CARET
+ {0x2039, 0x2039, prN}, // Pi SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+ {0x203A, 0x203A, prN}, // Pf SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+ {0x203B, 0x203B, prA}, // Po REFERENCE MARK
+ {0x203C, 0x203D, prN}, // Po [2] DOUBLE EXCLAMATION MARK..INTERROBANG
+ {0x203E, 0x203E, prA}, // Po OVERLINE
+ {0x203F, 0x2040, prN}, // Pc [2] UNDERTIE..CHARACTER TIE
+ {0x2041, 0x2043, prN}, // Po [3] CARET INSERTION POINT..HYPHEN BULLET
+ {0x2044, 0x2044, prN}, // Sm FRACTION SLASH
+ {0x2045, 0x2045, prN}, // Ps LEFT SQUARE BRACKET WITH QUILL
+ {0x2046, 0x2046, prN}, // Pe RIGHT SQUARE BRACKET WITH QUILL
+ {0x2047, 0x2051, prN}, // Po [11] DOUBLE QUESTION MARK..TWO ASTERISKS ALIGNED VERTICALLY
+ {0x2052, 0x2052, prN}, // Sm COMMERCIAL MINUS SIGN
+ {0x2053, 0x2053, prN}, // Po SWUNG DASH
+ {0x2054, 0x2054, prN}, // Pc INVERTED UNDERTIE
+ {0x2055, 0x205E, prN}, // Po [10] FLOWER PUNCTUATION MARK..VERTICAL FOUR DOTS
+ {0x205F, 0x205F, prN}, // Zs MEDIUM MATHEMATICAL SPACE
+ {0x2060, 0x2064, prN}, // Cf [5] WORD JOINER..INVISIBLE PLUS
+ {0x2066, 0x206F, prN}, // Cf [10] LEFT-TO-RIGHT ISOLATE..NOMINAL DIGIT SHAPES
+ {0x2070, 0x2070, prN}, // No SUPERSCRIPT ZERO
+ {0x2071, 0x2071, prN}, // Lm SUPERSCRIPT LATIN SMALL LETTER I
+ {0x2074, 0x2074, prA}, // No SUPERSCRIPT FOUR
+ {0x2075, 0x2079, prN}, // No [5] SUPERSCRIPT FIVE..SUPERSCRIPT NINE
+ {0x207A, 0x207C, prN}, // Sm [3] SUPERSCRIPT PLUS SIGN..SUPERSCRIPT EQUALS SIGN
+ {0x207D, 0x207D, prN}, // Ps SUPERSCRIPT LEFT PARENTHESIS
+ {0x207E, 0x207E, prN}, // Pe SUPERSCRIPT RIGHT PARENTHESIS
+ {0x207F, 0x207F, prA}, // Lm SUPERSCRIPT LATIN SMALL LETTER N
+ {0x2080, 0x2080, prN}, // No SUBSCRIPT ZERO
+ {0x2081, 0x2084, prA}, // No [4] SUBSCRIPT ONE..SUBSCRIPT FOUR
+ {0x2085, 0x2089, prN}, // No [5] SUBSCRIPT FIVE..SUBSCRIPT NINE
+ {0x208A, 0x208C, prN}, // Sm [3] SUBSCRIPT PLUS SIGN..SUBSCRIPT EQUALS SIGN
+ {0x208D, 0x208D, prN}, // Ps SUBSCRIPT LEFT PARENTHESIS
+ {0x208E, 0x208E, prN}, // Pe SUBSCRIPT RIGHT PARENTHESIS
+ {0x2090, 0x209C, prN}, // Lm [13] LATIN SUBSCRIPT SMALL LETTER A..LATIN SUBSCRIPT SMALL LETTER T
+ {0x20A0, 0x20A8, prN}, // Sc [9] EURO-CURRENCY SIGN..RUPEE SIGN
+ {0x20A9, 0x20A9, prH}, // Sc WON SIGN
+ {0x20AA, 0x20AB, prN}, // Sc [2] NEW SHEQEL SIGN..DONG SIGN
+ {0x20AC, 0x20AC, prA}, // Sc EURO SIGN
+ {0x20AD, 0x20C0, prN}, // Sc [20] KIP SIGN..SOM SIGN
+ {0x20D0, 0x20DC, prN}, // Mn [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE
+ {0x20DD, 0x20E0, prN}, // Me [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH
+ {0x20E1, 0x20E1, prN}, // Mn COMBINING LEFT RIGHT ARROW ABOVE
+ {0x20E2, 0x20E4, prN}, // Me [3] COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING UPWARD POINTING TRIANGLE
+ {0x20E5, 0x20F0, prN}, // Mn [12] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING ASTERISK ABOVE
+ {0x2100, 0x2101, prN}, // So [2] ACCOUNT OF..ADDRESSED TO THE SUBJECT
+ {0x2102, 0x2102, prN}, // Lu DOUBLE-STRUCK CAPITAL C
+ {0x2103, 0x2103, prA}, // So DEGREE CELSIUS
+ {0x2104, 0x2104, prN}, // So CENTRE LINE SYMBOL
+ {0x2105, 0x2105, prA}, // So CARE OF
+ {0x2106, 0x2106, prN}, // So CADA UNA
+ {0x2107, 0x2107, prN}, // Lu EULER CONSTANT
+ {0x2108, 0x2108, prN}, // So SCRUPLE
+ {0x2109, 0x2109, prA}, // So DEGREE FAHRENHEIT
+ {0x210A, 0x2112, prN}, // L& [9] SCRIPT SMALL G..SCRIPT CAPITAL L
+ {0x2113, 0x2113, prA}, // Ll SCRIPT SMALL L
+ {0x2114, 0x2114, prN}, // So L B BAR SYMBOL
+ {0x2115, 0x2115, prN}, // Lu DOUBLE-STRUCK CAPITAL N
+ {0x2116, 0x2116, prA}, // So NUMERO SIGN
+ {0x2117, 0x2117, prN}, // So SOUND RECORDING COPYRIGHT
+ {0x2118, 0x2118, prN}, // Sm SCRIPT CAPITAL P
+ {0x2119, 0x211D, prN}, // Lu [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-STRUCK CAPITAL R
+ {0x211E, 0x2120, prN}, // So [3] PRESCRIPTION TAKE..SERVICE MARK
+ {0x2121, 0x2122, prA}, // So [2] TELEPHONE SIGN..TRADE MARK SIGN
+ {0x2123, 0x2123, prN}, // So VERSICLE
+ {0x2124, 0x2124, prN}, // Lu DOUBLE-STRUCK CAPITAL Z
+ {0x2125, 0x2125, prN}, // So OUNCE SIGN
+ {0x2126, 0x2126, prA}, // Lu OHM SIGN
+ {0x2127, 0x2127, prN}, // So INVERTED OHM SIGN
+ {0x2128, 0x2128, prN}, // Lu BLACK-LETTER CAPITAL Z
+ {0x2129, 0x2129, prN}, // So TURNED GREEK SMALL LETTER IOTA
+ {0x212A, 0x212A, prN}, // Lu KELVIN SIGN
+ {0x212B, 0x212B, prA}, // Lu ANGSTROM SIGN
+ {0x212C, 0x212D, prN}, // Lu [2] SCRIPT CAPITAL B..BLACK-LETTER CAPITAL C
+ {0x212E, 0x212E, prN}, // So ESTIMATED SYMBOL
+ {0x212F, 0x2134, prN}, // L& [6] SCRIPT SMALL E..SCRIPT SMALL O
+ {0x2135, 0x2138, prN}, // Lo [4] ALEF SYMBOL..DALET SYMBOL
+ {0x2139, 0x2139, prN}, // Ll INFORMATION SOURCE
+ {0x213A, 0x213B, prN}, // So [2] ROTATED CAPITAL Q..FACSIMILE SIGN
+ {0x213C, 0x213F, prN}, // L& [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STRUCK CAPITAL PI
+ {0x2140, 0x2144, prN}, // Sm [5] DOUBLE-STRUCK N-ARY SUMMATION..TURNED SANS-SERIF CAPITAL Y
+ {0x2145, 0x2149, prN}, // L& [5] DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL J
+ {0x214A, 0x214A, prN}, // So PROPERTY LINE
+ {0x214B, 0x214B, prN}, // Sm TURNED AMPERSAND
+ {0x214C, 0x214D, prN}, // So [2] PER SIGN..AKTIESELSKAB
+ {0x214E, 0x214E, prN}, // Ll TURNED SMALL F
+ {0x214F, 0x214F, prN}, // So SYMBOL FOR SAMARITAN SOURCE
+ {0x2150, 0x2152, prN}, // No [3] VULGAR FRACTION ONE SEVENTH..VULGAR FRACTION ONE TENTH
+ {0x2153, 0x2154, prA}, // No [2] VULGAR FRACTION ONE THIRD..VULGAR FRACTION TWO THIRDS
+ {0x2155, 0x215A, prN}, // No [6] VULGAR FRACTION ONE FIFTH..VULGAR FRACTION FIVE SIXTHS
+ {0x215B, 0x215E, prA}, // No [4] VULGAR FRACTION ONE EIGHTH..VULGAR FRACTION SEVEN EIGHTHS
+ {0x215F, 0x215F, prN}, // No FRACTION NUMERATOR ONE
+ {0x2160, 0x216B, prA}, // Nl [12] ROMAN NUMERAL ONE..ROMAN NUMERAL TWELVE
+ {0x216C, 0x216F, prN}, // Nl [4] ROMAN NUMERAL FIFTY..ROMAN NUMERAL ONE THOUSAND
+ {0x2170, 0x2179, prA}, // Nl [10] SMALL ROMAN NUMERAL ONE..SMALL ROMAN NUMERAL TEN
+ {0x217A, 0x2182, prN}, // Nl [9] SMALL ROMAN NUMERAL ELEVEN..ROMAN NUMERAL TEN THOUSAND
+ {0x2183, 0x2184, prN}, // L& [2] ROMAN NUMERAL REVERSED ONE HUNDRED..LATIN SMALL LETTER REVERSED C
+ {0x2185, 0x2188, prN}, // Nl [4] ROMAN NUMERAL SIX LATE FORM..ROMAN NUMERAL ONE HUNDRED THOUSAND
+ {0x2189, 0x2189, prA}, // No VULGAR FRACTION ZERO THIRDS
+ {0x218A, 0x218B, prN}, // So [2] TURNED DIGIT TWO..TURNED DIGIT THREE
+ {0x2190, 0x2194, prA}, // Sm [5] LEFTWARDS ARROW..LEFT RIGHT ARROW
+ {0x2195, 0x2199, prA}, // So [5] UP DOWN ARROW..SOUTH WEST ARROW
+ {0x219A, 0x219B, prN}, // Sm [2] LEFTWARDS ARROW WITH STROKE..RIGHTWARDS ARROW WITH STROKE
+ {0x219C, 0x219F, prN}, // So [4] LEFTWARDS WAVE ARROW..UPWARDS TWO HEADED ARROW
+ {0x21A0, 0x21A0, prN}, // Sm RIGHTWARDS TWO HEADED ARROW
+ {0x21A1, 0x21A2, prN}, // So [2] DOWNWARDS TWO HEADED ARROW..LEFTWARDS ARROW WITH TAIL
+ {0x21A3, 0x21A3, prN}, // Sm RIGHTWARDS ARROW WITH TAIL
+ {0x21A4, 0x21A5, prN}, // So [2] LEFTWARDS ARROW FROM BAR..UPWARDS ARROW FROM BAR
+ {0x21A6, 0x21A6, prN}, // Sm RIGHTWARDS ARROW FROM BAR
+ {0x21A7, 0x21AD, prN}, // So [7] DOWNWARDS ARROW FROM BAR..LEFT RIGHT WAVE ARROW
+ {0x21AE, 0x21AE, prN}, // Sm LEFT RIGHT ARROW WITH STROKE
+ {0x21AF, 0x21B7, prN}, // So [9] DOWNWARDS ZIGZAG ARROW..CLOCKWISE TOP SEMICIRCLE ARROW
+ {0x21B8, 0x21B9, prA}, // So [2] NORTH WEST ARROW TO LONG BAR..LEFTWARDS ARROW TO BAR OVER RIGHTWARDS ARROW TO BAR
+ {0x21BA, 0x21CD, prN}, // So [20] ANTICLOCKWISE OPEN CIRCLE ARROW..LEFTWARDS DOUBLE ARROW WITH STROKE
+ {0x21CE, 0x21CF, prN}, // Sm [2] LEFT RIGHT DOUBLE ARROW WITH STROKE..RIGHTWARDS DOUBLE ARROW WITH STROKE
+ {0x21D0, 0x21D1, prN}, // So [2] LEFTWARDS DOUBLE ARROW..UPWARDS DOUBLE ARROW
+ {0x21D2, 0x21D2, prA}, // Sm RIGHTWARDS DOUBLE ARROW
+ {0x21D3, 0x21D3, prN}, // So DOWNWARDS DOUBLE ARROW
+ {0x21D4, 0x21D4, prA}, // Sm LEFT RIGHT DOUBLE ARROW
+ {0x21D5, 0x21E6, prN}, // So [18] UP DOWN DOUBLE ARROW..LEFTWARDS WHITE ARROW
+ {0x21E7, 0x21E7, prA}, // So UPWARDS WHITE ARROW
+ {0x21E8, 0x21F3, prN}, // So [12] RIGHTWARDS WHITE ARROW..UP DOWN WHITE ARROW
+ {0x21F4, 0x21FF, prN}, // Sm [12] RIGHT ARROW WITH SMALL CIRCLE..LEFT RIGHT OPEN-HEADED ARROW
+ {0x2200, 0x2200, prA}, // Sm FOR ALL
+ {0x2201, 0x2201, prN}, // Sm COMPLEMENT
+ {0x2202, 0x2203, prA}, // Sm [2] PARTIAL DIFFERENTIAL..THERE EXISTS
+ {0x2204, 0x2206, prN}, // Sm [3] THERE DOES NOT EXIST..INCREMENT
+ {0x2207, 0x2208, prA}, // Sm [2] NABLA..ELEMENT OF
+ {0x2209, 0x220A, prN}, // Sm [2] NOT AN ELEMENT OF..SMALL ELEMENT OF
+ {0x220B, 0x220B, prA}, // Sm CONTAINS AS MEMBER
+ {0x220C, 0x220E, prN}, // Sm [3] DOES NOT CONTAIN AS MEMBER..END OF PROOF
+ {0x220F, 0x220F, prA}, // Sm N-ARY PRODUCT
+ {0x2210, 0x2210, prN}, // Sm N-ARY COPRODUCT
+ {0x2211, 0x2211, prA}, // Sm N-ARY SUMMATION
+ {0x2212, 0x2214, prN}, // Sm [3] MINUS SIGN..DOT PLUS
+ {0x2215, 0x2215, prA}, // Sm DIVISION SLASH
+ {0x2216, 0x2219, prN}, // Sm [4] SET MINUS..BULLET OPERATOR
+ {0x221A, 0x221A, prA}, // Sm SQUARE ROOT
+ {0x221B, 0x221C, prN}, // Sm [2] CUBE ROOT..FOURTH ROOT
+ {0x221D, 0x2220, prA}, // Sm [4] PROPORTIONAL TO..ANGLE
+ {0x2221, 0x2222, prN}, // Sm [2] MEASURED ANGLE..SPHERICAL ANGLE
+ {0x2223, 0x2223, prA}, // Sm DIVIDES
+ {0x2224, 0x2224, prN}, // Sm DOES NOT DIVIDE
+ {0x2225, 0x2225, prA}, // Sm PARALLEL TO
+ {0x2226, 0x2226, prN}, // Sm NOT PARALLEL TO
+ {0x2227, 0x222C, prA}, // Sm [6] LOGICAL AND..DOUBLE INTEGRAL
+ {0x222D, 0x222D, prN}, // Sm TRIPLE INTEGRAL
+ {0x222E, 0x222E, prA}, // Sm CONTOUR INTEGRAL
+ {0x222F, 0x2233, prN}, // Sm [5] SURFACE INTEGRAL..ANTICLOCKWISE CONTOUR INTEGRAL
+ {0x2234, 0x2237, prA}, // Sm [4] THEREFORE..PROPORTION
+ {0x2238, 0x223B, prN}, // Sm [4] DOT MINUS..HOMOTHETIC
+ {0x223C, 0x223D, prA}, // Sm [2] TILDE OPERATOR..REVERSED TILDE
+ {0x223E, 0x2247, prN}, // Sm [10] INVERTED LAZY S..NEITHER APPROXIMATELY NOR ACTUALLY EQUAL TO
+ {0x2248, 0x2248, prA}, // Sm ALMOST EQUAL TO
+ {0x2249, 0x224B, prN}, // Sm [3] NOT ALMOST EQUAL TO..TRIPLE TILDE
+ {0x224C, 0x224C, prA}, // Sm ALL EQUAL TO
+ {0x224D, 0x2251, prN}, // Sm [5] EQUIVALENT TO..GEOMETRICALLY EQUAL TO
+ {0x2252, 0x2252, prA}, // Sm APPROXIMATELY EQUAL TO OR THE IMAGE OF
+ {0x2253, 0x225F, prN}, // Sm [13] IMAGE OF OR APPROXIMATELY EQUAL TO..QUESTIONED EQUAL TO
+ {0x2260, 0x2261, prA}, // Sm [2] NOT EQUAL TO..IDENTICAL TO
+ {0x2262, 0x2263, prN}, // Sm [2] NOT IDENTICAL TO..STRICTLY EQUIVALENT TO
+ {0x2264, 0x2267, prA}, // Sm [4] LESS-THAN OR EQUAL TO..GREATER-THAN OVER EQUAL TO
+ {0x2268, 0x2269, prN}, // Sm [2] LESS-THAN BUT NOT EQUAL TO..GREATER-THAN BUT NOT EQUAL TO
+ {0x226A, 0x226B, prA}, // Sm [2] MUCH LESS-THAN..MUCH GREATER-THAN
+ {0x226C, 0x226D, prN}, // Sm [2] BETWEEN..NOT EQUIVALENT TO
+ {0x226E, 0x226F, prA}, // Sm [2] NOT LESS-THAN..NOT GREATER-THAN
+ {0x2270, 0x2281, prN}, // Sm [18] NEITHER LESS-THAN NOR EQUAL TO..DOES NOT SUCCEED
+ {0x2282, 0x2283, prA}, // Sm [2] SUBSET OF..SUPERSET OF
+ {0x2284, 0x2285, prN}, // Sm [2] NOT A SUBSET OF..NOT A SUPERSET OF
+ {0x2286, 0x2287, prA}, // Sm [2] SUBSET OF OR EQUAL TO..SUPERSET OF OR EQUAL TO
+ {0x2288, 0x2294, prN}, // Sm [13] NEITHER A SUBSET OF NOR EQUAL TO..SQUARE CUP
+ {0x2295, 0x2295, prA}, // Sm CIRCLED PLUS
+ {0x2296, 0x2298, prN}, // Sm [3] CIRCLED MINUS..CIRCLED DIVISION SLASH
+ {0x2299, 0x2299, prA}, // Sm CIRCLED DOT OPERATOR
+ {0x229A, 0x22A4, prN}, // Sm [11] CIRCLED RING OPERATOR..DOWN TACK
+ {0x22A5, 0x22A5, prA}, // Sm UP TACK
+ {0x22A6, 0x22BE, prN}, // Sm [25] ASSERTION..RIGHT ANGLE WITH ARC
+ {0x22BF, 0x22BF, prA}, // Sm RIGHT TRIANGLE
+ {0x22C0, 0x22FF, prN}, // Sm [64] N-ARY LOGICAL AND..Z NOTATION BAG MEMBERSHIP
+ {0x2300, 0x2307, prN}, // So [8] DIAMETER SIGN..WAVY LINE
+ {0x2308, 0x2308, prN}, // Ps LEFT CEILING
+ {0x2309, 0x2309, prN}, // Pe RIGHT CEILING
+ {0x230A, 0x230A, prN}, // Ps LEFT FLOOR
+ {0x230B, 0x230B, prN}, // Pe RIGHT FLOOR
+ {0x230C, 0x2311, prN}, // So [6] BOTTOM RIGHT CROP..SQUARE LOZENGE
+ {0x2312, 0x2312, prA}, // So ARC
+ {0x2313, 0x2319, prN}, // So [7] SEGMENT..TURNED NOT SIGN
+ {0x231A, 0x231B, prW}, // So [2] WATCH..HOURGLASS
+ {0x231C, 0x231F, prN}, // So [4] TOP LEFT CORNER..BOTTOM RIGHT CORNER
+ {0x2320, 0x2321, prN}, // Sm [2] TOP HALF INTEGRAL..BOTTOM HALF INTEGRAL
+ {0x2322, 0x2328, prN}, // So [7] FROWN..KEYBOARD
+ {0x2329, 0x2329, prW}, // Ps LEFT-POINTING ANGLE BRACKET
+ {0x232A, 0x232A, prW}, // Pe RIGHT-POINTING ANGLE BRACKET
+ {0x232B, 0x237B, prN}, // So [81] ERASE TO THE LEFT..NOT CHECK MARK
+ {0x237C, 0x237C, prN}, // Sm RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW
+ {0x237D, 0x239A, prN}, // So [30] SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL
+ {0x239B, 0x23B3, prN}, // Sm [25] LEFT PARENTHESIS UPPER HOOK..SUMMATION BOTTOM
+ {0x23B4, 0x23DB, prN}, // So [40] TOP SQUARE BRACKET..FUSE
+ {0x23DC, 0x23E1, prN}, // Sm [6] TOP PARENTHESIS..BOTTOM TORTOISE SHELL BRACKET
+ {0x23E2, 0x23E8, prN}, // So [7] WHITE TRAPEZIUM..DECIMAL EXPONENT SYMBOL
+ {0x23E9, 0x23EC, prW}, // So [4] BLACK RIGHT-POINTING DOUBLE TRIANGLE..BLACK DOWN-POINTING DOUBLE TRIANGLE
+ {0x23ED, 0x23EF, prN}, // So [3] BLACK RIGHT-POINTING DOUBLE TRIANGLE WITH VERTICAL BAR..BLACK RIGHT-POINTING TRIANGLE WITH DOUBLE VERTICAL BAR
+ {0x23F0, 0x23F0, prW}, // So ALARM CLOCK
+ {0x23F1, 0x23F2, prN}, // So [2] STOPWATCH..TIMER CLOCK
+ {0x23F3, 0x23F3, prW}, // So HOURGLASS WITH FLOWING SAND
+ {0x23F4, 0x23FF, prN}, // So [12] BLACK MEDIUM LEFT-POINTING TRIANGLE..OBSERVER EYE SYMBOL
+ {0x2400, 0x2426, prN}, // So [39] SYMBOL FOR NULL..SYMBOL FOR SUBSTITUTE FORM TWO
+ {0x2440, 0x244A, prN}, // So [11] OCR HOOK..OCR DOUBLE BACKSLASH
+ {0x2460, 0x249B, prA}, // No [60] CIRCLED DIGIT ONE..NUMBER TWENTY FULL STOP
+ {0x249C, 0x24E9, prA}, // So [78] PARENTHESIZED LATIN SMALL LETTER A..CIRCLED LATIN SMALL LETTER Z
+ {0x24EA, 0x24EA, prN}, // No CIRCLED DIGIT ZERO
+ {0x24EB, 0x24FF, prA}, // No [21] NEGATIVE CIRCLED NUMBER ELEVEN..NEGATIVE CIRCLED DIGIT ZERO
+ {0x2500, 0x254B, prA}, // So [76] BOX DRAWINGS LIGHT HORIZONTAL..BOX DRAWINGS HEAVY VERTICAL AND HORIZONTAL
+ {0x254C, 0x254F, prN}, // So [4] BOX DRAWINGS LIGHT DOUBLE DASH HORIZONTAL..BOX DRAWINGS HEAVY DOUBLE DASH VERTICAL
+ {0x2550, 0x2573, prA}, // So [36] BOX DRAWINGS DOUBLE HORIZONTAL..BOX DRAWINGS LIGHT DIAGONAL CROSS
+ {0x2574, 0x257F, prN}, // So [12] BOX DRAWINGS LIGHT LEFT..BOX DRAWINGS HEAVY UP AND LIGHT DOWN
+ {0x2580, 0x258F, prA}, // So [16] UPPER HALF BLOCK..LEFT ONE EIGHTH BLOCK
+ {0x2590, 0x2591, prN}, // So [2] RIGHT HALF BLOCK..LIGHT SHADE
+ {0x2592, 0x2595, prA}, // So [4] MEDIUM SHADE..RIGHT ONE EIGHTH BLOCK
+ {0x2596, 0x259F, prN}, // So [10] QUADRANT LOWER LEFT..QUADRANT UPPER RIGHT AND LOWER LEFT AND LOWER RIGHT
+ {0x25A0, 0x25A1, prA}, // So [2] BLACK SQUARE..WHITE SQUARE
+ {0x25A2, 0x25A2, prN}, // So WHITE SQUARE WITH ROUNDED CORNERS
+ {0x25A3, 0x25A9, prA}, // So [7] WHITE SQUARE CONTAINING BLACK SMALL SQUARE..SQUARE WITH DIAGONAL CROSSHATCH FILL
+ {0x25AA, 0x25B1, prN}, // So [8] BLACK SMALL SQUARE..WHITE PARALLELOGRAM
+ {0x25B2, 0x25B3, prA}, // So [2] BLACK UP-POINTING TRIANGLE..WHITE UP-POINTING TRIANGLE
+ {0x25B4, 0x25B5, prN}, // So [2] BLACK UP-POINTING SMALL TRIANGLE..WHITE UP-POINTING SMALL TRIANGLE
+ {0x25B6, 0x25B6, prA}, // So BLACK RIGHT-POINTING TRIANGLE
+ {0x25B7, 0x25B7, prA}, // Sm WHITE RIGHT-POINTING TRIANGLE
+ {0x25B8, 0x25BB, prN}, // So [4] BLACK RIGHT-POINTING SMALL TRIANGLE..WHITE RIGHT-POINTING POINTER
+ {0x25BC, 0x25BD, prA}, // So [2] BLACK DOWN-POINTING TRIANGLE..WHITE DOWN-POINTING TRIANGLE
+ {0x25BE, 0x25BF, prN}, // So [2] BLACK DOWN-POINTING SMALL TRIANGLE..WHITE DOWN-POINTING SMALL TRIANGLE
+ {0x25C0, 0x25C0, prA}, // So BLACK LEFT-POINTING TRIANGLE
+ {0x25C1, 0x25C1, prA}, // Sm WHITE LEFT-POINTING TRIANGLE
+ {0x25C2, 0x25C5, prN}, // So [4] BLACK LEFT-POINTING SMALL TRIANGLE..WHITE LEFT-POINTING POINTER
+ {0x25C6, 0x25C8, prA}, // So [3] BLACK DIAMOND..WHITE DIAMOND CONTAINING BLACK SMALL DIAMOND
+ {0x25C9, 0x25CA, prN}, // So [2] FISHEYE..LOZENGE
+ {0x25CB, 0x25CB, prA}, // So WHITE CIRCLE
+ {0x25CC, 0x25CD, prN}, // So [2] DOTTED CIRCLE..CIRCLE WITH VERTICAL FILL
+ {0x25CE, 0x25D1, prA}, // So [4] BULLSEYE..CIRCLE WITH RIGHT HALF BLACK
+ {0x25D2, 0x25E1, prN}, // So [16] CIRCLE WITH LOWER HALF BLACK..LOWER HALF CIRCLE
+ {0x25E2, 0x25E5, prA}, // So [4] BLACK LOWER RIGHT TRIANGLE..BLACK UPPER RIGHT TRIANGLE
+ {0x25E6, 0x25EE, prN}, // So [9] WHITE BULLET..UP-POINTING TRIANGLE WITH RIGHT HALF BLACK
+ {0x25EF, 0x25EF, prA}, // So LARGE CIRCLE
+ {0x25F0, 0x25F7, prN}, // So [8] WHITE SQUARE WITH UPPER LEFT QUADRANT..WHITE CIRCLE WITH UPPER RIGHT QUADRANT
+ {0x25F8, 0x25FC, prN}, // Sm [5] UPPER LEFT TRIANGLE..BLACK MEDIUM SQUARE
+ {0x25FD, 0x25FE, prW}, // Sm [2] WHITE MEDIUM SMALL SQUARE..BLACK MEDIUM SMALL SQUARE
+ {0x25FF, 0x25FF, prN}, // Sm LOWER RIGHT TRIANGLE
+ {0x2600, 0x2604, prN}, // So [5] BLACK SUN WITH RAYS..COMET
+ {0x2605, 0x2606, prA}, // So [2] BLACK STAR..WHITE STAR
+ {0x2607, 0x2608, prN}, // So [2] LIGHTNING..THUNDERSTORM
+ {0x2609, 0x2609, prA}, // So SUN
+ {0x260A, 0x260D, prN}, // So [4] ASCENDING NODE..OPPOSITION
+ {0x260E, 0x260F, prA}, // So [2] BLACK TELEPHONE..WHITE TELEPHONE
+ {0x2610, 0x2613, prN}, // So [4] BALLOT BOX..SALTIRE
+ {0x2614, 0x2615, prW}, // So [2] UMBRELLA WITH RAIN DROPS..HOT BEVERAGE
+ {0x2616, 0x261B, prN}, // So [6] WHITE SHOGI PIECE..BLACK RIGHT POINTING INDEX
+ {0x261C, 0x261C, prA}, // So WHITE LEFT POINTING INDEX
+ {0x261D, 0x261D, prN}, // So WHITE UP POINTING INDEX
+ {0x261E, 0x261E, prA}, // So WHITE RIGHT POINTING INDEX
+ {0x261F, 0x263F, prN}, // So [33] WHITE DOWN POINTING INDEX..MERCURY
+ {0x2640, 0x2640, prA}, // So FEMALE SIGN
+ {0x2641, 0x2641, prN}, // So EARTH
+ {0x2642, 0x2642, prA}, // So MALE SIGN
+ {0x2643, 0x2647, prN}, // So [5] JUPITER..PLUTO
+ {0x2648, 0x2653, prW}, // So [12] ARIES..PISCES
+ {0x2654, 0x265F, prN}, // So [12] WHITE CHESS KING..BLACK CHESS PAWN
+ {0x2660, 0x2661, prA}, // So [2] BLACK SPADE SUIT..WHITE HEART SUIT
+ {0x2662, 0x2662, prN}, // So WHITE DIAMOND SUIT
+ {0x2663, 0x2665, prA}, // So [3] BLACK CLUB SUIT..BLACK HEART SUIT
+ {0x2666, 0x2666, prN}, // So BLACK DIAMOND SUIT
+ {0x2667, 0x266A, prA}, // So [4] WHITE CLUB SUIT..EIGHTH NOTE
+ {0x266B, 0x266B, prN}, // So BEAMED EIGHTH NOTES
+ {0x266C, 0x266D, prA}, // So [2] BEAMED SIXTEENTH NOTES..MUSIC FLAT SIGN
+ {0x266E, 0x266E, prN}, // So MUSIC NATURAL SIGN
+ {0x266F, 0x266F, prA}, // Sm MUSIC SHARP SIGN
+ {0x2670, 0x267E, prN}, // So [15] WEST SYRIAC CROSS..PERMANENT PAPER SIGN
+ {0x267F, 0x267F, prW}, // So WHEELCHAIR SYMBOL
+ {0x2680, 0x2692, prN}, // So [19] DIE FACE-1..HAMMER AND PICK
+ {0x2693, 0x2693, prW}, // So ANCHOR
+ {0x2694, 0x269D, prN}, // So [10] CROSSED SWORDS..OUTLINED WHITE STAR
+ {0x269E, 0x269F, prA}, // So [2] THREE LINES CONVERGING RIGHT..THREE LINES CONVERGING LEFT
+ {0x26A0, 0x26A0, prN}, // So WARNING SIGN
+ {0x26A1, 0x26A1, prW}, // So HIGH VOLTAGE SIGN
+ {0x26A2, 0x26A9, prN}, // So [8] DOUBLED FEMALE SIGN..HORIZONTAL MALE WITH STROKE SIGN
+ {0x26AA, 0x26AB, prW}, // So [2] MEDIUM WHITE CIRCLE..MEDIUM BLACK CIRCLE
+ {0x26AC, 0x26BC, prN}, // So [17] MEDIUM SMALL WHITE CIRCLE..SESQUIQUADRATE
+ {0x26BD, 0x26BE, prW}, // So [2] SOCCER BALL..BASEBALL
+ {0x26BF, 0x26BF, prA}, // So SQUARED KEY
+ {0x26C0, 0x26C3, prN}, // So [4] WHITE DRAUGHTS MAN..BLACK DRAUGHTS KING
+ {0x26C4, 0x26C5, prW}, // So [2] SNOWMAN WITHOUT SNOW..SUN BEHIND CLOUD
+ {0x26C6, 0x26CD, prA}, // So [8] RAIN..DISABLED CAR
+ {0x26CE, 0x26CE, prW}, // So OPHIUCHUS
+ {0x26CF, 0x26D3, prA}, // So [5] PICK..CHAINS
+ {0x26D4, 0x26D4, prW}, // So NO ENTRY
+ {0x26D5, 0x26E1, prA}, // So [13] ALTERNATE ONE-WAY LEFT WAY TRAFFIC..RESTRICTED LEFT ENTRY-2
+ {0x26E2, 0x26E2, prN}, // So ASTRONOMICAL SYMBOL FOR URANUS
+ {0x26E3, 0x26E3, prA}, // So HEAVY CIRCLE WITH STROKE AND TWO DOTS ABOVE
+ {0x26E4, 0x26E7, prN}, // So [4] PENTAGRAM..INVERTED PENTAGRAM
+ {0x26E8, 0x26E9, prA}, // So [2] BLACK CROSS ON SHIELD..SHINTO SHRINE
+ {0x26EA, 0x26EA, prW}, // So CHURCH
+ {0x26EB, 0x26F1, prA}, // So [7] CASTLE..UMBRELLA ON GROUND
+ {0x26F2, 0x26F3, prW}, // So [2] FOUNTAIN..FLAG IN HOLE
+ {0x26F4, 0x26F4, prA}, // So FERRY
+ {0x26F5, 0x26F5, prW}, // So SAILBOAT
+ {0x26F6, 0x26F9, prA}, // So [4] SQUARE FOUR CORNERS..PERSON WITH BALL
+ {0x26FA, 0x26FA, prW}, // So TENT
+ {0x26FB, 0x26FC, prA}, // So [2] JAPANESE BANK SYMBOL..HEADSTONE GRAVEYARD SYMBOL
+ {0x26FD, 0x26FD, prW}, // So FUEL PUMP
+ {0x26FE, 0x26FF, prA}, // So [2] CUP ON BLACK SQUARE..WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE
+ {0x2700, 0x2704, prN}, // So [5] BLACK SAFETY SCISSORS..WHITE SCISSORS
+ {0x2705, 0x2705, prW}, // So WHITE HEAVY CHECK MARK
+ {0x2706, 0x2709, prN}, // So [4] TELEPHONE LOCATION SIGN..ENVELOPE
+ {0x270A, 0x270B, prW}, // So [2] RAISED FIST..RAISED HAND
+ {0x270C, 0x2727, prN}, // So [28] VICTORY HAND..WHITE FOUR POINTED STAR
+ {0x2728, 0x2728, prW}, // So SPARKLES
+ {0x2729, 0x273C, prN}, // So [20] STRESS OUTLINED WHITE STAR..OPEN CENTRE TEARDROP-SPOKED ASTERISK
+ {0x273D, 0x273D, prA}, // So HEAVY TEARDROP-SPOKED ASTERISK
+ {0x273E, 0x274B, prN}, // So [14] SIX PETALLED BLACK AND WHITE FLORETTE..HEAVY EIGHT TEARDROP-SPOKED PROPELLER ASTERISK
+ {0x274C, 0x274C, prW}, // So CROSS MARK
+ {0x274D, 0x274D, prN}, // So SHADOWED WHITE CIRCLE
+ {0x274E, 0x274E, prW}, // So NEGATIVE SQUARED CROSS MARK
+ {0x274F, 0x2752, prN}, // So [4] LOWER RIGHT DROP-SHADOWED WHITE SQUARE..UPPER RIGHT SHADOWED WHITE SQUARE
+ {0x2753, 0x2755, prW}, // So [3] BLACK QUESTION MARK ORNAMENT..WHITE EXCLAMATION MARK ORNAMENT
+ {0x2756, 0x2756, prN}, // So BLACK DIAMOND MINUS WHITE X
+ {0x2757, 0x2757, prW}, // So HEAVY EXCLAMATION MARK SYMBOL
+ {0x2758, 0x2767, prN}, // So [16] LIGHT VERTICAL BAR..ROTATED FLORAL HEART BULLET
+ {0x2768, 0x2768, prN}, // Ps MEDIUM LEFT PARENTHESIS ORNAMENT
+ {0x2769, 0x2769, prN}, // Pe MEDIUM RIGHT PARENTHESIS ORNAMENT
+ {0x276A, 0x276A, prN}, // Ps MEDIUM FLATTENED LEFT PARENTHESIS ORNAMENT
+ {0x276B, 0x276B, prN}, // Pe MEDIUM FLATTENED RIGHT PARENTHESIS ORNAMENT
+ {0x276C, 0x276C, prN}, // Ps MEDIUM LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276D, 0x276D, prN}, // Pe MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276E, 0x276E, prN}, // Ps HEAVY LEFT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x276F, 0x276F, prN}, // Pe HEAVY RIGHT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x2770, 0x2770, prN}, // Ps HEAVY LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2771, 0x2771, prN}, // Pe HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2772, 0x2772, prN}, // Ps LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2773, 0x2773, prN}, // Pe LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2774, 0x2774, prN}, // Ps MEDIUM LEFT CURLY BRACKET ORNAMENT
+ {0x2775, 0x2775, prN}, // Pe MEDIUM RIGHT CURLY BRACKET ORNAMENT
+ {0x2776, 0x277F, prA}, // No [10] DINGBAT NEGATIVE CIRCLED DIGIT ONE..DINGBAT NEGATIVE CIRCLED NUMBER TEN
+ {0x2780, 0x2793, prN}, // No [20] DINGBAT CIRCLED SANS-SERIF DIGIT ONE..DINGBAT NEGATIVE CIRCLED SANS-SERIF NUMBER TEN
+ {0x2794, 0x2794, prN}, // So HEAVY WIDE-HEADED RIGHTWARDS ARROW
+ {0x2795, 0x2797, prW}, // So [3] HEAVY PLUS SIGN..HEAVY DIVISION SIGN
+ {0x2798, 0x27AF, prN}, // So [24] HEAVY SOUTH EAST ARROW..NOTCHED LOWER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW
+ {0x27B0, 0x27B0, prW}, // So CURLY LOOP
+ {0x27B1, 0x27BE, prN}, // So [14] NOTCHED UPPER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW..OPEN-OUTLINED RIGHTWARDS ARROW
+ {0x27BF, 0x27BF, prW}, // So DOUBLE CURLY LOOP
+ {0x27C0, 0x27C4, prN}, // Sm [5] THREE DIMENSIONAL ANGLE..OPEN SUPERSET
+ {0x27C5, 0x27C5, prN}, // Ps LEFT S-SHAPED BAG DELIMITER
+ {0x27C6, 0x27C6, prN}, // Pe RIGHT S-SHAPED BAG DELIMITER
+ {0x27C7, 0x27E5, prN}, // Sm [31] OR WITH DOT INSIDE..WHITE SQUARE WITH RIGHTWARDS TICK
+ {0x27E6, 0x27E6, prNa}, // Ps MATHEMATICAL LEFT WHITE SQUARE BRACKET
+ {0x27E7, 0x27E7, prNa}, // Pe MATHEMATICAL RIGHT WHITE SQUARE BRACKET
+ {0x27E8, 0x27E8, prNa}, // Ps MATHEMATICAL LEFT ANGLE BRACKET
+ {0x27E9, 0x27E9, prNa}, // Pe MATHEMATICAL RIGHT ANGLE BRACKET
+ {0x27EA, 0x27EA, prNa}, // Ps MATHEMATICAL LEFT DOUBLE ANGLE BRACKET
+ {0x27EB, 0x27EB, prNa}, // Pe MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET
+ {0x27EC, 0x27EC, prNa}, // Ps MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET
+ {0x27ED, 0x27ED, prNa}, // Pe MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x27EE, 0x27EE, prN}, // Ps MATHEMATICAL LEFT FLATTENED PARENTHESIS
+ {0x27EF, 0x27EF, prN}, // Pe MATHEMATICAL RIGHT FLATTENED PARENTHESIS
+ {0x27F0, 0x27FF, prN}, // Sm [16] UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW
+ {0x2800, 0x28FF, prN}, // So [256] BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678
+ {0x2900, 0x297F, prN}, // Sm [128] RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..DOWN FISH TAIL
+ {0x2980, 0x2982, prN}, // Sm [3] TRIPLE VERTICAL BAR DELIMITER..Z NOTATION TYPE COLON
+ {0x2983, 0x2983, prN}, // Ps LEFT WHITE CURLY BRACKET
+ {0x2984, 0x2984, prN}, // Pe RIGHT WHITE CURLY BRACKET
+ {0x2985, 0x2985, prNa}, // Ps LEFT WHITE PARENTHESIS
+ {0x2986, 0x2986, prNa}, // Pe RIGHT WHITE PARENTHESIS
+ {0x2987, 0x2987, prN}, // Ps Z NOTATION LEFT IMAGE BRACKET
+ {0x2988, 0x2988, prN}, // Pe Z NOTATION RIGHT IMAGE BRACKET
+ {0x2989, 0x2989, prN}, // Ps Z NOTATION LEFT BINDING BRACKET
+ {0x298A, 0x298A, prN}, // Pe Z NOTATION RIGHT BINDING BRACKET
+ {0x298B, 0x298B, prN}, // Ps LEFT SQUARE BRACKET WITH UNDERBAR
+ {0x298C, 0x298C, prN}, // Pe RIGHT SQUARE BRACKET WITH UNDERBAR
+ {0x298D, 0x298D, prN}, // Ps LEFT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x298E, 0x298E, prN}, // Pe RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x298F, 0x298F, prN}, // Ps LEFT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x2990, 0x2990, prN}, // Pe RIGHT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x2991, 0x2991, prN}, // Ps LEFT ANGLE BRACKET WITH DOT
+ {0x2992, 0x2992, prN}, // Pe RIGHT ANGLE BRACKET WITH DOT
+ {0x2993, 0x2993, prN}, // Ps LEFT ARC LESS-THAN BRACKET
+ {0x2994, 0x2994, prN}, // Pe RIGHT ARC GREATER-THAN BRACKET
+ {0x2995, 0x2995, prN}, // Ps DOUBLE LEFT ARC GREATER-THAN BRACKET
+ {0x2996, 0x2996, prN}, // Pe DOUBLE RIGHT ARC LESS-THAN BRACKET
+ {0x2997, 0x2997, prN}, // Ps LEFT BLACK TORTOISE SHELL BRACKET
+ {0x2998, 0x2998, prN}, // Pe RIGHT BLACK TORTOISE SHELL BRACKET
+ {0x2999, 0x29D7, prN}, // Sm [63] DOTTED FENCE..BLACK HOURGLASS
+ {0x29D8, 0x29D8, prN}, // Ps LEFT WIGGLY FENCE
+ {0x29D9, 0x29D9, prN}, // Pe RIGHT WIGGLY FENCE
+ {0x29DA, 0x29DA, prN}, // Ps LEFT DOUBLE WIGGLY FENCE
+ {0x29DB, 0x29DB, prN}, // Pe RIGHT DOUBLE WIGGLY FENCE
+ {0x29DC, 0x29FB, prN}, // Sm [32] INCOMPLETE INFINITY..TRIPLE PLUS
+ {0x29FC, 0x29FC, prN}, // Ps LEFT-POINTING CURVED ANGLE BRACKET
+ {0x29FD, 0x29FD, prN}, // Pe RIGHT-POINTING CURVED ANGLE BRACKET
+ {0x29FE, 0x29FF, prN}, // Sm [2] TINY..MINY
+ {0x2A00, 0x2AFF, prN}, // Sm [256] N-ARY CIRCLED DOT OPERATOR..N-ARY WHITE VERTICAL BAR
+ {0x2B00, 0x2B1A, prN}, // So [27] NORTH EAST WHITE ARROW..DOTTED SQUARE
+ {0x2B1B, 0x2B1C, prW}, // So [2] BLACK LARGE SQUARE..WHITE LARGE SQUARE
+ {0x2B1D, 0x2B2F, prN}, // So [19] BLACK VERY SMALL SQUARE..WHITE VERTICAL ELLIPSE
+ {0x2B30, 0x2B44, prN}, // Sm [21] LEFT ARROW WITH SMALL CIRCLE..RIGHTWARDS ARROW THROUGH SUPERSET
+ {0x2B45, 0x2B46, prN}, // So [2] LEFTWARDS QUADRUPLE ARROW..RIGHTWARDS QUADRUPLE ARROW
+ {0x2B47, 0x2B4C, prN}, // Sm [6] REVERSE TILDE OPERATOR ABOVE RIGHTWARDS ARROW..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR
+ {0x2B4D, 0x2B4F, prN}, // So [3] DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..SHORT BACKSLANTED SOUTH ARROW
+ {0x2B50, 0x2B50, prW}, // So WHITE MEDIUM STAR
+ {0x2B51, 0x2B54, prN}, // So [4] BLACK SMALL STAR..WHITE RIGHT-POINTING PENTAGON
+ {0x2B55, 0x2B55, prW}, // So HEAVY LARGE CIRCLE
+ {0x2B56, 0x2B59, prA}, // So [4] HEAVY OVAL WITH OVAL INSIDE..HEAVY CIRCLED SALTIRE
+ {0x2B5A, 0x2B73, prN}, // So [26] SLANTED NORTH ARROW WITH HOOKED HEAD..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR
+ {0x2B76, 0x2B95, prN}, // So [32] NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW
+ {0x2B97, 0x2BFF, prN}, // So [105] SYMBOL FOR TYPE A ELECTRONICS..HELLSCHREIBER PAUSE SYMBOL
+ {0x2C00, 0x2C5F, prN}, // L& [96] GLAGOLITIC CAPITAL LETTER AZU..GLAGOLITIC SMALL LETTER CAUDATE CHRIVI
+ {0x2C60, 0x2C7B, prN}, // L& [28] LATIN CAPITAL LETTER L WITH DOUBLE BAR..LATIN LETTER SMALL CAPITAL TURNED E
+ {0x2C7C, 0x2C7D, prN}, // Lm [2] LATIN SUBSCRIPT SMALL LETTER J..MODIFIER LETTER CAPITAL V
+ {0x2C7E, 0x2C7F, prN}, // Lu [2] LATIN CAPITAL LETTER S WITH SWASH TAIL..LATIN CAPITAL LETTER Z WITH SWASH TAIL
+ {0x2C80, 0x2CE4, prN}, // L& [101] COPTIC CAPITAL LETTER ALFA..COPTIC SYMBOL KAI
+ {0x2CE5, 0x2CEA, prN}, // So [6] COPTIC SYMBOL MI RO..COPTIC SYMBOL SHIMA SIMA
+ {0x2CEB, 0x2CEE, prN}, // L& [4] COPTIC CAPITAL LETTER CRYPTOGRAMMIC SHEI..COPTIC SMALL LETTER CRYPTOGRAMMIC GANGIA
+ {0x2CEF, 0x2CF1, prN}, // Mn [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS
+ {0x2CF2, 0x2CF3, prN}, // L& [2] COPTIC CAPITAL LETTER BOHAIRIC KHEI..COPTIC SMALL LETTER BOHAIRIC KHEI
+ {0x2CF9, 0x2CFC, prN}, // Po [4] COPTIC OLD NUBIAN FULL STOP..COPTIC OLD NUBIAN VERSE DIVIDER
+ {0x2CFD, 0x2CFD, prN}, // No COPTIC FRACTION ONE HALF
+ {0x2CFE, 0x2CFF, prN}, // Po [2] COPTIC FULL STOP..COPTIC MORPHOLOGICAL DIVIDER
+ {0x2D00, 0x2D25, prN}, // Ll [38] GEORGIAN SMALL LETTER AN..GEORGIAN SMALL LETTER HOE
+ {0x2D27, 0x2D27, prN}, // Ll GEORGIAN SMALL LETTER YN
+ {0x2D2D, 0x2D2D, prN}, // Ll GEORGIAN SMALL LETTER AEN
+ {0x2D30, 0x2D67, prN}, // Lo [56] TIFINAGH LETTER YA..TIFINAGH LETTER YO
+ {0x2D6F, 0x2D6F, prN}, // Lm TIFINAGH MODIFIER LETTER LABIALIZATION MARK
+ {0x2D70, 0x2D70, prN}, // Po TIFINAGH SEPARATOR MARK
+ {0x2D7F, 0x2D7F, prN}, // Mn TIFINAGH CONSONANT JOINER
+ {0x2D80, 0x2D96, prN}, // Lo [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SYLLABLE GGWE
+ {0x2DA0, 0x2DA6, prN}, // Lo [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SYLLABLE SSO
+ {0x2DA8, 0x2DAE, prN}, // Lo [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SYLLABLE CCO
+ {0x2DB0, 0x2DB6, prN}, // Lo [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SYLLABLE ZZO
+ {0x2DB8, 0x2DBE, prN}, // Lo [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC SYLLABLE CCHO
+ {0x2DC0, 0x2DC6, prN}, // Lo [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SYLLABLE QYO
+ {0x2DC8, 0x2DCE, prN}, // Lo [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SYLLABLE KYO
+ {0x2DD0, 0x2DD6, prN}, // Lo [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SYLLABLE XYO
+ {0x2DD8, 0x2DDE, prN}, // Lo [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SYLLABLE GYO
+ {0x2DE0, 0x2DFF, prN}, // Mn [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS
+ {0x2E00, 0x2E01, prN}, // Po [2] RIGHT ANGLE SUBSTITUTION MARKER..RIGHT ANGLE DOTTED SUBSTITUTION MARKER
+ {0x2E02, 0x2E02, prN}, // Pi LEFT SUBSTITUTION BRACKET
+ {0x2E03, 0x2E03, prN}, // Pf RIGHT SUBSTITUTION BRACKET
+ {0x2E04, 0x2E04, prN}, // Pi LEFT DOTTED SUBSTITUTION BRACKET
+ {0x2E05, 0x2E05, prN}, // Pf RIGHT DOTTED SUBSTITUTION BRACKET
+ {0x2E06, 0x2E08, prN}, // Po [3] RAISED INTERPOLATION MARKER..DOTTED TRANSPOSITION MARKER
+ {0x2E09, 0x2E09, prN}, // Pi LEFT TRANSPOSITION BRACKET
+ {0x2E0A, 0x2E0A, prN}, // Pf RIGHT TRANSPOSITION BRACKET
+ {0x2E0B, 0x2E0B, prN}, // Po RAISED SQUARE
+ {0x2E0C, 0x2E0C, prN}, // Pi LEFT RAISED OMISSION BRACKET
+ {0x2E0D, 0x2E0D, prN}, // Pf RIGHT RAISED OMISSION BRACKET
+ {0x2E0E, 0x2E16, prN}, // Po [9] EDITORIAL CORONIS..DOTTED RIGHT-POINTING ANGLE
+ {0x2E17, 0x2E17, prN}, // Pd DOUBLE OBLIQUE HYPHEN
+ {0x2E18, 0x2E19, prN}, // Po [2] INVERTED INTERROBANG..PALM BRANCH
+ {0x2E1A, 0x2E1A, prN}, // Pd HYPHEN WITH DIAERESIS
+ {0x2E1B, 0x2E1B, prN}, // Po TILDE WITH RING ABOVE
+ {0x2E1C, 0x2E1C, prN}, // Pi LEFT LOW PARAPHRASE BRACKET
+ {0x2E1D, 0x2E1D, prN}, // Pf RIGHT LOW PARAPHRASE BRACKET
+ {0x2E1E, 0x2E1F, prN}, // Po [2] TILDE WITH DOT ABOVE..TILDE WITH DOT BELOW
+ {0x2E20, 0x2E20, prN}, // Pi LEFT VERTICAL BAR WITH QUILL
+ {0x2E21, 0x2E21, prN}, // Pf RIGHT VERTICAL BAR WITH QUILL
+ {0x2E22, 0x2E22, prN}, // Ps TOP LEFT HALF BRACKET
+ {0x2E23, 0x2E23, prN}, // Pe TOP RIGHT HALF BRACKET
+ {0x2E24, 0x2E24, prN}, // Ps BOTTOM LEFT HALF BRACKET
+ {0x2E25, 0x2E25, prN}, // Pe BOTTOM RIGHT HALF BRACKET
+ {0x2E26, 0x2E26, prN}, // Ps LEFT SIDEWAYS U BRACKET
+ {0x2E27, 0x2E27, prN}, // Pe RIGHT SIDEWAYS U BRACKET
+ {0x2E28, 0x2E28, prN}, // Ps LEFT DOUBLE PARENTHESIS
+ {0x2E29, 0x2E29, prN}, // Pe RIGHT DOUBLE PARENTHESIS
+ {0x2E2A, 0x2E2E, prN}, // Po [5] TWO DOTS OVER ONE DOT PUNCTUATION..REVERSED QUESTION MARK
+ {0x2E2F, 0x2E2F, prN}, // Lm VERTICAL TILDE
+ {0x2E30, 0x2E39, prN}, // Po [10] RING POINT..TOP HALF SECTION SIGN
+ {0x2E3A, 0x2E3B, prN}, // Pd [2] TWO-EM DASH..THREE-EM DASH
+ {0x2E3C, 0x2E3F, prN}, // Po [4] STENOGRAPHIC FULL STOP..CAPITULUM
+ {0x2E40, 0x2E40, prN}, // Pd DOUBLE HYPHEN
+ {0x2E41, 0x2E41, prN}, // Po REVERSED COMMA
+ {0x2E42, 0x2E42, prN}, // Ps DOUBLE LOW-REVERSED-9 QUOTATION MARK
+ {0x2E43, 0x2E4F, prN}, // Po [13] DASH WITH LEFT UPTURN..CORNISH VERSE DIVIDER
+ {0x2E50, 0x2E51, prN}, // So [2] CROSS PATTY WITH RIGHT CROSSBAR..CROSS PATTY WITH LEFT CROSSBAR
+ {0x2E52, 0x2E54, prN}, // Po [3] TIRONIAN SIGN CAPITAL ET..MEDIEVAL QUESTION MARK
+ {0x2E55, 0x2E55, prN}, // Ps LEFT SQUARE BRACKET WITH STROKE
+ {0x2E56, 0x2E56, prN}, // Pe RIGHT SQUARE BRACKET WITH STROKE
+ {0x2E57, 0x2E57, prN}, // Ps LEFT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E58, 0x2E58, prN}, // Pe RIGHT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E59, 0x2E59, prN}, // Ps TOP HALF LEFT PARENTHESIS
+ {0x2E5A, 0x2E5A, prN}, // Pe TOP HALF RIGHT PARENTHESIS
+ {0x2E5B, 0x2E5B, prN}, // Ps BOTTOM HALF LEFT PARENTHESIS
+ {0x2E5C, 0x2E5C, prN}, // Pe BOTTOM HALF RIGHT PARENTHESIS
+ {0x2E5D, 0x2E5D, prN}, // Pd OBLIQUE HYPHEN
+ {0x2E80, 0x2E99, prW}, // So [26] CJK RADICAL REPEAT..CJK RADICAL RAP
+ {0x2E9B, 0x2EF3, prW}, // So [89] CJK RADICAL CHOKE..CJK RADICAL C-SIMPLIFIED TURTLE
+ {0x2F00, 0x2FD5, prW}, // So [214] KANGXI RADICAL ONE..KANGXI RADICAL FLUTE
+ {0x2FF0, 0x2FFB, prW}, // So [12] IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID
+ {0x3000, 0x3000, prF}, // Zs IDEOGRAPHIC SPACE
+ {0x3001, 0x3003, prW}, // Po [3] IDEOGRAPHIC COMMA..DITTO MARK
+ {0x3004, 0x3004, prW}, // So JAPANESE INDUSTRIAL STANDARD SYMBOL
+ {0x3005, 0x3005, prW}, // Lm IDEOGRAPHIC ITERATION MARK
+ {0x3006, 0x3006, prW}, // Lo IDEOGRAPHIC CLOSING MARK
+ {0x3007, 0x3007, prW}, // Nl IDEOGRAPHIC NUMBER ZERO
+ {0x3008, 0x3008, prW}, // Ps LEFT ANGLE BRACKET
+ {0x3009, 0x3009, prW}, // Pe RIGHT ANGLE BRACKET
+ {0x300A, 0x300A, prW}, // Ps LEFT DOUBLE ANGLE BRACKET
+ {0x300B, 0x300B, prW}, // Pe RIGHT DOUBLE ANGLE BRACKET
+ {0x300C, 0x300C, prW}, // Ps LEFT CORNER BRACKET
+ {0x300D, 0x300D, prW}, // Pe RIGHT CORNER BRACKET
+ {0x300E, 0x300E, prW}, // Ps LEFT WHITE CORNER BRACKET
+ {0x300F, 0x300F, prW}, // Pe RIGHT WHITE CORNER BRACKET
+ {0x3010, 0x3010, prW}, // Ps LEFT BLACK LENTICULAR BRACKET
+ {0x3011, 0x3011, prW}, // Pe RIGHT BLACK LENTICULAR BRACKET
+ {0x3012, 0x3013, prW}, // So [2] POSTAL MARK..GETA MARK
+ {0x3014, 0x3014, prW}, // Ps LEFT TORTOISE SHELL BRACKET
+ {0x3015, 0x3015, prW}, // Pe RIGHT TORTOISE SHELL BRACKET
+ {0x3016, 0x3016, prW}, // Ps LEFT WHITE LENTICULAR BRACKET
+ {0x3017, 0x3017, prW}, // Pe RIGHT WHITE LENTICULAR BRACKET
+ {0x3018, 0x3018, prW}, // Ps LEFT WHITE TORTOISE SHELL BRACKET
+ {0x3019, 0x3019, prW}, // Pe RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x301A, 0x301A, prW}, // Ps LEFT WHITE SQUARE BRACKET
+ {0x301B, 0x301B, prW}, // Pe RIGHT WHITE SQUARE BRACKET
+ {0x301C, 0x301C, prW}, // Pd WAVE DASH
+ {0x301D, 0x301D, prW}, // Ps REVERSED DOUBLE PRIME QUOTATION MARK
+ {0x301E, 0x301F, prW}, // Pe [2] DOUBLE PRIME QUOTATION MARK..LOW DOUBLE PRIME QUOTATION MARK
+ {0x3020, 0x3020, prW}, // So POSTAL MARK FACE
+ {0x3021, 0x3029, prW}, // Nl [9] HANGZHOU NUMERAL ONE..HANGZHOU NUMERAL NINE
+ {0x302A, 0x302D, prW}, // Mn [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK
+ {0x302E, 0x302F, prW}, // Mc [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK
+ {0x3030, 0x3030, prW}, // Pd WAVY DASH
+ {0x3031, 0x3035, prW}, // Lm [5] VERTICAL KANA REPEAT MARK..VERTICAL KANA REPEAT MARK LOWER HALF
+ {0x3036, 0x3037, prW}, // So [2] CIRCLED POSTAL MARK..IDEOGRAPHIC TELEGRAPH LINE FEED SEPARATOR SYMBOL
+ {0x3038, 0x303A, prW}, // Nl [3] HANGZHOU NUMERAL TEN..HANGZHOU NUMERAL THIRTY
+ {0x303B, 0x303B, prW}, // Lm VERTICAL IDEOGRAPHIC ITERATION MARK
+ {0x303C, 0x303C, prW}, // Lo MASU MARK
+ {0x303D, 0x303D, prW}, // Po PART ALTERNATION MARK
+ {0x303E, 0x303E, prW}, // So IDEOGRAPHIC VARIATION INDICATOR
+ {0x303F, 0x303F, prN}, // So IDEOGRAPHIC HALF FILL SPACE
+ {0x3041, 0x3096, prW}, // Lo [86] HIRAGANA LETTER SMALL A..HIRAGANA LETTER SMALL KE
+ {0x3099, 0x309A, prW}, // Mn [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309B, 0x309C, prW}, // Sk [2] KATAKANA-HIRAGANA VOICED SOUND MARK..KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309D, 0x309E, prW}, // Lm [2] HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK
+ {0x309F, 0x309F, prW}, // Lo HIRAGANA DIGRAPH YORI
+ {0x30A0, 0x30A0, prW}, // Pd KATAKANA-HIRAGANA DOUBLE HYPHEN
+ {0x30A1, 0x30FA, prW}, // Lo [90] KATAKANA LETTER SMALL A..KATAKANA LETTER VO
+ {0x30FB, 0x30FB, prW}, // Po KATAKANA MIDDLE DOT
+ {0x30FC, 0x30FE, prW}, // Lm [3] KATAKANA-HIRAGANA PROLONGED SOUND MARK..KATAKANA VOICED ITERATION MARK
+ {0x30FF, 0x30FF, prW}, // Lo KATAKANA DIGRAPH KOTO
+ {0x3105, 0x312F, prW}, // Lo [43] BOPOMOFO LETTER B..BOPOMOFO LETTER NN
+ {0x3131, 0x318E, prW}, // Lo [94] HANGUL LETTER KIYEOK..HANGUL LETTER ARAEAE
+ {0x3190, 0x3191, prW}, // So [2] IDEOGRAPHIC ANNOTATION LINKING MARK..IDEOGRAPHIC ANNOTATION REVERSE MARK
+ {0x3192, 0x3195, prW}, // No [4] IDEOGRAPHIC ANNOTATION ONE MARK..IDEOGRAPHIC ANNOTATION FOUR MARK
+ {0x3196, 0x319F, prW}, // So [10] IDEOGRAPHIC ANNOTATION TOP MARK..IDEOGRAPHIC ANNOTATION MAN MARK
+ {0x31A0, 0x31BF, prW}, // Lo [32] BOPOMOFO LETTER BU..BOPOMOFO LETTER AH
+ {0x31C0, 0x31E3, prW}, // So [36] CJK STROKE T..CJK STROKE Q
+ {0x31F0, 0x31FF, prW}, // Lo [16] KATAKANA LETTER SMALL KU..KATAKANA LETTER SMALL RO
+ {0x3200, 0x321E, prW}, // So [31] PARENTHESIZED HANGUL KIYEOK..PARENTHESIZED KOREAN CHARACTER O HU
+ {0x3220, 0x3229, prW}, // No [10] PARENTHESIZED IDEOGRAPH ONE..PARENTHESIZED IDEOGRAPH TEN
+ {0x322A, 0x3247, prW}, // So [30] PARENTHESIZED IDEOGRAPH MOON..CIRCLED IDEOGRAPH KOTO
+ {0x3248, 0x324F, prA}, // No [8] CIRCLED NUMBER TEN ON BLACK SQUARE..CIRCLED NUMBER EIGHTY ON BLACK SQUARE
+ {0x3250, 0x3250, prW}, // So PARTNERSHIP SIGN
+ {0x3251, 0x325F, prW}, // No [15] CIRCLED NUMBER TWENTY ONE..CIRCLED NUMBER THIRTY FIVE
+ {0x3260, 0x327F, prW}, // So [32] CIRCLED HANGUL KIYEOK..KOREAN STANDARD SYMBOL
+ {0x3280, 0x3289, prW}, // No [10] CIRCLED IDEOGRAPH ONE..CIRCLED IDEOGRAPH TEN
+ {0x328A, 0x32B0, prW}, // So [39] CIRCLED IDEOGRAPH MOON..CIRCLED IDEOGRAPH NIGHT
+ {0x32B1, 0x32BF, prW}, // No [15] CIRCLED NUMBER THIRTY SIX..CIRCLED NUMBER FIFTY
+ {0x32C0, 0x32FF, prW}, // So [64] IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY..SQUARE ERA NAME REIWA
+ {0x3300, 0x33FF, prW}, // So [256] SQUARE APAATO..SQUARE GAL
+ {0x3400, 0x4DBF, prW}, // Lo [6592] CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DBF
+ {0x4DC0, 0x4DFF, prN}, // So [64] HEXAGRAM FOR THE CREATIVE HEAVEN..HEXAGRAM FOR BEFORE COMPLETION
+ {0x4E00, 0x9FFF, prW}, // Lo [20992] CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FFF
+ {0xA000, 0xA014, prW}, // Lo [21] YI SYLLABLE IT..YI SYLLABLE E
+ {0xA015, 0xA015, prW}, // Lm YI SYLLABLE WU
+ {0xA016, 0xA48C, prW}, // Lo [1143] YI SYLLABLE BIT..YI SYLLABLE YYR
+ {0xA490, 0xA4C6, prW}, // So [55] YI RADICAL QOT..YI RADICAL KE
+ {0xA4D0, 0xA4F7, prN}, // Lo [40] LISU LETTER BA..LISU LETTER OE
+ {0xA4F8, 0xA4FD, prN}, // Lm [6] LISU LETTER TONE MYA TI..LISU LETTER TONE MYA JEU
+ {0xA4FE, 0xA4FF, prN}, // Po [2] LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP
+ {0xA500, 0xA60B, prN}, // Lo [268] VAI SYLLABLE EE..VAI SYLLABLE NG
+ {0xA60C, 0xA60C, prN}, // Lm VAI SYLLABLE LENGTHENER
+ {0xA60D, 0xA60F, prN}, // Po [3] VAI COMMA..VAI QUESTION MARK
+ {0xA610, 0xA61F, prN}, // Lo [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL JONG
+ {0xA620, 0xA629, prN}, // Nd [10] VAI DIGIT ZERO..VAI DIGIT NINE
+ {0xA62A, 0xA62B, prN}, // Lo [2] VAI SYLLABLE NDOLE MA..VAI SYLLABLE NDOLE DO
+ {0xA640, 0xA66D, prN}, // L& [46] CYRILLIC CAPITAL LETTER ZEMLYA..CYRILLIC SMALL LETTER DOUBLE MONOCULAR O
+ {0xA66E, 0xA66E, prN}, // Lo CYRILLIC LETTER MULTIOCULAR O
+ {0xA66F, 0xA66F, prN}, // Mn COMBINING CYRILLIC VZMET
+ {0xA670, 0xA672, prN}, // Me [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN
+ {0xA673, 0xA673, prN}, // Po SLAVONIC ASTERISK
+ {0xA674, 0xA67D, prN}, // Mn [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK
+ {0xA67E, 0xA67E, prN}, // Po CYRILLIC KAVYKA
+ {0xA67F, 0xA67F, prN}, // Lm CYRILLIC PAYEROK
+ {0xA680, 0xA69B, prN}, // L& [28] CYRILLIC CAPITAL LETTER DWE..CYRILLIC SMALL LETTER CROSSED O
+ {0xA69C, 0xA69D, prN}, // Lm [2] MODIFIER LETTER CYRILLIC HARD SIGN..MODIFIER LETTER CYRILLIC SOFT SIGN
+ {0xA69E, 0xA69F, prN}, // Mn [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E
+ {0xA6A0, 0xA6E5, prN}, // Lo [70] BAMUM LETTER A..BAMUM LETTER KI
+ {0xA6E6, 0xA6EF, prN}, // Nl [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM
+ {0xA6F0, 0xA6F1, prN}, // Mn [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS
+ {0xA6F2, 0xA6F7, prN}, // Po [6] BAMUM NJAEMLI..BAMUM QUESTION MARK
+ {0xA700, 0xA716, prN}, // Sk [23] MODIFIER LETTER CHINESE TONE YIN PING..MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR
+ {0xA717, 0xA71F, prN}, // Lm [9] MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK
+ {0xA720, 0xA721, prN}, // Sk [2] MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE
+ {0xA722, 0xA76F, prN}, // L& [78] LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF..LATIN SMALL LETTER CON
+ {0xA770, 0xA770, prN}, // Lm MODIFIER LETTER US
+ {0xA771, 0xA787, prN}, // L& [23] LATIN SMALL LETTER DUM..LATIN SMALL LETTER INSULAR T
+ {0xA788, 0xA788, prN}, // Lm MODIFIER LETTER LOW CIRCUMFLEX ACCENT
+ {0xA789, 0xA78A, prN}, // Sk [2] MODIFIER LETTER COLON..MODIFIER LETTER SHORT EQUALS SIGN
+ {0xA78B, 0xA78E, prN}, // L& [4] LATIN CAPITAL LETTER SALTILLO..LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT
+ {0xA78F, 0xA78F, prN}, // Lo LATIN LETTER SINOLOGICAL DOT
+ {0xA790, 0xA7CA, prN}, // L& [59] LATIN CAPITAL LETTER N WITH DESCENDER..LATIN SMALL LETTER S WITH SHORT STROKE OVERLAY
+ {0xA7D0, 0xA7D1, prN}, // L& [2] LATIN CAPITAL LETTER CLOSED INSULAR G..LATIN SMALL LETTER CLOSED INSULAR G
+ {0xA7D3, 0xA7D3, prN}, // Ll LATIN SMALL LETTER DOUBLE THORN
+ {0xA7D5, 0xA7D9, prN}, // L& [5] LATIN SMALL LETTER DOUBLE WYNN..LATIN SMALL LETTER SIGMOID S
+ {0xA7F2, 0xA7F4, prN}, // Lm [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q
+ {0xA7F5, 0xA7F6, prN}, // L& [2] LATIN CAPITAL LETTER REVERSED HALF H..LATIN SMALL LETTER REVERSED HALF H
+ {0xA7F7, 0xA7F7, prN}, // Lo LATIN EPIGRAPHIC LETTER SIDEWAYS I
+ {0xA7F8, 0xA7F9, prN}, // Lm [2] MODIFIER LETTER CAPITAL H WITH STROKE..MODIFIER LETTER SMALL LIGATURE OE
+ {0xA7FA, 0xA7FA, prN}, // Ll LATIN LETTER SMALL CAPITAL TURNED M
+ {0xA7FB, 0xA7FF, prN}, // Lo [5] LATIN EPIGRAPHIC LETTER REVERSED F..LATIN EPIGRAPHIC LETTER ARCHAIC M
+ {0xA800, 0xA801, prN}, // Lo [2] SYLOTI NAGRI LETTER A..SYLOTI NAGRI LETTER I
+ {0xA802, 0xA802, prN}, // Mn SYLOTI NAGRI SIGN DVISVARA
+ {0xA803, 0xA805, prN}, // Lo [3] SYLOTI NAGRI LETTER U..SYLOTI NAGRI LETTER O
+ {0xA806, 0xA806, prN}, // Mn SYLOTI NAGRI SIGN HASANTA
+ {0xA807, 0xA80A, prN}, // Lo [4] SYLOTI NAGRI LETTER KO..SYLOTI NAGRI LETTER GHO
+ {0xA80B, 0xA80B, prN}, // Mn SYLOTI NAGRI SIGN ANUSVARA
+ {0xA80C, 0xA822, prN}, // Lo [23] SYLOTI NAGRI LETTER CO..SYLOTI NAGRI LETTER HO
+ {0xA823, 0xA824, prN}, // Mc [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I
+ {0xA825, 0xA826, prN}, // Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E
+ {0xA827, 0xA827, prN}, // Mc SYLOTI NAGRI VOWEL SIGN OO
+ {0xA828, 0xA82B, prN}, // So [4] SYLOTI NAGRI POETRY MARK-1..SYLOTI NAGRI POETRY MARK-4
+ {0xA82C, 0xA82C, prN}, // Mn SYLOTI NAGRI SIGN ALTERNATE HASANTA
+ {0xA830, 0xA835, prN}, // No [6] NORTH INDIC FRACTION ONE QUARTER..NORTH INDIC FRACTION THREE SIXTEENTHS
+ {0xA836, 0xA837, prN}, // So [2] NORTH INDIC QUARTER MARK..NORTH INDIC PLACEHOLDER MARK
+ {0xA838, 0xA838, prN}, // Sc NORTH INDIC RUPEE MARK
+ {0xA839, 0xA839, prN}, // So NORTH INDIC QUANTITY MARK
+ {0xA840, 0xA873, prN}, // Lo [52] PHAGS-PA LETTER KA..PHAGS-PA LETTER CANDRABINDU
+ {0xA874, 0xA877, prN}, // Po [4] PHAGS-PA SINGLE HEAD MARK..PHAGS-PA MARK DOUBLE SHAD
+ {0xA880, 0xA881, prN}, // Mc [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA
+ {0xA882, 0xA8B3, prN}, // Lo [50] SAURASHTRA LETTER A..SAURASHTRA LETTER LLA
+ {0xA8B4, 0xA8C3, prN}, // Mc [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU
+ {0xA8C4, 0xA8C5, prN}, // Mn [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU
+ {0xA8CE, 0xA8CF, prN}, // Po [2] SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA
+ {0xA8D0, 0xA8D9, prN}, // Nd [10] SAURASHTRA DIGIT ZERO..SAURASHTRA DIGIT NINE
+ {0xA8E0, 0xA8F1, prN}, // Mn [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA
+ {0xA8F2, 0xA8F7, prN}, // Lo [6] DEVANAGARI SIGN SPACING CANDRABINDU..DEVANAGARI SIGN CANDRABINDU AVAGRAHA
+ {0xA8F8, 0xA8FA, prN}, // Po [3] DEVANAGARI SIGN PUSHPIKA..DEVANAGARI CARET
+ {0xA8FB, 0xA8FB, prN}, // Lo DEVANAGARI HEADSTROKE
+ {0xA8FC, 0xA8FC, prN}, // Po DEVANAGARI SIGN SIDDHAM
+ {0xA8FD, 0xA8FE, prN}, // Lo [2] DEVANAGARI JAIN OM..DEVANAGARI LETTER AY
+ {0xA8FF, 0xA8FF, prN}, // Mn DEVANAGARI VOWEL SIGN AY
+ {0xA900, 0xA909, prN}, // Nd [10] KAYAH LI DIGIT ZERO..KAYAH LI DIGIT NINE
+ {0xA90A, 0xA925, prN}, // Lo [28] KAYAH LI LETTER KA..KAYAH LI LETTER OO
+ {0xA926, 0xA92D, prN}, // Mn [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU
+ {0xA92E, 0xA92F, prN}, // Po [2] KAYAH LI SIGN CWI..KAYAH LI SIGN SHYA
+ {0xA930, 0xA946, prN}, // Lo [23] REJANG LETTER KA..REJANG LETTER A
+ {0xA947, 0xA951, prN}, // Mn [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R
+ {0xA952, 0xA953, prN}, // Mc [2] REJANG CONSONANT SIGN H..REJANG VIRAMA
+ {0xA95F, 0xA95F, prN}, // Po REJANG SECTION MARK
+ {0xA960, 0xA97C, prW}, // Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH
+ {0xA980, 0xA982, prN}, // Mn [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR
+ {0xA983, 0xA983, prN}, // Mc JAVANESE SIGN WIGNYAN
+ {0xA984, 0xA9B2, prN}, // Lo [47] JAVANESE LETTER A..JAVANESE LETTER HA
+ {0xA9B3, 0xA9B3, prN}, // Mn JAVANESE SIGN CECAK TELU
+ {0xA9B4, 0xA9B5, prN}, // Mc [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG
+ {0xA9B6, 0xA9B9, prN}, // Mn [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT
+ {0xA9BA, 0xA9BB, prN}, // Mc [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE
+ {0xA9BC, 0xA9BD, prN}, // Mn [2] JAVANESE VOWEL SIGN PEPET..JAVANESE CONSONANT SIGN KERET
+ {0xA9BE, 0xA9C0, prN}, // Mc [3] JAVANESE CONSONANT SIGN PENGKAL..JAVANESE PANGKON
+ {0xA9C1, 0xA9CD, prN}, // Po [13] JAVANESE LEFT RERENGGAN..JAVANESE TURNED PADA PISELEH
+ {0xA9CF, 0xA9CF, prN}, // Lm JAVANESE PANGRANGKEP
+ {0xA9D0, 0xA9D9, prN}, // Nd [10] JAVANESE DIGIT ZERO..JAVANESE DIGIT NINE
+ {0xA9DE, 0xA9DF, prN}, // Po [2] JAVANESE PADA TIRTA TUMETES..JAVANESE PADA ISEN-ISEN
+ {0xA9E0, 0xA9E4, prN}, // Lo [5] MYANMAR LETTER SHAN GHA..MYANMAR LETTER SHAN BHA
+ {0xA9E5, 0xA9E5, prN}, // Mn MYANMAR SIGN SHAN SAW
+ {0xA9E6, 0xA9E6, prN}, // Lm MYANMAR MODIFIER LETTER SHAN REDUPLICATION
+ {0xA9E7, 0xA9EF, prN}, // Lo [9] MYANMAR LETTER TAI LAING NYA..MYANMAR LETTER TAI LAING NNA
+ {0xA9F0, 0xA9F9, prN}, // Nd [10] MYANMAR TAI LAING DIGIT ZERO..MYANMAR TAI LAING DIGIT NINE
+ {0xA9FA, 0xA9FE, prN}, // Lo [5] MYANMAR LETTER TAI LAING LLA..MYANMAR LETTER TAI LAING BHA
+ {0xAA00, 0xAA28, prN}, // Lo [41] CHAM LETTER A..CHAM LETTER HA
+ {0xAA29, 0xAA2E, prN}, // Mn [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE
+ {0xAA2F, 0xAA30, prN}, // Mc [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
+ {0xAA31, 0xAA32, prN}, // Mn [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE
+ {0xAA33, 0xAA34, prN}, // Mc [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA
+ {0xAA35, 0xAA36, prN}, // Mn [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA
+ {0xAA40, 0xAA42, prN}, // Lo [3] CHAM LETTER FINAL K..CHAM LETTER FINAL NG
+ {0xAA43, 0xAA43, prN}, // Mn CHAM CONSONANT SIGN FINAL NG
+ {0xAA44, 0xAA4B, prN}, // Lo [8] CHAM LETTER FINAL CH..CHAM LETTER FINAL SS
+ {0xAA4C, 0xAA4C, prN}, // Mn CHAM CONSONANT SIGN FINAL M
+ {0xAA4D, 0xAA4D, prN}, // Mc CHAM CONSONANT SIGN FINAL H
+ {0xAA50, 0xAA59, prN}, // Nd [10] CHAM DIGIT ZERO..CHAM DIGIT NINE
+ {0xAA5C, 0xAA5F, prN}, // Po [4] CHAM PUNCTUATION SPIRAL..CHAM PUNCTUATION TRIPLE DANDA
+ {0xAA60, 0xAA6F, prN}, // Lo [16] MYANMAR LETTER KHAMTI GA..MYANMAR LETTER KHAMTI FA
+ {0xAA70, 0xAA70, prN}, // Lm MYANMAR MODIFIER LETTER KHAMTI REDUPLICATION
+ {0xAA71, 0xAA76, prN}, // Lo [6] MYANMAR LETTER KHAMTI XA..MYANMAR LOGOGRAM KHAMTI HM
+ {0xAA77, 0xAA79, prN}, // So [3] MYANMAR SYMBOL AITON EXCLAMATION..MYANMAR SYMBOL AITON TWO
+ {0xAA7A, 0xAA7A, prN}, // Lo MYANMAR LETTER AITON RA
+ {0xAA7B, 0xAA7B, prN}, // Mc MYANMAR SIGN PAO KAREN TONE
+ {0xAA7C, 0xAA7C, prN}, // Mn MYANMAR SIGN TAI LAING TONE-2
+ {0xAA7D, 0xAA7D, prN}, // Mc MYANMAR SIGN TAI LAING TONE-5
+ {0xAA7E, 0xAA7F, prN}, // Lo [2] MYANMAR LETTER SHWE PALAUNG CHA..MYANMAR LETTER SHWE PALAUNG SHA
+ {0xAA80, 0xAAAF, prN}, // Lo [48] TAI VIET LETTER LOW KO..TAI VIET LETTER HIGH O
+ {0xAAB0, 0xAAB0, prN}, // Mn TAI VIET MAI KANG
+ {0xAAB1, 0xAAB1, prN}, // Lo TAI VIET VOWEL AA
+ {0xAAB2, 0xAAB4, prN}, // Mn [3] TAI VIET VOWEL I..TAI VIET VOWEL U
+ {0xAAB5, 0xAAB6, prN}, // Lo [2] TAI VIET VOWEL E..TAI VIET VOWEL O
+ {0xAAB7, 0xAAB8, prN}, // Mn [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
+ {0xAAB9, 0xAABD, prN}, // Lo [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN
+ {0xAABE, 0xAABF, prN}, // Mn [2] TAI VIET VOWEL AM..TAI VIET TONE MAI EK
+ {0xAAC0, 0xAAC0, prN}, // Lo TAI VIET TONE MAI NUENG
+ {0xAAC1, 0xAAC1, prN}, // Mn TAI VIET TONE MAI THO
+ {0xAAC2, 0xAAC2, prN}, // Lo TAI VIET TONE MAI SONG
+ {0xAADB, 0xAADC, prN}, // Lo [2] TAI VIET SYMBOL KON..TAI VIET SYMBOL NUENG
+ {0xAADD, 0xAADD, prN}, // Lm TAI VIET SYMBOL SAM
+ {0xAADE, 0xAADF, prN}, // Po [2] TAI VIET SYMBOL HO HOI..TAI VIET SYMBOL KOI KOI
+ {0xAAE0, 0xAAEA, prN}, // Lo [11] MEETEI MAYEK LETTER E..MEETEI MAYEK LETTER SSA
+ {0xAAEB, 0xAAEB, prN}, // Mc MEETEI MAYEK VOWEL SIGN II
+ {0xAAEC, 0xAAED, prN}, // Mn [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI
+ {0xAAEE, 0xAAEF, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU
+ {0xAAF0, 0xAAF1, prN}, // Po [2] MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM
+ {0xAAF2, 0xAAF2, prN}, // Lo MEETEI MAYEK ANJI
+ {0xAAF3, 0xAAF4, prN}, // Lm [2] MEETEI MAYEK SYLLABLE REPETITION MARK..MEETEI MAYEK WORD REPETITION MARK
+ {0xAAF5, 0xAAF5, prN}, // Mc MEETEI MAYEK VOWEL SIGN VISARGA
+ {0xAAF6, 0xAAF6, prN}, // Mn MEETEI MAYEK VIRAMA
+ {0xAB01, 0xAB06, prN}, // Lo [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC SYLLABLE TTHO
+ {0xAB09, 0xAB0E, prN}, // Lo [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC SYLLABLE DDHO
+ {0xAB11, 0xAB16, prN}, // Lo [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SYLLABLE DZO
+ {0xAB20, 0xAB26, prN}, // Lo [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC SYLLABLE CCHHO
+ {0xAB28, 0xAB2E, prN}, // Lo [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SYLLABLE BBO
+ {0xAB30, 0xAB5A, prN}, // Ll [43] LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG
+ {0xAB5B, 0xAB5B, prN}, // Sk MODIFIER BREVE WITH INVERTED BREVE
+ {0xAB5C, 0xAB5F, prN}, // Lm [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK
+ {0xAB60, 0xAB68, prN}, // Ll [9] LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER TURNED R WITH MIDDLE TILDE
+ {0xAB69, 0xAB69, prN}, // Lm MODIFIER LETTER SMALL TURNED W
+ {0xAB6A, 0xAB6B, prN}, // Sk [2] MODIFIER LETTER LEFT TACK..MODIFIER LETTER RIGHT TACK
+ {0xAB70, 0xABBF, prN}, // Ll [80] CHEROKEE SMALL LETTER A..CHEROKEE SMALL LETTER YA
+ {0xABC0, 0xABE2, prN}, // Lo [35] MEETEI MAYEK LETTER KOK..MEETEI MAYEK LETTER I LONSUM
+ {0xABE3, 0xABE4, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
+ {0xABE5, 0xABE5, prN}, // Mn MEETEI MAYEK VOWEL SIGN ANAP
+ {0xABE6, 0xABE7, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP
+ {0xABE8, 0xABE8, prN}, // Mn MEETEI MAYEK VOWEL SIGN UNAP
+ {0xABE9, 0xABEA, prN}, // Mc [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG
+ {0xABEB, 0xABEB, prN}, // Po MEETEI MAYEK CHEIKHEI
+ {0xABEC, 0xABEC, prN}, // Mc MEETEI MAYEK LUM IYEK
+ {0xABED, 0xABED, prN}, // Mn MEETEI MAYEK APUN IYEK
+ {0xABF0, 0xABF9, prN}, // Nd [10] MEETEI MAYEK DIGIT ZERO..MEETEI MAYEK DIGIT NINE
+ {0xAC00, 0xD7A3, prW}, // Lo [11172] HANGUL SYLLABLE GA..HANGUL SYLLABLE HIH
+ {0xD7B0, 0xD7C6, prN}, // Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E
+ {0xD7CB, 0xD7FB, prN}, // Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH
+ {0xD800, 0xDB7F, prN}, // Cs [896] ..
+ {0xDB80, 0xDBFF, prN}, // Cs [128] ..
+ {0xDC00, 0xDFFF, prN}, // Cs [1024] ..
+ {0xE000, 0xF8FF, prA}, // Co [6400] ..
+ {0xF900, 0xFA6D, prW}, // Lo [366] CJK COMPATIBILITY IDEOGRAPH-F900..CJK COMPATIBILITY IDEOGRAPH-FA6D
+ {0xFA6E, 0xFA6F, prW}, // Cn [2] ..
+ {0xFA70, 0xFAD9, prW}, // Lo [106] CJK COMPATIBILITY IDEOGRAPH-FA70..CJK COMPATIBILITY IDEOGRAPH-FAD9
+ {0xFADA, 0xFAFF, prW}, // Cn [38] ..
+ {0xFB00, 0xFB06, prN}, // Ll [7] LATIN SMALL LIGATURE FF..LATIN SMALL LIGATURE ST
+ {0xFB13, 0xFB17, prN}, // Ll [5] ARMENIAN SMALL LIGATURE MEN NOW..ARMENIAN SMALL LIGATURE MEN XEH
+ {0xFB1D, 0xFB1D, prN}, // Lo HEBREW LETTER YOD WITH HIRIQ
+ {0xFB1E, 0xFB1E, prN}, // Mn HEBREW POINT JUDEO-SPANISH VARIKA
+ {0xFB1F, 0xFB28, prN}, // Lo [10] HEBREW LIGATURE YIDDISH YOD YOD PATAH..HEBREW LETTER WIDE TAV
+ {0xFB29, 0xFB29, prN}, // Sm HEBREW LETTER ALTERNATIVE PLUS SIGN
+ {0xFB2A, 0xFB36, prN}, // Lo [13] HEBREW LETTER SHIN WITH SHIN DOT..HEBREW LETTER ZAYIN WITH DAGESH
+ {0xFB38, 0xFB3C, prN}, // Lo [5] HEBREW LETTER TET WITH DAGESH..HEBREW LETTER LAMED WITH DAGESH
+ {0xFB3E, 0xFB3E, prN}, // Lo HEBREW LETTER MEM WITH DAGESH
+ {0xFB40, 0xFB41, prN}, // Lo [2] HEBREW LETTER NUN WITH DAGESH..HEBREW LETTER SAMEKH WITH DAGESH
+ {0xFB43, 0xFB44, prN}, // Lo [2] HEBREW LETTER FINAL PE WITH DAGESH..HEBREW LETTER PE WITH DAGESH
+ {0xFB46, 0xFB4F, prN}, // Lo [10] HEBREW LETTER TSADI WITH DAGESH..HEBREW LIGATURE ALEF LAMED
+ {0xFB50, 0xFBB1, prN}, // Lo [98] ARABIC LETTER ALEF WASLA ISOLATED FORM..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM
+ {0xFBB2, 0xFBC2, prN}, // Sk [17] ARABIC SYMBOL DOT ABOVE..ARABIC SYMBOL WASLA ABOVE
+ {0xFBD3, 0xFD3D, prN}, // Lo [363] ARABIC LETTER NG ISOLATED FORM..ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM
+ {0xFD3E, 0xFD3E, prN}, // Pe ORNATE LEFT PARENTHESIS
+ {0xFD3F, 0xFD3F, prN}, // Ps ORNATE RIGHT PARENTHESIS
+ {0xFD40, 0xFD4F, prN}, // So [16] ARABIC LIGATURE RAHIMAHU ALLAAH..ARABIC LIGATURE RAHIMAHUM ALLAAH
+ {0xFD50, 0xFD8F, prN}, // Lo [64] ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM..ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM
+ {0xFD92, 0xFDC7, prN}, // Lo [54] ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM..ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM
+ {0xFDCF, 0xFDCF, prN}, // So ARABIC LIGATURE SALAAMUHU ALAYNAA
+ {0xFDF0, 0xFDFB, prN}, // Lo [12] ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM..ARABIC LIGATURE JALLAJALALOUHOU
+ {0xFDFC, 0xFDFC, prN}, // Sc RIAL SIGN
+ {0xFDFD, 0xFDFF, prN}, // So [3] ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM..ARABIC LIGATURE AZZA WA JALL
+ {0xFE00, 0xFE0F, prA}, // Mn [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16
+ {0xFE10, 0xFE16, prW}, // Po [7] PRESENTATION FORM FOR VERTICAL COMMA..PRESENTATION FORM FOR VERTICAL QUESTION MARK
+ {0xFE17, 0xFE17, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT WHITE LENTICULAR BRACKET
+ {0xFE18, 0xFE18, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRAKCET
+ {0xFE19, 0xFE19, prW}, // Po PRESENTATION FORM FOR VERTICAL HORIZONTAL ELLIPSIS
+ {0xFE20, 0xFE2F, prN}, // Mn [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF
+ {0xFE30, 0xFE30, prW}, // Po PRESENTATION FORM FOR VERTICAL TWO DOT LEADER
+ {0xFE31, 0xFE32, prW}, // Pd [2] PRESENTATION FORM FOR VERTICAL EM DASH..PRESENTATION FORM FOR VERTICAL EN DASH
+ {0xFE33, 0xFE34, prW}, // Pc [2] PRESENTATION FORM FOR VERTICAL LOW LINE..PRESENTATION FORM FOR VERTICAL WAVY LOW LINE
+ {0xFE35, 0xFE35, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
+ {0xFE36, 0xFE36, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS
+ {0xFE37, 0xFE37, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET
+ {0xFE38, 0xFE38, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET
+ {0xFE39, 0xFE39, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT TORTOISE SHELL BRACKET
+ {0xFE3A, 0xFE3A, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT TORTOISE SHELL BRACKET
+ {0xFE3B, 0xFE3B, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT BLACK LENTICULAR BRACKET
+ {0xFE3C, 0xFE3C, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT BLACK LENTICULAR BRACKET
+ {0xFE3D, 0xFE3D, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT DOUBLE ANGLE BRACKET
+ {0xFE3E, 0xFE3E, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT DOUBLE ANGLE BRACKET
+ {0xFE3F, 0xFE3F, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT ANGLE BRACKET
+ {0xFE40, 0xFE40, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT ANGLE BRACKET
+ {0xFE41, 0xFE41, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET
+ {0xFE42, 0xFE42, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET
+ {0xFE43, 0xFE43, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET
+ {0xFE44, 0xFE44, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET
+ {0xFE45, 0xFE46, prW}, // Po [2] SESAME DOT..WHITE SESAME DOT
+ {0xFE47, 0xFE47, prW}, // Ps PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET
+ {0xFE48, 0xFE48, prW}, // Pe PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET
+ {0xFE49, 0xFE4C, prW}, // Po [4] DASHED OVERLINE..DOUBLE WAVY OVERLINE
+ {0xFE4D, 0xFE4F, prW}, // Pc [3] DASHED LOW LINE..WAVY LOW LINE
+ {0xFE50, 0xFE52, prW}, // Po [3] SMALL COMMA..SMALL FULL STOP
+ {0xFE54, 0xFE57, prW}, // Po [4] SMALL SEMICOLON..SMALL EXCLAMATION MARK
+ {0xFE58, 0xFE58, prW}, // Pd SMALL EM DASH
+ {0xFE59, 0xFE59, prW}, // Ps SMALL LEFT PARENTHESIS
+ {0xFE5A, 0xFE5A, prW}, // Pe SMALL RIGHT PARENTHESIS
+ {0xFE5B, 0xFE5B, prW}, // Ps SMALL LEFT CURLY BRACKET
+ {0xFE5C, 0xFE5C, prW}, // Pe SMALL RIGHT CURLY BRACKET
+ {0xFE5D, 0xFE5D, prW}, // Ps SMALL LEFT TORTOISE SHELL BRACKET
+ {0xFE5E, 0xFE5E, prW}, // Pe SMALL RIGHT TORTOISE SHELL BRACKET
+ {0xFE5F, 0xFE61, prW}, // Po [3] SMALL NUMBER SIGN..SMALL ASTERISK
+ {0xFE62, 0xFE62, prW}, // Sm SMALL PLUS SIGN
+ {0xFE63, 0xFE63, prW}, // Pd SMALL HYPHEN-MINUS
+ {0xFE64, 0xFE66, prW}, // Sm [3] SMALL LESS-THAN SIGN..SMALL EQUALS SIGN
+ {0xFE68, 0xFE68, prW}, // Po SMALL REVERSE SOLIDUS
+ {0xFE69, 0xFE69, prW}, // Sc SMALL DOLLAR SIGN
+ {0xFE6A, 0xFE6B, prW}, // Po [2] SMALL PERCENT SIGN..SMALL COMMERCIAL AT
+ {0xFE70, 0xFE74, prN}, // Lo [5] ARABIC FATHATAN ISOLATED FORM..ARABIC KASRATAN ISOLATED FORM
+ {0xFE76, 0xFEFC, prN}, // Lo [135] ARABIC FATHA ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF FINAL FORM
+ {0xFEFF, 0xFEFF, prN}, // Cf ZERO WIDTH NO-BREAK SPACE
+ {0xFF01, 0xFF03, prF}, // Po [3] FULLWIDTH EXCLAMATION MARK..FULLWIDTH NUMBER SIGN
+ {0xFF04, 0xFF04, prF}, // Sc FULLWIDTH DOLLAR SIGN
+ {0xFF05, 0xFF07, prF}, // Po [3] FULLWIDTH PERCENT SIGN..FULLWIDTH APOSTROPHE
+ {0xFF08, 0xFF08, prF}, // Ps FULLWIDTH LEFT PARENTHESIS
+ {0xFF09, 0xFF09, prF}, // Pe FULLWIDTH RIGHT PARENTHESIS
+ {0xFF0A, 0xFF0A, prF}, // Po FULLWIDTH ASTERISK
+ {0xFF0B, 0xFF0B, prF}, // Sm FULLWIDTH PLUS SIGN
+ {0xFF0C, 0xFF0C, prF}, // Po FULLWIDTH COMMA
+ {0xFF0D, 0xFF0D, prF}, // Pd FULLWIDTH HYPHEN-MINUS
+ {0xFF0E, 0xFF0F, prF}, // Po [2] FULLWIDTH FULL STOP..FULLWIDTH SOLIDUS
+ {0xFF10, 0xFF19, prF}, // Nd [10] FULLWIDTH DIGIT ZERO..FULLWIDTH DIGIT NINE
+ {0xFF1A, 0xFF1B, prF}, // Po [2] FULLWIDTH COLON..FULLWIDTH SEMICOLON
+ {0xFF1C, 0xFF1E, prF}, // Sm [3] FULLWIDTH LESS-THAN SIGN..FULLWIDTH GREATER-THAN SIGN
+ {0xFF1F, 0xFF20, prF}, // Po [2] FULLWIDTH QUESTION MARK..FULLWIDTH COMMERCIAL AT
+ {0xFF21, 0xFF3A, prF}, // Lu [26] FULLWIDTH LATIN CAPITAL LETTER A..FULLWIDTH LATIN CAPITAL LETTER Z
+ {0xFF3B, 0xFF3B, prF}, // Ps FULLWIDTH LEFT SQUARE BRACKET
+ {0xFF3C, 0xFF3C, prF}, // Po FULLWIDTH REVERSE SOLIDUS
+ {0xFF3D, 0xFF3D, prF}, // Pe FULLWIDTH RIGHT SQUARE BRACKET
+ {0xFF3E, 0xFF3E, prF}, // Sk FULLWIDTH CIRCUMFLEX ACCENT
+ {0xFF3F, 0xFF3F, prF}, // Pc FULLWIDTH LOW LINE
+ {0xFF40, 0xFF40, prF}, // Sk FULLWIDTH GRAVE ACCENT
+ {0xFF41, 0xFF5A, prF}, // Ll [26] FULLWIDTH LATIN SMALL LETTER A..FULLWIDTH LATIN SMALL LETTER Z
+ {0xFF5B, 0xFF5B, prF}, // Ps FULLWIDTH LEFT CURLY BRACKET
+ {0xFF5C, 0xFF5C, prF}, // Sm FULLWIDTH VERTICAL LINE
+ {0xFF5D, 0xFF5D, prF}, // Pe FULLWIDTH RIGHT CURLY BRACKET
+ {0xFF5E, 0xFF5E, prF}, // Sm FULLWIDTH TILDE
+ {0xFF5F, 0xFF5F, prF}, // Ps FULLWIDTH LEFT WHITE PARENTHESIS
+ {0xFF60, 0xFF60, prF}, // Pe FULLWIDTH RIGHT WHITE PARENTHESIS
+ {0xFF61, 0xFF61, prH}, // Po HALFWIDTH IDEOGRAPHIC FULL STOP
+ {0xFF62, 0xFF62, prH}, // Ps HALFWIDTH LEFT CORNER BRACKET
+ {0xFF63, 0xFF63, prH}, // Pe HALFWIDTH RIGHT CORNER BRACKET
+ {0xFF64, 0xFF65, prH}, // Po [2] HALFWIDTH IDEOGRAPHIC COMMA..HALFWIDTH KATAKANA MIDDLE DOT
+ {0xFF66, 0xFF6F, prH}, // Lo [10] HALFWIDTH KATAKANA LETTER WO..HALFWIDTH KATAKANA LETTER SMALL TU
+ {0xFF70, 0xFF70, prH}, // Lm HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK
+ {0xFF71, 0xFF9D, prH}, // Lo [45] HALFWIDTH KATAKANA LETTER A..HALFWIDTH KATAKANA LETTER N
+ {0xFF9E, 0xFF9F, prH}, // Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
+ {0xFFA0, 0xFFBE, prH}, // Lo [31] HALFWIDTH HANGUL FILLER..HALFWIDTH HANGUL LETTER HIEUH
+ {0xFFC2, 0xFFC7, prH}, // Lo [6] HALFWIDTH HANGUL LETTER A..HALFWIDTH HANGUL LETTER E
+ {0xFFCA, 0xFFCF, prH}, // Lo [6] HALFWIDTH HANGUL LETTER YEO..HALFWIDTH HANGUL LETTER OE
+ {0xFFD2, 0xFFD7, prH}, // Lo [6] HALFWIDTH HANGUL LETTER YO..HALFWIDTH HANGUL LETTER YU
+ {0xFFDA, 0xFFDC, prH}, // Lo [3] HALFWIDTH HANGUL LETTER EU..HALFWIDTH HANGUL LETTER I
+ {0xFFE0, 0xFFE1, prF}, // Sc [2] FULLWIDTH CENT SIGN..FULLWIDTH POUND SIGN
+ {0xFFE2, 0xFFE2, prF}, // Sm FULLWIDTH NOT SIGN
+ {0xFFE3, 0xFFE3, prF}, // Sk FULLWIDTH MACRON
+ {0xFFE4, 0xFFE4, prF}, // So FULLWIDTH BROKEN BAR
+ {0xFFE5, 0xFFE6, prF}, // Sc [2] FULLWIDTH YEN SIGN..FULLWIDTH WON SIGN
+ {0xFFE8, 0xFFE8, prH}, // So HALFWIDTH FORMS LIGHT VERTICAL
+ {0xFFE9, 0xFFEC, prH}, // Sm [4] HALFWIDTH LEFTWARDS ARROW..HALFWIDTH DOWNWARDS ARROW
+ {0xFFED, 0xFFEE, prH}, // So [2] HALFWIDTH BLACK SQUARE..HALFWIDTH WHITE CIRCLE
+ {0xFFF9, 0xFFFB, prN}, // Cf [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR
+ {0xFFFC, 0xFFFC, prN}, // So OBJECT REPLACEMENT CHARACTER
+ {0xFFFD, 0xFFFD, prA}, // So REPLACEMENT CHARACTER
+ {0x10000, 0x1000B, prN}, // Lo [12] LINEAR B SYLLABLE B008 A..LINEAR B SYLLABLE B046 JE
+ {0x1000D, 0x10026, prN}, // Lo [26] LINEAR B SYLLABLE B036 JO..LINEAR B SYLLABLE B032 QO
+ {0x10028, 0x1003A, prN}, // Lo [19] LINEAR B SYLLABLE B060 RA..LINEAR B SYLLABLE B042 WO
+ {0x1003C, 0x1003D, prN}, // Lo [2] LINEAR B SYLLABLE B017 ZA..LINEAR B SYLLABLE B074 ZE
+ {0x1003F, 0x1004D, prN}, // Lo [15] LINEAR B SYLLABLE B020 ZO..LINEAR B SYLLABLE B091 TWO
+ {0x10050, 0x1005D, prN}, // Lo [14] LINEAR B SYMBOL B018..LINEAR B SYMBOL B089
+ {0x10080, 0x100FA, prN}, // Lo [123] LINEAR B IDEOGRAM B100 MAN..LINEAR B IDEOGRAM VESSEL B305
+ {0x10100, 0x10102, prN}, // Po [3] AEGEAN WORD SEPARATOR LINE..AEGEAN CHECK MARK
+ {0x10107, 0x10133, prN}, // No [45] AEGEAN NUMBER ONE..AEGEAN NUMBER NINETY THOUSAND
+ {0x10137, 0x1013F, prN}, // So [9] AEGEAN WEIGHT BASE UNIT..AEGEAN MEASURE THIRD SUBUNIT
+ {0x10140, 0x10174, prN}, // Nl [53] GREEK ACROPHONIC ATTIC ONE QUARTER..GREEK ACROPHONIC STRATIAN FIFTY MNAS
+ {0x10175, 0x10178, prN}, // No [4] GREEK ONE HALF SIGN..GREEK THREE QUARTERS SIGN
+ {0x10179, 0x10189, prN}, // So [17] GREEK YEAR SIGN..GREEK TRYBLION BASE SIGN
+ {0x1018A, 0x1018B, prN}, // No [2] GREEK ZERO SIGN..GREEK ONE QUARTER SIGN
+ {0x1018C, 0x1018E, prN}, // So [3] GREEK SINUSOID SIGN..NOMISMA SIGN
+ {0x10190, 0x1019C, prN}, // So [13] ROMAN SEXTANS SIGN..ASCIA SYMBOL
+ {0x101A0, 0x101A0, prN}, // So GREEK SYMBOL TAU RHO
+ {0x101D0, 0x101FC, prN}, // So [45] PHAISTOS DISC SIGN PEDESTRIAN..PHAISTOS DISC SIGN WAVY BAND
+ {0x101FD, 0x101FD, prN}, // Mn PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE
+ {0x10280, 0x1029C, prN}, // Lo [29] LYCIAN LETTER A..LYCIAN LETTER X
+ {0x102A0, 0x102D0, prN}, // Lo [49] CARIAN LETTER A..CARIAN LETTER UUU3
+ {0x102E0, 0x102E0, prN}, // Mn COPTIC EPACT THOUSANDS MARK
+ {0x102E1, 0x102FB, prN}, // No [27] COPTIC EPACT DIGIT ONE..COPTIC EPACT NUMBER NINE HUNDRED
+ {0x10300, 0x1031F, prN}, // Lo [32] OLD ITALIC LETTER A..OLD ITALIC LETTER ESS
+ {0x10320, 0x10323, prN}, // No [4] OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY
+ {0x1032D, 0x1032F, prN}, // Lo [3] OLD ITALIC LETTER YE..OLD ITALIC LETTER SOUTHERN TSE
+ {0x10330, 0x10340, prN}, // Lo [17] GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA
+ {0x10341, 0x10341, prN}, // Nl GOTHIC LETTER NINETY
+ {0x10342, 0x10349, prN}, // Lo [8] GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL
+ {0x1034A, 0x1034A, prN}, // Nl GOTHIC LETTER NINE HUNDRED
+ {0x10350, 0x10375, prN}, // Lo [38] OLD PERMIC LETTER AN..OLD PERMIC LETTER IA
+ {0x10376, 0x1037A, prN}, // Mn [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII
+ {0x10380, 0x1039D, prN}, // Lo [30] UGARITIC LETTER ALPA..UGARITIC LETTER SSU
+ {0x1039F, 0x1039F, prN}, // Po UGARITIC WORD DIVIDER
+ {0x103A0, 0x103C3, prN}, // Lo [36] OLD PERSIAN SIGN A..OLD PERSIAN SIGN HA
+ {0x103C8, 0x103CF, prN}, // Lo [8] OLD PERSIAN SIGN AURAMAZDAA..OLD PERSIAN SIGN BUUMISH
+ {0x103D0, 0x103D0, prN}, // Po OLD PERSIAN WORD DIVIDER
+ {0x103D1, 0x103D5, prN}, // Nl [5] OLD PERSIAN NUMBER ONE..OLD PERSIAN NUMBER HUNDRED
+ {0x10400, 0x1044F, prN}, // L& [80] DESERET CAPITAL LETTER LONG I..DESERET SMALL LETTER EW
+ {0x10450, 0x1047F, prN}, // Lo [48] SHAVIAN LETTER PEEP..SHAVIAN LETTER YEW
+ {0x10480, 0x1049D, prN}, // Lo [30] OSMANYA LETTER ALEF..OSMANYA LETTER OO
+ {0x104A0, 0x104A9, prN}, // Nd [10] OSMANYA DIGIT ZERO..OSMANYA DIGIT NINE
+ {0x104B0, 0x104D3, prN}, // Lu [36] OSAGE CAPITAL LETTER A..OSAGE CAPITAL LETTER ZHA
+ {0x104D8, 0x104FB, prN}, // Ll [36] OSAGE SMALL LETTER A..OSAGE SMALL LETTER ZHA
+ {0x10500, 0x10527, prN}, // Lo [40] ELBASAN LETTER A..ELBASAN LETTER KHE
+ {0x10530, 0x10563, prN}, // Lo [52] CAUCASIAN ALBANIAN LETTER ALT..CAUCASIAN ALBANIAN LETTER KIW
+ {0x1056F, 0x1056F, prN}, // Po CAUCASIAN ALBANIAN CITATION MARK
+ {0x10570, 0x1057A, prN}, // Lu [11] VITHKUQI CAPITAL LETTER A..VITHKUQI CAPITAL LETTER GA
+ {0x1057C, 0x1058A, prN}, // Lu [15] VITHKUQI CAPITAL LETTER HA..VITHKUQI CAPITAL LETTER RE
+ {0x1058C, 0x10592, prN}, // Lu [7] VITHKUQI CAPITAL LETTER SE..VITHKUQI CAPITAL LETTER XE
+ {0x10594, 0x10595, prN}, // Lu [2] VITHKUQI CAPITAL LETTER Y..VITHKUQI CAPITAL LETTER ZE
+ {0x10597, 0x105A1, prN}, // Ll [11] VITHKUQI SMALL LETTER A..VITHKUQI SMALL LETTER GA
+ {0x105A3, 0x105B1, prN}, // Ll [15] VITHKUQI SMALL LETTER HA..VITHKUQI SMALL LETTER RE
+ {0x105B3, 0x105B9, prN}, // Ll [7] VITHKUQI SMALL LETTER SE..VITHKUQI SMALL LETTER XE
+ {0x105BB, 0x105BC, prN}, // Ll [2] VITHKUQI SMALL LETTER Y..VITHKUQI SMALL LETTER ZE
+ {0x10600, 0x10736, prN}, // Lo [311] LINEAR A SIGN AB001..LINEAR A SIGN A664
+ {0x10740, 0x10755, prN}, // Lo [22] LINEAR A SIGN A701 A..LINEAR A SIGN A732 JE
+ {0x10760, 0x10767, prN}, // Lo [8] LINEAR A SIGN A800..LINEAR A SIGN A807
+ {0x10780, 0x10785, prN}, // Lm [6] MODIFIER LETTER SMALL CAPITAL AA..MODIFIER LETTER SMALL B WITH HOOK
+ {0x10787, 0x107B0, prN}, // Lm [42] MODIFIER LETTER SMALL DZ DIGRAPH..MODIFIER LETTER SMALL V WITH RIGHT HOOK
+ {0x107B2, 0x107BA, prN}, // Lm [9] MODIFIER LETTER SMALL CAPITAL Y..MODIFIER LETTER SMALL S WITH CURL
+ {0x10800, 0x10805, prN}, // Lo [6] CYPRIOT SYLLABLE A..CYPRIOT SYLLABLE JA
+ {0x10808, 0x10808, prN}, // Lo CYPRIOT SYLLABLE JO
+ {0x1080A, 0x10835, prN}, // Lo [44] CYPRIOT SYLLABLE KA..CYPRIOT SYLLABLE WO
+ {0x10837, 0x10838, prN}, // Lo [2] CYPRIOT SYLLABLE XA..CYPRIOT SYLLABLE XE
+ {0x1083C, 0x1083C, prN}, // Lo CYPRIOT SYLLABLE ZA
+ {0x1083F, 0x1083F, prN}, // Lo CYPRIOT SYLLABLE ZO
+ {0x10840, 0x10855, prN}, // Lo [22] IMPERIAL ARAMAIC LETTER ALEPH..IMPERIAL ARAMAIC LETTER TAW
+ {0x10857, 0x10857, prN}, // Po IMPERIAL ARAMAIC SECTION SIGN
+ {0x10858, 0x1085F, prN}, // No [8] IMPERIAL ARAMAIC NUMBER ONE..IMPERIAL ARAMAIC NUMBER TEN THOUSAND
+ {0x10860, 0x10876, prN}, // Lo [23] PALMYRENE LETTER ALEPH..PALMYRENE LETTER TAW
+ {0x10877, 0x10878, prN}, // So [2] PALMYRENE LEFT-POINTING FLEURON..PALMYRENE RIGHT-POINTING FLEURON
+ {0x10879, 0x1087F, prN}, // No [7] PALMYRENE NUMBER ONE..PALMYRENE NUMBER TWENTY
+ {0x10880, 0x1089E, prN}, // Lo [31] NABATAEAN LETTER FINAL ALEPH..NABATAEAN LETTER TAW
+ {0x108A7, 0x108AF, prN}, // No [9] NABATAEAN NUMBER ONE..NABATAEAN NUMBER ONE HUNDRED
+ {0x108E0, 0x108F2, prN}, // Lo [19] HATRAN LETTER ALEPH..HATRAN LETTER QOPH
+ {0x108F4, 0x108F5, prN}, // Lo [2] HATRAN LETTER SHIN..HATRAN LETTER TAW
+ {0x108FB, 0x108FF, prN}, // No [5] HATRAN NUMBER ONE..HATRAN NUMBER ONE HUNDRED
+ {0x10900, 0x10915, prN}, // Lo [22] PHOENICIAN LETTER ALF..PHOENICIAN LETTER TAU
+ {0x10916, 0x1091B, prN}, // No [6] PHOENICIAN NUMBER ONE..PHOENICIAN NUMBER THREE
+ {0x1091F, 0x1091F, prN}, // Po PHOENICIAN WORD SEPARATOR
+ {0x10920, 0x10939, prN}, // Lo [26] LYDIAN LETTER A..LYDIAN LETTER C
+ {0x1093F, 0x1093F, prN}, // Po LYDIAN TRIANGULAR MARK
+ {0x10980, 0x1099F, prN}, // Lo [32] MEROITIC HIEROGLYPHIC LETTER A..MEROITIC HIEROGLYPHIC SYMBOL VIDJ-2
+ {0x109A0, 0x109B7, prN}, // Lo [24] MEROITIC CURSIVE LETTER A..MEROITIC CURSIVE LETTER DA
+ {0x109BC, 0x109BD, prN}, // No [2] MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS..MEROITIC CURSIVE FRACTION ONE HALF
+ {0x109BE, 0x109BF, prN}, // Lo [2] MEROITIC CURSIVE LOGOGRAM RMT..MEROITIC CURSIVE LOGOGRAM IMN
+ {0x109C0, 0x109CF, prN}, // No [16] MEROITIC CURSIVE NUMBER ONE..MEROITIC CURSIVE NUMBER SEVENTY
+ {0x109D2, 0x109FF, prN}, // No [46] MEROITIC CURSIVE NUMBER ONE HUNDRED..MEROITIC CURSIVE FRACTION TEN TWELFTHS
+ {0x10A00, 0x10A00, prN}, // Lo KHAROSHTHI LETTER A
+ {0x10A01, 0x10A03, prN}, // Mn [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R
+ {0x10A05, 0x10A06, prN}, // Mn [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O
+ {0x10A0C, 0x10A0F, prN}, // Mn [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA
+ {0x10A10, 0x10A13, prN}, // Lo [4] KHAROSHTHI LETTER KA..KHAROSHTHI LETTER GHA
+ {0x10A15, 0x10A17, prN}, // Lo [3] KHAROSHTHI LETTER CA..KHAROSHTHI LETTER JA
+ {0x10A19, 0x10A35, prN}, // Lo [29] KHAROSHTHI LETTER NYA..KHAROSHTHI LETTER VHA
+ {0x10A38, 0x10A3A, prN}, // Mn [3] KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW
+ {0x10A3F, 0x10A3F, prN}, // Mn KHAROSHTHI VIRAMA
+ {0x10A40, 0x10A48, prN}, // No [9] KHAROSHTHI DIGIT ONE..KHAROSHTHI FRACTION ONE HALF
+ {0x10A50, 0x10A58, prN}, // Po [9] KHAROSHTHI PUNCTUATION DOT..KHAROSHTHI PUNCTUATION LINES
+ {0x10A60, 0x10A7C, prN}, // Lo [29] OLD SOUTH ARABIAN LETTER HE..OLD SOUTH ARABIAN LETTER THETH
+ {0x10A7D, 0x10A7E, prN}, // No [2] OLD SOUTH ARABIAN NUMBER ONE..OLD SOUTH ARABIAN NUMBER FIFTY
+ {0x10A7F, 0x10A7F, prN}, // Po OLD SOUTH ARABIAN NUMERIC INDICATOR
+ {0x10A80, 0x10A9C, prN}, // Lo [29] OLD NORTH ARABIAN LETTER HEH..OLD NORTH ARABIAN LETTER ZAH
+ {0x10A9D, 0x10A9F, prN}, // No [3] OLD NORTH ARABIAN NUMBER ONE..OLD NORTH ARABIAN NUMBER TWENTY
+ {0x10AC0, 0x10AC7, prN}, // Lo [8] MANICHAEAN LETTER ALEPH..MANICHAEAN LETTER WAW
+ {0x10AC8, 0x10AC8, prN}, // So MANICHAEAN SIGN UD
+ {0x10AC9, 0x10AE4, prN}, // Lo [28] MANICHAEAN LETTER ZAYIN..MANICHAEAN LETTER TAW
+ {0x10AE5, 0x10AE6, prN}, // Mn [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
+ {0x10AEB, 0x10AEF, prN}, // No [5] MANICHAEAN NUMBER ONE..MANICHAEAN NUMBER ONE HUNDRED
+ {0x10AF0, 0x10AF6, prN}, // Po [7] MANICHAEAN PUNCTUATION STAR..MANICHAEAN PUNCTUATION LINE FILLER
+ {0x10B00, 0x10B35, prN}, // Lo [54] AVESTAN LETTER A..AVESTAN LETTER HE
+ {0x10B39, 0x10B3F, prN}, // Po [7] AVESTAN ABBREVIATION MARK..LARGE ONE RING OVER TWO RINGS PUNCTUATION
+ {0x10B40, 0x10B55, prN}, // Lo [22] INSCRIPTIONAL PARTHIAN LETTER ALEPH..INSCRIPTIONAL PARTHIAN LETTER TAW
+ {0x10B58, 0x10B5F, prN}, // No [8] INSCRIPTIONAL PARTHIAN NUMBER ONE..INSCRIPTIONAL PARTHIAN NUMBER ONE THOUSAND
+ {0x10B60, 0x10B72, prN}, // Lo [19] INSCRIPTIONAL PAHLAVI LETTER ALEPH..INSCRIPTIONAL PAHLAVI LETTER TAW
+ {0x10B78, 0x10B7F, prN}, // No [8] INSCRIPTIONAL PAHLAVI NUMBER ONE..INSCRIPTIONAL PAHLAVI NUMBER ONE THOUSAND
+ {0x10B80, 0x10B91, prN}, // Lo [18] PSALTER PAHLAVI LETTER ALEPH..PSALTER PAHLAVI LETTER TAW
+ {0x10B99, 0x10B9C, prN}, // Po [4] PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT
+ {0x10BA9, 0x10BAF, prN}, // No [7] PSALTER PAHLAVI NUMBER ONE..PSALTER PAHLAVI NUMBER ONE HUNDRED
+ {0x10C00, 0x10C48, prN}, // Lo [73] OLD TURKIC LETTER ORKHON A..OLD TURKIC LETTER ORKHON BASH
+ {0x10C80, 0x10CB2, prN}, // Lu [51] OLD HUNGARIAN CAPITAL LETTER A..OLD HUNGARIAN CAPITAL LETTER US
+ {0x10CC0, 0x10CF2, prN}, // Ll [51] OLD HUNGARIAN SMALL LETTER A..OLD HUNGARIAN SMALL LETTER US
+ {0x10CFA, 0x10CFF, prN}, // No [6] OLD HUNGARIAN NUMBER ONE..OLD HUNGARIAN NUMBER ONE THOUSAND
+ {0x10D00, 0x10D23, prN}, // Lo [36] HANIFI ROHINGYA LETTER A..HANIFI ROHINGYA MARK NA KHONNA
+ {0x10D24, 0x10D27, prN}, // Mn [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
+ {0x10D30, 0x10D39, prN}, // Nd [10] HANIFI ROHINGYA DIGIT ZERO..HANIFI ROHINGYA DIGIT NINE
+ {0x10E60, 0x10E7E, prN}, // No [31] RUMI DIGIT ONE..RUMI FRACTION TWO THIRDS
+ {0x10E80, 0x10EA9, prN}, // Lo [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET
+ {0x10EAB, 0x10EAC, prN}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10EAD, 0x10EAD, prN}, // Pd YEZIDI HYPHENATION MARK
+ {0x10EB0, 0x10EB1, prN}, // Lo [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10EFD, 0x10EFF, prN}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
+ {0x10F00, 0x10F1C, prN}, // Lo [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
+ {0x10F1D, 0x10F26, prN}, // No [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF
+ {0x10F27, 0x10F27, prN}, // Lo OLD SOGDIAN LIGATURE AYIN-DALETH
+ {0x10F30, 0x10F45, prN}, // Lo [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN
+ {0x10F46, 0x10F50, prN}, // Mn [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
+ {0x10F51, 0x10F54, prN}, // No [4] SOGDIAN NUMBER ONE..SOGDIAN NUMBER ONE HUNDRED
+ {0x10F55, 0x10F59, prN}, // Po [5] SOGDIAN PUNCTUATION TWO VERTICAL BARS..SOGDIAN PUNCTUATION HALF CIRCLE WITH DOT
+ {0x10F70, 0x10F81, prN}, // Lo [18] OLD UYGHUR LETTER ALEPH..OLD UYGHUR LETTER LESH
+ {0x10F82, 0x10F85, prN}, // Mn [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
+ {0x10F86, 0x10F89, prN}, // Po [4] OLD UYGHUR PUNCTUATION BAR..OLD UYGHUR PUNCTUATION FOUR DOTS
+ {0x10FB0, 0x10FC4, prN}, // Lo [21] CHORASMIAN LETTER ALEPH..CHORASMIAN LETTER TAW
+ {0x10FC5, 0x10FCB, prN}, // No [7] CHORASMIAN NUMBER ONE..CHORASMIAN NUMBER ONE HUNDRED
+ {0x10FE0, 0x10FF6, prN}, // Lo [23] ELYMAIC LETTER ALEPH..ELYMAIC LIGATURE ZAYIN-YODH
+ {0x11000, 0x11000, prN}, // Mc BRAHMI SIGN CANDRABINDU
+ {0x11001, 0x11001, prN}, // Mn BRAHMI SIGN ANUSVARA
+ {0x11002, 0x11002, prN}, // Mc BRAHMI SIGN VISARGA
+ {0x11003, 0x11037, prN}, // Lo [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI LETTER OLD TAMIL NNNA
+ {0x11038, 0x11046, prN}, // Mn [15] BRAHMI VOWEL SIGN AA..BRAHMI VIRAMA
+ {0x11047, 0x1104D, prN}, // Po [7] BRAHMI DANDA..BRAHMI PUNCTUATION LOTUS
+ {0x11052, 0x11065, prN}, // No [20] BRAHMI NUMBER ONE..BRAHMI NUMBER ONE THOUSAND
+ {0x11066, 0x1106F, prN}, // Nd [10] BRAHMI DIGIT ZERO..BRAHMI DIGIT NINE
+ {0x11070, 0x11070, prN}, // Mn BRAHMI SIGN OLD TAMIL VIRAMA
+ {0x11071, 0x11072, prN}, // Lo [2] BRAHMI LETTER OLD TAMIL SHORT E..BRAHMI LETTER OLD TAMIL SHORT O
+ {0x11073, 0x11074, prN}, // Mn [2] BRAHMI VOWEL SIGN OLD TAMIL SHORT E..BRAHMI VOWEL SIGN OLD TAMIL SHORT O
+ {0x11075, 0x11075, prN}, // Lo BRAHMI LETTER OLD TAMIL LLA
+ {0x1107F, 0x1107F, prN}, // Mn BRAHMI NUMBER JOINER
+ {0x11080, 0x11081, prN}, // Mn [2] KAITHI SIGN CANDRABINDU..KAITHI SIGN ANUSVARA
+ {0x11082, 0x11082, prN}, // Mc KAITHI SIGN VISARGA
+ {0x11083, 0x110AF, prN}, // Lo [45] KAITHI LETTER A..KAITHI LETTER HA
+ {0x110B0, 0x110B2, prN}, // Mc [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II
+ {0x110B3, 0x110B6, prN}, // Mn [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI
+ {0x110B7, 0x110B8, prN}, // Mc [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU
+ {0x110B9, 0x110BA, prN}, // Mn [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA
+ {0x110BB, 0x110BC, prN}, // Po [2] KAITHI ABBREVIATION SIGN..KAITHI ENUMERATION SIGN
+ {0x110BD, 0x110BD, prN}, // Cf KAITHI NUMBER SIGN
+ {0x110BE, 0x110C1, prN}, // Po [4] KAITHI SECTION MARK..KAITHI DOUBLE DANDA
+ {0x110C2, 0x110C2, prN}, // Mn KAITHI VOWEL SIGN VOCALIC R
+ {0x110CD, 0x110CD, prN}, // Cf KAITHI NUMBER SIGN ABOVE
+ {0x110D0, 0x110E8, prN}, // Lo [25] SORA SOMPENG LETTER SAH..SORA SOMPENG LETTER MAE
+ {0x110F0, 0x110F9, prN}, // Nd [10] SORA SOMPENG DIGIT ZERO..SORA SOMPENG DIGIT NINE
+ {0x11100, 0x11102, prN}, // Mn [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA
+ {0x11103, 0x11126, prN}, // Lo [36] CHAKMA LETTER AA..CHAKMA LETTER HAA
+ {0x11127, 0x1112B, prN}, // Mn [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU
+ {0x1112C, 0x1112C, prN}, // Mc CHAKMA VOWEL SIGN E
+ {0x1112D, 0x11134, prN}, // Mn [8] CHAKMA VOWEL SIGN AI..CHAKMA MAAYYAA
+ {0x11136, 0x1113F, prN}, // Nd [10] CHAKMA DIGIT ZERO..CHAKMA DIGIT NINE
+ {0x11140, 0x11143, prN}, // Po [4] CHAKMA SECTION MARK..CHAKMA QUESTION MARK
+ {0x11144, 0x11144, prN}, // Lo CHAKMA LETTER LHAA
+ {0x11145, 0x11146, prN}, // Mc [2] CHAKMA VOWEL SIGN AA..CHAKMA VOWEL SIGN EI
+ {0x11147, 0x11147, prN}, // Lo CHAKMA LETTER VAA
+ {0x11150, 0x11172, prN}, // Lo [35] MAHAJANI LETTER A..MAHAJANI LETTER RRA
+ {0x11173, 0x11173, prN}, // Mn MAHAJANI SIGN NUKTA
+ {0x11174, 0x11175, prN}, // Po [2] MAHAJANI ABBREVIATION SIGN..MAHAJANI SECTION MARK
+ {0x11176, 0x11176, prN}, // Lo MAHAJANI LIGATURE SHRI
+ {0x11180, 0x11181, prN}, // Mn [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA
+ {0x11182, 0x11182, prN}, // Mc SHARADA SIGN VISARGA
+ {0x11183, 0x111B2, prN}, // Lo [48] SHARADA LETTER A..SHARADA LETTER HA
+ {0x111B3, 0x111B5, prN}, // Mc [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II
+ {0x111B6, 0x111BE, prN}, // Mn [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O
+ {0x111BF, 0x111C0, prN}, // Mc [2] SHARADA VOWEL SIGN AU..SHARADA SIGN VIRAMA
+ {0x111C1, 0x111C4, prN}, // Lo [4] SHARADA SIGN AVAGRAHA..SHARADA OM
+ {0x111C5, 0x111C8, prN}, // Po [4] SHARADA DANDA..SHARADA SEPARATOR
+ {0x111C9, 0x111CC, prN}, // Mn [4] SHARADA SANDHI MARK..SHARADA EXTRA SHORT VOWEL MARK
+ {0x111CD, 0x111CD, prN}, // Po SHARADA SUTRA MARK
+ {0x111CE, 0x111CE, prN}, // Mc SHARADA VOWEL SIGN PRISHTHAMATRA E
+ {0x111CF, 0x111CF, prN}, // Mn SHARADA SIGN INVERTED CANDRABINDU
+ {0x111D0, 0x111D9, prN}, // Nd [10] SHARADA DIGIT ZERO..SHARADA DIGIT NINE
+ {0x111DA, 0x111DA, prN}, // Lo SHARADA EKAM
+ {0x111DB, 0x111DB, prN}, // Po SHARADA SIGN SIDDHAM
+ {0x111DC, 0x111DC, prN}, // Lo SHARADA HEADSTROKE
+ {0x111DD, 0x111DF, prN}, // Po [3] SHARADA CONTINUATION SIGN..SHARADA SECTION MARK-2
+ {0x111E1, 0x111F4, prN}, // No [20] SINHALA ARCHAIC DIGIT ONE..SINHALA ARCHAIC NUMBER ONE THOUSAND
+ {0x11200, 0x11211, prN}, // Lo [18] KHOJKI LETTER A..KHOJKI LETTER JJA
+ {0x11213, 0x1122B, prN}, // Lo [25] KHOJKI LETTER NYA..KHOJKI LETTER LLA
+ {0x1122C, 0x1122E, prN}, // Mc [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II
+ {0x1122F, 0x11231, prN}, // Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI
+ {0x11232, 0x11233, prN}, // Mc [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU
+ {0x11234, 0x11234, prN}, // Mn KHOJKI SIGN ANUSVARA
+ {0x11235, 0x11235, prN}, // Mc KHOJKI SIGN VIRAMA
+ {0x11236, 0x11237, prN}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
+ {0x11238, 0x1123D, prN}, // Po [6] KHOJKI DANDA..KHOJKI ABBREVIATION SIGN
+ {0x1123E, 0x1123E, prN}, // Mn KHOJKI SIGN SUKUN
+ {0x1123F, 0x11240, prN}, // Lo [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I
+ {0x11241, 0x11241, prN}, // Mn KHOJKI VOWEL SIGN VOCALIC R
+ {0x11280, 0x11286, prN}, // Lo [7] MULTANI LETTER A..MULTANI LETTER GA
+ {0x11288, 0x11288, prN}, // Lo MULTANI LETTER GHA
+ {0x1128A, 0x1128D, prN}, // Lo [4] MULTANI LETTER CA..MULTANI LETTER JJA
+ {0x1128F, 0x1129D, prN}, // Lo [15] MULTANI LETTER NYA..MULTANI LETTER BA
+ {0x1129F, 0x112A8, prN}, // Lo [10] MULTANI LETTER BHA..MULTANI LETTER RHA
+ {0x112A9, 0x112A9, prN}, // Po MULTANI SECTION MARK
+ {0x112B0, 0x112DE, prN}, // Lo [47] KHUDAWADI LETTER A..KHUDAWADI LETTER HA
+ {0x112DF, 0x112DF, prN}, // Mn KHUDAWADI SIGN ANUSVARA
+ {0x112E0, 0x112E2, prN}, // Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
+ {0x112E3, 0x112EA, prN}, // Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
+ {0x112F0, 0x112F9, prN}, // Nd [10] KHUDAWADI DIGIT ZERO..KHUDAWADI DIGIT NINE
+ {0x11300, 0x11301, prN}, // Mn [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU
+ {0x11302, 0x11303, prN}, // Mc [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA
+ {0x11305, 0x1130C, prN}, // Lo [8] GRANTHA LETTER A..GRANTHA LETTER VOCALIC L
+ {0x1130F, 0x11310, prN}, // Lo [2] GRANTHA LETTER EE..GRANTHA LETTER AI
+ {0x11313, 0x11328, prN}, // Lo [22] GRANTHA LETTER OO..GRANTHA LETTER NA
+ {0x1132A, 0x11330, prN}, // Lo [7] GRANTHA LETTER PA..GRANTHA LETTER RA
+ {0x11332, 0x11333, prN}, // Lo [2] GRANTHA LETTER LA..GRANTHA LETTER LLA
+ {0x11335, 0x11339, prN}, // Lo [5] GRANTHA LETTER VA..GRANTHA LETTER HA
+ {0x1133B, 0x1133C, prN}, // Mn [2] COMBINING BINDU BELOW..GRANTHA SIGN NUKTA
+ {0x1133D, 0x1133D, prN}, // Lo GRANTHA SIGN AVAGRAHA
+ {0x1133E, 0x1133F, prN}, // Mc [2] GRANTHA VOWEL SIGN AA..GRANTHA VOWEL SIGN I
+ {0x11340, 0x11340, prN}, // Mn GRANTHA VOWEL SIGN II
+ {0x11341, 0x11344, prN}, // Mc [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR
+ {0x11347, 0x11348, prN}, // Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI
+ {0x1134B, 0x1134D, prN}, // Mc [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA
+ {0x11350, 0x11350, prN}, // Lo GRANTHA OM
+ {0x11357, 0x11357, prN}, // Mc GRANTHA AU LENGTH MARK
+ {0x1135D, 0x11361, prN}, // Lo [5] GRANTHA SIGN PLUTA..GRANTHA LETTER VOCALIC LL
+ {0x11362, 0x11363, prN}, // Mc [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL
+ {0x11366, 0x1136C, prN}, // Mn [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX
+ {0x11370, 0x11374, prN}, // Mn [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA
+ {0x11400, 0x11434, prN}, // Lo [53] NEWA LETTER A..NEWA LETTER HA
+ {0x11435, 0x11437, prN}, // Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II
+ {0x11438, 0x1143F, prN}, // Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI
+ {0x11440, 0x11441, prN}, // Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU
+ {0x11442, 0x11444, prN}, // Mn [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA
+ {0x11445, 0x11445, prN}, // Mc NEWA SIGN VISARGA
+ {0x11446, 0x11446, prN}, // Mn NEWA SIGN NUKTA
+ {0x11447, 0x1144A, prN}, // Lo [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI
+ {0x1144B, 0x1144F, prN}, // Po [5] NEWA DANDA..NEWA ABBREVIATION SIGN
+ {0x11450, 0x11459, prN}, // Nd [10] NEWA DIGIT ZERO..NEWA DIGIT NINE
+ {0x1145A, 0x1145B, prN}, // Po [2] NEWA DOUBLE COMMA..NEWA PLACEHOLDER MARK
+ {0x1145D, 0x1145D, prN}, // Po NEWA INSERTION SIGN
+ {0x1145E, 0x1145E, prN}, // Mn NEWA SANDHI MARK
+ {0x1145F, 0x11461, prN}, // Lo [3] NEWA LETTER VEDIC ANUSVARA..NEWA SIGN UPADHMANIYA
+ {0x11480, 0x114AF, prN}, // Lo [48] TIRHUTA ANJI..TIRHUTA LETTER HA
+ {0x114B0, 0x114B2, prN}, // Mc [3] TIRHUTA VOWEL SIGN AA..TIRHUTA VOWEL SIGN II
+ {0x114B3, 0x114B8, prN}, // Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL
+ {0x114B9, 0x114B9, prN}, // Mc TIRHUTA VOWEL SIGN E
+ {0x114BA, 0x114BA, prN}, // Mn TIRHUTA VOWEL SIGN SHORT E
+ {0x114BB, 0x114BE, prN}, // Mc [4] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN AU
+ {0x114BF, 0x114C0, prN}, // Mn [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA
+ {0x114C1, 0x114C1, prN}, // Mc TIRHUTA SIGN VISARGA
+ {0x114C2, 0x114C3, prN}, // Mn [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA
+ {0x114C4, 0x114C5, prN}, // Lo [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA GVANG
+ {0x114C6, 0x114C6, prN}, // Po TIRHUTA ABBREVIATION SIGN
+ {0x114C7, 0x114C7, prN}, // Lo TIRHUTA OM
+ {0x114D0, 0x114D9, prN}, // Nd [10] TIRHUTA DIGIT ZERO..TIRHUTA DIGIT NINE
+ {0x11580, 0x115AE, prN}, // Lo [47] SIDDHAM LETTER A..SIDDHAM LETTER HA
+ {0x115AF, 0x115B1, prN}, // Mc [3] SIDDHAM VOWEL SIGN AA..SIDDHAM VOWEL SIGN II
+ {0x115B2, 0x115B5, prN}, // Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR
+ {0x115B8, 0x115BB, prN}, // Mc [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU
+ {0x115BC, 0x115BD, prN}, // Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA
+ {0x115BE, 0x115BE, prN}, // Mc SIDDHAM SIGN VISARGA
+ {0x115BF, 0x115C0, prN}, // Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA
+ {0x115C1, 0x115D7, prN}, // Po [23] SIDDHAM SIGN SIDDHAM..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES
+ {0x115D8, 0x115DB, prN}, // Lo [4] SIDDHAM LETTER THREE-CIRCLE ALTERNATE I..SIDDHAM LETTER ALTERNATE U
+ {0x115DC, 0x115DD, prN}, // Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU
+ {0x11600, 0x1162F, prN}, // Lo [48] MODI LETTER A..MODI LETTER LLA
+ {0x11630, 0x11632, prN}, // Mc [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II
+ {0x11633, 0x1163A, prN}, // Mn [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI
+ {0x1163B, 0x1163C, prN}, // Mc [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU
+ {0x1163D, 0x1163D, prN}, // Mn MODI SIGN ANUSVARA
+ {0x1163E, 0x1163E, prN}, // Mc MODI SIGN VISARGA
+ {0x1163F, 0x11640, prN}, // Mn [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA
+ {0x11641, 0x11643, prN}, // Po [3] MODI DANDA..MODI ABBREVIATION SIGN
+ {0x11644, 0x11644, prN}, // Lo MODI SIGN HUVA
+ {0x11650, 0x11659, prN}, // Nd [10] MODI DIGIT ZERO..MODI DIGIT NINE
+ {0x11660, 0x1166C, prN}, // Po [13] MONGOLIAN BIRGA WITH ORNAMENT..MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT
+ {0x11680, 0x116AA, prN}, // Lo [43] TAKRI LETTER A..TAKRI LETTER RRA
+ {0x116AB, 0x116AB, prN}, // Mn TAKRI SIGN ANUSVARA
+ {0x116AC, 0x116AC, prN}, // Mc TAKRI SIGN VISARGA
+ {0x116AD, 0x116AD, prN}, // Mn TAKRI VOWEL SIGN AA
+ {0x116AE, 0x116AF, prN}, // Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II
+ {0x116B0, 0x116B5, prN}, // Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU
+ {0x116B6, 0x116B6, prN}, // Mc TAKRI SIGN VIRAMA
+ {0x116B7, 0x116B7, prN}, // Mn TAKRI SIGN NUKTA
+ {0x116B8, 0x116B8, prN}, // Lo TAKRI LETTER ARCHAIC KHA
+ {0x116B9, 0x116B9, prN}, // Po TAKRI ABBREVIATION SIGN
+ {0x116C0, 0x116C9, prN}, // Nd [10] TAKRI DIGIT ZERO..TAKRI DIGIT NINE
+ {0x11700, 0x1171A, prN}, // Lo [27] AHOM LETTER KA..AHOM LETTER ALTERNATE BA
+ {0x1171D, 0x1171F, prN}, // Mn [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA
+ {0x11720, 0x11721, prN}, // Mc [2] AHOM VOWEL SIGN A..AHOM VOWEL SIGN AA
+ {0x11722, 0x11725, prN}, // Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU
+ {0x11726, 0x11726, prN}, // Mc AHOM VOWEL SIGN E
+ {0x11727, 0x1172B, prN}, // Mn [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER
+ {0x11730, 0x11739, prN}, // Nd [10] AHOM DIGIT ZERO..AHOM DIGIT NINE
+ {0x1173A, 0x1173B, prN}, // No [2] AHOM NUMBER TEN..AHOM NUMBER TWENTY
+ {0x1173C, 0x1173E, prN}, // Po [3] AHOM SIGN SMALL SECTION..AHOM SIGN RULAI
+ {0x1173F, 0x1173F, prN}, // So AHOM SYMBOL VI
+ {0x11740, 0x11746, prN}, // Lo [7] AHOM LETTER CA..AHOM LETTER LLA
+ {0x11800, 0x1182B, prN}, // Lo [44] DOGRA LETTER A..DOGRA LETTER RRA
+ {0x1182C, 0x1182E, prN}, // Mc [3] DOGRA VOWEL SIGN AA..DOGRA VOWEL SIGN II
+ {0x1182F, 0x11837, prN}, // Mn [9] DOGRA VOWEL SIGN U..DOGRA SIGN ANUSVARA
+ {0x11838, 0x11838, prN}, // Mc DOGRA SIGN VISARGA
+ {0x11839, 0x1183A, prN}, // Mn [2] DOGRA SIGN VIRAMA..DOGRA SIGN NUKTA
+ {0x1183B, 0x1183B, prN}, // Po DOGRA ABBREVIATION SIGN
+ {0x118A0, 0x118DF, prN}, // L& [64] WARANG CITI CAPITAL LETTER NGAA..WARANG CITI SMALL LETTER VIYO
+ {0x118E0, 0x118E9, prN}, // Nd [10] WARANG CITI DIGIT ZERO..WARANG CITI DIGIT NINE
+ {0x118EA, 0x118F2, prN}, // No [9] WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY
+ {0x118FF, 0x118FF, prN}, // Lo WARANG CITI OM
+ {0x11900, 0x11906, prN}, // Lo [7] DIVES AKURU LETTER A..DIVES AKURU LETTER E
+ {0x11909, 0x11909, prN}, // Lo DIVES AKURU LETTER O
+ {0x1190C, 0x11913, prN}, // Lo [8] DIVES AKURU LETTER KA..DIVES AKURU LETTER JA
+ {0x11915, 0x11916, prN}, // Lo [2] DIVES AKURU LETTER NYA..DIVES AKURU LETTER TTA
+ {0x11918, 0x1192F, prN}, // Lo [24] DIVES AKURU LETTER DDA..DIVES AKURU LETTER ZA
+ {0x11930, 0x11935, prN}, // Mc [6] DIVES AKURU VOWEL SIGN AA..DIVES AKURU VOWEL SIGN E
+ {0x11937, 0x11938, prN}, // Mc [2] DIVES AKURU VOWEL SIGN AI..DIVES AKURU VOWEL SIGN O
+ {0x1193B, 0x1193C, prN}, // Mn [2] DIVES AKURU SIGN ANUSVARA..DIVES AKURU SIGN CANDRABINDU
+ {0x1193D, 0x1193D, prN}, // Mc DIVES AKURU SIGN HALANTA
+ {0x1193E, 0x1193E, prN}, // Mn DIVES AKURU VIRAMA
+ {0x1193F, 0x1193F, prN}, // Lo DIVES AKURU PREFIXED NASAL SIGN
+ {0x11940, 0x11940, prN}, // Mc DIVES AKURU MEDIAL YA
+ {0x11941, 0x11941, prN}, // Lo DIVES AKURU INITIAL RA
+ {0x11942, 0x11942, prN}, // Mc DIVES AKURU MEDIAL RA
+ {0x11943, 0x11943, prN}, // Mn DIVES AKURU SIGN NUKTA
+ {0x11944, 0x11946, prN}, // Po [3] DIVES AKURU DOUBLE DANDA..DIVES AKURU END OF TEXT MARK
+ {0x11950, 0x11959, prN}, // Nd [10] DIVES AKURU DIGIT ZERO..DIVES AKURU DIGIT NINE
+ {0x119A0, 0x119A7, prN}, // Lo [8] NANDINAGARI LETTER A..NANDINAGARI LETTER VOCALIC RR
+ {0x119AA, 0x119D0, prN}, // Lo [39] NANDINAGARI LETTER E..NANDINAGARI LETTER RRA
+ {0x119D1, 0x119D3, prN}, // Mc [3] NANDINAGARI VOWEL SIGN AA..NANDINAGARI VOWEL SIGN II
+ {0x119D4, 0x119D7, prN}, // Mn [4] NANDINAGARI VOWEL SIGN U..NANDINAGARI VOWEL SIGN VOCALIC RR
+ {0x119DA, 0x119DB, prN}, // Mn [2] NANDINAGARI VOWEL SIGN E..NANDINAGARI VOWEL SIGN AI
+ {0x119DC, 0x119DF, prN}, // Mc [4] NANDINAGARI VOWEL SIGN O..NANDINAGARI SIGN VISARGA
+ {0x119E0, 0x119E0, prN}, // Mn NANDINAGARI SIGN VIRAMA
+ {0x119E1, 0x119E1, prN}, // Lo NANDINAGARI SIGN AVAGRAHA
+ {0x119E2, 0x119E2, prN}, // Po NANDINAGARI SIGN SIDDHAM
+ {0x119E3, 0x119E3, prN}, // Lo NANDINAGARI HEADSTROKE
+ {0x119E4, 0x119E4, prN}, // Mc NANDINAGARI VOWEL SIGN PRISHTHAMATRA E
+ {0x11A00, 0x11A00, prN}, // Lo ZANABAZAR SQUARE LETTER A
+ {0x11A01, 0x11A0A, prN}, // Mn [10] ZANABAZAR SQUARE VOWEL SIGN I..ZANABAZAR SQUARE VOWEL LENGTH MARK
+ {0x11A0B, 0x11A32, prN}, // Lo [40] ZANABAZAR SQUARE LETTER KA..ZANABAZAR SQUARE LETTER KSSA
+ {0x11A33, 0x11A38, prN}, // Mn [6] ZANABAZAR SQUARE FINAL CONSONANT MARK..ZANABAZAR SQUARE SIGN ANUSVARA
+ {0x11A39, 0x11A39, prN}, // Mc ZANABAZAR SQUARE SIGN VISARGA
+ {0x11A3A, 0x11A3A, prN}, // Lo ZANABAZAR SQUARE CLUSTER-INITIAL LETTER RA
+ {0x11A3B, 0x11A3E, prN}, // Mn [4] ZANABAZAR SQUARE CLUSTER-FINAL LETTER YA..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
+ {0x11A3F, 0x11A46, prN}, // Po [8] ZANABAZAR SQUARE INITIAL HEAD MARK..ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK
+ {0x11A47, 0x11A47, prN}, // Mn ZANABAZAR SQUARE SUBJOINER
+ {0x11A50, 0x11A50, prN}, // Lo SOYOMBO LETTER A
+ {0x11A51, 0x11A56, prN}, // Mn [6] SOYOMBO VOWEL SIGN I..SOYOMBO VOWEL SIGN OE
+ {0x11A57, 0x11A58, prN}, // Mc [2] SOYOMBO VOWEL SIGN AI..SOYOMBO VOWEL SIGN AU
+ {0x11A59, 0x11A5B, prN}, // Mn [3] SOYOMBO VOWEL SIGN VOCALIC R..SOYOMBO VOWEL LENGTH MARK
+ {0x11A5C, 0x11A89, prN}, // Lo [46] SOYOMBO LETTER KA..SOYOMBO CLUSTER-INITIAL LETTER SA
+ {0x11A8A, 0x11A96, prN}, // Mn [13] SOYOMBO FINAL CONSONANT SIGN G..SOYOMBO SIGN ANUSVARA
+ {0x11A97, 0x11A97, prN}, // Mc SOYOMBO SIGN VISARGA
+ {0x11A98, 0x11A99, prN}, // Mn [2] SOYOMBO GEMINATION MARK..SOYOMBO SUBJOINER
+ {0x11A9A, 0x11A9C, prN}, // Po [3] SOYOMBO MARK TSHEG..SOYOMBO MARK DOUBLE SHAD
+ {0x11A9D, 0x11A9D, prN}, // Lo SOYOMBO MARK PLUTA
+ {0x11A9E, 0x11AA2, prN}, // Po [5] SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO TERMINAL MARK-2
+ {0x11AB0, 0x11ABF, prN}, // Lo [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA
+ {0x11AC0, 0x11AF8, prN}, // Lo [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL
+ {0x11B00, 0x11B09, prN}, // Po [10] DEVANAGARI HEAD MARK..DEVANAGARI SIGN MINDU
+ {0x11C00, 0x11C08, prN}, // Lo [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L
+ {0x11C0A, 0x11C2E, prN}, // Lo [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA
+ {0x11C2F, 0x11C2F, prN}, // Mc BHAIKSUKI VOWEL SIGN AA
+ {0x11C30, 0x11C36, prN}, // Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L
+ {0x11C38, 0x11C3D, prN}, // Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA
+ {0x11C3E, 0x11C3E, prN}, // Mc BHAIKSUKI SIGN VISARGA
+ {0x11C3F, 0x11C3F, prN}, // Mn BHAIKSUKI SIGN VIRAMA
+ {0x11C40, 0x11C40, prN}, // Lo BHAIKSUKI SIGN AVAGRAHA
+ {0x11C41, 0x11C45, prN}, // Po [5] BHAIKSUKI DANDA..BHAIKSUKI GAP FILLER-2
+ {0x11C50, 0x11C59, prN}, // Nd [10] BHAIKSUKI DIGIT ZERO..BHAIKSUKI DIGIT NINE
+ {0x11C5A, 0x11C6C, prN}, // No [19] BHAIKSUKI NUMBER ONE..BHAIKSUKI HUNDREDS UNIT MARK
+ {0x11C70, 0x11C71, prN}, // Po [2] MARCHEN HEAD MARK..MARCHEN MARK SHAD
+ {0x11C72, 0x11C8F, prN}, // Lo [30] MARCHEN LETTER KA..MARCHEN LETTER A
+ {0x11C92, 0x11CA7, prN}, // Mn [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
+ {0x11CA9, 0x11CA9, prN}, // Mc MARCHEN SUBJOINED LETTER YA
+ {0x11CAA, 0x11CB0, prN}, // Mn [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA
+ {0x11CB1, 0x11CB1, prN}, // Mc MARCHEN VOWEL SIGN I
+ {0x11CB2, 0x11CB3, prN}, // Mn [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E
+ {0x11CB4, 0x11CB4, prN}, // Mc MARCHEN VOWEL SIGN O
+ {0x11CB5, 0x11CB6, prN}, // Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU
+ {0x11D00, 0x11D06, prN}, // Lo [7] MASARAM GONDI LETTER A..MASARAM GONDI LETTER E
+ {0x11D08, 0x11D09, prN}, // Lo [2] MASARAM GONDI LETTER AI..MASARAM GONDI LETTER O
+ {0x11D0B, 0x11D30, prN}, // Lo [38] MASARAM GONDI LETTER AU..MASARAM GONDI LETTER TRA
+ {0x11D31, 0x11D36, prN}, // Mn [6] MASARAM GONDI VOWEL SIGN AA..MASARAM GONDI VOWEL SIGN VOCALIC R
+ {0x11D3A, 0x11D3A, prN}, // Mn MASARAM GONDI VOWEL SIGN E
+ {0x11D3C, 0x11D3D, prN}, // Mn [2] MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
+ {0x11D3F, 0x11D45, prN}, // Mn [7] MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI VIRAMA
+ {0x11D46, 0x11D46, prN}, // Lo MASARAM GONDI REPHA
+ {0x11D47, 0x11D47, prN}, // Mn MASARAM GONDI RA-KARA
+ {0x11D50, 0x11D59, prN}, // Nd [10] MASARAM GONDI DIGIT ZERO..MASARAM GONDI DIGIT NINE
+ {0x11D60, 0x11D65, prN}, // Lo [6] GUNJALA GONDI LETTER A..GUNJALA GONDI LETTER UU
+ {0x11D67, 0x11D68, prN}, // Lo [2] GUNJALA GONDI LETTER EE..GUNJALA GONDI LETTER AI
+ {0x11D6A, 0x11D89, prN}, // Lo [32] GUNJALA GONDI LETTER OO..GUNJALA GONDI LETTER SA
+ {0x11D8A, 0x11D8E, prN}, // Mc [5] GUNJALA GONDI VOWEL SIGN AA..GUNJALA GONDI VOWEL SIGN UU
+ {0x11D90, 0x11D91, prN}, // Mn [2] GUNJALA GONDI VOWEL SIGN EE..GUNJALA GONDI VOWEL SIGN AI
+ {0x11D93, 0x11D94, prN}, // Mc [2] GUNJALA GONDI VOWEL SIGN OO..GUNJALA GONDI VOWEL SIGN AU
+ {0x11D95, 0x11D95, prN}, // Mn GUNJALA GONDI SIGN ANUSVARA
+ {0x11D96, 0x11D96, prN}, // Mc GUNJALA GONDI SIGN VISARGA
+ {0x11D97, 0x11D97, prN}, // Mn GUNJALA GONDI VIRAMA
+ {0x11D98, 0x11D98, prN}, // Lo GUNJALA GONDI OM
+ {0x11DA0, 0x11DA9, prN}, // Nd [10] GUNJALA GONDI DIGIT ZERO..GUNJALA GONDI DIGIT NINE
+ {0x11EE0, 0x11EF2, prN}, // Lo [19] MAKASAR LETTER KA..MAKASAR ANGKA
+ {0x11EF3, 0x11EF4, prN}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
+ {0x11EF5, 0x11EF6, prN}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x11EF7, 0x11EF8, prN}, // Po [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION
+ {0x11F00, 0x11F01, prN}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prN}, // Lo KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prN}, // Mc KAWI SIGN VISARGA
+ {0x11F04, 0x11F10, prN}, // Lo [13] KAWI LETTER A..KAWI LETTER O
+ {0x11F12, 0x11F33, prN}, // Lo [34] KAWI LETTER KA..KAWI LETTER JNYA
+ {0x11F34, 0x11F35, prN}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prN}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prN}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prN}, // Mn KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prN}, // Mc KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prN}, // Mn KAWI CONJOINER
+ {0x11F43, 0x11F4F, prN}, // Po [13] KAWI DANDA..KAWI PUNCTUATION CLOSING SPIRAL
+ {0x11F50, 0x11F59, prN}, // Nd [10] KAWI DIGIT ZERO..KAWI DIGIT NINE
+ {0x11FB0, 0x11FB0, prN}, // Lo LISU LETTER YHA
+ {0x11FC0, 0x11FD4, prN}, // No [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH
+ {0x11FD5, 0x11FDC, prN}, // So [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI
+ {0x11FDD, 0x11FE0, prN}, // Sc [4] TAMIL SIGN KAACU..TAMIL SIGN VARAAKAN
+ {0x11FE1, 0x11FF1, prN}, // So [17] TAMIL SIGN PAARAM..TAMIL SIGN VAKAIYARAA
+ {0x11FFF, 0x11FFF, prN}, // Po TAMIL PUNCTUATION END OF TEXT
+ {0x12000, 0x12399, prN}, // Lo [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U
+ {0x12400, 0x1246E, prN}, // Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM
+ {0x12470, 0x12474, prN}, // Po [5] CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON
+ {0x12480, 0x12543, prN}, // Lo [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU
+ {0x12F90, 0x12FF0, prN}, // Lo [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114
+ {0x12FF1, 0x12FF2, prN}, // Po [2] CYPRO-MINOAN SIGN CM301..CYPRO-MINOAN SIGN CM302
+ {0x13000, 0x1342F, prN}, // Lo [1072] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH V011D
+ {0x13430, 0x1343F, prN}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prN}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13441, 0x13446, prN}, // Lo [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN
+ {0x13447, 0x13455, prN}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
+ {0x14400, 0x14646, prN}, // Lo [583] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530
+ {0x16800, 0x16A38, prN}, // Lo [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ
+ {0x16A40, 0x16A5E, prN}, // Lo [31] MRO LETTER TA..MRO LETTER TEK
+ {0x16A60, 0x16A69, prN}, // Nd [10] MRO DIGIT ZERO..MRO DIGIT NINE
+ {0x16A6E, 0x16A6F, prN}, // Po [2] MRO DANDA..MRO DOUBLE DANDA
+ {0x16A70, 0x16ABE, prN}, // Lo [79] TANGSA LETTER OZ..TANGSA LETTER ZA
+ {0x16AC0, 0x16AC9, prN}, // Nd [10] TANGSA DIGIT ZERO..TANGSA DIGIT NINE
+ {0x16AD0, 0x16AED, prN}, // Lo [30] BASSA VAH LETTER ENNI..BASSA VAH LETTER I
+ {0x16AF0, 0x16AF4, prN}, // Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
+ {0x16AF5, 0x16AF5, prN}, // Po BASSA VAH FULL STOP
+ {0x16B00, 0x16B2F, prN}, // Lo [48] PAHAWH HMONG VOWEL KEEB..PAHAWH HMONG CONSONANT CAU
+ {0x16B30, 0x16B36, prN}, // Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
+ {0x16B37, 0x16B3B, prN}, // Po [5] PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN VOS FEEM
+ {0x16B3C, 0x16B3F, prN}, // So [4] PAHAWH HMONG SIGN XYEEM NTXIV..PAHAWH HMONG SIGN XYEEM FAIB
+ {0x16B40, 0x16B43, prN}, // Lm [4] PAHAWH HMONG SIGN VOS SEEV..PAHAWH HMONG SIGN IB YAM
+ {0x16B44, 0x16B44, prN}, // Po PAHAWH HMONG SIGN XAUS
+ {0x16B45, 0x16B45, prN}, // So PAHAWH HMONG SIGN CIM TSOV ROG
+ {0x16B50, 0x16B59, prN}, // Nd [10] PAHAWH HMONG DIGIT ZERO..PAHAWH HMONG DIGIT NINE
+ {0x16B5B, 0x16B61, prN}, // No [7] PAHAWH HMONG NUMBER TENS..PAHAWH HMONG NUMBER TRILLIONS
+ {0x16B63, 0x16B77, prN}, // Lo [21] PAHAWH HMONG SIGN VOS LUB..PAHAWH HMONG SIGN CIM NRES TOS
+ {0x16B7D, 0x16B8F, prN}, // Lo [19] PAHAWH HMONG CLAN SIGN TSHEEJ..PAHAWH HMONG CLAN SIGN VWJ
+ {0x16E40, 0x16E7F, prN}, // L& [64] MEDEFAIDRIN CAPITAL LETTER M..MEDEFAIDRIN SMALL LETTER Y
+ {0x16E80, 0x16E96, prN}, // No [23] MEDEFAIDRIN DIGIT ZERO..MEDEFAIDRIN DIGIT THREE ALTERNATE FORM
+ {0x16E97, 0x16E9A, prN}, // Po [4] MEDEFAIDRIN COMMA..MEDEFAIDRIN EXCLAMATION OH
+ {0x16F00, 0x16F4A, prN}, // Lo [75] MIAO LETTER PA..MIAO LETTER RTE
+ {0x16F4F, 0x16F4F, prN}, // Mn MIAO SIGN CONSONANT MODIFIER BAR
+ {0x16F50, 0x16F50, prN}, // Lo MIAO LETTER NASALIZATION
+ {0x16F51, 0x16F87, prN}, // Mc [55] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN UI
+ {0x16F8F, 0x16F92, prN}, // Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW
+ {0x16F93, 0x16F9F, prN}, // Lm [13] MIAO LETTER TONE-2..MIAO LETTER REFORMED TONE-8
+ {0x16FE0, 0x16FE1, prW}, // Lm [2] TANGUT ITERATION MARK..NUSHU ITERATION MARK
+ {0x16FE2, 0x16FE2, prW}, // Po OLD CHINESE HOOK MARK
+ {0x16FE3, 0x16FE3, prW}, // Lm OLD CHINESE ITERATION MARK
+ {0x16FE4, 0x16FE4, prW}, // Mn KHITAN SMALL SCRIPT FILLER
+ {0x16FF0, 0x16FF1, prW}, // Mc [2] VIETNAMESE ALTERNATE READING MARK CA..VIETNAMESE ALTERNATE READING MARK NHAY
+ {0x17000, 0x187F7, prW}, // Lo [6136] TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187F7
+ {0x18800, 0x18AFF, prW}, // Lo [768] TANGUT COMPONENT-001..TANGUT COMPONENT-768
+ {0x18B00, 0x18CD5, prW}, // Lo [470] KHITAN SMALL SCRIPT CHARACTER-18B00..KHITAN SMALL SCRIPT CHARACTER-18CD5
+ {0x18D00, 0x18D08, prW}, // Lo [9] TANGUT IDEOGRAPH-18D00..TANGUT IDEOGRAPH-18D08
+ {0x1AFF0, 0x1AFF3, prW}, // Lm [4] KATAKANA LETTER MINNAN TONE-2..KATAKANA LETTER MINNAN TONE-5
+ {0x1AFF5, 0x1AFFB, prW}, // Lm [7] KATAKANA LETTER MINNAN TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-5
+ {0x1AFFD, 0x1AFFE, prW}, // Lm [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
+ {0x1B000, 0x1B0FF, prW}, // Lo [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2
+ {0x1B100, 0x1B122, prW}, // Lo [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU
+ {0x1B132, 0x1B132, prW}, // Lo HIRAGANA LETTER SMALL KO
+ {0x1B150, 0x1B152, prW}, // Lo [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO
+ {0x1B155, 0x1B155, prW}, // Lo KATAKANA LETTER SMALL KO
+ {0x1B164, 0x1B167, prW}, // Lo [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
+ {0x1B170, 0x1B2FB, prW}, // Lo [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
+ {0x1BC00, 0x1BC6A, prN}, // Lo [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
+ {0x1BC70, 0x1BC7C, prN}, // Lo [13] DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK
+ {0x1BC80, 0x1BC88, prN}, // Lo [9] DUPLOYAN AFFIX HIGH ACUTE..DUPLOYAN AFFIX HIGH VERTICAL
+ {0x1BC90, 0x1BC99, prN}, // Lo [10] DUPLOYAN AFFIX LOW ACUTE..DUPLOYAN AFFIX LOW ARROW
+ {0x1BC9C, 0x1BC9C, prN}, // So DUPLOYAN SIGN O WITH CROSS
+ {0x1BC9D, 0x1BC9E, prN}, // Mn [2] DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK
+ {0x1BC9F, 0x1BC9F, prN}, // Po DUPLOYAN PUNCTUATION CHINOOK FULL STOP
+ {0x1BCA0, 0x1BCA3, prN}, // Cf [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP
+ {0x1CF00, 0x1CF2D, prN}, // Mn [46] ZNAMENNY COMBINING MARK GORAZDO NIZKO S KRYZHEM ON LEFT..ZNAMENNY COMBINING MARK KRYZH ON LEFT
+ {0x1CF30, 0x1CF46, prN}, // Mn [23] ZNAMENNY COMBINING TONAL RANGE MARK MRACHNO..ZNAMENNY PRIZNAK MODIFIER ROG
+ {0x1CF50, 0x1CFC3, prN}, // So [116] ZNAMENNY NEUME KRYUK..ZNAMENNY NEUME PAUK
+ {0x1D000, 0x1D0F5, prN}, // So [246] BYZANTINE MUSICAL SYMBOL PSILI..BYZANTINE MUSICAL SYMBOL GORGON NEO KATO
+ {0x1D100, 0x1D126, prN}, // So [39] MUSICAL SYMBOL SINGLE BARLINE..MUSICAL SYMBOL DRUM CLEF-2
+ {0x1D129, 0x1D164, prN}, // So [60] MUSICAL SYMBOL MULTIPLE MEASURE REST..MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE
+ {0x1D165, 0x1D166, prN}, // Mc [2] MUSICAL SYMBOL COMBINING STEM..MUSICAL SYMBOL COMBINING SPRECHGESANG STEM
+ {0x1D167, 0x1D169, prN}, // Mn [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3
+ {0x1D16A, 0x1D16C, prN}, // So [3] MUSICAL SYMBOL FINGERED TREMOLO-1..MUSICAL SYMBOL FINGERED TREMOLO-3
+ {0x1D16D, 0x1D172, prN}, // Mc [6] MUSICAL SYMBOL COMBINING AUGMENTATION DOT..MUSICAL SYMBOL COMBINING FLAG-5
+ {0x1D173, 0x1D17A, prN}, // Cf [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE
+ {0x1D17B, 0x1D182, prN}, // Mn [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE
+ {0x1D183, 0x1D184, prN}, // So [2] MUSICAL SYMBOL ARPEGGIATO UP..MUSICAL SYMBOL ARPEGGIATO DOWN
+ {0x1D185, 0x1D18B, prN}, // Mn [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE
+ {0x1D18C, 0x1D1A9, prN}, // So [30] MUSICAL SYMBOL RINFORZANDO..MUSICAL SYMBOL DEGREE SLASH
+ {0x1D1AA, 0x1D1AD, prN}, // Mn [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO
+ {0x1D1AE, 0x1D1EA, prN}, // So [61] MUSICAL SYMBOL PEDAL MARK..MUSICAL SYMBOL KORON
+ {0x1D200, 0x1D241, prN}, // So [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54
+ {0x1D242, 0x1D244, prN}, // Mn [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
+ {0x1D245, 0x1D245, prN}, // So GREEK MUSICAL LEIMMA
+ {0x1D2C0, 0x1D2D3, prN}, // No [20] KAKTOVIK NUMERAL ZERO..KAKTOVIK NUMERAL NINETEEN
+ {0x1D2E0, 0x1D2F3, prN}, // No [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN
+ {0x1D300, 0x1D356, prN}, // So [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING
+ {0x1D360, 0x1D378, prN}, // No [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE
+ {0x1D400, 0x1D454, prN}, // L& [85] MATHEMATICAL BOLD CAPITAL A..MATHEMATICAL ITALIC SMALL G
+ {0x1D456, 0x1D49C, prN}, // L& [71] MATHEMATICAL ITALIC SMALL I..MATHEMATICAL SCRIPT CAPITAL A
+ {0x1D49E, 0x1D49F, prN}, // Lu [2] MATHEMATICAL SCRIPT CAPITAL C..MATHEMATICAL SCRIPT CAPITAL D
+ {0x1D4A2, 0x1D4A2, prN}, // Lu MATHEMATICAL SCRIPT CAPITAL G
+ {0x1D4A5, 0x1D4A6, prN}, // Lu [2] MATHEMATICAL SCRIPT CAPITAL J..MATHEMATICAL SCRIPT CAPITAL K
+ {0x1D4A9, 0x1D4AC, prN}, // Lu [4] MATHEMATICAL SCRIPT CAPITAL N..MATHEMATICAL SCRIPT CAPITAL Q
+ {0x1D4AE, 0x1D4B9, prN}, // L& [12] MATHEMATICAL SCRIPT CAPITAL S..MATHEMATICAL SCRIPT SMALL D
+ {0x1D4BB, 0x1D4BB, prN}, // Ll MATHEMATICAL SCRIPT SMALL F
+ {0x1D4BD, 0x1D4C3, prN}, // Ll [7] MATHEMATICAL SCRIPT SMALL H..MATHEMATICAL SCRIPT SMALL N
+ {0x1D4C5, 0x1D505, prN}, // L& [65] MATHEMATICAL SCRIPT SMALL P..MATHEMATICAL FRAKTUR CAPITAL B
+ {0x1D507, 0x1D50A, prN}, // Lu [4] MATHEMATICAL FRAKTUR CAPITAL D..MATHEMATICAL FRAKTUR CAPITAL G
+ {0x1D50D, 0x1D514, prN}, // Lu [8] MATHEMATICAL FRAKTUR CAPITAL J..MATHEMATICAL FRAKTUR CAPITAL Q
+ {0x1D516, 0x1D51C, prN}, // Lu [7] MATHEMATICAL FRAKTUR CAPITAL S..MATHEMATICAL FRAKTUR CAPITAL Y
+ {0x1D51E, 0x1D539, prN}, // L& [28] MATHEMATICAL FRAKTUR SMALL A..MATHEMATICAL DOUBLE-STRUCK CAPITAL B
+ {0x1D53B, 0x1D53E, prN}, // Lu [4] MATHEMATICAL DOUBLE-STRUCK CAPITAL D..MATHEMATICAL DOUBLE-STRUCK CAPITAL G
+ {0x1D540, 0x1D544, prN}, // Lu [5] MATHEMATICAL DOUBLE-STRUCK CAPITAL I..MATHEMATICAL DOUBLE-STRUCK CAPITAL M
+ {0x1D546, 0x1D546, prN}, // Lu MATHEMATICAL DOUBLE-STRUCK CAPITAL O
+ {0x1D54A, 0x1D550, prN}, // Lu [7] MATHEMATICAL DOUBLE-STRUCK CAPITAL S..MATHEMATICAL DOUBLE-STRUCK CAPITAL Y
+ {0x1D552, 0x1D6A5, prN}, // L& [340] MATHEMATICAL DOUBLE-STRUCK SMALL A..MATHEMATICAL ITALIC SMALL DOTLESS J
+ {0x1D6A8, 0x1D6C0, prN}, // Lu [25] MATHEMATICAL BOLD CAPITAL ALPHA..MATHEMATICAL BOLD CAPITAL OMEGA
+ {0x1D6C1, 0x1D6C1, prN}, // Sm MATHEMATICAL BOLD NABLA
+ {0x1D6C2, 0x1D6DA, prN}, // Ll [25] MATHEMATICAL BOLD SMALL ALPHA..MATHEMATICAL BOLD SMALL OMEGA
+ {0x1D6DB, 0x1D6DB, prN}, // Sm MATHEMATICAL BOLD PARTIAL DIFFERENTIAL
+ {0x1D6DC, 0x1D6FA, prN}, // L& [31] MATHEMATICAL BOLD EPSILON SYMBOL..MATHEMATICAL ITALIC CAPITAL OMEGA
+ {0x1D6FB, 0x1D6FB, prN}, // Sm MATHEMATICAL ITALIC NABLA
+ {0x1D6FC, 0x1D714, prN}, // Ll [25] MATHEMATICAL ITALIC SMALL ALPHA..MATHEMATICAL ITALIC SMALL OMEGA
+ {0x1D715, 0x1D715, prN}, // Sm MATHEMATICAL ITALIC PARTIAL DIFFERENTIAL
+ {0x1D716, 0x1D734, prN}, // L& [31] MATHEMATICAL ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD ITALIC CAPITAL OMEGA
+ {0x1D735, 0x1D735, prN}, // Sm MATHEMATICAL BOLD ITALIC NABLA
+ {0x1D736, 0x1D74E, prN}, // Ll [25] MATHEMATICAL BOLD ITALIC SMALL ALPHA..MATHEMATICAL BOLD ITALIC SMALL OMEGA
+ {0x1D74F, 0x1D74F, prN}, // Sm MATHEMATICAL BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D750, 0x1D76E, prN}, // L& [31] MATHEMATICAL BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA
+ {0x1D76F, 0x1D76F, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD NABLA
+ {0x1D770, 0x1D788, prN}, // Ll [25] MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA
+ {0x1D789, 0x1D789, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD PARTIAL DIFFERENTIAL
+ {0x1D78A, 0x1D7A8, prN}, // L& [31] MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA
+ {0x1D7A9, 0x1D7A9, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD ITALIC NABLA
+ {0x1D7AA, 0x1D7C2, prN}, // Ll [25] MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA
+ {0x1D7C3, 0x1D7C3, prN}, // Sm MATHEMATICAL SANS-SERIF BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D7C4, 0x1D7CB, prN}, // L& [8] MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD SMALL DIGAMMA
+ {0x1D7CE, 0x1D7FF, prN}, // Nd [50] MATHEMATICAL BOLD DIGIT ZERO..MATHEMATICAL MONOSPACE DIGIT NINE
+ {0x1D800, 0x1D9FF, prN}, // So [512] SIGNWRITING HAND-FIST INDEX..SIGNWRITING HEAD
+ {0x1DA00, 0x1DA36, prN}, // Mn [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN
+ {0x1DA37, 0x1DA3A, prN}, // So [4] SIGNWRITING AIR BLOW SMALL ROTATIONS..SIGNWRITING BREATH EXHALE
+ {0x1DA3B, 0x1DA6C, prN}, // Mn [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT
+ {0x1DA6D, 0x1DA74, prN}, // So [8] SIGNWRITING SHOULDER HIP SPINE..SIGNWRITING TORSO-FLOORPLANE TWISTING
+ {0x1DA75, 0x1DA75, prN}, // Mn SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS
+ {0x1DA76, 0x1DA83, prN}, // So [14] SIGNWRITING LIMB COMBINATION..SIGNWRITING LOCATION DEPTH
+ {0x1DA84, 0x1DA84, prN}, // Mn SIGNWRITING LOCATION HEAD NECK
+ {0x1DA85, 0x1DA86, prN}, // So [2] SIGNWRITING LOCATION TORSO..SIGNWRITING LOCATION LIMBS DIGITS
+ {0x1DA87, 0x1DA8B, prN}, // Po [5] SIGNWRITING COMMA..SIGNWRITING PARENTHESIS
+ {0x1DA9B, 0x1DA9F, prN}, // Mn [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6
+ {0x1DAA1, 0x1DAAF, prN}, // Mn [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16
+ {0x1DF00, 0x1DF09, prN}, // Ll [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
+ {0x1DF0A, 0x1DF0A, prN}, // Lo LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
+ {0x1DF0B, 0x1DF1E, prN}, // Ll [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1DF25, 0x1DF2A, prN}, // Ll [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK
+ {0x1E000, 0x1E006, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
+ {0x1E008, 0x1E018, prN}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
+ {0x1E01B, 0x1E021, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
+ {0x1E023, 0x1E024, prN}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
+ {0x1E026, 0x1E02A, prN}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E030, 0x1E06D, prN}, // Lm [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE
+ {0x1E08F, 0x1E08F, prN}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+ {0x1E100, 0x1E12C, prN}, // Lo [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
+ {0x1E130, 0x1E136, prN}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
+ {0x1E137, 0x1E13D, prN}, // Lm [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
+ {0x1E140, 0x1E149, prN}, // Nd [10] NYIAKENG PUACHUE HMONG DIGIT ZERO..NYIAKENG PUACHUE HMONG DIGIT NINE
+ {0x1E14E, 0x1E14E, prN}, // Lo NYIAKENG PUACHUE HMONG LOGOGRAM NYAJ
+ {0x1E14F, 0x1E14F, prN}, // So NYIAKENG PUACHUE HMONG CIRCLED CA
+ {0x1E290, 0x1E2AD, prN}, // Lo [30] TOTO LETTER PA..TOTO LETTER A
+ {0x1E2AE, 0x1E2AE, prN}, // Mn TOTO SIGN RISING TONE
+ {0x1E2C0, 0x1E2EB, prN}, // Lo [44] WANCHO LETTER AA..WANCHO LETTER YIH
+ {0x1E2EC, 0x1E2EF, prN}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E2F0, 0x1E2F9, prN}, // Nd [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
+ {0x1E2FF, 0x1E2FF, prN}, // Sc WANCHO NGUN SIGN
+ {0x1E4D0, 0x1E4EA, prN}, // Lo [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL
+ {0x1E4EB, 0x1E4EB, prN}, // Lm NAG MUNDARI SIGN OJOD
+ {0x1E4EC, 0x1E4EF, prN}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E4F0, 0x1E4F9, prN}, // Nd [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE
+ {0x1E7E0, 0x1E7E6, prN}, // Lo [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
+ {0x1E7E8, 0x1E7EB, prN}, // Lo [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
+ {0x1E7ED, 0x1E7EE, prN}, // Lo [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
+ {0x1E7F0, 0x1E7FE, prN}, // Lo [15] ETHIOPIC SYLLABLE GURAGE QWI..ETHIOPIC SYLLABLE GURAGE PWEE
+ {0x1E800, 0x1E8C4, prN}, // Lo [197] MENDE KIKAKUI SYLLABLE M001 KI..MENDE KIKAKUI SYLLABLE M060 NYON
+ {0x1E8C7, 0x1E8CF, prN}, // No [9] MENDE KIKAKUI DIGIT ONE..MENDE KIKAKUI DIGIT NINE
+ {0x1E8D0, 0x1E8D6, prN}, // Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
+ {0x1E900, 0x1E943, prN}, // L& [68] ADLAM CAPITAL LETTER ALIF..ADLAM SMALL LETTER SHA
+ {0x1E944, 0x1E94A, prN}, // Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
+ {0x1E94B, 0x1E94B, prN}, // Lm ADLAM NASALIZATION MARK
+ {0x1E950, 0x1E959, prN}, // Nd [10] ADLAM DIGIT ZERO..ADLAM DIGIT NINE
+ {0x1E95E, 0x1E95F, prN}, // Po [2] ADLAM INITIAL EXCLAMATION MARK..ADLAM INITIAL QUESTION MARK
+ {0x1EC71, 0x1ECAB, prN}, // No [59] INDIC SIYAQ NUMBER ONE..INDIC SIYAQ NUMBER PREFIXED NINE
+ {0x1ECAC, 0x1ECAC, prN}, // So INDIC SIYAQ PLACEHOLDER
+ {0x1ECAD, 0x1ECAF, prN}, // No [3] INDIC SIYAQ FRACTION ONE QUARTER..INDIC SIYAQ FRACTION THREE QUARTERS
+ {0x1ECB0, 0x1ECB0, prN}, // Sc INDIC SIYAQ RUPEE MARK
+ {0x1ECB1, 0x1ECB4, prN}, // No [4] INDIC SIYAQ NUMBER ALTERNATE ONE..INDIC SIYAQ ALTERNATE LAKH MARK
+ {0x1ED01, 0x1ED2D, prN}, // No [45] OTTOMAN SIYAQ NUMBER ONE..OTTOMAN SIYAQ NUMBER NINETY THOUSAND
+ {0x1ED2E, 0x1ED2E, prN}, // So OTTOMAN SIYAQ MARRATAN
+ {0x1ED2F, 0x1ED3D, prN}, // No [15] OTTOMAN SIYAQ ALTERNATE NUMBER TWO..OTTOMAN SIYAQ FRACTION ONE SIXTH
+ {0x1EE00, 0x1EE03, prN}, // Lo [4] ARABIC MATHEMATICAL ALEF..ARABIC MATHEMATICAL DAL
+ {0x1EE05, 0x1EE1F, prN}, // Lo [27] ARABIC MATHEMATICAL WAW..ARABIC MATHEMATICAL DOTLESS QAF
+ {0x1EE21, 0x1EE22, prN}, // Lo [2] ARABIC MATHEMATICAL INITIAL BEH..ARABIC MATHEMATICAL INITIAL JEEM
+ {0x1EE24, 0x1EE24, prN}, // Lo ARABIC MATHEMATICAL INITIAL HEH
+ {0x1EE27, 0x1EE27, prN}, // Lo ARABIC MATHEMATICAL INITIAL HAH
+ {0x1EE29, 0x1EE32, prN}, // Lo [10] ARABIC MATHEMATICAL INITIAL YEH..ARABIC MATHEMATICAL INITIAL QAF
+ {0x1EE34, 0x1EE37, prN}, // Lo [4] ARABIC MATHEMATICAL INITIAL SHEEN..ARABIC MATHEMATICAL INITIAL KHAH
+ {0x1EE39, 0x1EE39, prN}, // Lo ARABIC MATHEMATICAL INITIAL DAD
+ {0x1EE3B, 0x1EE3B, prN}, // Lo ARABIC MATHEMATICAL INITIAL GHAIN
+ {0x1EE42, 0x1EE42, prN}, // Lo ARABIC MATHEMATICAL TAILED JEEM
+ {0x1EE47, 0x1EE47, prN}, // Lo ARABIC MATHEMATICAL TAILED HAH
+ {0x1EE49, 0x1EE49, prN}, // Lo ARABIC MATHEMATICAL TAILED YEH
+ {0x1EE4B, 0x1EE4B, prN}, // Lo ARABIC MATHEMATICAL TAILED LAM
+ {0x1EE4D, 0x1EE4F, prN}, // Lo [3] ARABIC MATHEMATICAL TAILED NOON..ARABIC MATHEMATICAL TAILED AIN
+ {0x1EE51, 0x1EE52, prN}, // Lo [2] ARABIC MATHEMATICAL TAILED SAD..ARABIC MATHEMATICAL TAILED QAF
+ {0x1EE54, 0x1EE54, prN}, // Lo ARABIC MATHEMATICAL TAILED SHEEN
+ {0x1EE57, 0x1EE57, prN}, // Lo ARABIC MATHEMATICAL TAILED KHAH
+ {0x1EE59, 0x1EE59, prN}, // Lo ARABIC MATHEMATICAL TAILED DAD
+ {0x1EE5B, 0x1EE5B, prN}, // Lo ARABIC MATHEMATICAL TAILED GHAIN
+ {0x1EE5D, 0x1EE5D, prN}, // Lo ARABIC MATHEMATICAL TAILED DOTLESS NOON
+ {0x1EE5F, 0x1EE5F, prN}, // Lo ARABIC MATHEMATICAL TAILED DOTLESS QAF
+ {0x1EE61, 0x1EE62, prN}, // Lo [2] ARABIC MATHEMATICAL STRETCHED BEH..ARABIC MATHEMATICAL STRETCHED JEEM
+ {0x1EE64, 0x1EE64, prN}, // Lo ARABIC MATHEMATICAL STRETCHED HEH
+ {0x1EE67, 0x1EE6A, prN}, // Lo [4] ARABIC MATHEMATICAL STRETCHED HAH..ARABIC MATHEMATICAL STRETCHED KAF
+ {0x1EE6C, 0x1EE72, prN}, // Lo [7] ARABIC MATHEMATICAL STRETCHED MEEM..ARABIC MATHEMATICAL STRETCHED QAF
+ {0x1EE74, 0x1EE77, prN}, // Lo [4] ARABIC MATHEMATICAL STRETCHED SHEEN..ARABIC MATHEMATICAL STRETCHED KHAH
+ {0x1EE79, 0x1EE7C, prN}, // Lo [4] ARABIC MATHEMATICAL STRETCHED DAD..ARABIC MATHEMATICAL STRETCHED DOTLESS BEH
+ {0x1EE7E, 0x1EE7E, prN}, // Lo ARABIC MATHEMATICAL STRETCHED DOTLESS FEH
+ {0x1EE80, 0x1EE89, prN}, // Lo [10] ARABIC MATHEMATICAL LOOPED ALEF..ARABIC MATHEMATICAL LOOPED YEH
+ {0x1EE8B, 0x1EE9B, prN}, // Lo [17] ARABIC MATHEMATICAL LOOPED LAM..ARABIC MATHEMATICAL LOOPED GHAIN
+ {0x1EEA1, 0x1EEA3, prN}, // Lo [3] ARABIC MATHEMATICAL DOUBLE-STRUCK BEH..ARABIC MATHEMATICAL DOUBLE-STRUCK DAL
+ {0x1EEA5, 0x1EEA9, prN}, // Lo [5] ARABIC MATHEMATICAL DOUBLE-STRUCK WAW..ARABIC MATHEMATICAL DOUBLE-STRUCK YEH
+ {0x1EEAB, 0x1EEBB, prN}, // Lo [17] ARABIC MATHEMATICAL DOUBLE-STRUCK LAM..ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN
+ {0x1EEF0, 0x1EEF1, prN}, // Sm [2] ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL..ARABIC MATHEMATICAL OPERATOR HAH WITH DAL
+ {0x1F000, 0x1F003, prN}, // So [4] MAHJONG TILE EAST WIND..MAHJONG TILE NORTH WIND
+ {0x1F004, 0x1F004, prW}, // So MAHJONG TILE RED DRAGON
+ {0x1F005, 0x1F02B, prN}, // So [39] MAHJONG TILE GREEN DRAGON..MAHJONG TILE BACK
+ {0x1F030, 0x1F093, prN}, // So [100] DOMINO TILE HORIZONTAL BACK..DOMINO TILE VERTICAL-06-06
+ {0x1F0A0, 0x1F0AE, prN}, // So [15] PLAYING CARD BACK..PLAYING CARD KING OF SPADES
+ {0x1F0B1, 0x1F0BF, prN}, // So [15] PLAYING CARD ACE OF HEARTS..PLAYING CARD RED JOKER
+ {0x1F0C1, 0x1F0CE, prN}, // So [14] PLAYING CARD ACE OF DIAMONDS..PLAYING CARD KING OF DIAMONDS
+ {0x1F0CF, 0x1F0CF, prW}, // So PLAYING CARD BLACK JOKER
+ {0x1F0D1, 0x1F0F5, prN}, // So [37] PLAYING CARD ACE OF CLUBS..PLAYING CARD TRUMP-21
+ {0x1F100, 0x1F10A, prA}, // No [11] DIGIT ZERO FULL STOP..DIGIT NINE COMMA
+ {0x1F10B, 0x1F10C, prN}, // No [2] DINGBAT CIRCLED SANS-SERIF DIGIT ZERO..DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ZERO
+ {0x1F10D, 0x1F10F, prN}, // So [3] CIRCLED ZERO WITH SLASH..CIRCLED DOLLAR SIGN WITH OVERLAID BACKSLASH
+ {0x1F110, 0x1F12D, prA}, // So [30] PARENTHESIZED LATIN CAPITAL LETTER A..CIRCLED CD
+ {0x1F12E, 0x1F12F, prN}, // So [2] CIRCLED WZ..COPYLEFT SYMBOL
+ {0x1F130, 0x1F169, prA}, // So [58] SQUARED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z
+ {0x1F16A, 0x1F16F, prN}, // So [6] RAISED MC SIGN..CIRCLED HUMAN FIGURE
+ {0x1F170, 0x1F18D, prA}, // So [30] NEGATIVE SQUARED LATIN CAPITAL LETTER A..NEGATIVE SQUARED SA
+ {0x1F18E, 0x1F18E, prW}, // So NEGATIVE SQUARED AB
+ {0x1F18F, 0x1F190, prA}, // So [2] NEGATIVE SQUARED WC..SQUARE DJ
+ {0x1F191, 0x1F19A, prW}, // So [10] SQUARED CL..SQUARED VS
+ {0x1F19B, 0x1F1AC, prA}, // So [18] SQUARED THREE D..SQUARED VOD
+ {0x1F1AD, 0x1F1AD, prN}, // So MASK WORK SYMBOL
+ {0x1F1E6, 0x1F1FF, prN}, // So [26] REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z
+ {0x1F200, 0x1F202, prW}, // So [3] SQUARE HIRAGANA HOKA..SQUARED KATAKANA SA
+ {0x1F210, 0x1F23B, prW}, // So [44] SQUARED CJK UNIFIED IDEOGRAPH-624B..SQUARED CJK UNIFIED IDEOGRAPH-914D
+ {0x1F240, 0x1F248, prW}, // So [9] TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-672C..TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6557
+ {0x1F250, 0x1F251, prW}, // So [2] CIRCLED IDEOGRAPH ADVANTAGE..CIRCLED IDEOGRAPH ACCEPT
+ {0x1F260, 0x1F265, prW}, // So [6] ROUNDED SYMBOL FOR FU..ROUNDED SYMBOL FOR CAI
+ {0x1F300, 0x1F320, prW}, // So [33] CYCLONE..SHOOTING STAR
+ {0x1F321, 0x1F32C, prN}, // So [12] THERMOMETER..WIND BLOWING FACE
+ {0x1F32D, 0x1F335, prW}, // So [9] HOT DOG..CACTUS
+ {0x1F336, 0x1F336, prN}, // So HOT PEPPER
+ {0x1F337, 0x1F37C, prW}, // So [70] TULIP..BABY BOTTLE
+ {0x1F37D, 0x1F37D, prN}, // So FORK AND KNIFE WITH PLATE
+ {0x1F37E, 0x1F393, prW}, // So [22] BOTTLE WITH POPPING CORK..GRADUATION CAP
+ {0x1F394, 0x1F39F, prN}, // So [12] HEART WITH TIP ON THE LEFT..ADMISSION TICKETS
+ {0x1F3A0, 0x1F3CA, prW}, // So [43] CAROUSEL HORSE..SWIMMER
+ {0x1F3CB, 0x1F3CE, prN}, // So [4] WEIGHT LIFTER..RACING CAR
+ {0x1F3CF, 0x1F3D3, prW}, // So [5] CRICKET BAT AND BALL..TABLE TENNIS PADDLE AND BALL
+ {0x1F3D4, 0x1F3DF, prN}, // So [12] SNOW CAPPED MOUNTAIN..STADIUM
+ {0x1F3E0, 0x1F3F0, prW}, // So [17] HOUSE BUILDING..EUROPEAN CASTLE
+ {0x1F3F1, 0x1F3F3, prN}, // So [3] WHITE PENNANT..WAVING WHITE FLAG
+ {0x1F3F4, 0x1F3F4, prW}, // So WAVING BLACK FLAG
+ {0x1F3F5, 0x1F3F7, prN}, // So [3] ROSETTE..LABEL
+ {0x1F3F8, 0x1F3FA, prW}, // So [3] BADMINTON RACQUET AND SHUTTLECOCK..AMPHORA
+ {0x1F3FB, 0x1F3FF, prW}, // Sk [5] EMOJI MODIFIER FITZPATRICK TYPE-1-2..EMOJI MODIFIER FITZPATRICK TYPE-6
+ {0x1F400, 0x1F43E, prW}, // So [63] RAT..PAW PRINTS
+ {0x1F43F, 0x1F43F, prN}, // So CHIPMUNK
+ {0x1F440, 0x1F440, prW}, // So EYES
+ {0x1F441, 0x1F441, prN}, // So EYE
+ {0x1F442, 0x1F4FC, prW}, // So [187] EAR..VIDEOCASSETTE
+ {0x1F4FD, 0x1F4FE, prN}, // So [2] FILM PROJECTOR..PORTABLE STEREO
+ {0x1F4FF, 0x1F53D, prW}, // So [63] PRAYER BEADS..DOWN-POINTING SMALL RED TRIANGLE
+ {0x1F53E, 0x1F54A, prN}, // So [13] LOWER RIGHT SHADOWED WHITE CIRCLE..DOVE OF PEACE
+ {0x1F54B, 0x1F54E, prW}, // So [4] KAABA..MENORAH WITH NINE BRANCHES
+ {0x1F54F, 0x1F54F, prN}, // So BOWL OF HYGIEIA
+ {0x1F550, 0x1F567, prW}, // So [24] CLOCK FACE ONE OCLOCK..CLOCK FACE TWELVE-THIRTY
+ {0x1F568, 0x1F579, prN}, // So [18] RIGHT SPEAKER..JOYSTICK
+ {0x1F57A, 0x1F57A, prW}, // So MAN DANCING
+ {0x1F57B, 0x1F594, prN}, // So [26] LEFT HAND TELEPHONE RECEIVER..REVERSED VICTORY HAND
+ {0x1F595, 0x1F596, prW}, // So [2] REVERSED HAND WITH MIDDLE FINGER EXTENDED..RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS
+ {0x1F597, 0x1F5A3, prN}, // So [13] WHITE DOWN POINTING LEFT HAND INDEX..BLACK DOWN POINTING BACKHAND INDEX
+ {0x1F5A4, 0x1F5A4, prW}, // So BLACK HEART
+ {0x1F5A5, 0x1F5FA, prN}, // So [86] DESKTOP COMPUTER..WORLD MAP
+ {0x1F5FB, 0x1F5FF, prW}, // So [5] MOUNT FUJI..MOYAI
+ {0x1F600, 0x1F64F, prW}, // So [80] GRINNING FACE..PERSON WITH FOLDED HANDS
+ {0x1F650, 0x1F67F, prN}, // So [48] NORTH WEST POINTING LEAF..REVERSE CHECKER BOARD
+ {0x1F680, 0x1F6C5, prW}, // So [70] ROCKET..LEFT LUGGAGE
+ {0x1F6C6, 0x1F6CB, prN}, // So [6] TRIANGLE WITH ROUNDED CORNERS..COUCH AND LAMP
+ {0x1F6CC, 0x1F6CC, prW}, // So SLEEPING ACCOMMODATION
+ {0x1F6CD, 0x1F6CF, prN}, // So [3] SHOPPING BAGS..BED
+ {0x1F6D0, 0x1F6D2, prW}, // So [3] PLACE OF WORSHIP..SHOPPING TROLLEY
+ {0x1F6D3, 0x1F6D4, prN}, // So [2] STUPA..PAGODA
+ {0x1F6D5, 0x1F6D7, prW}, // So [3] HINDU TEMPLE..ELEVATOR
+ {0x1F6DC, 0x1F6DF, prW}, // So [4] WIRELESS..RING BUOY
+ {0x1F6E0, 0x1F6EA, prN}, // So [11] HAMMER AND WRENCH..NORTHEAST-POINTING AIRPLANE
+ {0x1F6EB, 0x1F6EC, prW}, // So [2] AIRPLANE DEPARTURE..AIRPLANE ARRIVING
+ {0x1F6F0, 0x1F6F3, prN}, // So [4] SATELLITE..PASSENGER SHIP
+ {0x1F6F4, 0x1F6FC, prW}, // So [9] SCOOTER..ROLLER SKATE
+ {0x1F700, 0x1F776, prN}, // So [119] ALCHEMICAL SYMBOL FOR QUINTESSENCE..LUNAR ECLIPSE
+ {0x1F77B, 0x1F77F, prN}, // So [5] HAUMEA..ORCUS
+ {0x1F780, 0x1F7D9, prN}, // So [90] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..NINE POINTED WHITE STAR
+ {0x1F7E0, 0x1F7EB, prW}, // So [12] LARGE ORANGE CIRCLE..LARGE BROWN SQUARE
+ {0x1F7F0, 0x1F7F0, prW}, // So HEAVY EQUALS SIGN
+ {0x1F800, 0x1F80B, prN}, // So [12] LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD
+ {0x1F810, 0x1F847, prN}, // So [56] LEFTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD..DOWNWARDS HEAVY ARROW
+ {0x1F850, 0x1F859, prN}, // So [10] LEFTWARDS SANS-SERIF ARROW..UP DOWN SANS-SERIF ARROW
+ {0x1F860, 0x1F887, prN}, // So [40] WIDE-HEADED LEFTWARDS LIGHT BARB ARROW..WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW
+ {0x1F890, 0x1F8AD, prN}, // So [30] LEFTWARDS TRIANGLE ARROWHEAD..WHITE ARROW SHAFT WIDTH TWO THIRDS
+ {0x1F8B0, 0x1F8B1, prN}, // So [2] ARROW POINTING UPWARDS THEN NORTH WEST..ARROW POINTING RIGHTWARDS THEN CURVING SOUTH WEST
+ {0x1F900, 0x1F90B, prN}, // So [12] CIRCLED CROSS FORMEE WITH FOUR DOTS..DOWNWARD FACING NOTCHED HOOK WITH DOT
+ {0x1F90C, 0x1F93A, prW}, // So [47] PINCHED FINGERS..FENCER
+ {0x1F93B, 0x1F93B, prN}, // So MODERN PENTATHLON
+ {0x1F93C, 0x1F945, prW}, // So [10] WRESTLERS..GOAL NET
+ {0x1F946, 0x1F946, prN}, // So RIFLE
+ {0x1F947, 0x1F9FF, prW}, // So [185] FIRST PLACE MEDAL..NAZAR AMULET
+ {0x1FA00, 0x1FA53, prN}, // So [84] NEUTRAL CHESS KING..BLACK CHESS KNIGHT-BISHOP
+ {0x1FA60, 0x1FA6D, prN}, // So [14] XIANGQI RED GENERAL..XIANGQI BLACK SOLDIER
+ {0x1FA70, 0x1FA7C, prW}, // So [13] BALLET SHOES..CRUTCH
+ {0x1FA80, 0x1FA88, prW}, // So [9] YO-YO..FLUTE
+ {0x1FA90, 0x1FABD, prW}, // So [46] RINGED PLANET..WING
+ {0x1FABF, 0x1FAC5, prW}, // So [7] GOOSE..PERSON WITH CROWN
+ {0x1FACE, 0x1FADB, prW}, // So [14] MOOSE..PEA POD
+ {0x1FAE0, 0x1FAE8, prW}, // So [9] MELTING FACE..SHAKING FACE
+ {0x1FAF0, 0x1FAF8, prW}, // So [9] HAND WITH INDEX FINGER AND THUMB CROSSED..RIGHTWARDS PUSHING HAND
+ {0x1FB00, 0x1FB92, prN}, // So [147] BLOCK SEXTANT-1..UPPER HALF INVERSE MEDIUM SHADE AND LOWER HALF BLOCK
+ {0x1FB94, 0x1FBCA, prN}, // So [55] LEFT HALF INVERSE MEDIUM SHADE AND RIGHT HALF BLOCK..WHITE UP-POINTING CHEVRON
+ {0x1FBF0, 0x1FBF9, prN}, // Nd [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE
+ {0x20000, 0x2A6DF, prW}, // Lo [42720] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6DF
+ {0x2A6E0, 0x2A6FF, prW}, // Cn [32] ..
+ {0x2A700, 0x2B739, prW}, // Lo [4154] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B739
+ {0x2B73A, 0x2B73F, prW}, // Cn [6] ..
+ {0x2B740, 0x2B81D, prW}, // Lo [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D
+ {0x2B81E, 0x2B81F, prW}, // Cn [2] ..
+ {0x2B820, 0x2CEA1, prW}, // Lo [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1
+ {0x2CEA2, 0x2CEAF, prW}, // Cn [14] ..
+ {0x2CEB0, 0x2EBE0, prW}, // Lo [7473] CJK UNIFIED IDEOGRAPH-2CEB0..CJK UNIFIED IDEOGRAPH-2EBE0
+ {0x2EBE1, 0x2F7FF, prW}, // Cn [3103] ..
+ {0x2F800, 0x2FA1D, prW}, // Lo [542] CJK COMPATIBILITY IDEOGRAPH-2F800..CJK COMPATIBILITY IDEOGRAPH-2FA1D
+ {0x2FA1E, 0x2FA1F, prW}, // Cn [2] ..
+ {0x2FA20, 0x2FFFD, prW}, // Cn [1502] ..
+ {0x30000, 0x3134A, prW}, // Lo [4939] CJK UNIFIED IDEOGRAPH-30000..CJK UNIFIED IDEOGRAPH-3134A
+ {0x3134B, 0x3134F, prW}, // Cn [5] ..
+ {0x31350, 0x323AF, prW}, // Lo [4192] CJK UNIFIED IDEOGRAPH-31350..CJK UNIFIED IDEOGRAPH-323AF
+ {0x323B0, 0x3FFFD, prW}, // Cn [56398] ..
+ {0xE0001, 0xE0001, prN}, // Cf LANGUAGE TAG
+ {0xE0020, 0xE007F, prN}, // Cf [96] TAG SPACE..CANCEL TAG
+ {0xE0100, 0xE01EF, prA}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256
+ {0xF0000, 0xFFFFD, prA}, // Co [65534] ..
+ {0x100000, 0x10FFFD, prA}, // Co [65534] ..
+}
diff --git a/vendor/github.com/rivo/uniseg/emojipresentation.go b/vendor/github.com/rivo/uniseg/emojipresentation.go
new file mode 100644
index 0000000000..9b5f499c4a
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/emojipresentation.go
@@ -0,0 +1,295 @@
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// emojiPresentation are taken from
+//
+// and
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var emojiPresentation = [][3]int{
+ {0x231A, 0x231B, prEmojiPresentation}, // E0.6 [2] (⌚..⌛) watch..hourglass done
+ {0x23E9, 0x23EC, prEmojiPresentation}, // E0.6 [4] (⏩..⏬) fast-forward button..fast down button
+ {0x23F0, 0x23F0, prEmojiPresentation}, // E0.6 [1] (⏰) alarm clock
+ {0x23F3, 0x23F3, prEmojiPresentation}, // E0.6 [1] (⏳) hourglass not done
+ {0x25FD, 0x25FE, prEmojiPresentation}, // E0.6 [2] (◽..◾) white medium-small square..black medium-small square
+ {0x2614, 0x2615, prEmojiPresentation}, // E0.6 [2] (☔..☕) umbrella with rain drops..hot beverage
+ {0x2648, 0x2653, prEmojiPresentation}, // E0.6 [12] (♈..♓) Aries..Pisces
+ {0x267F, 0x267F, prEmojiPresentation}, // E0.6 [1] (♿) wheelchair symbol
+ {0x2693, 0x2693, prEmojiPresentation}, // E0.6 [1] (⚓) anchor
+ {0x26A1, 0x26A1, prEmojiPresentation}, // E0.6 [1] (⚡) high voltage
+ {0x26AA, 0x26AB, prEmojiPresentation}, // E0.6 [2] (⚪..⚫) white circle..black circle
+ {0x26BD, 0x26BE, prEmojiPresentation}, // E0.6 [2] (⚽..⚾) soccer ball..baseball
+ {0x26C4, 0x26C5, prEmojiPresentation}, // E0.6 [2] (⛄..⛅) snowman without snow..sun behind cloud
+ {0x26CE, 0x26CE, prEmojiPresentation}, // E0.6 [1] (⛎) Ophiuchus
+ {0x26D4, 0x26D4, prEmojiPresentation}, // E0.6 [1] (⛔) no entry
+ {0x26EA, 0x26EA, prEmojiPresentation}, // E0.6 [1] (⛪) church
+ {0x26F2, 0x26F3, prEmojiPresentation}, // E0.6 [2] (⛲..⛳) fountain..flag in hole
+ {0x26F5, 0x26F5, prEmojiPresentation}, // E0.6 [1] (⛵) sailboat
+ {0x26FA, 0x26FA, prEmojiPresentation}, // E0.6 [1] (⛺) tent
+ {0x26FD, 0x26FD, prEmojiPresentation}, // E0.6 [1] (⛽) fuel pump
+ {0x2705, 0x2705, prEmojiPresentation}, // E0.6 [1] (✅) check mark button
+ {0x270A, 0x270B, prEmojiPresentation}, // E0.6 [2] (✊..✋) raised fist..raised hand
+ {0x2728, 0x2728, prEmojiPresentation}, // E0.6 [1] (✨) sparkles
+ {0x274C, 0x274C, prEmojiPresentation}, // E0.6 [1] (❌) cross mark
+ {0x274E, 0x274E, prEmojiPresentation}, // E0.6 [1] (❎) cross mark button
+ {0x2753, 0x2755, prEmojiPresentation}, // E0.6 [3] (❓..❕) red question mark..white exclamation mark
+ {0x2757, 0x2757, prEmojiPresentation}, // E0.6 [1] (❗) red exclamation mark
+ {0x2795, 0x2797, prEmojiPresentation}, // E0.6 [3] (➕..➗) plus..divide
+ {0x27B0, 0x27B0, prEmojiPresentation}, // E0.6 [1] (➰) curly loop
+ {0x27BF, 0x27BF, prEmojiPresentation}, // E1.0 [1] (➿) double curly loop
+ {0x2B1B, 0x2B1C, prEmojiPresentation}, // E0.6 [2] (⬛..⬜) black large square..white large square
+ {0x2B50, 0x2B50, prEmojiPresentation}, // E0.6 [1] (⭐) star
+ {0x2B55, 0x2B55, prEmojiPresentation}, // E0.6 [1] (⭕) hollow red circle
+ {0x1F004, 0x1F004, prEmojiPresentation}, // E0.6 [1] (🀄) mahjong red dragon
+ {0x1F0CF, 0x1F0CF, prEmojiPresentation}, // E0.6 [1] (🃏) joker
+ {0x1F18E, 0x1F18E, prEmojiPresentation}, // E0.6 [1] (🆎) AB button (blood type)
+ {0x1F191, 0x1F19A, prEmojiPresentation}, // E0.6 [10] (🆑..🆚) CL button..VS button
+ {0x1F1E6, 0x1F1FF, prEmojiPresentation}, // E0.0 [26] (🇦..🇿) regional indicator symbol letter a..regional indicator symbol letter z
+ {0x1F201, 0x1F201, prEmojiPresentation}, // E0.6 [1] (🈁) Japanese “here” button
+ {0x1F21A, 0x1F21A, prEmojiPresentation}, // E0.6 [1] (🈚) Japanese “free of charge” button
+ {0x1F22F, 0x1F22F, prEmojiPresentation}, // E0.6 [1] (🈯) Japanese “reserved” button
+ {0x1F232, 0x1F236, prEmojiPresentation}, // E0.6 [5] (🈲..🈶) Japanese “prohibited” button..Japanese “not free of charge” button
+ {0x1F238, 0x1F23A, prEmojiPresentation}, // E0.6 [3] (🈸..🈺) Japanese “application” button..Japanese “open for business” button
+ {0x1F250, 0x1F251, prEmojiPresentation}, // E0.6 [2] (🉐..🉑) Japanese “bargain” button..Japanese “acceptable” button
+ {0x1F300, 0x1F30C, prEmojiPresentation}, // E0.6 [13] (🌀..🌌) cyclone..milky way
+ {0x1F30D, 0x1F30E, prEmojiPresentation}, // E0.7 [2] (🌍..🌎) globe showing Europe-Africa..globe showing Americas
+ {0x1F30F, 0x1F30F, prEmojiPresentation}, // E0.6 [1] (🌏) globe showing Asia-Australia
+ {0x1F310, 0x1F310, prEmojiPresentation}, // E1.0 [1] (🌐) globe with meridians
+ {0x1F311, 0x1F311, prEmojiPresentation}, // E0.6 [1] (🌑) new moon
+ {0x1F312, 0x1F312, prEmojiPresentation}, // E1.0 [1] (🌒) waxing crescent moon
+ {0x1F313, 0x1F315, prEmojiPresentation}, // E0.6 [3] (🌓..🌕) first quarter moon..full moon
+ {0x1F316, 0x1F318, prEmojiPresentation}, // E1.0 [3] (🌖..🌘) waning gibbous moon..waning crescent moon
+ {0x1F319, 0x1F319, prEmojiPresentation}, // E0.6 [1] (🌙) crescent moon
+ {0x1F31A, 0x1F31A, prEmojiPresentation}, // E1.0 [1] (🌚) new moon face
+ {0x1F31B, 0x1F31B, prEmojiPresentation}, // E0.6 [1] (🌛) first quarter moon face
+ {0x1F31C, 0x1F31C, prEmojiPresentation}, // E0.7 [1] (🌜) last quarter moon face
+ {0x1F31D, 0x1F31E, prEmojiPresentation}, // E1.0 [2] (🌝..🌞) full moon face..sun with face
+ {0x1F31F, 0x1F320, prEmojiPresentation}, // E0.6 [2] (🌟..🌠) glowing star..shooting star
+ {0x1F32D, 0x1F32F, prEmojiPresentation}, // E1.0 [3] (🌭..🌯) hot dog..burrito
+ {0x1F330, 0x1F331, prEmojiPresentation}, // E0.6 [2] (🌰..🌱) chestnut..seedling
+ {0x1F332, 0x1F333, prEmojiPresentation}, // E1.0 [2] (🌲..🌳) evergreen tree..deciduous tree
+ {0x1F334, 0x1F335, prEmojiPresentation}, // E0.6 [2] (🌴..🌵) palm tree..cactus
+ {0x1F337, 0x1F34A, prEmojiPresentation}, // E0.6 [20] (🌷..🍊) tulip..tangerine
+ {0x1F34B, 0x1F34B, prEmojiPresentation}, // E1.0 [1] (🍋) lemon
+ {0x1F34C, 0x1F34F, prEmojiPresentation}, // E0.6 [4] (🍌..🍏) banana..green apple
+ {0x1F350, 0x1F350, prEmojiPresentation}, // E1.0 [1] (🍐) pear
+ {0x1F351, 0x1F37B, prEmojiPresentation}, // E0.6 [43] (🍑..🍻) peach..clinking beer mugs
+ {0x1F37C, 0x1F37C, prEmojiPresentation}, // E1.0 [1] (🍼) baby bottle
+ {0x1F37E, 0x1F37F, prEmojiPresentation}, // E1.0 [2] (🍾..🍿) bottle with popping cork..popcorn
+ {0x1F380, 0x1F393, prEmojiPresentation}, // E0.6 [20] (🎀..🎓) ribbon..graduation cap
+ {0x1F3A0, 0x1F3C4, prEmojiPresentation}, // E0.6 [37] (🎠..🏄) carousel horse..person surfing
+ {0x1F3C5, 0x1F3C5, prEmojiPresentation}, // E1.0 [1] (🏅) sports medal
+ {0x1F3C6, 0x1F3C6, prEmojiPresentation}, // E0.6 [1] (🏆) trophy
+ {0x1F3C7, 0x1F3C7, prEmojiPresentation}, // E1.0 [1] (🏇) horse racing
+ {0x1F3C8, 0x1F3C8, prEmojiPresentation}, // E0.6 [1] (🏈) american football
+ {0x1F3C9, 0x1F3C9, prEmojiPresentation}, // E1.0 [1] (🏉) rugby football
+ {0x1F3CA, 0x1F3CA, prEmojiPresentation}, // E0.6 [1] (🏊) person swimming
+ {0x1F3CF, 0x1F3D3, prEmojiPresentation}, // E1.0 [5] (🏏..🏓) cricket game..ping pong
+ {0x1F3E0, 0x1F3E3, prEmojiPresentation}, // E0.6 [4] (🏠..🏣) house..Japanese post office
+ {0x1F3E4, 0x1F3E4, prEmojiPresentation}, // E1.0 [1] (🏤) post office
+ {0x1F3E5, 0x1F3F0, prEmojiPresentation}, // E0.6 [12] (🏥..🏰) hospital..castle
+ {0x1F3F4, 0x1F3F4, prEmojiPresentation}, // E1.0 [1] (🏴) black flag
+ {0x1F3F8, 0x1F407, prEmojiPresentation}, // E1.0 [16] (🏸..🐇) badminton..rabbit
+ {0x1F408, 0x1F408, prEmojiPresentation}, // E0.7 [1] (🐈) cat
+ {0x1F409, 0x1F40B, prEmojiPresentation}, // E1.0 [3] (🐉..🐋) dragon..whale
+ {0x1F40C, 0x1F40E, prEmojiPresentation}, // E0.6 [3] (🐌..🐎) snail..horse
+ {0x1F40F, 0x1F410, prEmojiPresentation}, // E1.0 [2] (🐏..🐐) ram..goat
+ {0x1F411, 0x1F412, prEmojiPresentation}, // E0.6 [2] (🐑..🐒) ewe..monkey
+ {0x1F413, 0x1F413, prEmojiPresentation}, // E1.0 [1] (🐓) rooster
+ {0x1F414, 0x1F414, prEmojiPresentation}, // E0.6 [1] (🐔) chicken
+ {0x1F415, 0x1F415, prEmojiPresentation}, // E0.7 [1] (🐕) dog
+ {0x1F416, 0x1F416, prEmojiPresentation}, // E1.0 [1] (🐖) pig
+ {0x1F417, 0x1F429, prEmojiPresentation}, // E0.6 [19] (🐗..🐩) boar..poodle
+ {0x1F42A, 0x1F42A, prEmojiPresentation}, // E1.0 [1] (🐪) camel
+ {0x1F42B, 0x1F43E, prEmojiPresentation}, // E0.6 [20] (🐫..🐾) two-hump camel..paw prints
+ {0x1F440, 0x1F440, prEmojiPresentation}, // E0.6 [1] (👀) eyes
+ {0x1F442, 0x1F464, prEmojiPresentation}, // E0.6 [35] (👂..👤) ear..bust in silhouette
+ {0x1F465, 0x1F465, prEmojiPresentation}, // E1.0 [1] (👥) busts in silhouette
+ {0x1F466, 0x1F46B, prEmojiPresentation}, // E0.6 [6] (👦..👫) boy..woman and man holding hands
+ {0x1F46C, 0x1F46D, prEmojiPresentation}, // E1.0 [2] (👬..👭) men holding hands..women holding hands
+ {0x1F46E, 0x1F4AC, prEmojiPresentation}, // E0.6 [63] (👮..💬) police officer..speech balloon
+ {0x1F4AD, 0x1F4AD, prEmojiPresentation}, // E1.0 [1] (💭) thought balloon
+ {0x1F4AE, 0x1F4B5, prEmojiPresentation}, // E0.6 [8] (💮..💵) white flower..dollar banknote
+ {0x1F4B6, 0x1F4B7, prEmojiPresentation}, // E1.0 [2] (💶..💷) euro banknote..pound banknote
+ {0x1F4B8, 0x1F4EB, prEmojiPresentation}, // E0.6 [52] (💸..📫) money with wings..closed mailbox with raised flag
+ {0x1F4EC, 0x1F4ED, prEmojiPresentation}, // E0.7 [2] (📬..📭) open mailbox with raised flag..open mailbox with lowered flag
+ {0x1F4EE, 0x1F4EE, prEmojiPresentation}, // E0.6 [1] (📮) postbox
+ {0x1F4EF, 0x1F4EF, prEmojiPresentation}, // E1.0 [1] (📯) postal horn
+ {0x1F4F0, 0x1F4F4, prEmojiPresentation}, // E0.6 [5] (📰..📴) newspaper..mobile phone off
+ {0x1F4F5, 0x1F4F5, prEmojiPresentation}, // E1.0 [1] (📵) no mobile phones
+ {0x1F4F6, 0x1F4F7, prEmojiPresentation}, // E0.6 [2] (📶..📷) antenna bars..camera
+ {0x1F4F8, 0x1F4F8, prEmojiPresentation}, // E1.0 [1] (📸) camera with flash
+ {0x1F4F9, 0x1F4FC, prEmojiPresentation}, // E0.6 [4] (📹..📼) video camera..videocassette
+ {0x1F4FF, 0x1F502, prEmojiPresentation}, // E1.0 [4] (📿..🔂) prayer beads..repeat single button
+ {0x1F503, 0x1F503, prEmojiPresentation}, // E0.6 [1] (🔃) clockwise vertical arrows
+ {0x1F504, 0x1F507, prEmojiPresentation}, // E1.0 [4] (🔄..🔇) counterclockwise arrows button..muted speaker
+ {0x1F508, 0x1F508, prEmojiPresentation}, // E0.7 [1] (🔈) speaker low volume
+ {0x1F509, 0x1F509, prEmojiPresentation}, // E1.0 [1] (🔉) speaker medium volume
+ {0x1F50A, 0x1F514, prEmojiPresentation}, // E0.6 [11] (🔊..🔔) speaker high volume..bell
+ {0x1F515, 0x1F515, prEmojiPresentation}, // E1.0 [1] (🔕) bell with slash
+ {0x1F516, 0x1F52B, prEmojiPresentation}, // E0.6 [22] (🔖..🔫) bookmark..water pistol
+ {0x1F52C, 0x1F52D, prEmojiPresentation}, // E1.0 [2] (🔬..🔭) microscope..telescope
+ {0x1F52E, 0x1F53D, prEmojiPresentation}, // E0.6 [16] (🔮..🔽) crystal ball..downwards button
+ {0x1F54B, 0x1F54E, prEmojiPresentation}, // E1.0 [4] (🕋..🕎) kaaba..menorah
+ {0x1F550, 0x1F55B, prEmojiPresentation}, // E0.6 [12] (🕐..🕛) one o’clock..twelve o’clock
+ {0x1F55C, 0x1F567, prEmojiPresentation}, // E0.7 [12] (🕜..🕧) one-thirty..twelve-thirty
+ {0x1F57A, 0x1F57A, prEmojiPresentation}, // E3.0 [1] (🕺) man dancing
+ {0x1F595, 0x1F596, prEmojiPresentation}, // E1.0 [2] (🖕..🖖) middle finger..vulcan salute
+ {0x1F5A4, 0x1F5A4, prEmojiPresentation}, // E3.0 [1] (🖤) black heart
+ {0x1F5FB, 0x1F5FF, prEmojiPresentation}, // E0.6 [5] (🗻..🗿) mount fuji..moai
+ {0x1F600, 0x1F600, prEmojiPresentation}, // E1.0 [1] (😀) grinning face
+ {0x1F601, 0x1F606, prEmojiPresentation}, // E0.6 [6] (😁..😆) beaming face with smiling eyes..grinning squinting face
+ {0x1F607, 0x1F608, prEmojiPresentation}, // E1.0 [2] (😇..😈) smiling face with halo..smiling face with horns
+ {0x1F609, 0x1F60D, prEmojiPresentation}, // E0.6 [5] (😉..😍) winking face..smiling face with heart-eyes
+ {0x1F60E, 0x1F60E, prEmojiPresentation}, // E1.0 [1] (😎) smiling face with sunglasses
+ {0x1F60F, 0x1F60F, prEmojiPresentation}, // E0.6 [1] (😏) smirking face
+ {0x1F610, 0x1F610, prEmojiPresentation}, // E0.7 [1] (😐) neutral face
+ {0x1F611, 0x1F611, prEmojiPresentation}, // E1.0 [1] (😑) expressionless face
+ {0x1F612, 0x1F614, prEmojiPresentation}, // E0.6 [3] (😒..😔) unamused face..pensive face
+ {0x1F615, 0x1F615, prEmojiPresentation}, // E1.0 [1] (😕) confused face
+ {0x1F616, 0x1F616, prEmojiPresentation}, // E0.6 [1] (😖) confounded face
+ {0x1F617, 0x1F617, prEmojiPresentation}, // E1.0 [1] (😗) kissing face
+ {0x1F618, 0x1F618, prEmojiPresentation}, // E0.6 [1] (😘) face blowing a kiss
+ {0x1F619, 0x1F619, prEmojiPresentation}, // E1.0 [1] (😙) kissing face with smiling eyes
+ {0x1F61A, 0x1F61A, prEmojiPresentation}, // E0.6 [1] (😚) kissing face with closed eyes
+ {0x1F61B, 0x1F61B, prEmojiPresentation}, // E1.0 [1] (😛) face with tongue
+ {0x1F61C, 0x1F61E, prEmojiPresentation}, // E0.6 [3] (😜..😞) winking face with tongue..disappointed face
+ {0x1F61F, 0x1F61F, prEmojiPresentation}, // E1.0 [1] (😟) worried face
+ {0x1F620, 0x1F625, prEmojiPresentation}, // E0.6 [6] (😠..😥) angry face..sad but relieved face
+ {0x1F626, 0x1F627, prEmojiPresentation}, // E1.0 [2] (😦..😧) frowning face with open mouth..anguished face
+ {0x1F628, 0x1F62B, prEmojiPresentation}, // E0.6 [4] (😨..😫) fearful face..tired face
+ {0x1F62C, 0x1F62C, prEmojiPresentation}, // E1.0 [1] (😬) grimacing face
+ {0x1F62D, 0x1F62D, prEmojiPresentation}, // E0.6 [1] (😭) loudly crying face
+ {0x1F62E, 0x1F62F, prEmojiPresentation}, // E1.0 [2] (😮..😯) face with open mouth..hushed face
+ {0x1F630, 0x1F633, prEmojiPresentation}, // E0.6 [4] (😰..😳) anxious face with sweat..flushed face
+ {0x1F634, 0x1F634, prEmojiPresentation}, // E1.0 [1] (😴) sleeping face
+ {0x1F635, 0x1F635, prEmojiPresentation}, // E0.6 [1] (😵) face with crossed-out eyes
+ {0x1F636, 0x1F636, prEmojiPresentation}, // E1.0 [1] (😶) face without mouth
+ {0x1F637, 0x1F640, prEmojiPresentation}, // E0.6 [10] (😷..🙀) face with medical mask..weary cat
+ {0x1F641, 0x1F644, prEmojiPresentation}, // E1.0 [4] (🙁..🙄) slightly frowning face..face with rolling eyes
+ {0x1F645, 0x1F64F, prEmojiPresentation}, // E0.6 [11] (🙅..🙏) person gesturing NO..folded hands
+ {0x1F680, 0x1F680, prEmojiPresentation}, // E0.6 [1] (🚀) rocket
+ {0x1F681, 0x1F682, prEmojiPresentation}, // E1.0 [2] (🚁..🚂) helicopter..locomotive
+ {0x1F683, 0x1F685, prEmojiPresentation}, // E0.6 [3] (🚃..🚅) railway car..bullet train
+ {0x1F686, 0x1F686, prEmojiPresentation}, // E1.0 [1] (🚆) train
+ {0x1F687, 0x1F687, prEmojiPresentation}, // E0.6 [1] (🚇) metro
+ {0x1F688, 0x1F688, prEmojiPresentation}, // E1.0 [1] (🚈) light rail
+ {0x1F689, 0x1F689, prEmojiPresentation}, // E0.6 [1] (🚉) station
+ {0x1F68A, 0x1F68B, prEmojiPresentation}, // E1.0 [2] (🚊..🚋) tram..tram car
+ {0x1F68C, 0x1F68C, prEmojiPresentation}, // E0.6 [1] (🚌) bus
+ {0x1F68D, 0x1F68D, prEmojiPresentation}, // E0.7 [1] (🚍) oncoming bus
+ {0x1F68E, 0x1F68E, prEmojiPresentation}, // E1.0 [1] (🚎) trolleybus
+ {0x1F68F, 0x1F68F, prEmojiPresentation}, // E0.6 [1] (🚏) bus stop
+ {0x1F690, 0x1F690, prEmojiPresentation}, // E1.0 [1] (🚐) minibus
+ {0x1F691, 0x1F693, prEmojiPresentation}, // E0.6 [3] (🚑..🚓) ambulance..police car
+ {0x1F694, 0x1F694, prEmojiPresentation}, // E0.7 [1] (🚔) oncoming police car
+ {0x1F695, 0x1F695, prEmojiPresentation}, // E0.6 [1] (🚕) taxi
+ {0x1F696, 0x1F696, prEmojiPresentation}, // E1.0 [1] (🚖) oncoming taxi
+ {0x1F697, 0x1F697, prEmojiPresentation}, // E0.6 [1] (🚗) automobile
+ {0x1F698, 0x1F698, prEmojiPresentation}, // E0.7 [1] (🚘) oncoming automobile
+ {0x1F699, 0x1F69A, prEmojiPresentation}, // E0.6 [2] (🚙..🚚) sport utility vehicle..delivery truck
+ {0x1F69B, 0x1F6A1, prEmojiPresentation}, // E1.0 [7] (🚛..🚡) articulated lorry..aerial tramway
+ {0x1F6A2, 0x1F6A2, prEmojiPresentation}, // E0.6 [1] (🚢) ship
+ {0x1F6A3, 0x1F6A3, prEmojiPresentation}, // E1.0 [1] (🚣) person rowing boat
+ {0x1F6A4, 0x1F6A5, prEmojiPresentation}, // E0.6 [2] (🚤..🚥) speedboat..horizontal traffic light
+ {0x1F6A6, 0x1F6A6, prEmojiPresentation}, // E1.0 [1] (🚦) vertical traffic light
+ {0x1F6A7, 0x1F6AD, prEmojiPresentation}, // E0.6 [7] (🚧..🚭) construction..no smoking
+ {0x1F6AE, 0x1F6B1, prEmojiPresentation}, // E1.0 [4] (🚮..🚱) litter in bin sign..non-potable water
+ {0x1F6B2, 0x1F6B2, prEmojiPresentation}, // E0.6 [1] (🚲) bicycle
+ {0x1F6B3, 0x1F6B5, prEmojiPresentation}, // E1.0 [3] (🚳..🚵) no bicycles..person mountain biking
+ {0x1F6B6, 0x1F6B6, prEmojiPresentation}, // E0.6 [1] (🚶) person walking
+ {0x1F6B7, 0x1F6B8, prEmojiPresentation}, // E1.0 [2] (🚷..🚸) no pedestrians..children crossing
+ {0x1F6B9, 0x1F6BE, prEmojiPresentation}, // E0.6 [6] (🚹..🚾) men’s room..water closet
+ {0x1F6BF, 0x1F6BF, prEmojiPresentation}, // E1.0 [1] (🚿) shower
+ {0x1F6C0, 0x1F6C0, prEmojiPresentation}, // E0.6 [1] (🛀) person taking bath
+ {0x1F6C1, 0x1F6C5, prEmojiPresentation}, // E1.0 [5] (🛁..🛅) bathtub..left luggage
+ {0x1F6CC, 0x1F6CC, prEmojiPresentation}, // E1.0 [1] (🛌) person in bed
+ {0x1F6D0, 0x1F6D0, prEmojiPresentation}, // E1.0 [1] (🛐) place of worship
+ {0x1F6D1, 0x1F6D2, prEmojiPresentation}, // E3.0 [2] (🛑..🛒) stop sign..shopping cart
+ {0x1F6D5, 0x1F6D5, prEmojiPresentation}, // E12.0 [1] (🛕) hindu temple
+ {0x1F6D6, 0x1F6D7, prEmojiPresentation}, // E13.0 [2] (🛖..🛗) hut..elevator
+ {0x1F6DC, 0x1F6DC, prEmojiPresentation}, // E15.0 [1] (🛜) wireless
+ {0x1F6DD, 0x1F6DF, prEmojiPresentation}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
+ {0x1F6EB, 0x1F6EC, prEmojiPresentation}, // E1.0 [2] (🛫..🛬) airplane departure..airplane arrival
+ {0x1F6F4, 0x1F6F6, prEmojiPresentation}, // E3.0 [3] (🛴..🛶) kick scooter..canoe
+ {0x1F6F7, 0x1F6F8, prEmojiPresentation}, // E5.0 [2] (🛷..🛸) sled..flying saucer
+ {0x1F6F9, 0x1F6F9, prEmojiPresentation}, // E11.0 [1] (🛹) skateboard
+ {0x1F6FA, 0x1F6FA, prEmojiPresentation}, // E12.0 [1] (🛺) auto rickshaw
+ {0x1F6FB, 0x1F6FC, prEmojiPresentation}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate
+ {0x1F7E0, 0x1F7EB, prEmojiPresentation}, // E12.0 [12] (🟠..🟫) orange circle..brown square
+ {0x1F7F0, 0x1F7F0, prEmojiPresentation}, // E14.0 [1] (🟰) heavy equals sign
+ {0x1F90C, 0x1F90C, prEmojiPresentation}, // E13.0 [1] (🤌) pinched fingers
+ {0x1F90D, 0x1F90F, prEmojiPresentation}, // E12.0 [3] (🤍..🤏) white heart..pinching hand
+ {0x1F910, 0x1F918, prEmojiPresentation}, // E1.0 [9] (🤐..🤘) zipper-mouth face..sign of the horns
+ {0x1F919, 0x1F91E, prEmojiPresentation}, // E3.0 [6] (🤙..🤞) call me hand..crossed fingers
+ {0x1F91F, 0x1F91F, prEmojiPresentation}, // E5.0 [1] (🤟) love-you gesture
+ {0x1F920, 0x1F927, prEmojiPresentation}, // E3.0 [8] (🤠..🤧) cowboy hat face..sneezing face
+ {0x1F928, 0x1F92F, prEmojiPresentation}, // E5.0 [8] (🤨..🤯) face with raised eyebrow..exploding head
+ {0x1F930, 0x1F930, prEmojiPresentation}, // E3.0 [1] (🤰) pregnant woman
+ {0x1F931, 0x1F932, prEmojiPresentation}, // E5.0 [2] (🤱..🤲) breast-feeding..palms up together
+ {0x1F933, 0x1F93A, prEmojiPresentation}, // E3.0 [8] (🤳..🤺) selfie..person fencing
+ {0x1F93C, 0x1F93E, prEmojiPresentation}, // E3.0 [3] (🤼..🤾) people wrestling..person playing handball
+ {0x1F93F, 0x1F93F, prEmojiPresentation}, // E12.0 [1] (🤿) diving mask
+ {0x1F940, 0x1F945, prEmojiPresentation}, // E3.0 [6] (🥀..🥅) wilted flower..goal net
+ {0x1F947, 0x1F94B, prEmojiPresentation}, // E3.0 [5] (🥇..🥋) 1st place medal..martial arts uniform
+ {0x1F94C, 0x1F94C, prEmojiPresentation}, // E5.0 [1] (🥌) curling stone
+ {0x1F94D, 0x1F94F, prEmojiPresentation}, // E11.0 [3] (🥍..🥏) lacrosse..flying disc
+ {0x1F950, 0x1F95E, prEmojiPresentation}, // E3.0 [15] (🥐..🥞) croissant..pancakes
+ {0x1F95F, 0x1F96B, prEmojiPresentation}, // E5.0 [13] (🥟..🥫) dumpling..canned food
+ {0x1F96C, 0x1F970, prEmojiPresentation}, // E11.0 [5] (🥬..🥰) leafy green..smiling face with hearts
+ {0x1F971, 0x1F971, prEmojiPresentation}, // E12.0 [1] (🥱) yawning face
+ {0x1F972, 0x1F972, prEmojiPresentation}, // E13.0 [1] (🥲) smiling face with tear
+ {0x1F973, 0x1F976, prEmojiPresentation}, // E11.0 [4] (🥳..🥶) partying face..cold face
+ {0x1F977, 0x1F978, prEmojiPresentation}, // E13.0 [2] (🥷..🥸) ninja..disguised face
+ {0x1F979, 0x1F979, prEmojiPresentation}, // E14.0 [1] (🥹) face holding back tears
+ {0x1F97A, 0x1F97A, prEmojiPresentation}, // E11.0 [1] (🥺) pleading face
+ {0x1F97B, 0x1F97B, prEmojiPresentation}, // E12.0 [1] (🥻) sari
+ {0x1F97C, 0x1F97F, prEmojiPresentation}, // E11.0 [4] (🥼..🥿) lab coat..flat shoe
+ {0x1F980, 0x1F984, prEmojiPresentation}, // E1.0 [5] (🦀..🦄) crab..unicorn
+ {0x1F985, 0x1F991, prEmojiPresentation}, // E3.0 [13] (🦅..🦑) eagle..squid
+ {0x1F992, 0x1F997, prEmojiPresentation}, // E5.0 [6] (🦒..🦗) giraffe..cricket
+ {0x1F998, 0x1F9A2, prEmojiPresentation}, // E11.0 [11] (🦘..🦢) kangaroo..swan
+ {0x1F9A3, 0x1F9A4, prEmojiPresentation}, // E13.0 [2] (🦣..🦤) mammoth..dodo
+ {0x1F9A5, 0x1F9AA, prEmojiPresentation}, // E12.0 [6] (🦥..🦪) sloth..oyster
+ {0x1F9AB, 0x1F9AD, prEmojiPresentation}, // E13.0 [3] (🦫..🦭) beaver..seal
+ {0x1F9AE, 0x1F9AF, prEmojiPresentation}, // E12.0 [2] (🦮..🦯) guide dog..white cane
+ {0x1F9B0, 0x1F9B9, prEmojiPresentation}, // E11.0 [10] (🦰..🦹) red hair..supervillain
+ {0x1F9BA, 0x1F9BF, prEmojiPresentation}, // E12.0 [6] (🦺..🦿) safety vest..mechanical leg
+ {0x1F9C0, 0x1F9C0, prEmojiPresentation}, // E1.0 [1] (🧀) cheese wedge
+ {0x1F9C1, 0x1F9C2, prEmojiPresentation}, // E11.0 [2] (🧁..🧂) cupcake..salt
+ {0x1F9C3, 0x1F9CA, prEmojiPresentation}, // E12.0 [8] (🧃..🧊) beverage box..ice
+ {0x1F9CB, 0x1F9CB, prEmojiPresentation}, // E13.0 [1] (🧋) bubble tea
+ {0x1F9CC, 0x1F9CC, prEmojiPresentation}, // E14.0 [1] (🧌) troll
+ {0x1F9CD, 0x1F9CF, prEmojiPresentation}, // E12.0 [3] (🧍..🧏) person standing..deaf person
+ {0x1F9D0, 0x1F9E6, prEmojiPresentation}, // E5.0 [23] (🧐..🧦) face with monocle..socks
+ {0x1F9E7, 0x1F9FF, prEmojiPresentation}, // E11.0 [25] (🧧..🧿) red envelope..nazar amulet
+ {0x1FA70, 0x1FA73, prEmojiPresentation}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
+ {0x1FA74, 0x1FA74, prEmojiPresentation}, // E13.0 [1] (🩴) thong sandal
+ {0x1FA75, 0x1FA77, prEmojiPresentation}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart
+ {0x1FA78, 0x1FA7A, prEmojiPresentation}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
+ {0x1FA7B, 0x1FA7C, prEmojiPresentation}, // E14.0 [2] (🩻..🩼) x-ray..crutch
+ {0x1FA80, 0x1FA82, prEmojiPresentation}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
+ {0x1FA83, 0x1FA86, prEmojiPresentation}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
+ {0x1FA87, 0x1FA88, prEmojiPresentation}, // E15.0 [2] (🪇..🪈) maracas..flute
+ {0x1FA90, 0x1FA95, prEmojiPresentation}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
+ {0x1FA96, 0x1FAA8, prEmojiPresentation}, // E13.0 [19] (🪖..🪨) military helmet..rock
+ {0x1FAA9, 0x1FAAC, prEmojiPresentation}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
+ {0x1FAAD, 0x1FAAF, prEmojiPresentation}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda
+ {0x1FAB0, 0x1FAB6, prEmojiPresentation}, // E13.0 [7] (🪰..🪶) fly..feather
+ {0x1FAB7, 0x1FABA, prEmojiPresentation}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
+ {0x1FABB, 0x1FABD, prEmojiPresentation}, // E15.0 [3] (🪻..🪽) hyacinth..wing
+ {0x1FABF, 0x1FABF, prEmojiPresentation}, // E15.0 [1] (🪿) goose
+ {0x1FAC0, 0x1FAC2, prEmojiPresentation}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
+ {0x1FAC3, 0x1FAC5, prEmojiPresentation}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
+ {0x1FACE, 0x1FACF, prEmojiPresentation}, // E15.0 [2] (🫎..🫏) moose..donkey
+ {0x1FAD0, 0x1FAD6, prEmojiPresentation}, // E13.0 [7] (🫐..🫖) blueberries..teapot
+ {0x1FAD7, 0x1FAD9, prEmojiPresentation}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
+ {0x1FADA, 0x1FADB, prEmojiPresentation}, // E15.0 [2] (🫚..🫛) ginger root..pea pod
+ {0x1FAE0, 0x1FAE7, prEmojiPresentation}, // E14.0 [8] (🫠..🫧) melting face..bubbles
+ {0x1FAE8, 0x1FAE8, prEmojiPresentation}, // E15.0 [1] (🫨) shaking face
+ {0x1FAF0, 0x1FAF6, prEmojiPresentation}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
+ {0x1FAF7, 0x1FAF8, prEmojiPresentation}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand
+}
diff --git a/vendor/github.com/rivo/uniseg/gen_breaktest.go b/vendor/github.com/rivo/uniseg/gen_breaktest.go
new file mode 100644
index 0000000000..6bfbeb5e7f
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/gen_breaktest.go
@@ -0,0 +1,215 @@
+//go:build generate
+
+// This program generates a Go containing a slice of test cases based on the
+// Unicode Character Database auxiliary data files. The command line arguments
+// are as follows:
+//
+// 1. The name of the Unicode data file (just the filename, without extension).
+// 2. The name of the locally generated Go file.
+// 3. The name of the slice containing the test cases.
+// 4. The name of the generator, for logging purposes.
+//
+//go:generate go run gen_breaktest.go GraphemeBreakTest graphemebreak_test.go graphemeBreakTestCases graphemes
+//go:generate go run gen_breaktest.go WordBreakTest wordbreak_test.go wordBreakTestCases words
+//go:generate go run gen_breaktest.go SentenceBreakTest sentencebreak_test.go sentenceBreakTestCases sentences
+//go:generate go run gen_breaktest.go LineBreakTest linebreak_test.go lineBreakTestCases lines
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "go/format"
+ "io/ioutil"
+ "log"
+ "net/http"
+ "os"
+ "time"
+)
+
+// We want to test against a specific version rather than the latest. When the
+// package is upgraded to a new version, change these to generate new tests.
+const (
+ testCaseURL = `https://www.unicode.org/Public/15.0.0/ucd/auxiliary/%s.txt`
+)
+
+func main() {
+ if len(os.Args) < 5 {
+ fmt.Println("Not enough arguments, see code for details")
+ os.Exit(1)
+ }
+
+ log.SetPrefix("gen_breaktest (" + os.Args[4] + "): ")
+ log.SetFlags(0)
+
+ // Read text of testcases and parse into Go source code.
+ src, err := parse(fmt.Sprintf(testCaseURL, os.Args[1]))
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Format the Go code.
+ formatted, err := format.Source(src)
+ if err != nil {
+ log.Fatalln("gofmt:", err)
+ }
+
+ // Write it out.
+ log.Print("Writing to ", os.Args[2])
+ if err := ioutil.WriteFile(os.Args[2], formatted, 0644); err != nil {
+ log.Fatal(err)
+ }
+}
+
+// parse reads a break text file, either from a local file or from a URL. It
+// parses the file data into Go source code representing the test cases.
+func parse(url string) ([]byte, error) {
+ log.Printf("Parsing %s", url)
+ res, err := http.Get(url)
+ if err != nil {
+ return nil, err
+ }
+ body := res.Body
+ defer body.Close()
+
+ buf := new(bytes.Buffer)
+ buf.Grow(120 << 10)
+ buf.WriteString(`// Code generated via go generate from gen_breaktest.go. DO NOT EDIT.
+
+package uniseg
+
+// ` + os.Args[3] + ` are Grapheme testcases taken from
+// ` + url + `
+// on ` + time.Now().Format("January 2, 2006") + `. See
+// https://www.unicode.org/license.html for the Unicode license agreement.
+var ` + os.Args[3] + ` = []testCase {
+`)
+
+ sc := bufio.NewScanner(body)
+ num := 1
+ var line []byte
+ original := make([]byte, 0, 64)
+ expected := make([]byte, 0, 64)
+ for sc.Scan() {
+ num++
+ line = sc.Bytes()
+ if len(line) == 0 || line[0] == '#' {
+ continue
+ }
+ var comment []byte
+ if i := bytes.IndexByte(line, '#'); i >= 0 {
+ comment = bytes.TrimSpace(line[i+1:])
+ line = bytes.TrimSpace(line[:i])
+ }
+ original, expected, err := parseRuneSequence(line, original[:0], expected[:0])
+ if err != nil {
+ return nil, fmt.Errorf(`line %d: %v: %q`, num, err, line)
+ }
+ fmt.Fprintf(buf, "\t{original: \"%s\", expected: %s}, // %s\n", original, expected, comment)
+ }
+ if err := sc.Err(); err != nil {
+ return nil, err
+ }
+
+ // Check for final "# EOF", useful check if we're streaming via HTTP
+ if !bytes.Equal(line, []byte("# EOF")) {
+ return nil, fmt.Errorf(`line %d: exected "# EOF" as final line, got %q`, num, line)
+ }
+ buf.WriteString("}\n")
+ return buf.Bytes(), nil
+}
+
+// Used by parseRuneSequence to match input via bytes.HasPrefix.
+var (
+ prefixBreak = []byte("÷ ")
+ prefixDontBreak = []byte("× ")
+ breakOk = []byte("÷")
+ breakNo = []byte("×")
+)
+
+// parseRuneSequence parses a rune + breaking opportunity sequence from b
+// and appends the Go code for testcase.original to orig
+// and appends the Go code for testcase.expected to exp.
+// It retuns the new orig and exp slices.
+//
+// E.g. for the input b="÷ 0020 × 0308 ÷ 1F1E6 ÷"
+// it will append
+//
+// "\u0020\u0308\U0001F1E6"
+//
+// and "[][]rune{{0x0020,0x0308},{0x1F1E6},}"
+// to orig and exp respectively.
+//
+// The formatting of exp is expected to be cleaned up by gofmt or format.Source.
+// Note we explicitly require the sequence to start with ÷ and we implicitly
+// require it to end with ÷.
+func parseRuneSequence(b, orig, exp []byte) ([]byte, []byte, error) {
+ // Check for and remove first ÷ or ×.
+ if !bytes.HasPrefix(b, prefixBreak) && !bytes.HasPrefix(b, prefixDontBreak) {
+ return nil, nil, errors.New("expected ÷ or × as first character")
+ }
+ if bytes.HasPrefix(b, prefixBreak) {
+ b = b[len(prefixBreak):]
+ } else {
+ b = b[len(prefixDontBreak):]
+ }
+
+ boundary := true
+ exp = append(exp, "[][]rune{"...)
+ for len(b) > 0 {
+ if boundary {
+ exp = append(exp, '{')
+ }
+ exp = append(exp, "0x"...)
+ // Find end of hex digits.
+ var i int
+ for i = 0; i < len(b) && b[i] != ' '; i++ {
+ if d := b[i]; ('0' <= d || d <= '9') ||
+ ('A' <= d || d <= 'F') ||
+ ('a' <= d || d <= 'f') {
+ continue
+ }
+ return nil, nil, errors.New("bad hex digit")
+ }
+ switch i {
+ case 4:
+ orig = append(orig, "\\u"...)
+ case 5:
+ orig = append(orig, "\\U000"...)
+ default:
+ return nil, nil, errors.New("unsupport code point hex length")
+ }
+ orig = append(orig, b[:i]...)
+ exp = append(exp, b[:i]...)
+ b = b[i:]
+
+ // Check for space between hex and ÷ or ×.
+ if len(b) < 1 || b[0] != ' ' {
+ return nil, nil, errors.New("bad input")
+ }
+ b = b[1:]
+
+ // Check for next boundary.
+ switch {
+ case bytes.HasPrefix(b, breakOk):
+ boundary = true
+ b = b[len(breakOk):]
+ case bytes.HasPrefix(b, breakNo):
+ boundary = false
+ b = b[len(breakNo):]
+ default:
+ return nil, nil, errors.New("missing ÷ or ×")
+ }
+ if boundary {
+ exp = append(exp, '}')
+ }
+ exp = append(exp, ',')
+ if len(b) > 0 && b[0] == ' ' {
+ b = b[1:]
+ }
+ }
+ exp = append(exp, '}')
+ return orig, exp, nil
+}
diff --git a/vendor/github.com/rivo/uniseg/gen_properties.go b/vendor/github.com/rivo/uniseg/gen_properties.go
new file mode 100644
index 0000000000..8992d2c5f8
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/gen_properties.go
@@ -0,0 +1,261 @@
+//go:build generate
+
+// This program generates a property file in Go file from Unicode Character
+// Database auxiliary data files. The command line arguments are as follows:
+//
+// 1. The name of the Unicode data file (just the filename, without extension).
+// Can be "-" (to skip) if the emoji flag is included.
+// 2. The name of the locally generated Go file.
+// 3. The name of the slice mapping code points to properties.
+// 4. The name of the generator, for logging purposes.
+// 5. (Optional) Flags, comma-separated. The following flags are available:
+// - "emojis=": include the specified emoji properties (e.g.
+// "Extended_Pictographic").
+// - "gencat": include general category properties.
+//
+//go:generate go run gen_properties.go auxiliary/GraphemeBreakProperty graphemeproperties.go graphemeCodePoints graphemes emojis=Extended_Pictographic
+//go:generate go run gen_properties.go auxiliary/WordBreakProperty wordproperties.go workBreakCodePoints words emojis=Extended_Pictographic
+//go:generate go run gen_properties.go auxiliary/SentenceBreakProperty sentenceproperties.go sentenceBreakCodePoints sentences
+//go:generate go run gen_properties.go LineBreak lineproperties.go lineBreakCodePoints lines gencat
+//go:generate go run gen_properties.go EastAsianWidth eastasianwidth.go eastAsianWidth eastasianwidth
+//go:generate go run gen_properties.go - emojipresentation.go emojiPresentation emojipresentation emojis=Emoji_Presentation
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "errors"
+ "fmt"
+ "go/format"
+ "io/ioutil"
+ "log"
+ "net/http"
+ "os"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// We want to test against a specific version rather than the latest. When the
+// package is upgraded to a new version, change these to generate new tests.
+const (
+ propertyURL = `https://www.unicode.org/Public/15.0.0/ucd/%s.txt`
+ emojiURL = `https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt`
+)
+
+// The regular expression for a line containing a code point range property.
+var propertyPattern = regexp.MustCompile(`^([0-9A-F]{4,6})(\.\.([0-9A-F]{4,6}))?\s*;\s*([A-Za-z0-9_]+)\s*#\s(.+)$`)
+
+func main() {
+ if len(os.Args) < 5 {
+ fmt.Println("Not enough arguments, see code for details")
+ os.Exit(1)
+ }
+
+ log.SetPrefix("gen_properties (" + os.Args[4] + "): ")
+ log.SetFlags(0)
+
+ // Parse flags.
+ flags := make(map[string]string)
+ if len(os.Args) >= 6 {
+ for _, flag := range strings.Split(os.Args[5], ",") {
+ flagFields := strings.Split(flag, "=")
+ if len(flagFields) == 1 {
+ flags[flagFields[0]] = "yes"
+ } else {
+ flags[flagFields[0]] = flagFields[1]
+ }
+ }
+ }
+
+ // Parse the text file and generate Go source code from it.
+ _, includeGeneralCategory := flags["gencat"]
+ var mainURL string
+ if os.Args[1] != "-" {
+ mainURL = fmt.Sprintf(propertyURL, os.Args[1])
+ }
+ src, err := parse(mainURL, flags["emojis"], includeGeneralCategory)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Format the Go code.
+ formatted, err := format.Source([]byte(src))
+ if err != nil {
+ log.Fatal("gofmt:", err)
+ }
+
+ // Save it to the (local) target file.
+ log.Print("Writing to ", os.Args[2])
+ if err := ioutil.WriteFile(os.Args[2], formatted, 0644); err != nil {
+ log.Fatal(err)
+ }
+}
+
+// parse parses the Unicode Properties text files located at the given URLs and
+// returns their equivalent Go source code to be used in the uniseg package. If
+// "emojiProperty" is not an empty string, emoji code points for that emoji
+// property (e.g. "Extended_Pictographic") will be included. In those cases, you
+// may pass an empty "propertyURL" to skip parsing the main properties file. If
+// "includeGeneralCategory" is true, the Unicode General Category property will
+// be extracted from the comments and included in the output.
+func parse(propertyURL, emojiProperty string, includeGeneralCategory bool) (string, error) {
+ if propertyURL == "" && emojiProperty == "" {
+ return "", errors.New("no properties to parse")
+ }
+
+ // Temporary buffer to hold properties.
+ var properties [][4]string
+
+ // Open the first URL.
+ if propertyURL != "" {
+ log.Printf("Parsing %s", propertyURL)
+ res, err := http.Get(propertyURL)
+ if err != nil {
+ return "", err
+ }
+ in1 := res.Body
+ defer in1.Close()
+
+ // Parse it.
+ scanner := bufio.NewScanner(in1)
+ num := 0
+ for scanner.Scan() {
+ num++
+ line := strings.TrimSpace(scanner.Text())
+
+ // Skip comments and empty lines.
+ if strings.HasPrefix(line, "#") || line == "" {
+ continue
+ }
+
+ // Everything else must be a code point range, a property and a comment.
+ from, to, property, comment, err := parseProperty(line)
+ if err != nil {
+ return "", fmt.Errorf("%s line %d: %v", os.Args[4], num, err)
+ }
+ properties = append(properties, [4]string{from, to, property, comment})
+ }
+ if err := scanner.Err(); err != nil {
+ return "", err
+ }
+ }
+
+ // Open the second URL.
+ if emojiProperty != "" {
+ log.Printf("Parsing %s", emojiURL)
+ res, err := http.Get(emojiURL)
+ if err != nil {
+ return "", err
+ }
+ in2 := res.Body
+ defer in2.Close()
+
+ // Parse it.
+ scanner := bufio.NewScanner(in2)
+ num := 0
+ for scanner.Scan() {
+ num++
+ line := scanner.Text()
+
+ // Skip comments, empty lines, and everything not containing
+ // "Extended_Pictographic".
+ if strings.HasPrefix(line, "#") || line == "" || !strings.Contains(line, emojiProperty) {
+ continue
+ }
+
+ // Everything else must be a code point range, a property and a comment.
+ from, to, property, comment, err := parseProperty(line)
+ if err != nil {
+ return "", fmt.Errorf("emojis line %d: %v", num, err)
+ }
+ properties = append(properties, [4]string{from, to, property, comment})
+ }
+ if err := scanner.Err(); err != nil {
+ return "", err
+ }
+ }
+
+ // Avoid overflow during binary search.
+ if len(properties) >= 1<<31 {
+ return "", errors.New("too many properties")
+ }
+
+ // Sort properties.
+ sort.Slice(properties, func(i, j int) bool {
+ left, _ := strconv.ParseUint(properties[i][0], 16, 64)
+ right, _ := strconv.ParseUint(properties[j][0], 16, 64)
+ return left < right
+ })
+
+ // Header.
+ var (
+ buf bytes.Buffer
+ emojiComment string
+ )
+ columns := 3
+ if includeGeneralCategory {
+ columns = 4
+ }
+ if emojiURL != "" {
+ emojiComment = `
+// and
+// ` + emojiURL + `
+// ("Extended_Pictographic" only)`
+ }
+ buf.WriteString(`// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// ` + os.Args[3] + ` are taken from
+// ` + propertyURL + emojiComment + `
+// on ` + time.Now().Format("January 2, 2006") + `. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var ` + os.Args[3] + ` = [][` + strconv.Itoa(columns) + `]int{
+ `)
+
+ // Properties.
+ for _, prop := range properties {
+ if includeGeneralCategory {
+ generalCategory := "gc" + prop[3][:2]
+ if generalCategory == "gcL&" {
+ generalCategory = "gcLC"
+ }
+ prop[3] = prop[3][3:]
+ fmt.Fprintf(&buf, "{0x%s,0x%s,%s,%s}, // %s\n", prop[0], prop[1], translateProperty("pr", prop[2]), generalCategory, prop[3])
+ } else {
+ fmt.Fprintf(&buf, "{0x%s,0x%s,%s}, // %s\n", prop[0], prop[1], translateProperty("pr", prop[2]), prop[3])
+ }
+ }
+
+ // Tail.
+ buf.WriteString("}")
+
+ return buf.String(), nil
+}
+
+// parseProperty parses a line of the Unicode properties text file containing a
+// property for a code point range and returns it along with its comment.
+func parseProperty(line string) (from, to, property, comment string, err error) {
+ fields := propertyPattern.FindStringSubmatch(line)
+ if fields == nil {
+ err = errors.New("no property found")
+ return
+ }
+ from = fields[1]
+ to = fields[3]
+ if to == "" {
+ to = from
+ }
+ property = fields[4]
+ comment = fields[5]
+ return
+}
+
+// translateProperty translates a property name as used in the Unicode data file
+// to a variable used in the Go code.
+func translateProperty(prefix, property string) string {
+ return prefix + strings.ReplaceAll(property, "_", "")
+}
diff --git a/vendor/github.com/rivo/uniseg/grapheme.go b/vendor/github.com/rivo/uniseg/grapheme.go
new file mode 100644
index 0000000000..b12403d43c
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/grapheme.go
@@ -0,0 +1,331 @@
+package uniseg
+
+import "unicode/utf8"
+
+// Graphemes implements an iterator over Unicode grapheme clusters, or
+// user-perceived characters. While iterating, it also provides information
+// about word boundaries, sentence boundaries, line breaks, and monospace
+// character widths.
+//
+// After constructing the class via [NewGraphemes] for a given string "str",
+// [Graphemes.Next] is called for every grapheme cluster in a loop until it
+// returns false. Inside the loop, information about the grapheme cluster as
+// well as boundary information and character width is available via the various
+// methods (see examples below).
+//
+// This class basically wraps the [StepString] parser and provides a convenient
+// interface to it. If you are only interested in some parts of this package's
+// functionality, using the specialized functions starting with "First" is
+// almost always faster.
+type Graphemes struct {
+ // The original string.
+ original string
+
+ // The remaining string to be parsed.
+ remaining string
+
+ // The current grapheme cluster.
+ cluster string
+
+ // The byte offset of the current grapheme cluster relative to the original
+ // string.
+ offset int
+
+ // The current boundary information of the [Step] parser.
+ boundaries int
+
+ // The current state of the [Step] parser.
+ state int
+}
+
+// NewGraphemes returns a new grapheme cluster iterator.
+func NewGraphemes(str string) *Graphemes {
+ return &Graphemes{
+ original: str,
+ remaining: str,
+ state: -1,
+ }
+}
+
+// Next advances the iterator by one grapheme cluster and returns false if no
+// clusters are left. This function must be called before the first cluster is
+// accessed.
+func (g *Graphemes) Next() bool {
+ if len(g.remaining) == 0 {
+ // We're already past the end.
+ g.state = -2
+ g.cluster = ""
+ return false
+ }
+ g.offset += len(g.cluster)
+ g.cluster, g.remaining, g.boundaries, g.state = StepString(g.remaining, g.state)
+ return true
+}
+
+// Runes returns a slice of runes (code points) which corresponds to the current
+// grapheme cluster. If the iterator is already past the end or [Graphemes.Next]
+// has not yet been called, nil is returned.
+func (g *Graphemes) Runes() []rune {
+ if g.state < 0 {
+ return nil
+ }
+ return []rune(g.cluster)
+}
+
+// Str returns a substring of the original string which corresponds to the
+// current grapheme cluster. If the iterator is already past the end or
+// [Graphemes.Next] has not yet been called, an empty string is returned.
+func (g *Graphemes) Str() string {
+ return g.cluster
+}
+
+// Bytes returns a byte slice which corresponds to the current grapheme cluster.
+// If the iterator is already past the end or [Graphemes.Next] has not yet been
+// called, nil is returned.
+func (g *Graphemes) Bytes() []byte {
+ if g.state < 0 {
+ return nil
+ }
+ return []byte(g.cluster)
+}
+
+// Positions returns the interval of the current grapheme cluster as byte
+// positions into the original string. The first returned value "from" indexes
+// the first byte and the second returned value "to" indexes the first byte that
+// is not included anymore, i.e. str[from:to] is the current grapheme cluster of
+// the original string "str". If [Graphemes.Next] has not yet been called, both
+// values are 0. If the iterator is already past the end, both values are 1.
+func (g *Graphemes) Positions() (int, int) {
+ if g.state == -1 {
+ return 0, 0
+ } else if g.state == -2 {
+ return 1, 1
+ }
+ return g.offset, g.offset + len(g.cluster)
+}
+
+// IsWordBoundary returns true if a word ends after the current grapheme
+// cluster.
+func (g *Graphemes) IsWordBoundary() bool {
+ if g.state < 0 {
+ return true
+ }
+ return g.boundaries&MaskWord != 0
+}
+
+// IsSentenceBoundary returns true if a sentence ends after the current
+// grapheme cluster.
+func (g *Graphemes) IsSentenceBoundary() bool {
+ if g.state < 0 {
+ return true
+ }
+ return g.boundaries&MaskSentence != 0
+}
+
+// LineBreak returns whether the line can be broken after the current grapheme
+// cluster. A value of [LineDontBreak] means the line may not be broken, a value
+// of [LineMustBreak] means the line must be broken, and a value of
+// [LineCanBreak] means the line may or may not be broken.
+func (g *Graphemes) LineBreak() int {
+ if g.state == -1 {
+ return LineDontBreak
+ }
+ if g.state == -2 {
+ return LineMustBreak
+ }
+ return g.boundaries & MaskLine
+}
+
+// Width returns the monospace width of the current grapheme cluster.
+func (g *Graphemes) Width() int {
+ if g.state < 0 {
+ return 0
+ }
+ return g.boundaries >> ShiftWidth
+}
+
+// Reset puts the iterator into its initial state such that the next call to
+// [Graphemes.Next] sets it to the first grapheme cluster again.
+func (g *Graphemes) Reset() {
+ g.state = -1
+ g.offset = 0
+ g.cluster = ""
+ g.remaining = g.original
+}
+
+// GraphemeClusterCount returns the number of user-perceived characters
+// (grapheme clusters) for the given string.
+func GraphemeClusterCount(s string) (n int) {
+ state := -1
+ for len(s) > 0 {
+ _, s, _, state = FirstGraphemeClusterInString(s, state)
+ n++
+ }
+ return
+}
+
+// ReverseString reverses the given string while observing grapheme cluster
+// boundaries.
+func ReverseString(s string) string {
+ str := []byte(s)
+ reversed := make([]byte, len(str))
+ state := -1
+ index := len(str)
+ for len(str) > 0 {
+ var cluster []byte
+ cluster, str, _, state = FirstGraphemeCluster(str, state)
+ index -= len(cluster)
+ copy(reversed[index:], cluster)
+ if index <= len(str)/2 {
+ break
+ }
+ }
+ return string(reversed)
+}
+
+// The number of bits the grapheme property must be shifted to make place for
+// grapheme states.
+const shiftGraphemePropState = 4
+
+// FirstGraphemeCluster returns the first grapheme cluster found in the given
+// byte slice according to the rules of [Unicode Standard Annex #29, Grapheme
+// Cluster Boundaries]. This function can be called continuously to extract all
+// grapheme clusters from a byte slice, as illustrated in the example below.
+//
+// If you don't know the current state, for example when calling the function
+// for the first time, you must pass -1. For consecutive calls, pass the state
+// and rest slice returned by the previous call.
+//
+// The "rest" slice is the sub-slice of the original byte slice "b" starting
+// after the last byte of the identified grapheme cluster. If the length of the
+// "rest" slice is 0, the entire byte slice "b" has been processed. The
+// "cluster" byte slice is the sub-slice of the input slice containing the
+// identified grapheme cluster.
+//
+// The returned width is the width of the grapheme cluster for most monospace
+// fonts where a value of 1 represents one character cell.
+//
+// Given an empty byte slice "b", the function returns nil values.
+//
+// While slightly less convenient than using the Graphemes class, this function
+// has much better performance and makes no allocations. It lends itself well to
+// large byte slices.
+//
+// [Unicode Standard Annex #29, Grapheme Cluster Boundaries]: http://unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries
+func FirstGraphemeCluster(b []byte, state int) (cluster, rest []byte, width, newState int) {
+ // An empty byte slice returns nothing.
+ if len(b) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRune(b)
+ if len(b) <= length { // If we're already past the end, there is nothing else to parse.
+ var prop int
+ if state < 0 {
+ prop = propertyGraphemes(r)
+ } else {
+ prop = state >> shiftGraphemePropState
+ }
+ return b, nil, runeWidth(r, prop), grAny | (prop << shiftGraphemePropState)
+ }
+
+ // If we don't know the state, determine it now.
+ var firstProp int
+ if state < 0 {
+ state, firstProp, _ = transitionGraphemeState(state, r)
+ } else {
+ firstProp = state >> shiftGraphemePropState
+ }
+ width += runeWidth(r, firstProp)
+
+ // Transition until we find a boundary.
+ for {
+ var (
+ prop int
+ boundary bool
+ )
+
+ r, l := utf8.DecodeRune(b[length:])
+ state, prop, boundary = transitionGraphemeState(state&maskGraphemeState, r)
+
+ if boundary {
+ return b[:length], b[length:], width, state | (prop << shiftGraphemePropState)
+ }
+
+ if firstProp == prExtendedPictographic {
+ if r == vs15 {
+ width = 1
+ } else if r == vs16 {
+ width = 2
+ }
+ } else if firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
+ }
+
+ length += l
+ if len(b) <= length {
+ return b, nil, width, grAny | (prop << shiftGraphemePropState)
+ }
+ }
+}
+
+// FirstGraphemeClusterInString is like [FirstGraphemeCluster] but its input and
+// outputs are strings.
+func FirstGraphemeClusterInString(str string, state int) (cluster, rest string, width, newState int) {
+ // An empty string returns nothing.
+ if len(str) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRuneInString(str)
+ if len(str) <= length { // If we're already past the end, there is nothing else to parse.
+ var prop int
+ if state < 0 {
+ prop = propertyGraphemes(r)
+ } else {
+ prop = state >> shiftGraphemePropState
+ }
+ return str, "", runeWidth(r, prop), grAny | (prop << shiftGraphemePropState)
+ }
+
+ // If we don't know the state, determine it now.
+ var firstProp int
+ if state < 0 {
+ state, firstProp, _ = transitionGraphemeState(state, r)
+ } else {
+ firstProp = state >> shiftGraphemePropState
+ }
+ width += runeWidth(r, firstProp)
+
+ // Transition until we find a boundary.
+ for {
+ var (
+ prop int
+ boundary bool
+ )
+
+ r, l := utf8.DecodeRuneInString(str[length:])
+ state, prop, boundary = transitionGraphemeState(state&maskGraphemeState, r)
+
+ if boundary {
+ return str[:length], str[length:], width, state | (prop << shiftGraphemePropState)
+ }
+
+ if firstProp == prExtendedPictographic {
+ if r == vs15 {
+ width = 1
+ } else if r == vs16 {
+ width = 2
+ }
+ } else if firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
+ }
+
+ length += l
+ if len(str) <= length {
+ return str, "", width, grAny | (prop << shiftGraphemePropState)
+ }
+ }
+}
diff --git a/vendor/github.com/rivo/uniseg/graphemeproperties.go b/vendor/github.com/rivo/uniseg/graphemeproperties.go
new file mode 100644
index 0000000000..0aff4a619a
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/graphemeproperties.go
@@ -0,0 +1,1915 @@
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// graphemeCodePoints are taken from
+// https://www.unicode.org/Public/15.0.0/ucd/auxiliary/GraphemeBreakProperty.txt
+// and
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var graphemeCodePoints = [][3]int{
+ {0x0000, 0x0009, prControl}, // Cc [10] ..
+ {0x000A, 0x000A, prLF}, // Cc
+ {0x000B, 0x000C, prControl}, // Cc [2] ..
+ {0x000D, 0x000D, prCR}, // Cc
+ {0x000E, 0x001F, prControl}, // Cc [18] ..
+ {0x007F, 0x009F, prControl}, // Cc [33] ..
+ {0x00A9, 0x00A9, prExtendedPictographic}, // E0.6 [1] (©️) copyright
+ {0x00AD, 0x00AD, prControl}, // Cf SOFT HYPHEN
+ {0x00AE, 0x00AE, prExtendedPictographic}, // E0.6 [1] (®️) registered
+ {0x0300, 0x036F, prExtend}, // Mn [112] COMBINING GRAVE ACCENT..COMBINING LATIN SMALL LETTER X
+ {0x0483, 0x0487, prExtend}, // Mn [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE
+ {0x0488, 0x0489, prExtend}, // Me [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN
+ {0x0591, 0x05BD, prExtend}, // Mn [45] HEBREW ACCENT ETNAHTA..HEBREW POINT METEG
+ {0x05BF, 0x05BF, prExtend}, // Mn HEBREW POINT RAFE
+ {0x05C1, 0x05C2, prExtend}, // Mn [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT
+ {0x05C4, 0x05C5, prExtend}, // Mn [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT
+ {0x05C7, 0x05C7, prExtend}, // Mn HEBREW POINT QAMATS QATAN
+ {0x0600, 0x0605, prPrepend}, // Cf [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE
+ {0x0610, 0x061A, prExtend}, // Mn [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA
+ {0x061C, 0x061C, prControl}, // Cf ARABIC LETTER MARK
+ {0x064B, 0x065F, prExtend}, // Mn [21] ARABIC FATHATAN..ARABIC WAVY HAMZA BELOW
+ {0x0670, 0x0670, prExtend}, // Mn ARABIC LETTER SUPERSCRIPT ALEF
+ {0x06D6, 0x06DC, prExtend}, // Mn [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN
+ {0x06DD, 0x06DD, prPrepend}, // Cf ARABIC END OF AYAH
+ {0x06DF, 0x06E4, prExtend}, // Mn [6] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH MADDA
+ {0x06E7, 0x06E8, prExtend}, // Mn [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON
+ {0x06EA, 0x06ED, prExtend}, // Mn [4] ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM
+ {0x070F, 0x070F, prPrepend}, // Cf SYRIAC ABBREVIATION MARK
+ {0x0711, 0x0711, prExtend}, // Mn SYRIAC LETTER SUPERSCRIPT ALAPH
+ {0x0730, 0x074A, prExtend}, // Mn [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
+ {0x07A6, 0x07B0, prExtend}, // Mn [11] THAANA ABAFILI..THAANA SUKUN
+ {0x07EB, 0x07F3, prExtend}, // Mn [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE
+ {0x07FD, 0x07FD, prExtend}, // Mn NKO DANTAYALAN
+ {0x0816, 0x0819, prExtend}, // Mn [4] SAMARITAN MARK IN..SAMARITAN MARK DAGESH
+ {0x081B, 0x0823, prExtend}, // Mn [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A
+ {0x0825, 0x0827, prExtend}, // Mn [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U
+ {0x0829, 0x082D, prExtend}, // Mn [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA
+ {0x0859, 0x085B, prExtend}, // Mn [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK
+ {0x0890, 0x0891, prPrepend}, // Cf [2] ARABIC POUND MARK ABOVE..ARABIC PIASTRE MARK ABOVE
+ {0x0898, 0x089F, prExtend}, // Mn [8] ARABIC SMALL HIGH WORD AL-JUZ..ARABIC HALF MADDA OVER MADDA
+ {0x08CA, 0x08E1, prExtend}, // Mn [24] ARABIC SMALL HIGH FARSI YEH..ARABIC SMALL HIGH SIGN SAFHA
+ {0x08E2, 0x08E2, prPrepend}, // Cf ARABIC DISPUTED END OF AYAH
+ {0x08E3, 0x0902, prExtend}, // Mn [32] ARABIC TURNED DAMMA BELOW..DEVANAGARI SIGN ANUSVARA
+ {0x0903, 0x0903, prSpacingMark}, // Mc DEVANAGARI SIGN VISARGA
+ {0x093A, 0x093A, prExtend}, // Mn DEVANAGARI VOWEL SIGN OE
+ {0x093B, 0x093B, prSpacingMark}, // Mc DEVANAGARI VOWEL SIGN OOE
+ {0x093C, 0x093C, prExtend}, // Mn DEVANAGARI SIGN NUKTA
+ {0x093E, 0x0940, prSpacingMark}, // Mc [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II
+ {0x0941, 0x0948, prExtend}, // Mn [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI
+ {0x0949, 0x094C, prSpacingMark}, // Mc [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU
+ {0x094D, 0x094D, prExtend}, // Mn DEVANAGARI SIGN VIRAMA
+ {0x094E, 0x094F, prSpacingMark}, // Mc [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW
+ {0x0951, 0x0957, prExtend}, // Mn [7] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI VOWEL SIGN UUE
+ {0x0962, 0x0963, prExtend}, // Mn [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL
+ {0x0981, 0x0981, prExtend}, // Mn BENGALI SIGN CANDRABINDU
+ {0x0982, 0x0983, prSpacingMark}, // Mc [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA
+ {0x09BC, 0x09BC, prExtend}, // Mn BENGALI SIGN NUKTA
+ {0x09BE, 0x09BE, prExtend}, // Mc BENGALI VOWEL SIGN AA
+ {0x09BF, 0x09C0, prSpacingMark}, // Mc [2] BENGALI VOWEL SIGN I..BENGALI VOWEL SIGN II
+ {0x09C1, 0x09C4, prExtend}, // Mn [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR
+ {0x09C7, 0x09C8, prSpacingMark}, // Mc [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI
+ {0x09CB, 0x09CC, prSpacingMark}, // Mc [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU
+ {0x09CD, 0x09CD, prExtend}, // Mn BENGALI SIGN VIRAMA
+ {0x09D7, 0x09D7, prExtend}, // Mc BENGALI AU LENGTH MARK
+ {0x09E2, 0x09E3, prExtend}, // Mn [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL
+ {0x09FE, 0x09FE, prExtend}, // Mn BENGALI SANDHI MARK
+ {0x0A01, 0x0A02, prExtend}, // Mn [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI
+ {0x0A03, 0x0A03, prSpacingMark}, // Mc GURMUKHI SIGN VISARGA
+ {0x0A3C, 0x0A3C, prExtend}, // Mn GURMUKHI SIGN NUKTA
+ {0x0A3E, 0x0A40, prSpacingMark}, // Mc [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II
+ {0x0A41, 0x0A42, prExtend}, // Mn [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU
+ {0x0A47, 0x0A48, prExtend}, // Mn [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI
+ {0x0A4B, 0x0A4D, prExtend}, // Mn [3] GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA
+ {0x0A51, 0x0A51, prExtend}, // Mn GURMUKHI SIGN UDAAT
+ {0x0A70, 0x0A71, prExtend}, // Mn [2] GURMUKHI TIPPI..GURMUKHI ADDAK
+ {0x0A75, 0x0A75, prExtend}, // Mn GURMUKHI SIGN YAKASH
+ {0x0A81, 0x0A82, prExtend}, // Mn [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA
+ {0x0A83, 0x0A83, prSpacingMark}, // Mc GUJARATI SIGN VISARGA
+ {0x0ABC, 0x0ABC, prExtend}, // Mn GUJARATI SIGN NUKTA
+ {0x0ABE, 0x0AC0, prSpacingMark}, // Mc [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II
+ {0x0AC1, 0x0AC5, prExtend}, // Mn [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E
+ {0x0AC7, 0x0AC8, prExtend}, // Mn [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI
+ {0x0AC9, 0x0AC9, prSpacingMark}, // Mc GUJARATI VOWEL SIGN CANDRA O
+ {0x0ACB, 0x0ACC, prSpacingMark}, // Mc [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU
+ {0x0ACD, 0x0ACD, prExtend}, // Mn GUJARATI SIGN VIRAMA
+ {0x0AE2, 0x0AE3, prExtend}, // Mn [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL
+ {0x0AFA, 0x0AFF, prExtend}, // Mn [6] GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
+ {0x0B01, 0x0B01, prExtend}, // Mn ORIYA SIGN CANDRABINDU
+ {0x0B02, 0x0B03, prSpacingMark}, // Mc [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA
+ {0x0B3C, 0x0B3C, prExtend}, // Mn ORIYA SIGN NUKTA
+ {0x0B3E, 0x0B3E, prExtend}, // Mc ORIYA VOWEL SIGN AA
+ {0x0B3F, 0x0B3F, prExtend}, // Mn ORIYA VOWEL SIGN I
+ {0x0B40, 0x0B40, prSpacingMark}, // Mc ORIYA VOWEL SIGN II
+ {0x0B41, 0x0B44, prExtend}, // Mn [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR
+ {0x0B47, 0x0B48, prSpacingMark}, // Mc [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI
+ {0x0B4B, 0x0B4C, prSpacingMark}, // Mc [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU
+ {0x0B4D, 0x0B4D, prExtend}, // Mn ORIYA SIGN VIRAMA
+ {0x0B55, 0x0B56, prExtend}, // Mn [2] ORIYA SIGN OVERLINE..ORIYA AI LENGTH MARK
+ {0x0B57, 0x0B57, prExtend}, // Mc ORIYA AU LENGTH MARK
+ {0x0B62, 0x0B63, prExtend}, // Mn [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL
+ {0x0B82, 0x0B82, prExtend}, // Mn TAMIL SIGN ANUSVARA
+ {0x0BBE, 0x0BBE, prExtend}, // Mc TAMIL VOWEL SIGN AA
+ {0x0BBF, 0x0BBF, prSpacingMark}, // Mc TAMIL VOWEL SIGN I
+ {0x0BC0, 0x0BC0, prExtend}, // Mn TAMIL VOWEL SIGN II
+ {0x0BC1, 0x0BC2, prSpacingMark}, // Mc [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU
+ {0x0BC6, 0x0BC8, prSpacingMark}, // Mc [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI
+ {0x0BCA, 0x0BCC, prSpacingMark}, // Mc [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU
+ {0x0BCD, 0x0BCD, prExtend}, // Mn TAMIL SIGN VIRAMA
+ {0x0BD7, 0x0BD7, prExtend}, // Mc TAMIL AU LENGTH MARK
+ {0x0C00, 0x0C00, prExtend}, // Mn TELUGU SIGN COMBINING CANDRABINDU ABOVE
+ {0x0C01, 0x0C03, prSpacingMark}, // Mc [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA
+ {0x0C04, 0x0C04, prExtend}, // Mn TELUGU SIGN COMBINING ANUSVARA ABOVE
+ {0x0C3C, 0x0C3C, prExtend}, // Mn TELUGU SIGN NUKTA
+ {0x0C3E, 0x0C40, prExtend}, // Mn [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II
+ {0x0C41, 0x0C44, prSpacingMark}, // Mc [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR
+ {0x0C46, 0x0C48, prExtend}, // Mn [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI
+ {0x0C4A, 0x0C4D, prExtend}, // Mn [4] TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA
+ {0x0C55, 0x0C56, prExtend}, // Mn [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK
+ {0x0C62, 0x0C63, prExtend}, // Mn [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL
+ {0x0C81, 0x0C81, prExtend}, // Mn KANNADA SIGN CANDRABINDU
+ {0x0C82, 0x0C83, prSpacingMark}, // Mc [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA
+ {0x0CBC, 0x0CBC, prExtend}, // Mn KANNADA SIGN NUKTA
+ {0x0CBE, 0x0CBE, prSpacingMark}, // Mc KANNADA VOWEL SIGN AA
+ {0x0CBF, 0x0CBF, prExtend}, // Mn KANNADA VOWEL SIGN I
+ {0x0CC0, 0x0CC1, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN U
+ {0x0CC2, 0x0CC2, prExtend}, // Mc KANNADA VOWEL SIGN UU
+ {0x0CC3, 0x0CC4, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN VOCALIC R..KANNADA VOWEL SIGN VOCALIC RR
+ {0x0CC6, 0x0CC6, prExtend}, // Mn KANNADA VOWEL SIGN E
+ {0x0CC7, 0x0CC8, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI
+ {0x0CCA, 0x0CCB, prSpacingMark}, // Mc [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO
+ {0x0CCC, 0x0CCD, prExtend}, // Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
+ {0x0CD5, 0x0CD6, prExtend}, // Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
+ {0x0CE2, 0x0CE3, prExtend}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0CF3, 0x0CF3, prSpacingMark}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
+ {0x0D00, 0x0D01, prExtend}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
+ {0x0D02, 0x0D03, prSpacingMark}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
+ {0x0D3B, 0x0D3C, prExtend}, // Mn [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
+ {0x0D3E, 0x0D3E, prExtend}, // Mc MALAYALAM VOWEL SIGN AA
+ {0x0D3F, 0x0D40, prSpacingMark}, // Mc [2] MALAYALAM VOWEL SIGN I..MALAYALAM VOWEL SIGN II
+ {0x0D41, 0x0D44, prExtend}, // Mn [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR
+ {0x0D46, 0x0D48, prSpacingMark}, // Mc [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI
+ {0x0D4A, 0x0D4C, prSpacingMark}, // Mc [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU
+ {0x0D4D, 0x0D4D, prExtend}, // Mn MALAYALAM SIGN VIRAMA
+ {0x0D4E, 0x0D4E, prPrepend}, // Lo MALAYALAM LETTER DOT REPH
+ {0x0D57, 0x0D57, prExtend}, // Mc MALAYALAM AU LENGTH MARK
+ {0x0D62, 0x0D63, prExtend}, // Mn [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL
+ {0x0D81, 0x0D81, prExtend}, // Mn SINHALA SIGN CANDRABINDU
+ {0x0D82, 0x0D83, prSpacingMark}, // Mc [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA
+ {0x0DCA, 0x0DCA, prExtend}, // Mn SINHALA SIGN AL-LAKUNA
+ {0x0DCF, 0x0DCF, prExtend}, // Mc SINHALA VOWEL SIGN AELA-PILLA
+ {0x0DD0, 0x0DD1, prSpacingMark}, // Mc [2] SINHALA VOWEL SIGN KETTI AEDA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA
+ {0x0DD2, 0x0DD4, prExtend}, // Mn [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA
+ {0x0DD6, 0x0DD6, prExtend}, // Mn SINHALA VOWEL SIGN DIGA PAA-PILLA
+ {0x0DD8, 0x0DDE, prSpacingMark}, // Mc [7] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN KOMBUVA HAA GAYANUKITTA
+ {0x0DDF, 0x0DDF, prExtend}, // Mc SINHALA VOWEL SIGN GAYANUKITTA
+ {0x0DF2, 0x0DF3, prSpacingMark}, // Mc [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA
+ {0x0E31, 0x0E31, prExtend}, // Mn THAI CHARACTER MAI HAN-AKAT
+ {0x0E33, 0x0E33, prSpacingMark}, // Lo THAI CHARACTER SARA AM
+ {0x0E34, 0x0E3A, prExtend}, // Mn [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU
+ {0x0E47, 0x0E4E, prExtend}, // Mn [8] THAI CHARACTER MAITAIKHU..THAI CHARACTER YAMAKKAN
+ {0x0EB1, 0x0EB1, prExtend}, // Mn LAO VOWEL SIGN MAI KAN
+ {0x0EB3, 0x0EB3, prSpacingMark}, // Lo LAO VOWEL SIGN AM
+ {0x0EB4, 0x0EBC, prExtend}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
+ {0x0EC8, 0x0ECE, prExtend}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN
+ {0x0F18, 0x0F19, prExtend}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
+ {0x0F35, 0x0F35, prExtend}, // Mn TIBETAN MARK NGAS BZUNG NYI ZLA
+ {0x0F37, 0x0F37, prExtend}, // Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS
+ {0x0F39, 0x0F39, prExtend}, // Mn TIBETAN MARK TSA -PHRU
+ {0x0F3E, 0x0F3F, prSpacingMark}, // Mc [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES
+ {0x0F71, 0x0F7E, prExtend}, // Mn [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO
+ {0x0F7F, 0x0F7F, prSpacingMark}, // Mc TIBETAN SIGN RNAM BCAD
+ {0x0F80, 0x0F84, prExtend}, // Mn [5] TIBETAN VOWEL SIGN REVERSED I..TIBETAN MARK HALANTA
+ {0x0F86, 0x0F87, prExtend}, // Mn [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS
+ {0x0F8D, 0x0F97, prExtend}, // Mn [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA
+ {0x0F99, 0x0FBC, prExtend}, // Mn [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA
+ {0x0FC6, 0x0FC6, prExtend}, // Mn TIBETAN SYMBOL PADMA GDAN
+ {0x102D, 0x1030, prExtend}, // Mn [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU
+ {0x1031, 0x1031, prSpacingMark}, // Mc MYANMAR VOWEL SIGN E
+ {0x1032, 0x1037, prExtend}, // Mn [6] MYANMAR VOWEL SIGN AI..MYANMAR SIGN DOT BELOW
+ {0x1039, 0x103A, prExtend}, // Mn [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT
+ {0x103B, 0x103C, prSpacingMark}, // Mc [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA
+ {0x103D, 0x103E, prExtend}, // Mn [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA
+ {0x1056, 0x1057, prSpacingMark}, // Mc [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR
+ {0x1058, 0x1059, prExtend}, // Mn [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL
+ {0x105E, 0x1060, prExtend}, // Mn [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA
+ {0x1071, 0x1074, prExtend}, // Mn [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE
+ {0x1082, 0x1082, prExtend}, // Mn MYANMAR CONSONANT SIGN SHAN MEDIAL WA
+ {0x1084, 0x1084, prSpacingMark}, // Mc MYANMAR VOWEL SIGN SHAN E
+ {0x1085, 0x1086, prExtend}, // Mn [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y
+ {0x108D, 0x108D, prExtend}, // Mn MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE
+ {0x109D, 0x109D, prExtend}, // Mn MYANMAR VOWEL SIGN AITON AI
+ {0x1100, 0x115F, prL}, // Lo [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER
+ {0x1160, 0x11A7, prV}, // Lo [72] HANGUL JUNGSEONG FILLER..HANGUL JUNGSEONG O-YAE
+ {0x11A8, 0x11FF, prT}, // Lo [88] HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG SSANGNIEUN
+ {0x135D, 0x135F, prExtend}, // Mn [3] ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING GEMINATION MARK
+ {0x1712, 0x1714, prExtend}, // Mn [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN VIRAMA
+ {0x1715, 0x1715, prSpacingMark}, // Mc TAGALOG SIGN PAMUDPOD
+ {0x1732, 0x1733, prExtend}, // Mn [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U
+ {0x1734, 0x1734, prSpacingMark}, // Mc HANUNOO SIGN PAMUDPOD
+ {0x1752, 0x1753, prExtend}, // Mn [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U
+ {0x1772, 0x1773, prExtend}, // Mn [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U
+ {0x17B4, 0x17B5, prExtend}, // Mn [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA
+ {0x17B6, 0x17B6, prSpacingMark}, // Mc KHMER VOWEL SIGN AA
+ {0x17B7, 0x17BD, prExtend}, // Mn [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA
+ {0x17BE, 0x17C5, prSpacingMark}, // Mc [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU
+ {0x17C6, 0x17C6, prExtend}, // Mn KHMER SIGN NIKAHIT
+ {0x17C7, 0x17C8, prSpacingMark}, // Mc [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU
+ {0x17C9, 0x17D3, prExtend}, // Mn [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT
+ {0x17DD, 0x17DD, prExtend}, // Mn KHMER SIGN ATTHACAN
+ {0x180B, 0x180D, prExtend}, // Mn [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE
+ {0x180E, 0x180E, prControl}, // Cf MONGOLIAN VOWEL SEPARATOR
+ {0x180F, 0x180F, prExtend}, // Mn MONGOLIAN FREE VARIATION SELECTOR FOUR
+ {0x1885, 0x1886, prExtend}, // Mn [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA
+ {0x18A9, 0x18A9, prExtend}, // Mn MONGOLIAN LETTER ALI GALI DAGALGA
+ {0x1920, 0x1922, prExtend}, // Mn [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U
+ {0x1923, 0x1926, prSpacingMark}, // Mc [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU
+ {0x1927, 0x1928, prExtend}, // Mn [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O
+ {0x1929, 0x192B, prSpacingMark}, // Mc [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA
+ {0x1930, 0x1931, prSpacingMark}, // Mc [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA
+ {0x1932, 0x1932, prExtend}, // Mn LIMBU SMALL LETTER ANUSVARA
+ {0x1933, 0x1938, prSpacingMark}, // Mc [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA
+ {0x1939, 0x193B, prExtend}, // Mn [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
+ {0x1A17, 0x1A18, prExtend}, // Mn [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U
+ {0x1A19, 0x1A1A, prSpacingMark}, // Mc [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O
+ {0x1A1B, 0x1A1B, prExtend}, // Mn BUGINESE VOWEL SIGN AE
+ {0x1A55, 0x1A55, prSpacingMark}, // Mc TAI THAM CONSONANT SIGN MEDIAL RA
+ {0x1A56, 0x1A56, prExtend}, // Mn TAI THAM CONSONANT SIGN MEDIAL LA
+ {0x1A57, 0x1A57, prSpacingMark}, // Mc TAI THAM CONSONANT SIGN LA TANG LAI
+ {0x1A58, 0x1A5E, prExtend}, // Mn [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA
+ {0x1A60, 0x1A60, prExtend}, // Mn TAI THAM SIGN SAKOT
+ {0x1A62, 0x1A62, prExtend}, // Mn TAI THAM VOWEL SIGN MAI SAT
+ {0x1A65, 0x1A6C, prExtend}, // Mn [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW
+ {0x1A6D, 0x1A72, prSpacingMark}, // Mc [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI
+ {0x1A73, 0x1A7C, prExtend}, // Mn [10] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN KHUEN-LUE KARAN
+ {0x1A7F, 0x1A7F, prExtend}, // Mn TAI THAM COMBINING CRYPTOGRAMMIC DOT
+ {0x1AB0, 0x1ABD, prExtend}, // Mn [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW
+ {0x1ABE, 0x1ABE, prExtend}, // Me COMBINING PARENTHESES OVERLAY
+ {0x1ABF, 0x1ACE, prExtend}, // Mn [16] COMBINING LATIN SMALL LETTER W BELOW..COMBINING LATIN SMALL LETTER INSULAR T
+ {0x1B00, 0x1B03, prExtend}, // Mn [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG
+ {0x1B04, 0x1B04, prSpacingMark}, // Mc BALINESE SIGN BISAH
+ {0x1B34, 0x1B34, prExtend}, // Mn BALINESE SIGN REREKAN
+ {0x1B35, 0x1B35, prExtend}, // Mc BALINESE VOWEL SIGN TEDUNG
+ {0x1B36, 0x1B3A, prExtend}, // Mn [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA
+ {0x1B3B, 0x1B3B, prSpacingMark}, // Mc BALINESE VOWEL SIGN RA REPA TEDUNG
+ {0x1B3C, 0x1B3C, prExtend}, // Mn BALINESE VOWEL SIGN LA LENGA
+ {0x1B3D, 0x1B41, prSpacingMark}, // Mc [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG
+ {0x1B42, 0x1B42, prExtend}, // Mn BALINESE VOWEL SIGN PEPET
+ {0x1B43, 0x1B44, prSpacingMark}, // Mc [2] BALINESE VOWEL SIGN PEPET TEDUNG..BALINESE ADEG ADEG
+ {0x1B6B, 0x1B73, prExtend}, // Mn [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG
+ {0x1B80, 0x1B81, prExtend}, // Mn [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR
+ {0x1B82, 0x1B82, prSpacingMark}, // Mc SUNDANESE SIGN PANGWISAD
+ {0x1BA1, 0x1BA1, prSpacingMark}, // Mc SUNDANESE CONSONANT SIGN PAMINGKAL
+ {0x1BA2, 0x1BA5, prExtend}, // Mn [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU
+ {0x1BA6, 0x1BA7, prSpacingMark}, // Mc [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG
+ {0x1BA8, 0x1BA9, prExtend}, // Mn [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG
+ {0x1BAA, 0x1BAA, prSpacingMark}, // Mc SUNDANESE SIGN PAMAAEH
+ {0x1BAB, 0x1BAD, prExtend}, // Mn [3] SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA
+ {0x1BE6, 0x1BE6, prExtend}, // Mn BATAK SIGN TOMPI
+ {0x1BE7, 0x1BE7, prSpacingMark}, // Mc BATAK VOWEL SIGN E
+ {0x1BE8, 0x1BE9, prExtend}, // Mn [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE
+ {0x1BEA, 0x1BEC, prSpacingMark}, // Mc [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O
+ {0x1BED, 0x1BED, prExtend}, // Mn BATAK VOWEL SIGN KARO O
+ {0x1BEE, 0x1BEE, prSpacingMark}, // Mc BATAK VOWEL SIGN U
+ {0x1BEF, 0x1BF1, prExtend}, // Mn [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H
+ {0x1BF2, 0x1BF3, prSpacingMark}, // Mc [2] BATAK PANGOLAT..BATAK PANONGONAN
+ {0x1C24, 0x1C2B, prSpacingMark}, // Mc [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU
+ {0x1C2C, 0x1C33, prExtend}, // Mn [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T
+ {0x1C34, 0x1C35, prSpacingMark}, // Mc [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG
+ {0x1C36, 0x1C37, prExtend}, // Mn [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
+ {0x1CD0, 0x1CD2, prExtend}, // Mn [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA
+ {0x1CD4, 0x1CE0, prExtend}, // Mn [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
+ {0x1CE1, 0x1CE1, prSpacingMark}, // Mc VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
+ {0x1CE2, 0x1CE8, prExtend}, // Mn [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL
+ {0x1CED, 0x1CED, prExtend}, // Mn VEDIC SIGN TIRYAK
+ {0x1CF4, 0x1CF4, prExtend}, // Mn VEDIC TONE CANDRA ABOVE
+ {0x1CF7, 0x1CF7, prSpacingMark}, // Mc VEDIC SIGN ATIKRAMA
+ {0x1CF8, 0x1CF9, prExtend}, // Mn [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
+ {0x1DC0, 0x1DFF, prExtend}, // Mn [64] COMBINING DOTTED GRAVE ACCENT..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x200B, 0x200B, prControl}, // Cf ZERO WIDTH SPACE
+ {0x200C, 0x200C, prExtend}, // Cf ZERO WIDTH NON-JOINER
+ {0x200D, 0x200D, prZWJ}, // Cf ZERO WIDTH JOINER
+ {0x200E, 0x200F, prControl}, // Cf [2] LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK
+ {0x2028, 0x2028, prControl}, // Zl LINE SEPARATOR
+ {0x2029, 0x2029, prControl}, // Zp PARAGRAPH SEPARATOR
+ {0x202A, 0x202E, prControl}, // Cf [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE
+ {0x203C, 0x203C, prExtendedPictographic}, // E0.6 [1] (‼️) double exclamation mark
+ {0x2049, 0x2049, prExtendedPictographic}, // E0.6 [1] (⁉️) exclamation question mark
+ {0x2060, 0x2064, prControl}, // Cf [5] WORD JOINER..INVISIBLE PLUS
+ {0x2065, 0x2065, prControl}, // Cn
+ {0x2066, 0x206F, prControl}, // Cf [10] LEFT-TO-RIGHT ISOLATE..NOMINAL DIGIT SHAPES
+ {0x20D0, 0x20DC, prExtend}, // Mn [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE
+ {0x20DD, 0x20E0, prExtend}, // Me [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH
+ {0x20E1, 0x20E1, prExtend}, // Mn COMBINING LEFT RIGHT ARROW ABOVE
+ {0x20E2, 0x20E4, prExtend}, // Me [3] COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING UPWARD POINTING TRIANGLE
+ {0x20E5, 0x20F0, prExtend}, // Mn [12] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING ASTERISK ABOVE
+ {0x2122, 0x2122, prExtendedPictographic}, // E0.6 [1] (™️) trade mark
+ {0x2139, 0x2139, prExtendedPictographic}, // E0.6 [1] (ℹ️) information
+ {0x2194, 0x2199, prExtendedPictographic}, // E0.6 [6] (↔️..↙️) left-right arrow..down-left arrow
+ {0x21A9, 0x21AA, prExtendedPictographic}, // E0.6 [2] (↩️..↪️) right arrow curving left..left arrow curving right
+ {0x231A, 0x231B, prExtendedPictographic}, // E0.6 [2] (⌚..⌛) watch..hourglass done
+ {0x2328, 0x2328, prExtendedPictographic}, // E1.0 [1] (⌨️) keyboard
+ {0x2388, 0x2388, prExtendedPictographic}, // E0.0 [1] (⎈) HELM SYMBOL
+ {0x23CF, 0x23CF, prExtendedPictographic}, // E1.0 [1] (⏏️) eject button
+ {0x23E9, 0x23EC, prExtendedPictographic}, // E0.6 [4] (⏩..⏬) fast-forward button..fast down button
+ {0x23ED, 0x23EE, prExtendedPictographic}, // E0.7 [2] (⏭️..⏮️) next track button..last track button
+ {0x23EF, 0x23EF, prExtendedPictographic}, // E1.0 [1] (⏯️) play or pause button
+ {0x23F0, 0x23F0, prExtendedPictographic}, // E0.6 [1] (⏰) alarm clock
+ {0x23F1, 0x23F2, prExtendedPictographic}, // E1.0 [2] (⏱️..⏲️) stopwatch..timer clock
+ {0x23F3, 0x23F3, prExtendedPictographic}, // E0.6 [1] (⏳) hourglass not done
+ {0x23F8, 0x23FA, prExtendedPictographic}, // E0.7 [3] (⏸️..⏺️) pause button..record button
+ {0x24C2, 0x24C2, prExtendedPictographic}, // E0.6 [1] (Ⓜ️) circled M
+ {0x25AA, 0x25AB, prExtendedPictographic}, // E0.6 [2] (▪️..▫️) black small square..white small square
+ {0x25B6, 0x25B6, prExtendedPictographic}, // E0.6 [1] (▶️) play button
+ {0x25C0, 0x25C0, prExtendedPictographic}, // E0.6 [1] (◀️) reverse button
+ {0x25FB, 0x25FE, prExtendedPictographic}, // E0.6 [4] (◻️..◾) white medium square..black medium-small square
+ {0x2600, 0x2601, prExtendedPictographic}, // E0.6 [2] (☀️..☁️) sun..cloud
+ {0x2602, 0x2603, prExtendedPictographic}, // E0.7 [2] (☂️..☃️) umbrella..snowman
+ {0x2604, 0x2604, prExtendedPictographic}, // E1.0 [1] (☄️) comet
+ {0x2605, 0x2605, prExtendedPictographic}, // E0.0 [1] (★) BLACK STAR
+ {0x2607, 0x260D, prExtendedPictographic}, // E0.0 [7] (☇..☍) LIGHTNING..OPPOSITION
+ {0x260E, 0x260E, prExtendedPictographic}, // E0.6 [1] (☎️) telephone
+ {0x260F, 0x2610, prExtendedPictographic}, // E0.0 [2] (☏..☐) WHITE TELEPHONE..BALLOT BOX
+ {0x2611, 0x2611, prExtendedPictographic}, // E0.6 [1] (☑️) check box with check
+ {0x2612, 0x2612, prExtendedPictographic}, // E0.0 [1] (☒) BALLOT BOX WITH X
+ {0x2614, 0x2615, prExtendedPictographic}, // E0.6 [2] (☔..☕) umbrella with rain drops..hot beverage
+ {0x2616, 0x2617, prExtendedPictographic}, // E0.0 [2] (☖..☗) WHITE SHOGI PIECE..BLACK SHOGI PIECE
+ {0x2618, 0x2618, prExtendedPictographic}, // E1.0 [1] (☘️) shamrock
+ {0x2619, 0x261C, prExtendedPictographic}, // E0.0 [4] (☙..☜) REVERSED ROTATED FLORAL HEART BULLET..WHITE LEFT POINTING INDEX
+ {0x261D, 0x261D, prExtendedPictographic}, // E0.6 [1] (☝️) index pointing up
+ {0x261E, 0x261F, prExtendedPictographic}, // E0.0 [2] (☞..☟) WHITE RIGHT POINTING INDEX..WHITE DOWN POINTING INDEX
+ {0x2620, 0x2620, prExtendedPictographic}, // E1.0 [1] (☠️) skull and crossbones
+ {0x2621, 0x2621, prExtendedPictographic}, // E0.0 [1] (☡) CAUTION SIGN
+ {0x2622, 0x2623, prExtendedPictographic}, // E1.0 [2] (☢️..☣️) radioactive..biohazard
+ {0x2624, 0x2625, prExtendedPictographic}, // E0.0 [2] (☤..☥) CADUCEUS..ANKH
+ {0x2626, 0x2626, prExtendedPictographic}, // E1.0 [1] (☦️) orthodox cross
+ {0x2627, 0x2629, prExtendedPictographic}, // E0.0 [3] (☧..☩) CHI RHO..CROSS OF JERUSALEM
+ {0x262A, 0x262A, prExtendedPictographic}, // E0.7 [1] (☪️) star and crescent
+ {0x262B, 0x262D, prExtendedPictographic}, // E0.0 [3] (☫..☭) FARSI SYMBOL..HAMMER AND SICKLE
+ {0x262E, 0x262E, prExtendedPictographic}, // E1.0 [1] (☮️) peace symbol
+ {0x262F, 0x262F, prExtendedPictographic}, // E0.7 [1] (☯️) yin yang
+ {0x2630, 0x2637, prExtendedPictographic}, // E0.0 [8] (☰..☷) TRIGRAM FOR HEAVEN..TRIGRAM FOR EARTH
+ {0x2638, 0x2639, prExtendedPictographic}, // E0.7 [2] (☸️..☹️) wheel of dharma..frowning face
+ {0x263A, 0x263A, prExtendedPictographic}, // E0.6 [1] (☺️) smiling face
+ {0x263B, 0x263F, prExtendedPictographic}, // E0.0 [5] (☻..☿) BLACK SMILING FACE..MERCURY
+ {0x2640, 0x2640, prExtendedPictographic}, // E4.0 [1] (♀️) female sign
+ {0x2641, 0x2641, prExtendedPictographic}, // E0.0 [1] (♁) EARTH
+ {0x2642, 0x2642, prExtendedPictographic}, // E4.0 [1] (♂️) male sign
+ {0x2643, 0x2647, prExtendedPictographic}, // E0.0 [5] (♃..♇) JUPITER..PLUTO
+ {0x2648, 0x2653, prExtendedPictographic}, // E0.6 [12] (♈..♓) Aries..Pisces
+ {0x2654, 0x265E, prExtendedPictographic}, // E0.0 [11] (♔..♞) WHITE CHESS KING..BLACK CHESS KNIGHT
+ {0x265F, 0x265F, prExtendedPictographic}, // E11.0 [1] (♟️) chess pawn
+ {0x2660, 0x2660, prExtendedPictographic}, // E0.6 [1] (♠️) spade suit
+ {0x2661, 0x2662, prExtendedPictographic}, // E0.0 [2] (♡..♢) WHITE HEART SUIT..WHITE DIAMOND SUIT
+ {0x2663, 0x2663, prExtendedPictographic}, // E0.6 [1] (♣️) club suit
+ {0x2664, 0x2664, prExtendedPictographic}, // E0.0 [1] (♤) WHITE SPADE SUIT
+ {0x2665, 0x2666, prExtendedPictographic}, // E0.6 [2] (♥️..♦️) heart suit..diamond suit
+ {0x2667, 0x2667, prExtendedPictographic}, // E0.0 [1] (♧) WHITE CLUB SUIT
+ {0x2668, 0x2668, prExtendedPictographic}, // E0.6 [1] (♨️) hot springs
+ {0x2669, 0x267A, prExtendedPictographic}, // E0.0 [18] (♩..♺) QUARTER NOTE..RECYCLING SYMBOL FOR GENERIC MATERIALS
+ {0x267B, 0x267B, prExtendedPictographic}, // E0.6 [1] (♻️) recycling symbol
+ {0x267C, 0x267D, prExtendedPictographic}, // E0.0 [2] (♼..♽) RECYCLED PAPER SYMBOL..PARTIALLY-RECYCLED PAPER SYMBOL
+ {0x267E, 0x267E, prExtendedPictographic}, // E11.0 [1] (♾️) infinity
+ {0x267F, 0x267F, prExtendedPictographic}, // E0.6 [1] (♿) wheelchair symbol
+ {0x2680, 0x2685, prExtendedPictographic}, // E0.0 [6] (⚀..⚅) DIE FACE-1..DIE FACE-6
+ {0x2690, 0x2691, prExtendedPictographic}, // E0.0 [2] (⚐..⚑) WHITE FLAG..BLACK FLAG
+ {0x2692, 0x2692, prExtendedPictographic}, // E1.0 [1] (⚒️) hammer and pick
+ {0x2693, 0x2693, prExtendedPictographic}, // E0.6 [1] (⚓) anchor
+ {0x2694, 0x2694, prExtendedPictographic}, // E1.0 [1] (⚔️) crossed swords
+ {0x2695, 0x2695, prExtendedPictographic}, // E4.0 [1] (⚕️) medical symbol
+ {0x2696, 0x2697, prExtendedPictographic}, // E1.0 [2] (⚖️..⚗️) balance scale..alembic
+ {0x2698, 0x2698, prExtendedPictographic}, // E0.0 [1] (⚘) FLOWER
+ {0x2699, 0x2699, prExtendedPictographic}, // E1.0 [1] (⚙️) gear
+ {0x269A, 0x269A, prExtendedPictographic}, // E0.0 [1] (⚚) STAFF OF HERMES
+ {0x269B, 0x269C, prExtendedPictographic}, // E1.0 [2] (⚛️..⚜️) atom symbol..fleur-de-lis
+ {0x269D, 0x269F, prExtendedPictographic}, // E0.0 [3] (⚝..⚟) OUTLINED WHITE STAR..THREE LINES CONVERGING LEFT
+ {0x26A0, 0x26A1, prExtendedPictographic}, // E0.6 [2] (⚠️..⚡) warning..high voltage
+ {0x26A2, 0x26A6, prExtendedPictographic}, // E0.0 [5] (⚢..⚦) DOUBLED FEMALE SIGN..MALE WITH STROKE SIGN
+ {0x26A7, 0x26A7, prExtendedPictographic}, // E13.0 [1] (⚧️) transgender symbol
+ {0x26A8, 0x26A9, prExtendedPictographic}, // E0.0 [2] (⚨..⚩) VERTICAL MALE WITH STROKE SIGN..HORIZONTAL MALE WITH STROKE SIGN
+ {0x26AA, 0x26AB, prExtendedPictographic}, // E0.6 [2] (⚪..⚫) white circle..black circle
+ {0x26AC, 0x26AF, prExtendedPictographic}, // E0.0 [4] (⚬..⚯) MEDIUM SMALL WHITE CIRCLE..UNMARRIED PARTNERSHIP SYMBOL
+ {0x26B0, 0x26B1, prExtendedPictographic}, // E1.0 [2] (⚰️..⚱️) coffin..funeral urn
+ {0x26B2, 0x26BC, prExtendedPictographic}, // E0.0 [11] (⚲..⚼) NEUTER..SESQUIQUADRATE
+ {0x26BD, 0x26BE, prExtendedPictographic}, // E0.6 [2] (⚽..⚾) soccer ball..baseball
+ {0x26BF, 0x26C3, prExtendedPictographic}, // E0.0 [5] (⚿..⛃) SQUARED KEY..BLACK DRAUGHTS KING
+ {0x26C4, 0x26C5, prExtendedPictographic}, // E0.6 [2] (⛄..⛅) snowman without snow..sun behind cloud
+ {0x26C6, 0x26C7, prExtendedPictographic}, // E0.0 [2] (⛆..⛇) RAIN..BLACK SNOWMAN
+ {0x26C8, 0x26C8, prExtendedPictographic}, // E0.7 [1] (⛈️) cloud with lightning and rain
+ {0x26C9, 0x26CD, prExtendedPictographic}, // E0.0 [5] (⛉..⛍) TURNED WHITE SHOGI PIECE..DISABLED CAR
+ {0x26CE, 0x26CE, prExtendedPictographic}, // E0.6 [1] (⛎) Ophiuchus
+ {0x26CF, 0x26CF, prExtendedPictographic}, // E0.7 [1] (⛏️) pick
+ {0x26D0, 0x26D0, prExtendedPictographic}, // E0.0 [1] (⛐) CAR SLIDING
+ {0x26D1, 0x26D1, prExtendedPictographic}, // E0.7 [1] (⛑️) rescue worker’s helmet
+ {0x26D2, 0x26D2, prExtendedPictographic}, // E0.0 [1] (⛒) CIRCLED CROSSING LANES
+ {0x26D3, 0x26D3, prExtendedPictographic}, // E0.7 [1] (⛓️) chains
+ {0x26D4, 0x26D4, prExtendedPictographic}, // E0.6 [1] (⛔) no entry
+ {0x26D5, 0x26E8, prExtendedPictographic}, // E0.0 [20] (⛕..⛨) ALTERNATE ONE-WAY LEFT WAY TRAFFIC..BLACK CROSS ON SHIELD
+ {0x26E9, 0x26E9, prExtendedPictographic}, // E0.7 [1] (⛩️) shinto shrine
+ {0x26EA, 0x26EA, prExtendedPictographic}, // E0.6 [1] (⛪) church
+ {0x26EB, 0x26EF, prExtendedPictographic}, // E0.0 [5] (⛫..⛯) CASTLE..MAP SYMBOL FOR LIGHTHOUSE
+ {0x26F0, 0x26F1, prExtendedPictographic}, // E0.7 [2] (⛰️..⛱️) mountain..umbrella on ground
+ {0x26F2, 0x26F3, prExtendedPictographic}, // E0.6 [2] (⛲..⛳) fountain..flag in hole
+ {0x26F4, 0x26F4, prExtendedPictographic}, // E0.7 [1] (⛴️) ferry
+ {0x26F5, 0x26F5, prExtendedPictographic}, // E0.6 [1] (⛵) sailboat
+ {0x26F6, 0x26F6, prExtendedPictographic}, // E0.0 [1] (⛶) SQUARE FOUR CORNERS
+ {0x26F7, 0x26F9, prExtendedPictographic}, // E0.7 [3] (⛷️..⛹️) skier..person bouncing ball
+ {0x26FA, 0x26FA, prExtendedPictographic}, // E0.6 [1] (⛺) tent
+ {0x26FB, 0x26FC, prExtendedPictographic}, // E0.0 [2] (⛻..⛼) JAPANESE BANK SYMBOL..HEADSTONE GRAVEYARD SYMBOL
+ {0x26FD, 0x26FD, prExtendedPictographic}, // E0.6 [1] (⛽) fuel pump
+ {0x26FE, 0x2701, prExtendedPictographic}, // E0.0 [4] (⛾..✁) CUP ON BLACK SQUARE..UPPER BLADE SCISSORS
+ {0x2702, 0x2702, prExtendedPictographic}, // E0.6 [1] (✂️) scissors
+ {0x2703, 0x2704, prExtendedPictographic}, // E0.0 [2] (✃..✄) LOWER BLADE SCISSORS..WHITE SCISSORS
+ {0x2705, 0x2705, prExtendedPictographic}, // E0.6 [1] (✅) check mark button
+ {0x2708, 0x270C, prExtendedPictographic}, // E0.6 [5] (✈️..✌️) airplane..victory hand
+ {0x270D, 0x270D, prExtendedPictographic}, // E0.7 [1] (✍️) writing hand
+ {0x270E, 0x270E, prExtendedPictographic}, // E0.0 [1] (✎) LOWER RIGHT PENCIL
+ {0x270F, 0x270F, prExtendedPictographic}, // E0.6 [1] (✏️) pencil
+ {0x2710, 0x2711, prExtendedPictographic}, // E0.0 [2] (✐..✑) UPPER RIGHT PENCIL..WHITE NIB
+ {0x2712, 0x2712, prExtendedPictographic}, // E0.6 [1] (✒️) black nib
+ {0x2714, 0x2714, prExtendedPictographic}, // E0.6 [1] (✔️) check mark
+ {0x2716, 0x2716, prExtendedPictographic}, // E0.6 [1] (✖️) multiply
+ {0x271D, 0x271D, prExtendedPictographic}, // E0.7 [1] (✝️) latin cross
+ {0x2721, 0x2721, prExtendedPictographic}, // E0.7 [1] (✡️) star of David
+ {0x2728, 0x2728, prExtendedPictographic}, // E0.6 [1] (✨) sparkles
+ {0x2733, 0x2734, prExtendedPictographic}, // E0.6 [2] (✳️..✴️) eight-spoked asterisk..eight-pointed star
+ {0x2744, 0x2744, prExtendedPictographic}, // E0.6 [1] (❄️) snowflake
+ {0x2747, 0x2747, prExtendedPictographic}, // E0.6 [1] (❇️) sparkle
+ {0x274C, 0x274C, prExtendedPictographic}, // E0.6 [1] (❌) cross mark
+ {0x274E, 0x274E, prExtendedPictographic}, // E0.6 [1] (❎) cross mark button
+ {0x2753, 0x2755, prExtendedPictographic}, // E0.6 [3] (❓..❕) red question mark..white exclamation mark
+ {0x2757, 0x2757, prExtendedPictographic}, // E0.6 [1] (❗) red exclamation mark
+ {0x2763, 0x2763, prExtendedPictographic}, // E1.0 [1] (❣️) heart exclamation
+ {0x2764, 0x2764, prExtendedPictographic}, // E0.6 [1] (❤️) red heart
+ {0x2765, 0x2767, prExtendedPictographic}, // E0.0 [3] (❥..❧) ROTATED HEAVY BLACK HEART BULLET..ROTATED FLORAL HEART BULLET
+ {0x2795, 0x2797, prExtendedPictographic}, // E0.6 [3] (➕..➗) plus..divide
+ {0x27A1, 0x27A1, prExtendedPictographic}, // E0.6 [1] (➡️) right arrow
+ {0x27B0, 0x27B0, prExtendedPictographic}, // E0.6 [1] (➰) curly loop
+ {0x27BF, 0x27BF, prExtendedPictographic}, // E1.0 [1] (➿) double curly loop
+ {0x2934, 0x2935, prExtendedPictographic}, // E0.6 [2] (⤴️..⤵️) right arrow curving up..right arrow curving down
+ {0x2B05, 0x2B07, prExtendedPictographic}, // E0.6 [3] (⬅️..⬇️) left arrow..down arrow
+ {0x2B1B, 0x2B1C, prExtendedPictographic}, // E0.6 [2] (⬛..⬜) black large square..white large square
+ {0x2B50, 0x2B50, prExtendedPictographic}, // E0.6 [1] (⭐) star
+ {0x2B55, 0x2B55, prExtendedPictographic}, // E0.6 [1] (⭕) hollow red circle
+ {0x2CEF, 0x2CF1, prExtend}, // Mn [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS
+ {0x2D7F, 0x2D7F, prExtend}, // Mn TIFINAGH CONSONANT JOINER
+ {0x2DE0, 0x2DFF, prExtend}, // Mn [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS
+ {0x302A, 0x302D, prExtend}, // Mn [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK
+ {0x302E, 0x302F, prExtend}, // Mc [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK
+ {0x3030, 0x3030, prExtendedPictographic}, // E0.6 [1] (〰️) wavy dash
+ {0x303D, 0x303D, prExtendedPictographic}, // E0.6 [1] (〽️) part alternation mark
+ {0x3099, 0x309A, prExtend}, // Mn [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x3297, 0x3297, prExtendedPictographic}, // E0.6 [1] (㊗️) Japanese “congratulations” button
+ {0x3299, 0x3299, prExtendedPictographic}, // E0.6 [1] (㊙️) Japanese “secret” button
+ {0xA66F, 0xA66F, prExtend}, // Mn COMBINING CYRILLIC VZMET
+ {0xA670, 0xA672, prExtend}, // Me [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN
+ {0xA674, 0xA67D, prExtend}, // Mn [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK
+ {0xA69E, 0xA69F, prExtend}, // Mn [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E
+ {0xA6F0, 0xA6F1, prExtend}, // Mn [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS
+ {0xA802, 0xA802, prExtend}, // Mn SYLOTI NAGRI SIGN DVISVARA
+ {0xA806, 0xA806, prExtend}, // Mn SYLOTI NAGRI SIGN HASANTA
+ {0xA80B, 0xA80B, prExtend}, // Mn SYLOTI NAGRI SIGN ANUSVARA
+ {0xA823, 0xA824, prSpacingMark}, // Mc [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I
+ {0xA825, 0xA826, prExtend}, // Mn [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E
+ {0xA827, 0xA827, prSpacingMark}, // Mc SYLOTI NAGRI VOWEL SIGN OO
+ {0xA82C, 0xA82C, prExtend}, // Mn SYLOTI NAGRI SIGN ALTERNATE HASANTA
+ {0xA880, 0xA881, prSpacingMark}, // Mc [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA
+ {0xA8B4, 0xA8C3, prSpacingMark}, // Mc [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU
+ {0xA8C4, 0xA8C5, prExtend}, // Mn [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU
+ {0xA8E0, 0xA8F1, prExtend}, // Mn [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA
+ {0xA8FF, 0xA8FF, prExtend}, // Mn DEVANAGARI VOWEL SIGN AY
+ {0xA926, 0xA92D, prExtend}, // Mn [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU
+ {0xA947, 0xA951, prExtend}, // Mn [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R
+ {0xA952, 0xA953, prSpacingMark}, // Mc [2] REJANG CONSONANT SIGN H..REJANG VIRAMA
+ {0xA960, 0xA97C, prL}, // Lo [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH
+ {0xA980, 0xA982, prExtend}, // Mn [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR
+ {0xA983, 0xA983, prSpacingMark}, // Mc JAVANESE SIGN WIGNYAN
+ {0xA9B3, 0xA9B3, prExtend}, // Mn JAVANESE SIGN CECAK TELU
+ {0xA9B4, 0xA9B5, prSpacingMark}, // Mc [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG
+ {0xA9B6, 0xA9B9, prExtend}, // Mn [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT
+ {0xA9BA, 0xA9BB, prSpacingMark}, // Mc [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE
+ {0xA9BC, 0xA9BD, prExtend}, // Mn [2] JAVANESE VOWEL SIGN PEPET..JAVANESE CONSONANT SIGN KERET
+ {0xA9BE, 0xA9C0, prSpacingMark}, // Mc [3] JAVANESE CONSONANT SIGN PENGKAL..JAVANESE PANGKON
+ {0xA9E5, 0xA9E5, prExtend}, // Mn MYANMAR SIGN SHAN SAW
+ {0xAA29, 0xAA2E, prExtend}, // Mn [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE
+ {0xAA2F, 0xAA30, prSpacingMark}, // Mc [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
+ {0xAA31, 0xAA32, prExtend}, // Mn [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE
+ {0xAA33, 0xAA34, prSpacingMark}, // Mc [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA
+ {0xAA35, 0xAA36, prExtend}, // Mn [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA
+ {0xAA43, 0xAA43, prExtend}, // Mn CHAM CONSONANT SIGN FINAL NG
+ {0xAA4C, 0xAA4C, prExtend}, // Mn CHAM CONSONANT SIGN FINAL M
+ {0xAA4D, 0xAA4D, prSpacingMark}, // Mc CHAM CONSONANT SIGN FINAL H
+ {0xAA7C, 0xAA7C, prExtend}, // Mn MYANMAR SIGN TAI LAING TONE-2
+ {0xAAB0, 0xAAB0, prExtend}, // Mn TAI VIET MAI KANG
+ {0xAAB2, 0xAAB4, prExtend}, // Mn [3] TAI VIET VOWEL I..TAI VIET VOWEL U
+ {0xAAB7, 0xAAB8, prExtend}, // Mn [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
+ {0xAABE, 0xAABF, prExtend}, // Mn [2] TAI VIET VOWEL AM..TAI VIET TONE MAI EK
+ {0xAAC1, 0xAAC1, prExtend}, // Mn TAI VIET TONE MAI THO
+ {0xAAEB, 0xAAEB, prSpacingMark}, // Mc MEETEI MAYEK VOWEL SIGN II
+ {0xAAEC, 0xAAED, prExtend}, // Mn [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI
+ {0xAAEE, 0xAAEF, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU
+ {0xAAF5, 0xAAF5, prSpacingMark}, // Mc MEETEI MAYEK VOWEL SIGN VISARGA
+ {0xAAF6, 0xAAF6, prExtend}, // Mn MEETEI MAYEK VIRAMA
+ {0xABE3, 0xABE4, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
+ {0xABE5, 0xABE5, prExtend}, // Mn MEETEI MAYEK VOWEL SIGN ANAP
+ {0xABE6, 0xABE7, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP
+ {0xABE8, 0xABE8, prExtend}, // Mn MEETEI MAYEK VOWEL SIGN UNAP
+ {0xABE9, 0xABEA, prSpacingMark}, // Mc [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG
+ {0xABEC, 0xABEC, prSpacingMark}, // Mc MEETEI MAYEK LUM IYEK
+ {0xABED, 0xABED, prExtend}, // Mn MEETEI MAYEK APUN IYEK
+ {0xAC00, 0xAC00, prLV}, // Lo HANGUL SYLLABLE GA
+ {0xAC01, 0xAC1B, prLVT}, // Lo [27] HANGUL SYLLABLE GAG..HANGUL SYLLABLE GAH
+ {0xAC1C, 0xAC1C, prLV}, // Lo HANGUL SYLLABLE GAE
+ {0xAC1D, 0xAC37, prLVT}, // Lo [27] HANGUL SYLLABLE GAEG..HANGUL SYLLABLE GAEH
+ {0xAC38, 0xAC38, prLV}, // Lo HANGUL SYLLABLE GYA
+ {0xAC39, 0xAC53, prLVT}, // Lo [27] HANGUL SYLLABLE GYAG..HANGUL SYLLABLE GYAH
+ {0xAC54, 0xAC54, prLV}, // Lo HANGUL SYLLABLE GYAE
+ {0xAC55, 0xAC6F, prLVT}, // Lo [27] HANGUL SYLLABLE GYAEG..HANGUL SYLLABLE GYAEH
+ {0xAC70, 0xAC70, prLV}, // Lo HANGUL SYLLABLE GEO
+ {0xAC71, 0xAC8B, prLVT}, // Lo [27] HANGUL SYLLABLE GEOG..HANGUL SYLLABLE GEOH
+ {0xAC8C, 0xAC8C, prLV}, // Lo HANGUL SYLLABLE GE
+ {0xAC8D, 0xACA7, prLVT}, // Lo [27] HANGUL SYLLABLE GEG..HANGUL SYLLABLE GEH
+ {0xACA8, 0xACA8, prLV}, // Lo HANGUL SYLLABLE GYEO
+ {0xACA9, 0xACC3, prLVT}, // Lo [27] HANGUL SYLLABLE GYEOG..HANGUL SYLLABLE GYEOH
+ {0xACC4, 0xACC4, prLV}, // Lo HANGUL SYLLABLE GYE
+ {0xACC5, 0xACDF, prLVT}, // Lo [27] HANGUL SYLLABLE GYEG..HANGUL SYLLABLE GYEH
+ {0xACE0, 0xACE0, prLV}, // Lo HANGUL SYLLABLE GO
+ {0xACE1, 0xACFB, prLVT}, // Lo [27] HANGUL SYLLABLE GOG..HANGUL SYLLABLE GOH
+ {0xACFC, 0xACFC, prLV}, // Lo HANGUL SYLLABLE GWA
+ {0xACFD, 0xAD17, prLVT}, // Lo [27] HANGUL SYLLABLE GWAG..HANGUL SYLLABLE GWAH
+ {0xAD18, 0xAD18, prLV}, // Lo HANGUL SYLLABLE GWAE
+ {0xAD19, 0xAD33, prLVT}, // Lo [27] HANGUL SYLLABLE GWAEG..HANGUL SYLLABLE GWAEH
+ {0xAD34, 0xAD34, prLV}, // Lo HANGUL SYLLABLE GOE
+ {0xAD35, 0xAD4F, prLVT}, // Lo [27] HANGUL SYLLABLE GOEG..HANGUL SYLLABLE GOEH
+ {0xAD50, 0xAD50, prLV}, // Lo HANGUL SYLLABLE GYO
+ {0xAD51, 0xAD6B, prLVT}, // Lo [27] HANGUL SYLLABLE GYOG..HANGUL SYLLABLE GYOH
+ {0xAD6C, 0xAD6C, prLV}, // Lo HANGUL SYLLABLE GU
+ {0xAD6D, 0xAD87, prLVT}, // Lo [27] HANGUL SYLLABLE GUG..HANGUL SYLLABLE GUH
+ {0xAD88, 0xAD88, prLV}, // Lo HANGUL SYLLABLE GWEO
+ {0xAD89, 0xADA3, prLVT}, // Lo [27] HANGUL SYLLABLE GWEOG..HANGUL SYLLABLE GWEOH
+ {0xADA4, 0xADA4, prLV}, // Lo HANGUL SYLLABLE GWE
+ {0xADA5, 0xADBF, prLVT}, // Lo [27] HANGUL SYLLABLE GWEG..HANGUL SYLLABLE GWEH
+ {0xADC0, 0xADC0, prLV}, // Lo HANGUL SYLLABLE GWI
+ {0xADC1, 0xADDB, prLVT}, // Lo [27] HANGUL SYLLABLE GWIG..HANGUL SYLLABLE GWIH
+ {0xADDC, 0xADDC, prLV}, // Lo HANGUL SYLLABLE GYU
+ {0xADDD, 0xADF7, prLVT}, // Lo [27] HANGUL SYLLABLE GYUG..HANGUL SYLLABLE GYUH
+ {0xADF8, 0xADF8, prLV}, // Lo HANGUL SYLLABLE GEU
+ {0xADF9, 0xAE13, prLVT}, // Lo [27] HANGUL SYLLABLE GEUG..HANGUL SYLLABLE GEUH
+ {0xAE14, 0xAE14, prLV}, // Lo HANGUL SYLLABLE GYI
+ {0xAE15, 0xAE2F, prLVT}, // Lo [27] HANGUL SYLLABLE GYIG..HANGUL SYLLABLE GYIH
+ {0xAE30, 0xAE30, prLV}, // Lo HANGUL SYLLABLE GI
+ {0xAE31, 0xAE4B, prLVT}, // Lo [27] HANGUL SYLLABLE GIG..HANGUL SYLLABLE GIH
+ {0xAE4C, 0xAE4C, prLV}, // Lo HANGUL SYLLABLE GGA
+ {0xAE4D, 0xAE67, prLVT}, // Lo [27] HANGUL SYLLABLE GGAG..HANGUL SYLLABLE GGAH
+ {0xAE68, 0xAE68, prLV}, // Lo HANGUL SYLLABLE GGAE
+ {0xAE69, 0xAE83, prLVT}, // Lo [27] HANGUL SYLLABLE GGAEG..HANGUL SYLLABLE GGAEH
+ {0xAE84, 0xAE84, prLV}, // Lo HANGUL SYLLABLE GGYA
+ {0xAE85, 0xAE9F, prLVT}, // Lo [27] HANGUL SYLLABLE GGYAG..HANGUL SYLLABLE GGYAH
+ {0xAEA0, 0xAEA0, prLV}, // Lo HANGUL SYLLABLE GGYAE
+ {0xAEA1, 0xAEBB, prLVT}, // Lo [27] HANGUL SYLLABLE GGYAEG..HANGUL SYLLABLE GGYAEH
+ {0xAEBC, 0xAEBC, prLV}, // Lo HANGUL SYLLABLE GGEO
+ {0xAEBD, 0xAED7, prLVT}, // Lo [27] HANGUL SYLLABLE GGEOG..HANGUL SYLLABLE GGEOH
+ {0xAED8, 0xAED8, prLV}, // Lo HANGUL SYLLABLE GGE
+ {0xAED9, 0xAEF3, prLVT}, // Lo [27] HANGUL SYLLABLE GGEG..HANGUL SYLLABLE GGEH
+ {0xAEF4, 0xAEF4, prLV}, // Lo HANGUL SYLLABLE GGYEO
+ {0xAEF5, 0xAF0F, prLVT}, // Lo [27] HANGUL SYLLABLE GGYEOG..HANGUL SYLLABLE GGYEOH
+ {0xAF10, 0xAF10, prLV}, // Lo HANGUL SYLLABLE GGYE
+ {0xAF11, 0xAF2B, prLVT}, // Lo [27] HANGUL SYLLABLE GGYEG..HANGUL SYLLABLE GGYEH
+ {0xAF2C, 0xAF2C, prLV}, // Lo HANGUL SYLLABLE GGO
+ {0xAF2D, 0xAF47, prLVT}, // Lo [27] HANGUL SYLLABLE GGOG..HANGUL SYLLABLE GGOH
+ {0xAF48, 0xAF48, prLV}, // Lo HANGUL SYLLABLE GGWA
+ {0xAF49, 0xAF63, prLVT}, // Lo [27] HANGUL SYLLABLE GGWAG..HANGUL SYLLABLE GGWAH
+ {0xAF64, 0xAF64, prLV}, // Lo HANGUL SYLLABLE GGWAE
+ {0xAF65, 0xAF7F, prLVT}, // Lo [27] HANGUL SYLLABLE GGWAEG..HANGUL SYLLABLE GGWAEH
+ {0xAF80, 0xAF80, prLV}, // Lo HANGUL SYLLABLE GGOE
+ {0xAF81, 0xAF9B, prLVT}, // Lo [27] HANGUL SYLLABLE GGOEG..HANGUL SYLLABLE GGOEH
+ {0xAF9C, 0xAF9C, prLV}, // Lo HANGUL SYLLABLE GGYO
+ {0xAF9D, 0xAFB7, prLVT}, // Lo [27] HANGUL SYLLABLE GGYOG..HANGUL SYLLABLE GGYOH
+ {0xAFB8, 0xAFB8, prLV}, // Lo HANGUL SYLLABLE GGU
+ {0xAFB9, 0xAFD3, prLVT}, // Lo [27] HANGUL SYLLABLE GGUG..HANGUL SYLLABLE GGUH
+ {0xAFD4, 0xAFD4, prLV}, // Lo HANGUL SYLLABLE GGWEO
+ {0xAFD5, 0xAFEF, prLVT}, // Lo [27] HANGUL SYLLABLE GGWEOG..HANGUL SYLLABLE GGWEOH
+ {0xAFF0, 0xAFF0, prLV}, // Lo HANGUL SYLLABLE GGWE
+ {0xAFF1, 0xB00B, prLVT}, // Lo [27] HANGUL SYLLABLE GGWEG..HANGUL SYLLABLE GGWEH
+ {0xB00C, 0xB00C, prLV}, // Lo HANGUL SYLLABLE GGWI
+ {0xB00D, 0xB027, prLVT}, // Lo [27] HANGUL SYLLABLE GGWIG..HANGUL SYLLABLE GGWIH
+ {0xB028, 0xB028, prLV}, // Lo HANGUL SYLLABLE GGYU
+ {0xB029, 0xB043, prLVT}, // Lo [27] HANGUL SYLLABLE GGYUG..HANGUL SYLLABLE GGYUH
+ {0xB044, 0xB044, prLV}, // Lo HANGUL SYLLABLE GGEU
+ {0xB045, 0xB05F, prLVT}, // Lo [27] HANGUL SYLLABLE GGEUG..HANGUL SYLLABLE GGEUH
+ {0xB060, 0xB060, prLV}, // Lo HANGUL SYLLABLE GGYI
+ {0xB061, 0xB07B, prLVT}, // Lo [27] HANGUL SYLLABLE GGYIG..HANGUL SYLLABLE GGYIH
+ {0xB07C, 0xB07C, prLV}, // Lo HANGUL SYLLABLE GGI
+ {0xB07D, 0xB097, prLVT}, // Lo [27] HANGUL SYLLABLE GGIG..HANGUL SYLLABLE GGIH
+ {0xB098, 0xB098, prLV}, // Lo HANGUL SYLLABLE NA
+ {0xB099, 0xB0B3, prLVT}, // Lo [27] HANGUL SYLLABLE NAG..HANGUL SYLLABLE NAH
+ {0xB0B4, 0xB0B4, prLV}, // Lo HANGUL SYLLABLE NAE
+ {0xB0B5, 0xB0CF, prLVT}, // Lo [27] HANGUL SYLLABLE NAEG..HANGUL SYLLABLE NAEH
+ {0xB0D0, 0xB0D0, prLV}, // Lo HANGUL SYLLABLE NYA
+ {0xB0D1, 0xB0EB, prLVT}, // Lo [27] HANGUL SYLLABLE NYAG..HANGUL SYLLABLE NYAH
+ {0xB0EC, 0xB0EC, prLV}, // Lo HANGUL SYLLABLE NYAE
+ {0xB0ED, 0xB107, prLVT}, // Lo [27] HANGUL SYLLABLE NYAEG..HANGUL SYLLABLE NYAEH
+ {0xB108, 0xB108, prLV}, // Lo HANGUL SYLLABLE NEO
+ {0xB109, 0xB123, prLVT}, // Lo [27] HANGUL SYLLABLE NEOG..HANGUL SYLLABLE NEOH
+ {0xB124, 0xB124, prLV}, // Lo HANGUL SYLLABLE NE
+ {0xB125, 0xB13F, prLVT}, // Lo [27] HANGUL SYLLABLE NEG..HANGUL SYLLABLE NEH
+ {0xB140, 0xB140, prLV}, // Lo HANGUL SYLLABLE NYEO
+ {0xB141, 0xB15B, prLVT}, // Lo [27] HANGUL SYLLABLE NYEOG..HANGUL SYLLABLE NYEOH
+ {0xB15C, 0xB15C, prLV}, // Lo HANGUL SYLLABLE NYE
+ {0xB15D, 0xB177, prLVT}, // Lo [27] HANGUL SYLLABLE NYEG..HANGUL SYLLABLE NYEH
+ {0xB178, 0xB178, prLV}, // Lo HANGUL SYLLABLE NO
+ {0xB179, 0xB193, prLVT}, // Lo [27] HANGUL SYLLABLE NOG..HANGUL SYLLABLE NOH
+ {0xB194, 0xB194, prLV}, // Lo HANGUL SYLLABLE NWA
+ {0xB195, 0xB1AF, prLVT}, // Lo [27] HANGUL SYLLABLE NWAG..HANGUL SYLLABLE NWAH
+ {0xB1B0, 0xB1B0, prLV}, // Lo HANGUL SYLLABLE NWAE
+ {0xB1B1, 0xB1CB, prLVT}, // Lo [27] HANGUL SYLLABLE NWAEG..HANGUL SYLLABLE NWAEH
+ {0xB1CC, 0xB1CC, prLV}, // Lo HANGUL SYLLABLE NOE
+ {0xB1CD, 0xB1E7, prLVT}, // Lo [27] HANGUL SYLLABLE NOEG..HANGUL SYLLABLE NOEH
+ {0xB1E8, 0xB1E8, prLV}, // Lo HANGUL SYLLABLE NYO
+ {0xB1E9, 0xB203, prLVT}, // Lo [27] HANGUL SYLLABLE NYOG..HANGUL SYLLABLE NYOH
+ {0xB204, 0xB204, prLV}, // Lo HANGUL SYLLABLE NU
+ {0xB205, 0xB21F, prLVT}, // Lo [27] HANGUL SYLLABLE NUG..HANGUL SYLLABLE NUH
+ {0xB220, 0xB220, prLV}, // Lo HANGUL SYLLABLE NWEO
+ {0xB221, 0xB23B, prLVT}, // Lo [27] HANGUL SYLLABLE NWEOG..HANGUL SYLLABLE NWEOH
+ {0xB23C, 0xB23C, prLV}, // Lo HANGUL SYLLABLE NWE
+ {0xB23D, 0xB257, prLVT}, // Lo [27] HANGUL SYLLABLE NWEG..HANGUL SYLLABLE NWEH
+ {0xB258, 0xB258, prLV}, // Lo HANGUL SYLLABLE NWI
+ {0xB259, 0xB273, prLVT}, // Lo [27] HANGUL SYLLABLE NWIG..HANGUL SYLLABLE NWIH
+ {0xB274, 0xB274, prLV}, // Lo HANGUL SYLLABLE NYU
+ {0xB275, 0xB28F, prLVT}, // Lo [27] HANGUL SYLLABLE NYUG..HANGUL SYLLABLE NYUH
+ {0xB290, 0xB290, prLV}, // Lo HANGUL SYLLABLE NEU
+ {0xB291, 0xB2AB, prLVT}, // Lo [27] HANGUL SYLLABLE NEUG..HANGUL SYLLABLE NEUH
+ {0xB2AC, 0xB2AC, prLV}, // Lo HANGUL SYLLABLE NYI
+ {0xB2AD, 0xB2C7, prLVT}, // Lo [27] HANGUL SYLLABLE NYIG..HANGUL SYLLABLE NYIH
+ {0xB2C8, 0xB2C8, prLV}, // Lo HANGUL SYLLABLE NI
+ {0xB2C9, 0xB2E3, prLVT}, // Lo [27] HANGUL SYLLABLE NIG..HANGUL SYLLABLE NIH
+ {0xB2E4, 0xB2E4, prLV}, // Lo HANGUL SYLLABLE DA
+ {0xB2E5, 0xB2FF, prLVT}, // Lo [27] HANGUL SYLLABLE DAG..HANGUL SYLLABLE DAH
+ {0xB300, 0xB300, prLV}, // Lo HANGUL SYLLABLE DAE
+ {0xB301, 0xB31B, prLVT}, // Lo [27] HANGUL SYLLABLE DAEG..HANGUL SYLLABLE DAEH
+ {0xB31C, 0xB31C, prLV}, // Lo HANGUL SYLLABLE DYA
+ {0xB31D, 0xB337, prLVT}, // Lo [27] HANGUL SYLLABLE DYAG..HANGUL SYLLABLE DYAH
+ {0xB338, 0xB338, prLV}, // Lo HANGUL SYLLABLE DYAE
+ {0xB339, 0xB353, prLVT}, // Lo [27] HANGUL SYLLABLE DYAEG..HANGUL SYLLABLE DYAEH
+ {0xB354, 0xB354, prLV}, // Lo HANGUL SYLLABLE DEO
+ {0xB355, 0xB36F, prLVT}, // Lo [27] HANGUL SYLLABLE DEOG..HANGUL SYLLABLE DEOH
+ {0xB370, 0xB370, prLV}, // Lo HANGUL SYLLABLE DE
+ {0xB371, 0xB38B, prLVT}, // Lo [27] HANGUL SYLLABLE DEG..HANGUL SYLLABLE DEH
+ {0xB38C, 0xB38C, prLV}, // Lo HANGUL SYLLABLE DYEO
+ {0xB38D, 0xB3A7, prLVT}, // Lo [27] HANGUL SYLLABLE DYEOG..HANGUL SYLLABLE DYEOH
+ {0xB3A8, 0xB3A8, prLV}, // Lo HANGUL SYLLABLE DYE
+ {0xB3A9, 0xB3C3, prLVT}, // Lo [27] HANGUL SYLLABLE DYEG..HANGUL SYLLABLE DYEH
+ {0xB3C4, 0xB3C4, prLV}, // Lo HANGUL SYLLABLE DO
+ {0xB3C5, 0xB3DF, prLVT}, // Lo [27] HANGUL SYLLABLE DOG..HANGUL SYLLABLE DOH
+ {0xB3E0, 0xB3E0, prLV}, // Lo HANGUL SYLLABLE DWA
+ {0xB3E1, 0xB3FB, prLVT}, // Lo [27] HANGUL SYLLABLE DWAG..HANGUL SYLLABLE DWAH
+ {0xB3FC, 0xB3FC, prLV}, // Lo HANGUL SYLLABLE DWAE
+ {0xB3FD, 0xB417, prLVT}, // Lo [27] HANGUL SYLLABLE DWAEG..HANGUL SYLLABLE DWAEH
+ {0xB418, 0xB418, prLV}, // Lo HANGUL SYLLABLE DOE
+ {0xB419, 0xB433, prLVT}, // Lo [27] HANGUL SYLLABLE DOEG..HANGUL SYLLABLE DOEH
+ {0xB434, 0xB434, prLV}, // Lo HANGUL SYLLABLE DYO
+ {0xB435, 0xB44F, prLVT}, // Lo [27] HANGUL SYLLABLE DYOG..HANGUL SYLLABLE DYOH
+ {0xB450, 0xB450, prLV}, // Lo HANGUL SYLLABLE DU
+ {0xB451, 0xB46B, prLVT}, // Lo [27] HANGUL SYLLABLE DUG..HANGUL SYLLABLE DUH
+ {0xB46C, 0xB46C, prLV}, // Lo HANGUL SYLLABLE DWEO
+ {0xB46D, 0xB487, prLVT}, // Lo [27] HANGUL SYLLABLE DWEOG..HANGUL SYLLABLE DWEOH
+ {0xB488, 0xB488, prLV}, // Lo HANGUL SYLLABLE DWE
+ {0xB489, 0xB4A3, prLVT}, // Lo [27] HANGUL SYLLABLE DWEG..HANGUL SYLLABLE DWEH
+ {0xB4A4, 0xB4A4, prLV}, // Lo HANGUL SYLLABLE DWI
+ {0xB4A5, 0xB4BF, prLVT}, // Lo [27] HANGUL SYLLABLE DWIG..HANGUL SYLLABLE DWIH
+ {0xB4C0, 0xB4C0, prLV}, // Lo HANGUL SYLLABLE DYU
+ {0xB4C1, 0xB4DB, prLVT}, // Lo [27] HANGUL SYLLABLE DYUG..HANGUL SYLLABLE DYUH
+ {0xB4DC, 0xB4DC, prLV}, // Lo HANGUL SYLLABLE DEU
+ {0xB4DD, 0xB4F7, prLVT}, // Lo [27] HANGUL SYLLABLE DEUG..HANGUL SYLLABLE DEUH
+ {0xB4F8, 0xB4F8, prLV}, // Lo HANGUL SYLLABLE DYI
+ {0xB4F9, 0xB513, prLVT}, // Lo [27] HANGUL SYLLABLE DYIG..HANGUL SYLLABLE DYIH
+ {0xB514, 0xB514, prLV}, // Lo HANGUL SYLLABLE DI
+ {0xB515, 0xB52F, prLVT}, // Lo [27] HANGUL SYLLABLE DIG..HANGUL SYLLABLE DIH
+ {0xB530, 0xB530, prLV}, // Lo HANGUL SYLLABLE DDA
+ {0xB531, 0xB54B, prLVT}, // Lo [27] HANGUL SYLLABLE DDAG..HANGUL SYLLABLE DDAH
+ {0xB54C, 0xB54C, prLV}, // Lo HANGUL SYLLABLE DDAE
+ {0xB54D, 0xB567, prLVT}, // Lo [27] HANGUL SYLLABLE DDAEG..HANGUL SYLLABLE DDAEH
+ {0xB568, 0xB568, prLV}, // Lo HANGUL SYLLABLE DDYA
+ {0xB569, 0xB583, prLVT}, // Lo [27] HANGUL SYLLABLE DDYAG..HANGUL SYLLABLE DDYAH
+ {0xB584, 0xB584, prLV}, // Lo HANGUL SYLLABLE DDYAE
+ {0xB585, 0xB59F, prLVT}, // Lo [27] HANGUL SYLLABLE DDYAEG..HANGUL SYLLABLE DDYAEH
+ {0xB5A0, 0xB5A0, prLV}, // Lo HANGUL SYLLABLE DDEO
+ {0xB5A1, 0xB5BB, prLVT}, // Lo [27] HANGUL SYLLABLE DDEOG..HANGUL SYLLABLE DDEOH
+ {0xB5BC, 0xB5BC, prLV}, // Lo HANGUL SYLLABLE DDE
+ {0xB5BD, 0xB5D7, prLVT}, // Lo [27] HANGUL SYLLABLE DDEG..HANGUL SYLLABLE DDEH
+ {0xB5D8, 0xB5D8, prLV}, // Lo HANGUL SYLLABLE DDYEO
+ {0xB5D9, 0xB5F3, prLVT}, // Lo [27] HANGUL SYLLABLE DDYEOG..HANGUL SYLLABLE DDYEOH
+ {0xB5F4, 0xB5F4, prLV}, // Lo HANGUL SYLLABLE DDYE
+ {0xB5F5, 0xB60F, prLVT}, // Lo [27] HANGUL SYLLABLE DDYEG..HANGUL SYLLABLE DDYEH
+ {0xB610, 0xB610, prLV}, // Lo HANGUL SYLLABLE DDO
+ {0xB611, 0xB62B, prLVT}, // Lo [27] HANGUL SYLLABLE DDOG..HANGUL SYLLABLE DDOH
+ {0xB62C, 0xB62C, prLV}, // Lo HANGUL SYLLABLE DDWA
+ {0xB62D, 0xB647, prLVT}, // Lo [27] HANGUL SYLLABLE DDWAG..HANGUL SYLLABLE DDWAH
+ {0xB648, 0xB648, prLV}, // Lo HANGUL SYLLABLE DDWAE
+ {0xB649, 0xB663, prLVT}, // Lo [27] HANGUL SYLLABLE DDWAEG..HANGUL SYLLABLE DDWAEH
+ {0xB664, 0xB664, prLV}, // Lo HANGUL SYLLABLE DDOE
+ {0xB665, 0xB67F, prLVT}, // Lo [27] HANGUL SYLLABLE DDOEG..HANGUL SYLLABLE DDOEH
+ {0xB680, 0xB680, prLV}, // Lo HANGUL SYLLABLE DDYO
+ {0xB681, 0xB69B, prLVT}, // Lo [27] HANGUL SYLLABLE DDYOG..HANGUL SYLLABLE DDYOH
+ {0xB69C, 0xB69C, prLV}, // Lo HANGUL SYLLABLE DDU
+ {0xB69D, 0xB6B7, prLVT}, // Lo [27] HANGUL SYLLABLE DDUG..HANGUL SYLLABLE DDUH
+ {0xB6B8, 0xB6B8, prLV}, // Lo HANGUL SYLLABLE DDWEO
+ {0xB6B9, 0xB6D3, prLVT}, // Lo [27] HANGUL SYLLABLE DDWEOG..HANGUL SYLLABLE DDWEOH
+ {0xB6D4, 0xB6D4, prLV}, // Lo HANGUL SYLLABLE DDWE
+ {0xB6D5, 0xB6EF, prLVT}, // Lo [27] HANGUL SYLLABLE DDWEG..HANGUL SYLLABLE DDWEH
+ {0xB6F0, 0xB6F0, prLV}, // Lo HANGUL SYLLABLE DDWI
+ {0xB6F1, 0xB70B, prLVT}, // Lo [27] HANGUL SYLLABLE DDWIG..HANGUL SYLLABLE DDWIH
+ {0xB70C, 0xB70C, prLV}, // Lo HANGUL SYLLABLE DDYU
+ {0xB70D, 0xB727, prLVT}, // Lo [27] HANGUL SYLLABLE DDYUG..HANGUL SYLLABLE DDYUH
+ {0xB728, 0xB728, prLV}, // Lo HANGUL SYLLABLE DDEU
+ {0xB729, 0xB743, prLVT}, // Lo [27] HANGUL SYLLABLE DDEUG..HANGUL SYLLABLE DDEUH
+ {0xB744, 0xB744, prLV}, // Lo HANGUL SYLLABLE DDYI
+ {0xB745, 0xB75F, prLVT}, // Lo [27] HANGUL SYLLABLE DDYIG..HANGUL SYLLABLE DDYIH
+ {0xB760, 0xB760, prLV}, // Lo HANGUL SYLLABLE DDI
+ {0xB761, 0xB77B, prLVT}, // Lo [27] HANGUL SYLLABLE DDIG..HANGUL SYLLABLE DDIH
+ {0xB77C, 0xB77C, prLV}, // Lo HANGUL SYLLABLE RA
+ {0xB77D, 0xB797, prLVT}, // Lo [27] HANGUL SYLLABLE RAG..HANGUL SYLLABLE RAH
+ {0xB798, 0xB798, prLV}, // Lo HANGUL SYLLABLE RAE
+ {0xB799, 0xB7B3, prLVT}, // Lo [27] HANGUL SYLLABLE RAEG..HANGUL SYLLABLE RAEH
+ {0xB7B4, 0xB7B4, prLV}, // Lo HANGUL SYLLABLE RYA
+ {0xB7B5, 0xB7CF, prLVT}, // Lo [27] HANGUL SYLLABLE RYAG..HANGUL SYLLABLE RYAH
+ {0xB7D0, 0xB7D0, prLV}, // Lo HANGUL SYLLABLE RYAE
+ {0xB7D1, 0xB7EB, prLVT}, // Lo [27] HANGUL SYLLABLE RYAEG..HANGUL SYLLABLE RYAEH
+ {0xB7EC, 0xB7EC, prLV}, // Lo HANGUL SYLLABLE REO
+ {0xB7ED, 0xB807, prLVT}, // Lo [27] HANGUL SYLLABLE REOG..HANGUL SYLLABLE REOH
+ {0xB808, 0xB808, prLV}, // Lo HANGUL SYLLABLE RE
+ {0xB809, 0xB823, prLVT}, // Lo [27] HANGUL SYLLABLE REG..HANGUL SYLLABLE REH
+ {0xB824, 0xB824, prLV}, // Lo HANGUL SYLLABLE RYEO
+ {0xB825, 0xB83F, prLVT}, // Lo [27] HANGUL SYLLABLE RYEOG..HANGUL SYLLABLE RYEOH
+ {0xB840, 0xB840, prLV}, // Lo HANGUL SYLLABLE RYE
+ {0xB841, 0xB85B, prLVT}, // Lo [27] HANGUL SYLLABLE RYEG..HANGUL SYLLABLE RYEH
+ {0xB85C, 0xB85C, prLV}, // Lo HANGUL SYLLABLE RO
+ {0xB85D, 0xB877, prLVT}, // Lo [27] HANGUL SYLLABLE ROG..HANGUL SYLLABLE ROH
+ {0xB878, 0xB878, prLV}, // Lo HANGUL SYLLABLE RWA
+ {0xB879, 0xB893, prLVT}, // Lo [27] HANGUL SYLLABLE RWAG..HANGUL SYLLABLE RWAH
+ {0xB894, 0xB894, prLV}, // Lo HANGUL SYLLABLE RWAE
+ {0xB895, 0xB8AF, prLVT}, // Lo [27] HANGUL SYLLABLE RWAEG..HANGUL SYLLABLE RWAEH
+ {0xB8B0, 0xB8B0, prLV}, // Lo HANGUL SYLLABLE ROE
+ {0xB8B1, 0xB8CB, prLVT}, // Lo [27] HANGUL SYLLABLE ROEG..HANGUL SYLLABLE ROEH
+ {0xB8CC, 0xB8CC, prLV}, // Lo HANGUL SYLLABLE RYO
+ {0xB8CD, 0xB8E7, prLVT}, // Lo [27] HANGUL SYLLABLE RYOG..HANGUL SYLLABLE RYOH
+ {0xB8E8, 0xB8E8, prLV}, // Lo HANGUL SYLLABLE RU
+ {0xB8E9, 0xB903, prLVT}, // Lo [27] HANGUL SYLLABLE RUG..HANGUL SYLLABLE RUH
+ {0xB904, 0xB904, prLV}, // Lo HANGUL SYLLABLE RWEO
+ {0xB905, 0xB91F, prLVT}, // Lo [27] HANGUL SYLLABLE RWEOG..HANGUL SYLLABLE RWEOH
+ {0xB920, 0xB920, prLV}, // Lo HANGUL SYLLABLE RWE
+ {0xB921, 0xB93B, prLVT}, // Lo [27] HANGUL SYLLABLE RWEG..HANGUL SYLLABLE RWEH
+ {0xB93C, 0xB93C, prLV}, // Lo HANGUL SYLLABLE RWI
+ {0xB93D, 0xB957, prLVT}, // Lo [27] HANGUL SYLLABLE RWIG..HANGUL SYLLABLE RWIH
+ {0xB958, 0xB958, prLV}, // Lo HANGUL SYLLABLE RYU
+ {0xB959, 0xB973, prLVT}, // Lo [27] HANGUL SYLLABLE RYUG..HANGUL SYLLABLE RYUH
+ {0xB974, 0xB974, prLV}, // Lo HANGUL SYLLABLE REU
+ {0xB975, 0xB98F, prLVT}, // Lo [27] HANGUL SYLLABLE REUG..HANGUL SYLLABLE REUH
+ {0xB990, 0xB990, prLV}, // Lo HANGUL SYLLABLE RYI
+ {0xB991, 0xB9AB, prLVT}, // Lo [27] HANGUL SYLLABLE RYIG..HANGUL SYLLABLE RYIH
+ {0xB9AC, 0xB9AC, prLV}, // Lo HANGUL SYLLABLE RI
+ {0xB9AD, 0xB9C7, prLVT}, // Lo [27] HANGUL SYLLABLE RIG..HANGUL SYLLABLE RIH
+ {0xB9C8, 0xB9C8, prLV}, // Lo HANGUL SYLLABLE MA
+ {0xB9C9, 0xB9E3, prLVT}, // Lo [27] HANGUL SYLLABLE MAG..HANGUL SYLLABLE MAH
+ {0xB9E4, 0xB9E4, prLV}, // Lo HANGUL SYLLABLE MAE
+ {0xB9E5, 0xB9FF, prLVT}, // Lo [27] HANGUL SYLLABLE MAEG..HANGUL SYLLABLE MAEH
+ {0xBA00, 0xBA00, prLV}, // Lo HANGUL SYLLABLE MYA
+ {0xBA01, 0xBA1B, prLVT}, // Lo [27] HANGUL SYLLABLE MYAG..HANGUL SYLLABLE MYAH
+ {0xBA1C, 0xBA1C, prLV}, // Lo HANGUL SYLLABLE MYAE
+ {0xBA1D, 0xBA37, prLVT}, // Lo [27] HANGUL SYLLABLE MYAEG..HANGUL SYLLABLE MYAEH
+ {0xBA38, 0xBA38, prLV}, // Lo HANGUL SYLLABLE MEO
+ {0xBA39, 0xBA53, prLVT}, // Lo [27] HANGUL SYLLABLE MEOG..HANGUL SYLLABLE MEOH
+ {0xBA54, 0xBA54, prLV}, // Lo HANGUL SYLLABLE ME
+ {0xBA55, 0xBA6F, prLVT}, // Lo [27] HANGUL SYLLABLE MEG..HANGUL SYLLABLE MEH
+ {0xBA70, 0xBA70, prLV}, // Lo HANGUL SYLLABLE MYEO
+ {0xBA71, 0xBA8B, prLVT}, // Lo [27] HANGUL SYLLABLE MYEOG..HANGUL SYLLABLE MYEOH
+ {0xBA8C, 0xBA8C, prLV}, // Lo HANGUL SYLLABLE MYE
+ {0xBA8D, 0xBAA7, prLVT}, // Lo [27] HANGUL SYLLABLE MYEG..HANGUL SYLLABLE MYEH
+ {0xBAA8, 0xBAA8, prLV}, // Lo HANGUL SYLLABLE MO
+ {0xBAA9, 0xBAC3, prLVT}, // Lo [27] HANGUL SYLLABLE MOG..HANGUL SYLLABLE MOH
+ {0xBAC4, 0xBAC4, prLV}, // Lo HANGUL SYLLABLE MWA
+ {0xBAC5, 0xBADF, prLVT}, // Lo [27] HANGUL SYLLABLE MWAG..HANGUL SYLLABLE MWAH
+ {0xBAE0, 0xBAE0, prLV}, // Lo HANGUL SYLLABLE MWAE
+ {0xBAE1, 0xBAFB, prLVT}, // Lo [27] HANGUL SYLLABLE MWAEG..HANGUL SYLLABLE MWAEH
+ {0xBAFC, 0xBAFC, prLV}, // Lo HANGUL SYLLABLE MOE
+ {0xBAFD, 0xBB17, prLVT}, // Lo [27] HANGUL SYLLABLE MOEG..HANGUL SYLLABLE MOEH
+ {0xBB18, 0xBB18, prLV}, // Lo HANGUL SYLLABLE MYO
+ {0xBB19, 0xBB33, prLVT}, // Lo [27] HANGUL SYLLABLE MYOG..HANGUL SYLLABLE MYOH
+ {0xBB34, 0xBB34, prLV}, // Lo HANGUL SYLLABLE MU
+ {0xBB35, 0xBB4F, prLVT}, // Lo [27] HANGUL SYLLABLE MUG..HANGUL SYLLABLE MUH
+ {0xBB50, 0xBB50, prLV}, // Lo HANGUL SYLLABLE MWEO
+ {0xBB51, 0xBB6B, prLVT}, // Lo [27] HANGUL SYLLABLE MWEOG..HANGUL SYLLABLE MWEOH
+ {0xBB6C, 0xBB6C, prLV}, // Lo HANGUL SYLLABLE MWE
+ {0xBB6D, 0xBB87, prLVT}, // Lo [27] HANGUL SYLLABLE MWEG..HANGUL SYLLABLE MWEH
+ {0xBB88, 0xBB88, prLV}, // Lo HANGUL SYLLABLE MWI
+ {0xBB89, 0xBBA3, prLVT}, // Lo [27] HANGUL SYLLABLE MWIG..HANGUL SYLLABLE MWIH
+ {0xBBA4, 0xBBA4, prLV}, // Lo HANGUL SYLLABLE MYU
+ {0xBBA5, 0xBBBF, prLVT}, // Lo [27] HANGUL SYLLABLE MYUG..HANGUL SYLLABLE MYUH
+ {0xBBC0, 0xBBC0, prLV}, // Lo HANGUL SYLLABLE MEU
+ {0xBBC1, 0xBBDB, prLVT}, // Lo [27] HANGUL SYLLABLE MEUG..HANGUL SYLLABLE MEUH
+ {0xBBDC, 0xBBDC, prLV}, // Lo HANGUL SYLLABLE MYI
+ {0xBBDD, 0xBBF7, prLVT}, // Lo [27] HANGUL SYLLABLE MYIG..HANGUL SYLLABLE MYIH
+ {0xBBF8, 0xBBF8, prLV}, // Lo HANGUL SYLLABLE MI
+ {0xBBF9, 0xBC13, prLVT}, // Lo [27] HANGUL SYLLABLE MIG..HANGUL SYLLABLE MIH
+ {0xBC14, 0xBC14, prLV}, // Lo HANGUL SYLLABLE BA
+ {0xBC15, 0xBC2F, prLVT}, // Lo [27] HANGUL SYLLABLE BAG..HANGUL SYLLABLE BAH
+ {0xBC30, 0xBC30, prLV}, // Lo HANGUL SYLLABLE BAE
+ {0xBC31, 0xBC4B, prLVT}, // Lo [27] HANGUL SYLLABLE BAEG..HANGUL SYLLABLE BAEH
+ {0xBC4C, 0xBC4C, prLV}, // Lo HANGUL SYLLABLE BYA
+ {0xBC4D, 0xBC67, prLVT}, // Lo [27] HANGUL SYLLABLE BYAG..HANGUL SYLLABLE BYAH
+ {0xBC68, 0xBC68, prLV}, // Lo HANGUL SYLLABLE BYAE
+ {0xBC69, 0xBC83, prLVT}, // Lo [27] HANGUL SYLLABLE BYAEG..HANGUL SYLLABLE BYAEH
+ {0xBC84, 0xBC84, prLV}, // Lo HANGUL SYLLABLE BEO
+ {0xBC85, 0xBC9F, prLVT}, // Lo [27] HANGUL SYLLABLE BEOG..HANGUL SYLLABLE BEOH
+ {0xBCA0, 0xBCA0, prLV}, // Lo HANGUL SYLLABLE BE
+ {0xBCA1, 0xBCBB, prLVT}, // Lo [27] HANGUL SYLLABLE BEG..HANGUL SYLLABLE BEH
+ {0xBCBC, 0xBCBC, prLV}, // Lo HANGUL SYLLABLE BYEO
+ {0xBCBD, 0xBCD7, prLVT}, // Lo [27] HANGUL SYLLABLE BYEOG..HANGUL SYLLABLE BYEOH
+ {0xBCD8, 0xBCD8, prLV}, // Lo HANGUL SYLLABLE BYE
+ {0xBCD9, 0xBCF3, prLVT}, // Lo [27] HANGUL SYLLABLE BYEG..HANGUL SYLLABLE BYEH
+ {0xBCF4, 0xBCF4, prLV}, // Lo HANGUL SYLLABLE BO
+ {0xBCF5, 0xBD0F, prLVT}, // Lo [27] HANGUL SYLLABLE BOG..HANGUL SYLLABLE BOH
+ {0xBD10, 0xBD10, prLV}, // Lo HANGUL SYLLABLE BWA
+ {0xBD11, 0xBD2B, prLVT}, // Lo [27] HANGUL SYLLABLE BWAG..HANGUL SYLLABLE BWAH
+ {0xBD2C, 0xBD2C, prLV}, // Lo HANGUL SYLLABLE BWAE
+ {0xBD2D, 0xBD47, prLVT}, // Lo [27] HANGUL SYLLABLE BWAEG..HANGUL SYLLABLE BWAEH
+ {0xBD48, 0xBD48, prLV}, // Lo HANGUL SYLLABLE BOE
+ {0xBD49, 0xBD63, prLVT}, // Lo [27] HANGUL SYLLABLE BOEG..HANGUL SYLLABLE BOEH
+ {0xBD64, 0xBD64, prLV}, // Lo HANGUL SYLLABLE BYO
+ {0xBD65, 0xBD7F, prLVT}, // Lo [27] HANGUL SYLLABLE BYOG..HANGUL SYLLABLE BYOH
+ {0xBD80, 0xBD80, prLV}, // Lo HANGUL SYLLABLE BU
+ {0xBD81, 0xBD9B, prLVT}, // Lo [27] HANGUL SYLLABLE BUG..HANGUL SYLLABLE BUH
+ {0xBD9C, 0xBD9C, prLV}, // Lo HANGUL SYLLABLE BWEO
+ {0xBD9D, 0xBDB7, prLVT}, // Lo [27] HANGUL SYLLABLE BWEOG..HANGUL SYLLABLE BWEOH
+ {0xBDB8, 0xBDB8, prLV}, // Lo HANGUL SYLLABLE BWE
+ {0xBDB9, 0xBDD3, prLVT}, // Lo [27] HANGUL SYLLABLE BWEG..HANGUL SYLLABLE BWEH
+ {0xBDD4, 0xBDD4, prLV}, // Lo HANGUL SYLLABLE BWI
+ {0xBDD5, 0xBDEF, prLVT}, // Lo [27] HANGUL SYLLABLE BWIG..HANGUL SYLLABLE BWIH
+ {0xBDF0, 0xBDF0, prLV}, // Lo HANGUL SYLLABLE BYU
+ {0xBDF1, 0xBE0B, prLVT}, // Lo [27] HANGUL SYLLABLE BYUG..HANGUL SYLLABLE BYUH
+ {0xBE0C, 0xBE0C, prLV}, // Lo HANGUL SYLLABLE BEU
+ {0xBE0D, 0xBE27, prLVT}, // Lo [27] HANGUL SYLLABLE BEUG..HANGUL SYLLABLE BEUH
+ {0xBE28, 0xBE28, prLV}, // Lo HANGUL SYLLABLE BYI
+ {0xBE29, 0xBE43, prLVT}, // Lo [27] HANGUL SYLLABLE BYIG..HANGUL SYLLABLE BYIH
+ {0xBE44, 0xBE44, prLV}, // Lo HANGUL SYLLABLE BI
+ {0xBE45, 0xBE5F, prLVT}, // Lo [27] HANGUL SYLLABLE BIG..HANGUL SYLLABLE BIH
+ {0xBE60, 0xBE60, prLV}, // Lo HANGUL SYLLABLE BBA
+ {0xBE61, 0xBE7B, prLVT}, // Lo [27] HANGUL SYLLABLE BBAG..HANGUL SYLLABLE BBAH
+ {0xBE7C, 0xBE7C, prLV}, // Lo HANGUL SYLLABLE BBAE
+ {0xBE7D, 0xBE97, prLVT}, // Lo [27] HANGUL SYLLABLE BBAEG..HANGUL SYLLABLE BBAEH
+ {0xBE98, 0xBE98, prLV}, // Lo HANGUL SYLLABLE BBYA
+ {0xBE99, 0xBEB3, prLVT}, // Lo [27] HANGUL SYLLABLE BBYAG..HANGUL SYLLABLE BBYAH
+ {0xBEB4, 0xBEB4, prLV}, // Lo HANGUL SYLLABLE BBYAE
+ {0xBEB5, 0xBECF, prLVT}, // Lo [27] HANGUL SYLLABLE BBYAEG..HANGUL SYLLABLE BBYAEH
+ {0xBED0, 0xBED0, prLV}, // Lo HANGUL SYLLABLE BBEO
+ {0xBED1, 0xBEEB, prLVT}, // Lo [27] HANGUL SYLLABLE BBEOG..HANGUL SYLLABLE BBEOH
+ {0xBEEC, 0xBEEC, prLV}, // Lo HANGUL SYLLABLE BBE
+ {0xBEED, 0xBF07, prLVT}, // Lo [27] HANGUL SYLLABLE BBEG..HANGUL SYLLABLE BBEH
+ {0xBF08, 0xBF08, prLV}, // Lo HANGUL SYLLABLE BBYEO
+ {0xBF09, 0xBF23, prLVT}, // Lo [27] HANGUL SYLLABLE BBYEOG..HANGUL SYLLABLE BBYEOH
+ {0xBF24, 0xBF24, prLV}, // Lo HANGUL SYLLABLE BBYE
+ {0xBF25, 0xBF3F, prLVT}, // Lo [27] HANGUL SYLLABLE BBYEG..HANGUL SYLLABLE BBYEH
+ {0xBF40, 0xBF40, prLV}, // Lo HANGUL SYLLABLE BBO
+ {0xBF41, 0xBF5B, prLVT}, // Lo [27] HANGUL SYLLABLE BBOG..HANGUL SYLLABLE BBOH
+ {0xBF5C, 0xBF5C, prLV}, // Lo HANGUL SYLLABLE BBWA
+ {0xBF5D, 0xBF77, prLVT}, // Lo [27] HANGUL SYLLABLE BBWAG..HANGUL SYLLABLE BBWAH
+ {0xBF78, 0xBF78, prLV}, // Lo HANGUL SYLLABLE BBWAE
+ {0xBF79, 0xBF93, prLVT}, // Lo [27] HANGUL SYLLABLE BBWAEG..HANGUL SYLLABLE BBWAEH
+ {0xBF94, 0xBF94, prLV}, // Lo HANGUL SYLLABLE BBOE
+ {0xBF95, 0xBFAF, prLVT}, // Lo [27] HANGUL SYLLABLE BBOEG..HANGUL SYLLABLE BBOEH
+ {0xBFB0, 0xBFB0, prLV}, // Lo HANGUL SYLLABLE BBYO
+ {0xBFB1, 0xBFCB, prLVT}, // Lo [27] HANGUL SYLLABLE BBYOG..HANGUL SYLLABLE BBYOH
+ {0xBFCC, 0xBFCC, prLV}, // Lo HANGUL SYLLABLE BBU
+ {0xBFCD, 0xBFE7, prLVT}, // Lo [27] HANGUL SYLLABLE BBUG..HANGUL SYLLABLE BBUH
+ {0xBFE8, 0xBFE8, prLV}, // Lo HANGUL SYLLABLE BBWEO
+ {0xBFE9, 0xC003, prLVT}, // Lo [27] HANGUL SYLLABLE BBWEOG..HANGUL SYLLABLE BBWEOH
+ {0xC004, 0xC004, prLV}, // Lo HANGUL SYLLABLE BBWE
+ {0xC005, 0xC01F, prLVT}, // Lo [27] HANGUL SYLLABLE BBWEG..HANGUL SYLLABLE BBWEH
+ {0xC020, 0xC020, prLV}, // Lo HANGUL SYLLABLE BBWI
+ {0xC021, 0xC03B, prLVT}, // Lo [27] HANGUL SYLLABLE BBWIG..HANGUL SYLLABLE BBWIH
+ {0xC03C, 0xC03C, prLV}, // Lo HANGUL SYLLABLE BBYU
+ {0xC03D, 0xC057, prLVT}, // Lo [27] HANGUL SYLLABLE BBYUG..HANGUL SYLLABLE BBYUH
+ {0xC058, 0xC058, prLV}, // Lo HANGUL SYLLABLE BBEU
+ {0xC059, 0xC073, prLVT}, // Lo [27] HANGUL SYLLABLE BBEUG..HANGUL SYLLABLE BBEUH
+ {0xC074, 0xC074, prLV}, // Lo HANGUL SYLLABLE BBYI
+ {0xC075, 0xC08F, prLVT}, // Lo [27] HANGUL SYLLABLE BBYIG..HANGUL SYLLABLE BBYIH
+ {0xC090, 0xC090, prLV}, // Lo HANGUL SYLLABLE BBI
+ {0xC091, 0xC0AB, prLVT}, // Lo [27] HANGUL SYLLABLE BBIG..HANGUL SYLLABLE BBIH
+ {0xC0AC, 0xC0AC, prLV}, // Lo HANGUL SYLLABLE SA
+ {0xC0AD, 0xC0C7, prLVT}, // Lo [27] HANGUL SYLLABLE SAG..HANGUL SYLLABLE SAH
+ {0xC0C8, 0xC0C8, prLV}, // Lo HANGUL SYLLABLE SAE
+ {0xC0C9, 0xC0E3, prLVT}, // Lo [27] HANGUL SYLLABLE SAEG..HANGUL SYLLABLE SAEH
+ {0xC0E4, 0xC0E4, prLV}, // Lo HANGUL SYLLABLE SYA
+ {0xC0E5, 0xC0FF, prLVT}, // Lo [27] HANGUL SYLLABLE SYAG..HANGUL SYLLABLE SYAH
+ {0xC100, 0xC100, prLV}, // Lo HANGUL SYLLABLE SYAE
+ {0xC101, 0xC11B, prLVT}, // Lo [27] HANGUL SYLLABLE SYAEG..HANGUL SYLLABLE SYAEH
+ {0xC11C, 0xC11C, prLV}, // Lo HANGUL SYLLABLE SEO
+ {0xC11D, 0xC137, prLVT}, // Lo [27] HANGUL SYLLABLE SEOG..HANGUL SYLLABLE SEOH
+ {0xC138, 0xC138, prLV}, // Lo HANGUL SYLLABLE SE
+ {0xC139, 0xC153, prLVT}, // Lo [27] HANGUL SYLLABLE SEG..HANGUL SYLLABLE SEH
+ {0xC154, 0xC154, prLV}, // Lo HANGUL SYLLABLE SYEO
+ {0xC155, 0xC16F, prLVT}, // Lo [27] HANGUL SYLLABLE SYEOG..HANGUL SYLLABLE SYEOH
+ {0xC170, 0xC170, prLV}, // Lo HANGUL SYLLABLE SYE
+ {0xC171, 0xC18B, prLVT}, // Lo [27] HANGUL SYLLABLE SYEG..HANGUL SYLLABLE SYEH
+ {0xC18C, 0xC18C, prLV}, // Lo HANGUL SYLLABLE SO
+ {0xC18D, 0xC1A7, prLVT}, // Lo [27] HANGUL SYLLABLE SOG..HANGUL SYLLABLE SOH
+ {0xC1A8, 0xC1A8, prLV}, // Lo HANGUL SYLLABLE SWA
+ {0xC1A9, 0xC1C3, prLVT}, // Lo [27] HANGUL SYLLABLE SWAG..HANGUL SYLLABLE SWAH
+ {0xC1C4, 0xC1C4, prLV}, // Lo HANGUL SYLLABLE SWAE
+ {0xC1C5, 0xC1DF, prLVT}, // Lo [27] HANGUL SYLLABLE SWAEG..HANGUL SYLLABLE SWAEH
+ {0xC1E0, 0xC1E0, prLV}, // Lo HANGUL SYLLABLE SOE
+ {0xC1E1, 0xC1FB, prLVT}, // Lo [27] HANGUL SYLLABLE SOEG..HANGUL SYLLABLE SOEH
+ {0xC1FC, 0xC1FC, prLV}, // Lo HANGUL SYLLABLE SYO
+ {0xC1FD, 0xC217, prLVT}, // Lo [27] HANGUL SYLLABLE SYOG..HANGUL SYLLABLE SYOH
+ {0xC218, 0xC218, prLV}, // Lo HANGUL SYLLABLE SU
+ {0xC219, 0xC233, prLVT}, // Lo [27] HANGUL SYLLABLE SUG..HANGUL SYLLABLE SUH
+ {0xC234, 0xC234, prLV}, // Lo HANGUL SYLLABLE SWEO
+ {0xC235, 0xC24F, prLVT}, // Lo [27] HANGUL SYLLABLE SWEOG..HANGUL SYLLABLE SWEOH
+ {0xC250, 0xC250, prLV}, // Lo HANGUL SYLLABLE SWE
+ {0xC251, 0xC26B, prLVT}, // Lo [27] HANGUL SYLLABLE SWEG..HANGUL SYLLABLE SWEH
+ {0xC26C, 0xC26C, prLV}, // Lo HANGUL SYLLABLE SWI
+ {0xC26D, 0xC287, prLVT}, // Lo [27] HANGUL SYLLABLE SWIG..HANGUL SYLLABLE SWIH
+ {0xC288, 0xC288, prLV}, // Lo HANGUL SYLLABLE SYU
+ {0xC289, 0xC2A3, prLVT}, // Lo [27] HANGUL SYLLABLE SYUG..HANGUL SYLLABLE SYUH
+ {0xC2A4, 0xC2A4, prLV}, // Lo HANGUL SYLLABLE SEU
+ {0xC2A5, 0xC2BF, prLVT}, // Lo [27] HANGUL SYLLABLE SEUG..HANGUL SYLLABLE SEUH
+ {0xC2C0, 0xC2C0, prLV}, // Lo HANGUL SYLLABLE SYI
+ {0xC2C1, 0xC2DB, prLVT}, // Lo [27] HANGUL SYLLABLE SYIG..HANGUL SYLLABLE SYIH
+ {0xC2DC, 0xC2DC, prLV}, // Lo HANGUL SYLLABLE SI
+ {0xC2DD, 0xC2F7, prLVT}, // Lo [27] HANGUL SYLLABLE SIG..HANGUL SYLLABLE SIH
+ {0xC2F8, 0xC2F8, prLV}, // Lo HANGUL SYLLABLE SSA
+ {0xC2F9, 0xC313, prLVT}, // Lo [27] HANGUL SYLLABLE SSAG..HANGUL SYLLABLE SSAH
+ {0xC314, 0xC314, prLV}, // Lo HANGUL SYLLABLE SSAE
+ {0xC315, 0xC32F, prLVT}, // Lo [27] HANGUL SYLLABLE SSAEG..HANGUL SYLLABLE SSAEH
+ {0xC330, 0xC330, prLV}, // Lo HANGUL SYLLABLE SSYA
+ {0xC331, 0xC34B, prLVT}, // Lo [27] HANGUL SYLLABLE SSYAG..HANGUL SYLLABLE SSYAH
+ {0xC34C, 0xC34C, prLV}, // Lo HANGUL SYLLABLE SSYAE
+ {0xC34D, 0xC367, prLVT}, // Lo [27] HANGUL SYLLABLE SSYAEG..HANGUL SYLLABLE SSYAEH
+ {0xC368, 0xC368, prLV}, // Lo HANGUL SYLLABLE SSEO
+ {0xC369, 0xC383, prLVT}, // Lo [27] HANGUL SYLLABLE SSEOG..HANGUL SYLLABLE SSEOH
+ {0xC384, 0xC384, prLV}, // Lo HANGUL SYLLABLE SSE
+ {0xC385, 0xC39F, prLVT}, // Lo [27] HANGUL SYLLABLE SSEG..HANGUL SYLLABLE SSEH
+ {0xC3A0, 0xC3A0, prLV}, // Lo HANGUL SYLLABLE SSYEO
+ {0xC3A1, 0xC3BB, prLVT}, // Lo [27] HANGUL SYLLABLE SSYEOG..HANGUL SYLLABLE SSYEOH
+ {0xC3BC, 0xC3BC, prLV}, // Lo HANGUL SYLLABLE SSYE
+ {0xC3BD, 0xC3D7, prLVT}, // Lo [27] HANGUL SYLLABLE SSYEG..HANGUL SYLLABLE SSYEH
+ {0xC3D8, 0xC3D8, prLV}, // Lo HANGUL SYLLABLE SSO
+ {0xC3D9, 0xC3F3, prLVT}, // Lo [27] HANGUL SYLLABLE SSOG..HANGUL SYLLABLE SSOH
+ {0xC3F4, 0xC3F4, prLV}, // Lo HANGUL SYLLABLE SSWA
+ {0xC3F5, 0xC40F, prLVT}, // Lo [27] HANGUL SYLLABLE SSWAG..HANGUL SYLLABLE SSWAH
+ {0xC410, 0xC410, prLV}, // Lo HANGUL SYLLABLE SSWAE
+ {0xC411, 0xC42B, prLVT}, // Lo [27] HANGUL SYLLABLE SSWAEG..HANGUL SYLLABLE SSWAEH
+ {0xC42C, 0xC42C, prLV}, // Lo HANGUL SYLLABLE SSOE
+ {0xC42D, 0xC447, prLVT}, // Lo [27] HANGUL SYLLABLE SSOEG..HANGUL SYLLABLE SSOEH
+ {0xC448, 0xC448, prLV}, // Lo HANGUL SYLLABLE SSYO
+ {0xC449, 0xC463, prLVT}, // Lo [27] HANGUL SYLLABLE SSYOG..HANGUL SYLLABLE SSYOH
+ {0xC464, 0xC464, prLV}, // Lo HANGUL SYLLABLE SSU
+ {0xC465, 0xC47F, prLVT}, // Lo [27] HANGUL SYLLABLE SSUG..HANGUL SYLLABLE SSUH
+ {0xC480, 0xC480, prLV}, // Lo HANGUL SYLLABLE SSWEO
+ {0xC481, 0xC49B, prLVT}, // Lo [27] HANGUL SYLLABLE SSWEOG..HANGUL SYLLABLE SSWEOH
+ {0xC49C, 0xC49C, prLV}, // Lo HANGUL SYLLABLE SSWE
+ {0xC49D, 0xC4B7, prLVT}, // Lo [27] HANGUL SYLLABLE SSWEG..HANGUL SYLLABLE SSWEH
+ {0xC4B8, 0xC4B8, prLV}, // Lo HANGUL SYLLABLE SSWI
+ {0xC4B9, 0xC4D3, prLVT}, // Lo [27] HANGUL SYLLABLE SSWIG..HANGUL SYLLABLE SSWIH
+ {0xC4D4, 0xC4D4, prLV}, // Lo HANGUL SYLLABLE SSYU
+ {0xC4D5, 0xC4EF, prLVT}, // Lo [27] HANGUL SYLLABLE SSYUG..HANGUL SYLLABLE SSYUH
+ {0xC4F0, 0xC4F0, prLV}, // Lo HANGUL SYLLABLE SSEU
+ {0xC4F1, 0xC50B, prLVT}, // Lo [27] HANGUL SYLLABLE SSEUG..HANGUL SYLLABLE SSEUH
+ {0xC50C, 0xC50C, prLV}, // Lo HANGUL SYLLABLE SSYI
+ {0xC50D, 0xC527, prLVT}, // Lo [27] HANGUL SYLLABLE SSYIG..HANGUL SYLLABLE SSYIH
+ {0xC528, 0xC528, prLV}, // Lo HANGUL SYLLABLE SSI
+ {0xC529, 0xC543, prLVT}, // Lo [27] HANGUL SYLLABLE SSIG..HANGUL SYLLABLE SSIH
+ {0xC544, 0xC544, prLV}, // Lo HANGUL SYLLABLE A
+ {0xC545, 0xC55F, prLVT}, // Lo [27] HANGUL SYLLABLE AG..HANGUL SYLLABLE AH
+ {0xC560, 0xC560, prLV}, // Lo HANGUL SYLLABLE AE
+ {0xC561, 0xC57B, prLVT}, // Lo [27] HANGUL SYLLABLE AEG..HANGUL SYLLABLE AEH
+ {0xC57C, 0xC57C, prLV}, // Lo HANGUL SYLLABLE YA
+ {0xC57D, 0xC597, prLVT}, // Lo [27] HANGUL SYLLABLE YAG..HANGUL SYLLABLE YAH
+ {0xC598, 0xC598, prLV}, // Lo HANGUL SYLLABLE YAE
+ {0xC599, 0xC5B3, prLVT}, // Lo [27] HANGUL SYLLABLE YAEG..HANGUL SYLLABLE YAEH
+ {0xC5B4, 0xC5B4, prLV}, // Lo HANGUL SYLLABLE EO
+ {0xC5B5, 0xC5CF, prLVT}, // Lo [27] HANGUL SYLLABLE EOG..HANGUL SYLLABLE EOH
+ {0xC5D0, 0xC5D0, prLV}, // Lo HANGUL SYLLABLE E
+ {0xC5D1, 0xC5EB, prLVT}, // Lo [27] HANGUL SYLLABLE EG..HANGUL SYLLABLE EH
+ {0xC5EC, 0xC5EC, prLV}, // Lo HANGUL SYLLABLE YEO
+ {0xC5ED, 0xC607, prLVT}, // Lo [27] HANGUL SYLLABLE YEOG..HANGUL SYLLABLE YEOH
+ {0xC608, 0xC608, prLV}, // Lo HANGUL SYLLABLE YE
+ {0xC609, 0xC623, prLVT}, // Lo [27] HANGUL SYLLABLE YEG..HANGUL SYLLABLE YEH
+ {0xC624, 0xC624, prLV}, // Lo HANGUL SYLLABLE O
+ {0xC625, 0xC63F, prLVT}, // Lo [27] HANGUL SYLLABLE OG..HANGUL SYLLABLE OH
+ {0xC640, 0xC640, prLV}, // Lo HANGUL SYLLABLE WA
+ {0xC641, 0xC65B, prLVT}, // Lo [27] HANGUL SYLLABLE WAG..HANGUL SYLLABLE WAH
+ {0xC65C, 0xC65C, prLV}, // Lo HANGUL SYLLABLE WAE
+ {0xC65D, 0xC677, prLVT}, // Lo [27] HANGUL SYLLABLE WAEG..HANGUL SYLLABLE WAEH
+ {0xC678, 0xC678, prLV}, // Lo HANGUL SYLLABLE OE
+ {0xC679, 0xC693, prLVT}, // Lo [27] HANGUL SYLLABLE OEG..HANGUL SYLLABLE OEH
+ {0xC694, 0xC694, prLV}, // Lo HANGUL SYLLABLE YO
+ {0xC695, 0xC6AF, prLVT}, // Lo [27] HANGUL SYLLABLE YOG..HANGUL SYLLABLE YOH
+ {0xC6B0, 0xC6B0, prLV}, // Lo HANGUL SYLLABLE U
+ {0xC6B1, 0xC6CB, prLVT}, // Lo [27] HANGUL SYLLABLE UG..HANGUL SYLLABLE UH
+ {0xC6CC, 0xC6CC, prLV}, // Lo HANGUL SYLLABLE WEO
+ {0xC6CD, 0xC6E7, prLVT}, // Lo [27] HANGUL SYLLABLE WEOG..HANGUL SYLLABLE WEOH
+ {0xC6E8, 0xC6E8, prLV}, // Lo HANGUL SYLLABLE WE
+ {0xC6E9, 0xC703, prLVT}, // Lo [27] HANGUL SYLLABLE WEG..HANGUL SYLLABLE WEH
+ {0xC704, 0xC704, prLV}, // Lo HANGUL SYLLABLE WI
+ {0xC705, 0xC71F, prLVT}, // Lo [27] HANGUL SYLLABLE WIG..HANGUL SYLLABLE WIH
+ {0xC720, 0xC720, prLV}, // Lo HANGUL SYLLABLE YU
+ {0xC721, 0xC73B, prLVT}, // Lo [27] HANGUL SYLLABLE YUG..HANGUL SYLLABLE YUH
+ {0xC73C, 0xC73C, prLV}, // Lo HANGUL SYLLABLE EU
+ {0xC73D, 0xC757, prLVT}, // Lo [27] HANGUL SYLLABLE EUG..HANGUL SYLLABLE EUH
+ {0xC758, 0xC758, prLV}, // Lo HANGUL SYLLABLE YI
+ {0xC759, 0xC773, prLVT}, // Lo [27] HANGUL SYLLABLE YIG..HANGUL SYLLABLE YIH
+ {0xC774, 0xC774, prLV}, // Lo HANGUL SYLLABLE I
+ {0xC775, 0xC78F, prLVT}, // Lo [27] HANGUL SYLLABLE IG..HANGUL SYLLABLE IH
+ {0xC790, 0xC790, prLV}, // Lo HANGUL SYLLABLE JA
+ {0xC791, 0xC7AB, prLVT}, // Lo [27] HANGUL SYLLABLE JAG..HANGUL SYLLABLE JAH
+ {0xC7AC, 0xC7AC, prLV}, // Lo HANGUL SYLLABLE JAE
+ {0xC7AD, 0xC7C7, prLVT}, // Lo [27] HANGUL SYLLABLE JAEG..HANGUL SYLLABLE JAEH
+ {0xC7C8, 0xC7C8, prLV}, // Lo HANGUL SYLLABLE JYA
+ {0xC7C9, 0xC7E3, prLVT}, // Lo [27] HANGUL SYLLABLE JYAG..HANGUL SYLLABLE JYAH
+ {0xC7E4, 0xC7E4, prLV}, // Lo HANGUL SYLLABLE JYAE
+ {0xC7E5, 0xC7FF, prLVT}, // Lo [27] HANGUL SYLLABLE JYAEG..HANGUL SYLLABLE JYAEH
+ {0xC800, 0xC800, prLV}, // Lo HANGUL SYLLABLE JEO
+ {0xC801, 0xC81B, prLVT}, // Lo [27] HANGUL SYLLABLE JEOG..HANGUL SYLLABLE JEOH
+ {0xC81C, 0xC81C, prLV}, // Lo HANGUL SYLLABLE JE
+ {0xC81D, 0xC837, prLVT}, // Lo [27] HANGUL SYLLABLE JEG..HANGUL SYLLABLE JEH
+ {0xC838, 0xC838, prLV}, // Lo HANGUL SYLLABLE JYEO
+ {0xC839, 0xC853, prLVT}, // Lo [27] HANGUL SYLLABLE JYEOG..HANGUL SYLLABLE JYEOH
+ {0xC854, 0xC854, prLV}, // Lo HANGUL SYLLABLE JYE
+ {0xC855, 0xC86F, prLVT}, // Lo [27] HANGUL SYLLABLE JYEG..HANGUL SYLLABLE JYEH
+ {0xC870, 0xC870, prLV}, // Lo HANGUL SYLLABLE JO
+ {0xC871, 0xC88B, prLVT}, // Lo [27] HANGUL SYLLABLE JOG..HANGUL SYLLABLE JOH
+ {0xC88C, 0xC88C, prLV}, // Lo HANGUL SYLLABLE JWA
+ {0xC88D, 0xC8A7, prLVT}, // Lo [27] HANGUL SYLLABLE JWAG..HANGUL SYLLABLE JWAH
+ {0xC8A8, 0xC8A8, prLV}, // Lo HANGUL SYLLABLE JWAE
+ {0xC8A9, 0xC8C3, prLVT}, // Lo [27] HANGUL SYLLABLE JWAEG..HANGUL SYLLABLE JWAEH
+ {0xC8C4, 0xC8C4, prLV}, // Lo HANGUL SYLLABLE JOE
+ {0xC8C5, 0xC8DF, prLVT}, // Lo [27] HANGUL SYLLABLE JOEG..HANGUL SYLLABLE JOEH
+ {0xC8E0, 0xC8E0, prLV}, // Lo HANGUL SYLLABLE JYO
+ {0xC8E1, 0xC8FB, prLVT}, // Lo [27] HANGUL SYLLABLE JYOG..HANGUL SYLLABLE JYOH
+ {0xC8FC, 0xC8FC, prLV}, // Lo HANGUL SYLLABLE JU
+ {0xC8FD, 0xC917, prLVT}, // Lo [27] HANGUL SYLLABLE JUG..HANGUL SYLLABLE JUH
+ {0xC918, 0xC918, prLV}, // Lo HANGUL SYLLABLE JWEO
+ {0xC919, 0xC933, prLVT}, // Lo [27] HANGUL SYLLABLE JWEOG..HANGUL SYLLABLE JWEOH
+ {0xC934, 0xC934, prLV}, // Lo HANGUL SYLLABLE JWE
+ {0xC935, 0xC94F, prLVT}, // Lo [27] HANGUL SYLLABLE JWEG..HANGUL SYLLABLE JWEH
+ {0xC950, 0xC950, prLV}, // Lo HANGUL SYLLABLE JWI
+ {0xC951, 0xC96B, prLVT}, // Lo [27] HANGUL SYLLABLE JWIG..HANGUL SYLLABLE JWIH
+ {0xC96C, 0xC96C, prLV}, // Lo HANGUL SYLLABLE JYU
+ {0xC96D, 0xC987, prLVT}, // Lo [27] HANGUL SYLLABLE JYUG..HANGUL SYLLABLE JYUH
+ {0xC988, 0xC988, prLV}, // Lo HANGUL SYLLABLE JEU
+ {0xC989, 0xC9A3, prLVT}, // Lo [27] HANGUL SYLLABLE JEUG..HANGUL SYLLABLE JEUH
+ {0xC9A4, 0xC9A4, prLV}, // Lo HANGUL SYLLABLE JYI
+ {0xC9A5, 0xC9BF, prLVT}, // Lo [27] HANGUL SYLLABLE JYIG..HANGUL SYLLABLE JYIH
+ {0xC9C0, 0xC9C0, prLV}, // Lo HANGUL SYLLABLE JI
+ {0xC9C1, 0xC9DB, prLVT}, // Lo [27] HANGUL SYLLABLE JIG..HANGUL SYLLABLE JIH
+ {0xC9DC, 0xC9DC, prLV}, // Lo HANGUL SYLLABLE JJA
+ {0xC9DD, 0xC9F7, prLVT}, // Lo [27] HANGUL SYLLABLE JJAG..HANGUL SYLLABLE JJAH
+ {0xC9F8, 0xC9F8, prLV}, // Lo HANGUL SYLLABLE JJAE
+ {0xC9F9, 0xCA13, prLVT}, // Lo [27] HANGUL SYLLABLE JJAEG..HANGUL SYLLABLE JJAEH
+ {0xCA14, 0xCA14, prLV}, // Lo HANGUL SYLLABLE JJYA
+ {0xCA15, 0xCA2F, prLVT}, // Lo [27] HANGUL SYLLABLE JJYAG..HANGUL SYLLABLE JJYAH
+ {0xCA30, 0xCA30, prLV}, // Lo HANGUL SYLLABLE JJYAE
+ {0xCA31, 0xCA4B, prLVT}, // Lo [27] HANGUL SYLLABLE JJYAEG..HANGUL SYLLABLE JJYAEH
+ {0xCA4C, 0xCA4C, prLV}, // Lo HANGUL SYLLABLE JJEO
+ {0xCA4D, 0xCA67, prLVT}, // Lo [27] HANGUL SYLLABLE JJEOG..HANGUL SYLLABLE JJEOH
+ {0xCA68, 0xCA68, prLV}, // Lo HANGUL SYLLABLE JJE
+ {0xCA69, 0xCA83, prLVT}, // Lo [27] HANGUL SYLLABLE JJEG..HANGUL SYLLABLE JJEH
+ {0xCA84, 0xCA84, prLV}, // Lo HANGUL SYLLABLE JJYEO
+ {0xCA85, 0xCA9F, prLVT}, // Lo [27] HANGUL SYLLABLE JJYEOG..HANGUL SYLLABLE JJYEOH
+ {0xCAA0, 0xCAA0, prLV}, // Lo HANGUL SYLLABLE JJYE
+ {0xCAA1, 0xCABB, prLVT}, // Lo [27] HANGUL SYLLABLE JJYEG..HANGUL SYLLABLE JJYEH
+ {0xCABC, 0xCABC, prLV}, // Lo HANGUL SYLLABLE JJO
+ {0xCABD, 0xCAD7, prLVT}, // Lo [27] HANGUL SYLLABLE JJOG..HANGUL SYLLABLE JJOH
+ {0xCAD8, 0xCAD8, prLV}, // Lo HANGUL SYLLABLE JJWA
+ {0xCAD9, 0xCAF3, prLVT}, // Lo [27] HANGUL SYLLABLE JJWAG..HANGUL SYLLABLE JJWAH
+ {0xCAF4, 0xCAF4, prLV}, // Lo HANGUL SYLLABLE JJWAE
+ {0xCAF5, 0xCB0F, prLVT}, // Lo [27] HANGUL SYLLABLE JJWAEG..HANGUL SYLLABLE JJWAEH
+ {0xCB10, 0xCB10, prLV}, // Lo HANGUL SYLLABLE JJOE
+ {0xCB11, 0xCB2B, prLVT}, // Lo [27] HANGUL SYLLABLE JJOEG..HANGUL SYLLABLE JJOEH
+ {0xCB2C, 0xCB2C, prLV}, // Lo HANGUL SYLLABLE JJYO
+ {0xCB2D, 0xCB47, prLVT}, // Lo [27] HANGUL SYLLABLE JJYOG..HANGUL SYLLABLE JJYOH
+ {0xCB48, 0xCB48, prLV}, // Lo HANGUL SYLLABLE JJU
+ {0xCB49, 0xCB63, prLVT}, // Lo [27] HANGUL SYLLABLE JJUG..HANGUL SYLLABLE JJUH
+ {0xCB64, 0xCB64, prLV}, // Lo HANGUL SYLLABLE JJWEO
+ {0xCB65, 0xCB7F, prLVT}, // Lo [27] HANGUL SYLLABLE JJWEOG..HANGUL SYLLABLE JJWEOH
+ {0xCB80, 0xCB80, prLV}, // Lo HANGUL SYLLABLE JJWE
+ {0xCB81, 0xCB9B, prLVT}, // Lo [27] HANGUL SYLLABLE JJWEG..HANGUL SYLLABLE JJWEH
+ {0xCB9C, 0xCB9C, prLV}, // Lo HANGUL SYLLABLE JJWI
+ {0xCB9D, 0xCBB7, prLVT}, // Lo [27] HANGUL SYLLABLE JJWIG..HANGUL SYLLABLE JJWIH
+ {0xCBB8, 0xCBB8, prLV}, // Lo HANGUL SYLLABLE JJYU
+ {0xCBB9, 0xCBD3, prLVT}, // Lo [27] HANGUL SYLLABLE JJYUG..HANGUL SYLLABLE JJYUH
+ {0xCBD4, 0xCBD4, prLV}, // Lo HANGUL SYLLABLE JJEU
+ {0xCBD5, 0xCBEF, prLVT}, // Lo [27] HANGUL SYLLABLE JJEUG..HANGUL SYLLABLE JJEUH
+ {0xCBF0, 0xCBF0, prLV}, // Lo HANGUL SYLLABLE JJYI
+ {0xCBF1, 0xCC0B, prLVT}, // Lo [27] HANGUL SYLLABLE JJYIG..HANGUL SYLLABLE JJYIH
+ {0xCC0C, 0xCC0C, prLV}, // Lo HANGUL SYLLABLE JJI
+ {0xCC0D, 0xCC27, prLVT}, // Lo [27] HANGUL SYLLABLE JJIG..HANGUL SYLLABLE JJIH
+ {0xCC28, 0xCC28, prLV}, // Lo HANGUL SYLLABLE CA
+ {0xCC29, 0xCC43, prLVT}, // Lo [27] HANGUL SYLLABLE CAG..HANGUL SYLLABLE CAH
+ {0xCC44, 0xCC44, prLV}, // Lo HANGUL SYLLABLE CAE
+ {0xCC45, 0xCC5F, prLVT}, // Lo [27] HANGUL SYLLABLE CAEG..HANGUL SYLLABLE CAEH
+ {0xCC60, 0xCC60, prLV}, // Lo HANGUL SYLLABLE CYA
+ {0xCC61, 0xCC7B, prLVT}, // Lo [27] HANGUL SYLLABLE CYAG..HANGUL SYLLABLE CYAH
+ {0xCC7C, 0xCC7C, prLV}, // Lo HANGUL SYLLABLE CYAE
+ {0xCC7D, 0xCC97, prLVT}, // Lo [27] HANGUL SYLLABLE CYAEG..HANGUL SYLLABLE CYAEH
+ {0xCC98, 0xCC98, prLV}, // Lo HANGUL SYLLABLE CEO
+ {0xCC99, 0xCCB3, prLVT}, // Lo [27] HANGUL SYLLABLE CEOG..HANGUL SYLLABLE CEOH
+ {0xCCB4, 0xCCB4, prLV}, // Lo HANGUL SYLLABLE CE
+ {0xCCB5, 0xCCCF, prLVT}, // Lo [27] HANGUL SYLLABLE CEG..HANGUL SYLLABLE CEH
+ {0xCCD0, 0xCCD0, prLV}, // Lo HANGUL SYLLABLE CYEO
+ {0xCCD1, 0xCCEB, prLVT}, // Lo [27] HANGUL SYLLABLE CYEOG..HANGUL SYLLABLE CYEOH
+ {0xCCEC, 0xCCEC, prLV}, // Lo HANGUL SYLLABLE CYE
+ {0xCCED, 0xCD07, prLVT}, // Lo [27] HANGUL SYLLABLE CYEG..HANGUL SYLLABLE CYEH
+ {0xCD08, 0xCD08, prLV}, // Lo HANGUL SYLLABLE CO
+ {0xCD09, 0xCD23, prLVT}, // Lo [27] HANGUL SYLLABLE COG..HANGUL SYLLABLE COH
+ {0xCD24, 0xCD24, prLV}, // Lo HANGUL SYLLABLE CWA
+ {0xCD25, 0xCD3F, prLVT}, // Lo [27] HANGUL SYLLABLE CWAG..HANGUL SYLLABLE CWAH
+ {0xCD40, 0xCD40, prLV}, // Lo HANGUL SYLLABLE CWAE
+ {0xCD41, 0xCD5B, prLVT}, // Lo [27] HANGUL SYLLABLE CWAEG..HANGUL SYLLABLE CWAEH
+ {0xCD5C, 0xCD5C, prLV}, // Lo HANGUL SYLLABLE COE
+ {0xCD5D, 0xCD77, prLVT}, // Lo [27] HANGUL SYLLABLE COEG..HANGUL SYLLABLE COEH
+ {0xCD78, 0xCD78, prLV}, // Lo HANGUL SYLLABLE CYO
+ {0xCD79, 0xCD93, prLVT}, // Lo [27] HANGUL SYLLABLE CYOG..HANGUL SYLLABLE CYOH
+ {0xCD94, 0xCD94, prLV}, // Lo HANGUL SYLLABLE CU
+ {0xCD95, 0xCDAF, prLVT}, // Lo [27] HANGUL SYLLABLE CUG..HANGUL SYLLABLE CUH
+ {0xCDB0, 0xCDB0, prLV}, // Lo HANGUL SYLLABLE CWEO
+ {0xCDB1, 0xCDCB, prLVT}, // Lo [27] HANGUL SYLLABLE CWEOG..HANGUL SYLLABLE CWEOH
+ {0xCDCC, 0xCDCC, prLV}, // Lo HANGUL SYLLABLE CWE
+ {0xCDCD, 0xCDE7, prLVT}, // Lo [27] HANGUL SYLLABLE CWEG..HANGUL SYLLABLE CWEH
+ {0xCDE8, 0xCDE8, prLV}, // Lo HANGUL SYLLABLE CWI
+ {0xCDE9, 0xCE03, prLVT}, // Lo [27] HANGUL SYLLABLE CWIG..HANGUL SYLLABLE CWIH
+ {0xCE04, 0xCE04, prLV}, // Lo HANGUL SYLLABLE CYU
+ {0xCE05, 0xCE1F, prLVT}, // Lo [27] HANGUL SYLLABLE CYUG..HANGUL SYLLABLE CYUH
+ {0xCE20, 0xCE20, prLV}, // Lo HANGUL SYLLABLE CEU
+ {0xCE21, 0xCE3B, prLVT}, // Lo [27] HANGUL SYLLABLE CEUG..HANGUL SYLLABLE CEUH
+ {0xCE3C, 0xCE3C, prLV}, // Lo HANGUL SYLLABLE CYI
+ {0xCE3D, 0xCE57, prLVT}, // Lo [27] HANGUL SYLLABLE CYIG..HANGUL SYLLABLE CYIH
+ {0xCE58, 0xCE58, prLV}, // Lo HANGUL SYLLABLE CI
+ {0xCE59, 0xCE73, prLVT}, // Lo [27] HANGUL SYLLABLE CIG..HANGUL SYLLABLE CIH
+ {0xCE74, 0xCE74, prLV}, // Lo HANGUL SYLLABLE KA
+ {0xCE75, 0xCE8F, prLVT}, // Lo [27] HANGUL SYLLABLE KAG..HANGUL SYLLABLE KAH
+ {0xCE90, 0xCE90, prLV}, // Lo HANGUL SYLLABLE KAE
+ {0xCE91, 0xCEAB, prLVT}, // Lo [27] HANGUL SYLLABLE KAEG..HANGUL SYLLABLE KAEH
+ {0xCEAC, 0xCEAC, prLV}, // Lo HANGUL SYLLABLE KYA
+ {0xCEAD, 0xCEC7, prLVT}, // Lo [27] HANGUL SYLLABLE KYAG..HANGUL SYLLABLE KYAH
+ {0xCEC8, 0xCEC8, prLV}, // Lo HANGUL SYLLABLE KYAE
+ {0xCEC9, 0xCEE3, prLVT}, // Lo [27] HANGUL SYLLABLE KYAEG..HANGUL SYLLABLE KYAEH
+ {0xCEE4, 0xCEE4, prLV}, // Lo HANGUL SYLLABLE KEO
+ {0xCEE5, 0xCEFF, prLVT}, // Lo [27] HANGUL SYLLABLE KEOG..HANGUL SYLLABLE KEOH
+ {0xCF00, 0xCF00, prLV}, // Lo HANGUL SYLLABLE KE
+ {0xCF01, 0xCF1B, prLVT}, // Lo [27] HANGUL SYLLABLE KEG..HANGUL SYLLABLE KEH
+ {0xCF1C, 0xCF1C, prLV}, // Lo HANGUL SYLLABLE KYEO
+ {0xCF1D, 0xCF37, prLVT}, // Lo [27] HANGUL SYLLABLE KYEOG..HANGUL SYLLABLE KYEOH
+ {0xCF38, 0xCF38, prLV}, // Lo HANGUL SYLLABLE KYE
+ {0xCF39, 0xCF53, prLVT}, // Lo [27] HANGUL SYLLABLE KYEG..HANGUL SYLLABLE KYEH
+ {0xCF54, 0xCF54, prLV}, // Lo HANGUL SYLLABLE KO
+ {0xCF55, 0xCF6F, prLVT}, // Lo [27] HANGUL SYLLABLE KOG..HANGUL SYLLABLE KOH
+ {0xCF70, 0xCF70, prLV}, // Lo HANGUL SYLLABLE KWA
+ {0xCF71, 0xCF8B, prLVT}, // Lo [27] HANGUL SYLLABLE KWAG..HANGUL SYLLABLE KWAH
+ {0xCF8C, 0xCF8C, prLV}, // Lo HANGUL SYLLABLE KWAE
+ {0xCF8D, 0xCFA7, prLVT}, // Lo [27] HANGUL SYLLABLE KWAEG..HANGUL SYLLABLE KWAEH
+ {0xCFA8, 0xCFA8, prLV}, // Lo HANGUL SYLLABLE KOE
+ {0xCFA9, 0xCFC3, prLVT}, // Lo [27] HANGUL SYLLABLE KOEG..HANGUL SYLLABLE KOEH
+ {0xCFC4, 0xCFC4, prLV}, // Lo HANGUL SYLLABLE KYO
+ {0xCFC5, 0xCFDF, prLVT}, // Lo [27] HANGUL SYLLABLE KYOG..HANGUL SYLLABLE KYOH
+ {0xCFE0, 0xCFE0, prLV}, // Lo HANGUL SYLLABLE KU
+ {0xCFE1, 0xCFFB, prLVT}, // Lo [27] HANGUL SYLLABLE KUG..HANGUL SYLLABLE KUH
+ {0xCFFC, 0xCFFC, prLV}, // Lo HANGUL SYLLABLE KWEO
+ {0xCFFD, 0xD017, prLVT}, // Lo [27] HANGUL SYLLABLE KWEOG..HANGUL SYLLABLE KWEOH
+ {0xD018, 0xD018, prLV}, // Lo HANGUL SYLLABLE KWE
+ {0xD019, 0xD033, prLVT}, // Lo [27] HANGUL SYLLABLE KWEG..HANGUL SYLLABLE KWEH
+ {0xD034, 0xD034, prLV}, // Lo HANGUL SYLLABLE KWI
+ {0xD035, 0xD04F, prLVT}, // Lo [27] HANGUL SYLLABLE KWIG..HANGUL SYLLABLE KWIH
+ {0xD050, 0xD050, prLV}, // Lo HANGUL SYLLABLE KYU
+ {0xD051, 0xD06B, prLVT}, // Lo [27] HANGUL SYLLABLE KYUG..HANGUL SYLLABLE KYUH
+ {0xD06C, 0xD06C, prLV}, // Lo HANGUL SYLLABLE KEU
+ {0xD06D, 0xD087, prLVT}, // Lo [27] HANGUL SYLLABLE KEUG..HANGUL SYLLABLE KEUH
+ {0xD088, 0xD088, prLV}, // Lo HANGUL SYLLABLE KYI
+ {0xD089, 0xD0A3, prLVT}, // Lo [27] HANGUL SYLLABLE KYIG..HANGUL SYLLABLE KYIH
+ {0xD0A4, 0xD0A4, prLV}, // Lo HANGUL SYLLABLE KI
+ {0xD0A5, 0xD0BF, prLVT}, // Lo [27] HANGUL SYLLABLE KIG..HANGUL SYLLABLE KIH
+ {0xD0C0, 0xD0C0, prLV}, // Lo HANGUL SYLLABLE TA
+ {0xD0C1, 0xD0DB, prLVT}, // Lo [27] HANGUL SYLLABLE TAG..HANGUL SYLLABLE TAH
+ {0xD0DC, 0xD0DC, prLV}, // Lo HANGUL SYLLABLE TAE
+ {0xD0DD, 0xD0F7, prLVT}, // Lo [27] HANGUL SYLLABLE TAEG..HANGUL SYLLABLE TAEH
+ {0xD0F8, 0xD0F8, prLV}, // Lo HANGUL SYLLABLE TYA
+ {0xD0F9, 0xD113, prLVT}, // Lo [27] HANGUL SYLLABLE TYAG..HANGUL SYLLABLE TYAH
+ {0xD114, 0xD114, prLV}, // Lo HANGUL SYLLABLE TYAE
+ {0xD115, 0xD12F, prLVT}, // Lo [27] HANGUL SYLLABLE TYAEG..HANGUL SYLLABLE TYAEH
+ {0xD130, 0xD130, prLV}, // Lo HANGUL SYLLABLE TEO
+ {0xD131, 0xD14B, prLVT}, // Lo [27] HANGUL SYLLABLE TEOG..HANGUL SYLLABLE TEOH
+ {0xD14C, 0xD14C, prLV}, // Lo HANGUL SYLLABLE TE
+ {0xD14D, 0xD167, prLVT}, // Lo [27] HANGUL SYLLABLE TEG..HANGUL SYLLABLE TEH
+ {0xD168, 0xD168, prLV}, // Lo HANGUL SYLLABLE TYEO
+ {0xD169, 0xD183, prLVT}, // Lo [27] HANGUL SYLLABLE TYEOG..HANGUL SYLLABLE TYEOH
+ {0xD184, 0xD184, prLV}, // Lo HANGUL SYLLABLE TYE
+ {0xD185, 0xD19F, prLVT}, // Lo [27] HANGUL SYLLABLE TYEG..HANGUL SYLLABLE TYEH
+ {0xD1A0, 0xD1A0, prLV}, // Lo HANGUL SYLLABLE TO
+ {0xD1A1, 0xD1BB, prLVT}, // Lo [27] HANGUL SYLLABLE TOG..HANGUL SYLLABLE TOH
+ {0xD1BC, 0xD1BC, prLV}, // Lo HANGUL SYLLABLE TWA
+ {0xD1BD, 0xD1D7, prLVT}, // Lo [27] HANGUL SYLLABLE TWAG..HANGUL SYLLABLE TWAH
+ {0xD1D8, 0xD1D8, prLV}, // Lo HANGUL SYLLABLE TWAE
+ {0xD1D9, 0xD1F3, prLVT}, // Lo [27] HANGUL SYLLABLE TWAEG..HANGUL SYLLABLE TWAEH
+ {0xD1F4, 0xD1F4, prLV}, // Lo HANGUL SYLLABLE TOE
+ {0xD1F5, 0xD20F, prLVT}, // Lo [27] HANGUL SYLLABLE TOEG..HANGUL SYLLABLE TOEH
+ {0xD210, 0xD210, prLV}, // Lo HANGUL SYLLABLE TYO
+ {0xD211, 0xD22B, prLVT}, // Lo [27] HANGUL SYLLABLE TYOG..HANGUL SYLLABLE TYOH
+ {0xD22C, 0xD22C, prLV}, // Lo HANGUL SYLLABLE TU
+ {0xD22D, 0xD247, prLVT}, // Lo [27] HANGUL SYLLABLE TUG..HANGUL SYLLABLE TUH
+ {0xD248, 0xD248, prLV}, // Lo HANGUL SYLLABLE TWEO
+ {0xD249, 0xD263, prLVT}, // Lo [27] HANGUL SYLLABLE TWEOG..HANGUL SYLLABLE TWEOH
+ {0xD264, 0xD264, prLV}, // Lo HANGUL SYLLABLE TWE
+ {0xD265, 0xD27F, prLVT}, // Lo [27] HANGUL SYLLABLE TWEG..HANGUL SYLLABLE TWEH
+ {0xD280, 0xD280, prLV}, // Lo HANGUL SYLLABLE TWI
+ {0xD281, 0xD29B, prLVT}, // Lo [27] HANGUL SYLLABLE TWIG..HANGUL SYLLABLE TWIH
+ {0xD29C, 0xD29C, prLV}, // Lo HANGUL SYLLABLE TYU
+ {0xD29D, 0xD2B7, prLVT}, // Lo [27] HANGUL SYLLABLE TYUG..HANGUL SYLLABLE TYUH
+ {0xD2B8, 0xD2B8, prLV}, // Lo HANGUL SYLLABLE TEU
+ {0xD2B9, 0xD2D3, prLVT}, // Lo [27] HANGUL SYLLABLE TEUG..HANGUL SYLLABLE TEUH
+ {0xD2D4, 0xD2D4, prLV}, // Lo HANGUL SYLLABLE TYI
+ {0xD2D5, 0xD2EF, prLVT}, // Lo [27] HANGUL SYLLABLE TYIG..HANGUL SYLLABLE TYIH
+ {0xD2F0, 0xD2F0, prLV}, // Lo HANGUL SYLLABLE TI
+ {0xD2F1, 0xD30B, prLVT}, // Lo [27] HANGUL SYLLABLE TIG..HANGUL SYLLABLE TIH
+ {0xD30C, 0xD30C, prLV}, // Lo HANGUL SYLLABLE PA
+ {0xD30D, 0xD327, prLVT}, // Lo [27] HANGUL SYLLABLE PAG..HANGUL SYLLABLE PAH
+ {0xD328, 0xD328, prLV}, // Lo HANGUL SYLLABLE PAE
+ {0xD329, 0xD343, prLVT}, // Lo [27] HANGUL SYLLABLE PAEG..HANGUL SYLLABLE PAEH
+ {0xD344, 0xD344, prLV}, // Lo HANGUL SYLLABLE PYA
+ {0xD345, 0xD35F, prLVT}, // Lo [27] HANGUL SYLLABLE PYAG..HANGUL SYLLABLE PYAH
+ {0xD360, 0xD360, prLV}, // Lo HANGUL SYLLABLE PYAE
+ {0xD361, 0xD37B, prLVT}, // Lo [27] HANGUL SYLLABLE PYAEG..HANGUL SYLLABLE PYAEH
+ {0xD37C, 0xD37C, prLV}, // Lo HANGUL SYLLABLE PEO
+ {0xD37D, 0xD397, prLVT}, // Lo [27] HANGUL SYLLABLE PEOG..HANGUL SYLLABLE PEOH
+ {0xD398, 0xD398, prLV}, // Lo HANGUL SYLLABLE PE
+ {0xD399, 0xD3B3, prLVT}, // Lo [27] HANGUL SYLLABLE PEG..HANGUL SYLLABLE PEH
+ {0xD3B4, 0xD3B4, prLV}, // Lo HANGUL SYLLABLE PYEO
+ {0xD3B5, 0xD3CF, prLVT}, // Lo [27] HANGUL SYLLABLE PYEOG..HANGUL SYLLABLE PYEOH
+ {0xD3D0, 0xD3D0, prLV}, // Lo HANGUL SYLLABLE PYE
+ {0xD3D1, 0xD3EB, prLVT}, // Lo [27] HANGUL SYLLABLE PYEG..HANGUL SYLLABLE PYEH
+ {0xD3EC, 0xD3EC, prLV}, // Lo HANGUL SYLLABLE PO
+ {0xD3ED, 0xD407, prLVT}, // Lo [27] HANGUL SYLLABLE POG..HANGUL SYLLABLE POH
+ {0xD408, 0xD408, prLV}, // Lo HANGUL SYLLABLE PWA
+ {0xD409, 0xD423, prLVT}, // Lo [27] HANGUL SYLLABLE PWAG..HANGUL SYLLABLE PWAH
+ {0xD424, 0xD424, prLV}, // Lo HANGUL SYLLABLE PWAE
+ {0xD425, 0xD43F, prLVT}, // Lo [27] HANGUL SYLLABLE PWAEG..HANGUL SYLLABLE PWAEH
+ {0xD440, 0xD440, prLV}, // Lo HANGUL SYLLABLE POE
+ {0xD441, 0xD45B, prLVT}, // Lo [27] HANGUL SYLLABLE POEG..HANGUL SYLLABLE POEH
+ {0xD45C, 0xD45C, prLV}, // Lo HANGUL SYLLABLE PYO
+ {0xD45D, 0xD477, prLVT}, // Lo [27] HANGUL SYLLABLE PYOG..HANGUL SYLLABLE PYOH
+ {0xD478, 0xD478, prLV}, // Lo HANGUL SYLLABLE PU
+ {0xD479, 0xD493, prLVT}, // Lo [27] HANGUL SYLLABLE PUG..HANGUL SYLLABLE PUH
+ {0xD494, 0xD494, prLV}, // Lo HANGUL SYLLABLE PWEO
+ {0xD495, 0xD4AF, prLVT}, // Lo [27] HANGUL SYLLABLE PWEOG..HANGUL SYLLABLE PWEOH
+ {0xD4B0, 0xD4B0, prLV}, // Lo HANGUL SYLLABLE PWE
+ {0xD4B1, 0xD4CB, prLVT}, // Lo [27] HANGUL SYLLABLE PWEG..HANGUL SYLLABLE PWEH
+ {0xD4CC, 0xD4CC, prLV}, // Lo HANGUL SYLLABLE PWI
+ {0xD4CD, 0xD4E7, prLVT}, // Lo [27] HANGUL SYLLABLE PWIG..HANGUL SYLLABLE PWIH
+ {0xD4E8, 0xD4E8, prLV}, // Lo HANGUL SYLLABLE PYU
+ {0xD4E9, 0xD503, prLVT}, // Lo [27] HANGUL SYLLABLE PYUG..HANGUL SYLLABLE PYUH
+ {0xD504, 0xD504, prLV}, // Lo HANGUL SYLLABLE PEU
+ {0xD505, 0xD51F, prLVT}, // Lo [27] HANGUL SYLLABLE PEUG..HANGUL SYLLABLE PEUH
+ {0xD520, 0xD520, prLV}, // Lo HANGUL SYLLABLE PYI
+ {0xD521, 0xD53B, prLVT}, // Lo [27] HANGUL SYLLABLE PYIG..HANGUL SYLLABLE PYIH
+ {0xD53C, 0xD53C, prLV}, // Lo HANGUL SYLLABLE PI
+ {0xD53D, 0xD557, prLVT}, // Lo [27] HANGUL SYLLABLE PIG..HANGUL SYLLABLE PIH
+ {0xD558, 0xD558, prLV}, // Lo HANGUL SYLLABLE HA
+ {0xD559, 0xD573, prLVT}, // Lo [27] HANGUL SYLLABLE HAG..HANGUL SYLLABLE HAH
+ {0xD574, 0xD574, prLV}, // Lo HANGUL SYLLABLE HAE
+ {0xD575, 0xD58F, prLVT}, // Lo [27] HANGUL SYLLABLE HAEG..HANGUL SYLLABLE HAEH
+ {0xD590, 0xD590, prLV}, // Lo HANGUL SYLLABLE HYA
+ {0xD591, 0xD5AB, prLVT}, // Lo [27] HANGUL SYLLABLE HYAG..HANGUL SYLLABLE HYAH
+ {0xD5AC, 0xD5AC, prLV}, // Lo HANGUL SYLLABLE HYAE
+ {0xD5AD, 0xD5C7, prLVT}, // Lo [27] HANGUL SYLLABLE HYAEG..HANGUL SYLLABLE HYAEH
+ {0xD5C8, 0xD5C8, prLV}, // Lo HANGUL SYLLABLE HEO
+ {0xD5C9, 0xD5E3, prLVT}, // Lo [27] HANGUL SYLLABLE HEOG..HANGUL SYLLABLE HEOH
+ {0xD5E4, 0xD5E4, prLV}, // Lo HANGUL SYLLABLE HE
+ {0xD5E5, 0xD5FF, prLVT}, // Lo [27] HANGUL SYLLABLE HEG..HANGUL SYLLABLE HEH
+ {0xD600, 0xD600, prLV}, // Lo HANGUL SYLLABLE HYEO
+ {0xD601, 0xD61B, prLVT}, // Lo [27] HANGUL SYLLABLE HYEOG..HANGUL SYLLABLE HYEOH
+ {0xD61C, 0xD61C, prLV}, // Lo HANGUL SYLLABLE HYE
+ {0xD61D, 0xD637, prLVT}, // Lo [27] HANGUL SYLLABLE HYEG..HANGUL SYLLABLE HYEH
+ {0xD638, 0xD638, prLV}, // Lo HANGUL SYLLABLE HO
+ {0xD639, 0xD653, prLVT}, // Lo [27] HANGUL SYLLABLE HOG..HANGUL SYLLABLE HOH
+ {0xD654, 0xD654, prLV}, // Lo HANGUL SYLLABLE HWA
+ {0xD655, 0xD66F, prLVT}, // Lo [27] HANGUL SYLLABLE HWAG..HANGUL SYLLABLE HWAH
+ {0xD670, 0xD670, prLV}, // Lo HANGUL SYLLABLE HWAE
+ {0xD671, 0xD68B, prLVT}, // Lo [27] HANGUL SYLLABLE HWAEG..HANGUL SYLLABLE HWAEH
+ {0xD68C, 0xD68C, prLV}, // Lo HANGUL SYLLABLE HOE
+ {0xD68D, 0xD6A7, prLVT}, // Lo [27] HANGUL SYLLABLE HOEG..HANGUL SYLLABLE HOEH
+ {0xD6A8, 0xD6A8, prLV}, // Lo HANGUL SYLLABLE HYO
+ {0xD6A9, 0xD6C3, prLVT}, // Lo [27] HANGUL SYLLABLE HYOG..HANGUL SYLLABLE HYOH
+ {0xD6C4, 0xD6C4, prLV}, // Lo HANGUL SYLLABLE HU
+ {0xD6C5, 0xD6DF, prLVT}, // Lo [27] HANGUL SYLLABLE HUG..HANGUL SYLLABLE HUH
+ {0xD6E0, 0xD6E0, prLV}, // Lo HANGUL SYLLABLE HWEO
+ {0xD6E1, 0xD6FB, prLVT}, // Lo [27] HANGUL SYLLABLE HWEOG..HANGUL SYLLABLE HWEOH
+ {0xD6FC, 0xD6FC, prLV}, // Lo HANGUL SYLLABLE HWE
+ {0xD6FD, 0xD717, prLVT}, // Lo [27] HANGUL SYLLABLE HWEG..HANGUL SYLLABLE HWEH
+ {0xD718, 0xD718, prLV}, // Lo HANGUL SYLLABLE HWI
+ {0xD719, 0xD733, prLVT}, // Lo [27] HANGUL SYLLABLE HWIG..HANGUL SYLLABLE HWIH
+ {0xD734, 0xD734, prLV}, // Lo HANGUL SYLLABLE HYU
+ {0xD735, 0xD74F, prLVT}, // Lo [27] HANGUL SYLLABLE HYUG..HANGUL SYLLABLE HYUH
+ {0xD750, 0xD750, prLV}, // Lo HANGUL SYLLABLE HEU
+ {0xD751, 0xD76B, prLVT}, // Lo [27] HANGUL SYLLABLE HEUG..HANGUL SYLLABLE HEUH
+ {0xD76C, 0xD76C, prLV}, // Lo HANGUL SYLLABLE HYI
+ {0xD76D, 0xD787, prLVT}, // Lo [27] HANGUL SYLLABLE HYIG..HANGUL SYLLABLE HYIH
+ {0xD788, 0xD788, prLV}, // Lo HANGUL SYLLABLE HI
+ {0xD789, 0xD7A3, prLVT}, // Lo [27] HANGUL SYLLABLE HIG..HANGUL SYLLABLE HIH
+ {0xD7B0, 0xD7C6, prV}, // Lo [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E
+ {0xD7CB, 0xD7FB, prT}, // Lo [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH
+ {0xFB1E, 0xFB1E, prExtend}, // Mn HEBREW POINT JUDEO-SPANISH VARIKA
+ {0xFE00, 0xFE0F, prExtend}, // Mn [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16
+ {0xFE20, 0xFE2F, prExtend}, // Mn [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF
+ {0xFEFF, 0xFEFF, prControl}, // Cf ZERO WIDTH NO-BREAK SPACE
+ {0xFF9E, 0xFF9F, prExtend}, // Lm [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
+ {0xFFF0, 0xFFF8, prControl}, // Cn [9] ..
+ {0xFFF9, 0xFFFB, prControl}, // Cf [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR
+ {0x101FD, 0x101FD, prExtend}, // Mn PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE
+ {0x102E0, 0x102E0, prExtend}, // Mn COPTIC EPACT THOUSANDS MARK
+ {0x10376, 0x1037A, prExtend}, // Mn [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII
+ {0x10A01, 0x10A03, prExtend}, // Mn [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R
+ {0x10A05, 0x10A06, prExtend}, // Mn [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O
+ {0x10A0C, 0x10A0F, prExtend}, // Mn [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA
+ {0x10A38, 0x10A3A, prExtend}, // Mn [3] KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW
+ {0x10A3F, 0x10A3F, prExtend}, // Mn KHAROSHTHI VIRAMA
+ {0x10AE5, 0x10AE6, prExtend}, // Mn [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
+ {0x10D24, 0x10D27, prExtend}, // Mn [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
+ {0x10EAB, 0x10EAC, prExtend}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10EFD, 0x10EFF, prExtend}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
+ {0x10F46, 0x10F50, prExtend}, // Mn [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
+ {0x10F82, 0x10F85, prExtend}, // Mn [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
+ {0x11000, 0x11000, prSpacingMark}, // Mc BRAHMI SIGN CANDRABINDU
+ {0x11001, 0x11001, prExtend}, // Mn BRAHMI SIGN ANUSVARA
+ {0x11002, 0x11002, prSpacingMark}, // Mc BRAHMI SIGN VISARGA
+ {0x11038, 0x11046, prExtend}, // Mn [15] BRAHMI VOWEL SIGN AA..BRAHMI VIRAMA
+ {0x11070, 0x11070, prExtend}, // Mn BRAHMI SIGN OLD TAMIL VIRAMA
+ {0x11073, 0x11074, prExtend}, // Mn [2] BRAHMI VOWEL SIGN OLD TAMIL SHORT E..BRAHMI VOWEL SIGN OLD TAMIL SHORT O
+ {0x1107F, 0x11081, prExtend}, // Mn [3] BRAHMI NUMBER JOINER..KAITHI SIGN ANUSVARA
+ {0x11082, 0x11082, prSpacingMark}, // Mc KAITHI SIGN VISARGA
+ {0x110B0, 0x110B2, prSpacingMark}, // Mc [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II
+ {0x110B3, 0x110B6, prExtend}, // Mn [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI
+ {0x110B7, 0x110B8, prSpacingMark}, // Mc [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU
+ {0x110B9, 0x110BA, prExtend}, // Mn [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA
+ {0x110BD, 0x110BD, prPrepend}, // Cf KAITHI NUMBER SIGN
+ {0x110C2, 0x110C2, prExtend}, // Mn KAITHI VOWEL SIGN VOCALIC R
+ {0x110CD, 0x110CD, prPrepend}, // Cf KAITHI NUMBER SIGN ABOVE
+ {0x11100, 0x11102, prExtend}, // Mn [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA
+ {0x11127, 0x1112B, prExtend}, // Mn [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU
+ {0x1112C, 0x1112C, prSpacingMark}, // Mc CHAKMA VOWEL SIGN E
+ {0x1112D, 0x11134, prExtend}, // Mn [8] CHAKMA VOWEL SIGN AI..CHAKMA MAAYYAA
+ {0x11145, 0x11146, prSpacingMark}, // Mc [2] CHAKMA VOWEL SIGN AA..CHAKMA VOWEL SIGN EI
+ {0x11173, 0x11173, prExtend}, // Mn MAHAJANI SIGN NUKTA
+ {0x11180, 0x11181, prExtend}, // Mn [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA
+ {0x11182, 0x11182, prSpacingMark}, // Mc SHARADA SIGN VISARGA
+ {0x111B3, 0x111B5, prSpacingMark}, // Mc [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II
+ {0x111B6, 0x111BE, prExtend}, // Mn [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O
+ {0x111BF, 0x111C0, prSpacingMark}, // Mc [2] SHARADA VOWEL SIGN AU..SHARADA SIGN VIRAMA
+ {0x111C2, 0x111C3, prPrepend}, // Lo [2] SHARADA SIGN JIHVAMULIYA..SHARADA SIGN UPADHMANIYA
+ {0x111C9, 0x111CC, prExtend}, // Mn [4] SHARADA SANDHI MARK..SHARADA EXTRA SHORT VOWEL MARK
+ {0x111CE, 0x111CE, prSpacingMark}, // Mc SHARADA VOWEL SIGN PRISHTHAMATRA E
+ {0x111CF, 0x111CF, prExtend}, // Mn SHARADA SIGN INVERTED CANDRABINDU
+ {0x1122C, 0x1122E, prSpacingMark}, // Mc [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II
+ {0x1122F, 0x11231, prExtend}, // Mn [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI
+ {0x11232, 0x11233, prSpacingMark}, // Mc [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU
+ {0x11234, 0x11234, prExtend}, // Mn KHOJKI SIGN ANUSVARA
+ {0x11235, 0x11235, prSpacingMark}, // Mc KHOJKI SIGN VIRAMA
+ {0x11236, 0x11237, prExtend}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
+ {0x1123E, 0x1123E, prExtend}, // Mn KHOJKI SIGN SUKUN
+ {0x11241, 0x11241, prExtend}, // Mn KHOJKI VOWEL SIGN VOCALIC R
+ {0x112DF, 0x112DF, prExtend}, // Mn KHUDAWADI SIGN ANUSVARA
+ {0x112E0, 0x112E2, prSpacingMark}, // Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
+ {0x112E3, 0x112EA, prExtend}, // Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
+ {0x11300, 0x11301, prExtend}, // Mn [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU
+ {0x11302, 0x11303, prSpacingMark}, // Mc [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA
+ {0x1133B, 0x1133C, prExtend}, // Mn [2] COMBINING BINDU BELOW..GRANTHA SIGN NUKTA
+ {0x1133E, 0x1133E, prExtend}, // Mc GRANTHA VOWEL SIGN AA
+ {0x1133F, 0x1133F, prSpacingMark}, // Mc GRANTHA VOWEL SIGN I
+ {0x11340, 0x11340, prExtend}, // Mn GRANTHA VOWEL SIGN II
+ {0x11341, 0x11344, prSpacingMark}, // Mc [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR
+ {0x11347, 0x11348, prSpacingMark}, // Mc [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI
+ {0x1134B, 0x1134D, prSpacingMark}, // Mc [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA
+ {0x11357, 0x11357, prExtend}, // Mc GRANTHA AU LENGTH MARK
+ {0x11362, 0x11363, prSpacingMark}, // Mc [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL
+ {0x11366, 0x1136C, prExtend}, // Mn [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX
+ {0x11370, 0x11374, prExtend}, // Mn [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA
+ {0x11435, 0x11437, prSpacingMark}, // Mc [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II
+ {0x11438, 0x1143F, prExtend}, // Mn [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI
+ {0x11440, 0x11441, prSpacingMark}, // Mc [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU
+ {0x11442, 0x11444, prExtend}, // Mn [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA
+ {0x11445, 0x11445, prSpacingMark}, // Mc NEWA SIGN VISARGA
+ {0x11446, 0x11446, prExtend}, // Mn NEWA SIGN NUKTA
+ {0x1145E, 0x1145E, prExtend}, // Mn NEWA SANDHI MARK
+ {0x114B0, 0x114B0, prExtend}, // Mc TIRHUTA VOWEL SIGN AA
+ {0x114B1, 0x114B2, prSpacingMark}, // Mc [2] TIRHUTA VOWEL SIGN I..TIRHUTA VOWEL SIGN II
+ {0x114B3, 0x114B8, prExtend}, // Mn [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL
+ {0x114B9, 0x114B9, prSpacingMark}, // Mc TIRHUTA VOWEL SIGN E
+ {0x114BA, 0x114BA, prExtend}, // Mn TIRHUTA VOWEL SIGN SHORT E
+ {0x114BB, 0x114BC, prSpacingMark}, // Mc [2] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN O
+ {0x114BD, 0x114BD, prExtend}, // Mc TIRHUTA VOWEL SIGN SHORT O
+ {0x114BE, 0x114BE, prSpacingMark}, // Mc TIRHUTA VOWEL SIGN AU
+ {0x114BF, 0x114C0, prExtend}, // Mn [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA
+ {0x114C1, 0x114C1, prSpacingMark}, // Mc TIRHUTA SIGN VISARGA
+ {0x114C2, 0x114C3, prExtend}, // Mn [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA
+ {0x115AF, 0x115AF, prExtend}, // Mc SIDDHAM VOWEL SIGN AA
+ {0x115B0, 0x115B1, prSpacingMark}, // Mc [2] SIDDHAM VOWEL SIGN I..SIDDHAM VOWEL SIGN II
+ {0x115B2, 0x115B5, prExtend}, // Mn [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR
+ {0x115B8, 0x115BB, prSpacingMark}, // Mc [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU
+ {0x115BC, 0x115BD, prExtend}, // Mn [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA
+ {0x115BE, 0x115BE, prSpacingMark}, // Mc SIDDHAM SIGN VISARGA
+ {0x115BF, 0x115C0, prExtend}, // Mn [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA
+ {0x115DC, 0x115DD, prExtend}, // Mn [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU
+ {0x11630, 0x11632, prSpacingMark}, // Mc [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II
+ {0x11633, 0x1163A, prExtend}, // Mn [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI
+ {0x1163B, 0x1163C, prSpacingMark}, // Mc [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU
+ {0x1163D, 0x1163D, prExtend}, // Mn MODI SIGN ANUSVARA
+ {0x1163E, 0x1163E, prSpacingMark}, // Mc MODI SIGN VISARGA
+ {0x1163F, 0x11640, prExtend}, // Mn [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA
+ {0x116AB, 0x116AB, prExtend}, // Mn TAKRI SIGN ANUSVARA
+ {0x116AC, 0x116AC, prSpacingMark}, // Mc TAKRI SIGN VISARGA
+ {0x116AD, 0x116AD, prExtend}, // Mn TAKRI VOWEL SIGN AA
+ {0x116AE, 0x116AF, prSpacingMark}, // Mc [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II
+ {0x116B0, 0x116B5, prExtend}, // Mn [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU
+ {0x116B6, 0x116B6, prSpacingMark}, // Mc TAKRI SIGN VIRAMA
+ {0x116B7, 0x116B7, prExtend}, // Mn TAKRI SIGN NUKTA
+ {0x1171D, 0x1171F, prExtend}, // Mn [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA
+ {0x11722, 0x11725, prExtend}, // Mn [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU
+ {0x11726, 0x11726, prSpacingMark}, // Mc AHOM VOWEL SIGN E
+ {0x11727, 0x1172B, prExtend}, // Mn [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER
+ {0x1182C, 0x1182E, prSpacingMark}, // Mc [3] DOGRA VOWEL SIGN AA..DOGRA VOWEL SIGN II
+ {0x1182F, 0x11837, prExtend}, // Mn [9] DOGRA VOWEL SIGN U..DOGRA SIGN ANUSVARA
+ {0x11838, 0x11838, prSpacingMark}, // Mc DOGRA SIGN VISARGA
+ {0x11839, 0x1183A, prExtend}, // Mn [2] DOGRA SIGN VIRAMA..DOGRA SIGN NUKTA
+ {0x11930, 0x11930, prExtend}, // Mc DIVES AKURU VOWEL SIGN AA
+ {0x11931, 0x11935, prSpacingMark}, // Mc [5] DIVES AKURU VOWEL SIGN I..DIVES AKURU VOWEL SIGN E
+ {0x11937, 0x11938, prSpacingMark}, // Mc [2] DIVES AKURU VOWEL SIGN AI..DIVES AKURU VOWEL SIGN O
+ {0x1193B, 0x1193C, prExtend}, // Mn [2] DIVES AKURU SIGN ANUSVARA..DIVES AKURU SIGN CANDRABINDU
+ {0x1193D, 0x1193D, prSpacingMark}, // Mc DIVES AKURU SIGN HALANTA
+ {0x1193E, 0x1193E, prExtend}, // Mn DIVES AKURU VIRAMA
+ {0x1193F, 0x1193F, prPrepend}, // Lo DIVES AKURU PREFIXED NASAL SIGN
+ {0x11940, 0x11940, prSpacingMark}, // Mc DIVES AKURU MEDIAL YA
+ {0x11941, 0x11941, prPrepend}, // Lo DIVES AKURU INITIAL RA
+ {0x11942, 0x11942, prSpacingMark}, // Mc DIVES AKURU MEDIAL RA
+ {0x11943, 0x11943, prExtend}, // Mn DIVES AKURU SIGN NUKTA
+ {0x119D1, 0x119D3, prSpacingMark}, // Mc [3] NANDINAGARI VOWEL SIGN AA..NANDINAGARI VOWEL SIGN II
+ {0x119D4, 0x119D7, prExtend}, // Mn [4] NANDINAGARI VOWEL SIGN U..NANDINAGARI VOWEL SIGN VOCALIC RR
+ {0x119DA, 0x119DB, prExtend}, // Mn [2] NANDINAGARI VOWEL SIGN E..NANDINAGARI VOWEL SIGN AI
+ {0x119DC, 0x119DF, prSpacingMark}, // Mc [4] NANDINAGARI VOWEL SIGN O..NANDINAGARI SIGN VISARGA
+ {0x119E0, 0x119E0, prExtend}, // Mn NANDINAGARI SIGN VIRAMA
+ {0x119E4, 0x119E4, prSpacingMark}, // Mc NANDINAGARI VOWEL SIGN PRISHTHAMATRA E
+ {0x11A01, 0x11A0A, prExtend}, // Mn [10] ZANABAZAR SQUARE VOWEL SIGN I..ZANABAZAR SQUARE VOWEL LENGTH MARK
+ {0x11A33, 0x11A38, prExtend}, // Mn [6] ZANABAZAR SQUARE FINAL CONSONANT MARK..ZANABAZAR SQUARE SIGN ANUSVARA
+ {0x11A39, 0x11A39, prSpacingMark}, // Mc ZANABAZAR SQUARE SIGN VISARGA
+ {0x11A3A, 0x11A3A, prPrepend}, // Lo ZANABAZAR SQUARE CLUSTER-INITIAL LETTER RA
+ {0x11A3B, 0x11A3E, prExtend}, // Mn [4] ZANABAZAR SQUARE CLUSTER-FINAL LETTER YA..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
+ {0x11A47, 0x11A47, prExtend}, // Mn ZANABAZAR SQUARE SUBJOINER
+ {0x11A51, 0x11A56, prExtend}, // Mn [6] SOYOMBO VOWEL SIGN I..SOYOMBO VOWEL SIGN OE
+ {0x11A57, 0x11A58, prSpacingMark}, // Mc [2] SOYOMBO VOWEL SIGN AI..SOYOMBO VOWEL SIGN AU
+ {0x11A59, 0x11A5B, prExtend}, // Mn [3] SOYOMBO VOWEL SIGN VOCALIC R..SOYOMBO VOWEL LENGTH MARK
+ {0x11A84, 0x11A89, prPrepend}, // Lo [6] SOYOMBO SIGN JIHVAMULIYA..SOYOMBO CLUSTER-INITIAL LETTER SA
+ {0x11A8A, 0x11A96, prExtend}, // Mn [13] SOYOMBO FINAL CONSONANT SIGN G..SOYOMBO SIGN ANUSVARA
+ {0x11A97, 0x11A97, prSpacingMark}, // Mc SOYOMBO SIGN VISARGA
+ {0x11A98, 0x11A99, prExtend}, // Mn [2] SOYOMBO GEMINATION MARK..SOYOMBO SUBJOINER
+ {0x11C2F, 0x11C2F, prSpacingMark}, // Mc BHAIKSUKI VOWEL SIGN AA
+ {0x11C30, 0x11C36, prExtend}, // Mn [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L
+ {0x11C38, 0x11C3D, prExtend}, // Mn [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA
+ {0x11C3E, 0x11C3E, prSpacingMark}, // Mc BHAIKSUKI SIGN VISARGA
+ {0x11C3F, 0x11C3F, prExtend}, // Mn BHAIKSUKI SIGN VIRAMA
+ {0x11C92, 0x11CA7, prExtend}, // Mn [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
+ {0x11CA9, 0x11CA9, prSpacingMark}, // Mc MARCHEN SUBJOINED LETTER YA
+ {0x11CAA, 0x11CB0, prExtend}, // Mn [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA
+ {0x11CB1, 0x11CB1, prSpacingMark}, // Mc MARCHEN VOWEL SIGN I
+ {0x11CB2, 0x11CB3, prExtend}, // Mn [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E
+ {0x11CB4, 0x11CB4, prSpacingMark}, // Mc MARCHEN VOWEL SIGN O
+ {0x11CB5, 0x11CB6, prExtend}, // Mn [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU
+ {0x11D31, 0x11D36, prExtend}, // Mn [6] MASARAM GONDI VOWEL SIGN AA..MASARAM GONDI VOWEL SIGN VOCALIC R
+ {0x11D3A, 0x11D3A, prExtend}, // Mn MASARAM GONDI VOWEL SIGN E
+ {0x11D3C, 0x11D3D, prExtend}, // Mn [2] MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
+ {0x11D3F, 0x11D45, prExtend}, // Mn [7] MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI VIRAMA
+ {0x11D46, 0x11D46, prPrepend}, // Lo MASARAM GONDI REPHA
+ {0x11D47, 0x11D47, prExtend}, // Mn MASARAM GONDI RA-KARA
+ {0x11D8A, 0x11D8E, prSpacingMark}, // Mc [5] GUNJALA GONDI VOWEL SIGN AA..GUNJALA GONDI VOWEL SIGN UU
+ {0x11D90, 0x11D91, prExtend}, // Mn [2] GUNJALA GONDI VOWEL SIGN EE..GUNJALA GONDI VOWEL SIGN AI
+ {0x11D93, 0x11D94, prSpacingMark}, // Mc [2] GUNJALA GONDI VOWEL SIGN OO..GUNJALA GONDI VOWEL SIGN AU
+ {0x11D95, 0x11D95, prExtend}, // Mn GUNJALA GONDI SIGN ANUSVARA
+ {0x11D96, 0x11D96, prSpacingMark}, // Mc GUNJALA GONDI SIGN VISARGA
+ {0x11D97, 0x11D97, prExtend}, // Mn GUNJALA GONDI VIRAMA
+ {0x11EF3, 0x11EF4, prExtend}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
+ {0x11EF5, 0x11EF6, prSpacingMark}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x11F00, 0x11F01, prExtend}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prPrepend}, // Lo KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prSpacingMark}, // Mc KAWI SIGN VISARGA
+ {0x11F34, 0x11F35, prSpacingMark}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prExtend}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prSpacingMark}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prExtend}, // Mn KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prSpacingMark}, // Mc KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prExtend}, // Mn KAWI CONJOINER
+ {0x13430, 0x1343F, prControl}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prExtend}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13447, 0x13455, prExtend}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
+ {0x16AF0, 0x16AF4, prExtend}, // Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
+ {0x16B30, 0x16B36, prExtend}, // Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
+ {0x16F4F, 0x16F4F, prExtend}, // Mn MIAO SIGN CONSONANT MODIFIER BAR
+ {0x16F51, 0x16F87, prSpacingMark}, // Mc [55] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN UI
+ {0x16F8F, 0x16F92, prExtend}, // Mn [4] MIAO TONE RIGHT..MIAO TONE BELOW
+ {0x16FE4, 0x16FE4, prExtend}, // Mn KHITAN SMALL SCRIPT FILLER
+ {0x16FF0, 0x16FF1, prSpacingMark}, // Mc [2] VIETNAMESE ALTERNATE READING MARK CA..VIETNAMESE ALTERNATE READING MARK NHAY
+ {0x1BC9D, 0x1BC9E, prExtend}, // Mn [2] DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK
+ {0x1BCA0, 0x1BCA3, prControl}, // Cf [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP
+ {0x1CF00, 0x1CF2D, prExtend}, // Mn [46] ZNAMENNY COMBINING MARK GORAZDO NIZKO S KRYZHEM ON LEFT..ZNAMENNY COMBINING MARK KRYZH ON LEFT
+ {0x1CF30, 0x1CF46, prExtend}, // Mn [23] ZNAMENNY COMBINING TONAL RANGE MARK MRACHNO..ZNAMENNY PRIZNAK MODIFIER ROG
+ {0x1D165, 0x1D165, prExtend}, // Mc MUSICAL SYMBOL COMBINING STEM
+ {0x1D166, 0x1D166, prSpacingMark}, // Mc MUSICAL SYMBOL COMBINING SPRECHGESANG STEM
+ {0x1D167, 0x1D169, prExtend}, // Mn [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3
+ {0x1D16D, 0x1D16D, prSpacingMark}, // Mc MUSICAL SYMBOL COMBINING AUGMENTATION DOT
+ {0x1D16E, 0x1D172, prExtend}, // Mc [5] MUSICAL SYMBOL COMBINING FLAG-1..MUSICAL SYMBOL COMBINING FLAG-5
+ {0x1D173, 0x1D17A, prControl}, // Cf [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE
+ {0x1D17B, 0x1D182, prExtend}, // Mn [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE
+ {0x1D185, 0x1D18B, prExtend}, // Mn [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE
+ {0x1D1AA, 0x1D1AD, prExtend}, // Mn [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO
+ {0x1D242, 0x1D244, prExtend}, // Mn [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
+ {0x1DA00, 0x1DA36, prExtend}, // Mn [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN
+ {0x1DA3B, 0x1DA6C, prExtend}, // Mn [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT
+ {0x1DA75, 0x1DA75, prExtend}, // Mn SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS
+ {0x1DA84, 0x1DA84, prExtend}, // Mn SIGNWRITING LOCATION HEAD NECK
+ {0x1DA9B, 0x1DA9F, prExtend}, // Mn [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6
+ {0x1DAA1, 0x1DAAF, prExtend}, // Mn [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16
+ {0x1E000, 0x1E006, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
+ {0x1E008, 0x1E018, prExtend}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
+ {0x1E01B, 0x1E021, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
+ {0x1E023, 0x1E024, prExtend}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
+ {0x1E026, 0x1E02A, prExtend}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E08F, 0x1E08F, prExtend}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+ {0x1E130, 0x1E136, prExtend}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
+ {0x1E2AE, 0x1E2AE, prExtend}, // Mn TOTO SIGN RISING TONE
+ {0x1E2EC, 0x1E2EF, prExtend}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E4EC, 0x1E4EF, prExtend}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E8D0, 0x1E8D6, prExtend}, // Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
+ {0x1E944, 0x1E94A, prExtend}, // Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
+ {0x1F000, 0x1F003, prExtendedPictographic}, // E0.0 [4] (🀀..🀃) MAHJONG TILE EAST WIND..MAHJONG TILE NORTH WIND
+ {0x1F004, 0x1F004, prExtendedPictographic}, // E0.6 [1] (🀄) mahjong red dragon
+ {0x1F005, 0x1F0CE, prExtendedPictographic}, // E0.0 [202] (🀅..🃎) MAHJONG TILE GREEN DRAGON..PLAYING CARD KING OF DIAMONDS
+ {0x1F0CF, 0x1F0CF, prExtendedPictographic}, // E0.6 [1] (🃏) joker
+ {0x1F0D0, 0x1F0FF, prExtendedPictographic}, // E0.0 [48] (..) ..
+ {0x1F10D, 0x1F10F, prExtendedPictographic}, // E0.0 [3] (🄍..🄏) CIRCLED ZERO WITH SLASH..CIRCLED DOLLAR SIGN WITH OVERLAID BACKSLASH
+ {0x1F12F, 0x1F12F, prExtendedPictographic}, // E0.0 [1] (🄯) COPYLEFT SYMBOL
+ {0x1F16C, 0x1F16F, prExtendedPictographic}, // E0.0 [4] (🅬..🅯) RAISED MR SIGN..CIRCLED HUMAN FIGURE
+ {0x1F170, 0x1F171, prExtendedPictographic}, // E0.6 [2] (🅰️..🅱️) A button (blood type)..B button (blood type)
+ {0x1F17E, 0x1F17F, prExtendedPictographic}, // E0.6 [2] (🅾️..🅿️) O button (blood type)..P button
+ {0x1F18E, 0x1F18E, prExtendedPictographic}, // E0.6 [1] (🆎) AB button (blood type)
+ {0x1F191, 0x1F19A, prExtendedPictographic}, // E0.6 [10] (🆑..🆚) CL button..VS button
+ {0x1F1AD, 0x1F1E5, prExtendedPictographic}, // E0.0 [57] (🆭..) MASK WORK SYMBOL..
+ {0x1F1E6, 0x1F1FF, prRegionalIndicator}, // So [26] REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z
+ {0x1F201, 0x1F202, prExtendedPictographic}, // E0.6 [2] (🈁..🈂️) Japanese “here” button..Japanese “service charge” button
+ {0x1F203, 0x1F20F, prExtendedPictographic}, // E0.0 [13] (..) ..
+ {0x1F21A, 0x1F21A, prExtendedPictographic}, // E0.6 [1] (🈚) Japanese “free of charge” button
+ {0x1F22F, 0x1F22F, prExtendedPictographic}, // E0.6 [1] (🈯) Japanese “reserved” button
+ {0x1F232, 0x1F23A, prExtendedPictographic}, // E0.6 [9] (🈲..🈺) Japanese “prohibited” button..Japanese “open for business” button
+ {0x1F23C, 0x1F23F, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F249, 0x1F24F, prExtendedPictographic}, // E0.0 [7] (..) ..
+ {0x1F250, 0x1F251, prExtendedPictographic}, // E0.6 [2] (🉐..🉑) Japanese “bargain” button..Japanese “acceptable” button
+ {0x1F252, 0x1F2FF, prExtendedPictographic}, // E0.0 [174] (..) ..
+ {0x1F300, 0x1F30C, prExtendedPictographic}, // E0.6 [13] (🌀..🌌) cyclone..milky way
+ {0x1F30D, 0x1F30E, prExtendedPictographic}, // E0.7 [2] (🌍..🌎) globe showing Europe-Africa..globe showing Americas
+ {0x1F30F, 0x1F30F, prExtendedPictographic}, // E0.6 [1] (🌏) globe showing Asia-Australia
+ {0x1F310, 0x1F310, prExtendedPictographic}, // E1.0 [1] (🌐) globe with meridians
+ {0x1F311, 0x1F311, prExtendedPictographic}, // E0.6 [1] (🌑) new moon
+ {0x1F312, 0x1F312, prExtendedPictographic}, // E1.0 [1] (🌒) waxing crescent moon
+ {0x1F313, 0x1F315, prExtendedPictographic}, // E0.6 [3] (🌓..🌕) first quarter moon..full moon
+ {0x1F316, 0x1F318, prExtendedPictographic}, // E1.0 [3] (🌖..🌘) waning gibbous moon..waning crescent moon
+ {0x1F319, 0x1F319, prExtendedPictographic}, // E0.6 [1] (🌙) crescent moon
+ {0x1F31A, 0x1F31A, prExtendedPictographic}, // E1.0 [1] (🌚) new moon face
+ {0x1F31B, 0x1F31B, prExtendedPictographic}, // E0.6 [1] (🌛) first quarter moon face
+ {0x1F31C, 0x1F31C, prExtendedPictographic}, // E0.7 [1] (🌜) last quarter moon face
+ {0x1F31D, 0x1F31E, prExtendedPictographic}, // E1.0 [2] (🌝..🌞) full moon face..sun with face
+ {0x1F31F, 0x1F320, prExtendedPictographic}, // E0.6 [2] (🌟..🌠) glowing star..shooting star
+ {0x1F321, 0x1F321, prExtendedPictographic}, // E0.7 [1] (🌡️) thermometer
+ {0x1F322, 0x1F323, prExtendedPictographic}, // E0.0 [2] (🌢..🌣) BLACK DROPLET..WHITE SUN
+ {0x1F324, 0x1F32C, prExtendedPictographic}, // E0.7 [9] (🌤️..🌬️) sun behind small cloud..wind face
+ {0x1F32D, 0x1F32F, prExtendedPictographic}, // E1.0 [3] (🌭..🌯) hot dog..burrito
+ {0x1F330, 0x1F331, prExtendedPictographic}, // E0.6 [2] (🌰..🌱) chestnut..seedling
+ {0x1F332, 0x1F333, prExtendedPictographic}, // E1.0 [2] (🌲..🌳) evergreen tree..deciduous tree
+ {0x1F334, 0x1F335, prExtendedPictographic}, // E0.6 [2] (🌴..🌵) palm tree..cactus
+ {0x1F336, 0x1F336, prExtendedPictographic}, // E0.7 [1] (🌶️) hot pepper
+ {0x1F337, 0x1F34A, prExtendedPictographic}, // E0.6 [20] (🌷..🍊) tulip..tangerine
+ {0x1F34B, 0x1F34B, prExtendedPictographic}, // E1.0 [1] (🍋) lemon
+ {0x1F34C, 0x1F34F, prExtendedPictographic}, // E0.6 [4] (🍌..🍏) banana..green apple
+ {0x1F350, 0x1F350, prExtendedPictographic}, // E1.0 [1] (🍐) pear
+ {0x1F351, 0x1F37B, prExtendedPictographic}, // E0.6 [43] (🍑..🍻) peach..clinking beer mugs
+ {0x1F37C, 0x1F37C, prExtendedPictographic}, // E1.0 [1] (🍼) baby bottle
+ {0x1F37D, 0x1F37D, prExtendedPictographic}, // E0.7 [1] (🍽️) fork and knife with plate
+ {0x1F37E, 0x1F37F, prExtendedPictographic}, // E1.0 [2] (🍾..🍿) bottle with popping cork..popcorn
+ {0x1F380, 0x1F393, prExtendedPictographic}, // E0.6 [20] (🎀..🎓) ribbon..graduation cap
+ {0x1F394, 0x1F395, prExtendedPictographic}, // E0.0 [2] (🎔..🎕) HEART WITH TIP ON THE LEFT..BOUQUET OF FLOWERS
+ {0x1F396, 0x1F397, prExtendedPictographic}, // E0.7 [2] (🎖️..🎗️) military medal..reminder ribbon
+ {0x1F398, 0x1F398, prExtendedPictographic}, // E0.0 [1] (🎘) MUSICAL KEYBOARD WITH JACKS
+ {0x1F399, 0x1F39B, prExtendedPictographic}, // E0.7 [3] (🎙️..🎛️) studio microphone..control knobs
+ {0x1F39C, 0x1F39D, prExtendedPictographic}, // E0.0 [2] (🎜..🎝) BEAMED ASCENDING MUSICAL NOTES..BEAMED DESCENDING MUSICAL NOTES
+ {0x1F39E, 0x1F39F, prExtendedPictographic}, // E0.7 [2] (🎞️..🎟️) film frames..admission tickets
+ {0x1F3A0, 0x1F3C4, prExtendedPictographic}, // E0.6 [37] (🎠..🏄) carousel horse..person surfing
+ {0x1F3C5, 0x1F3C5, prExtendedPictographic}, // E1.0 [1] (🏅) sports medal
+ {0x1F3C6, 0x1F3C6, prExtendedPictographic}, // E0.6 [1] (🏆) trophy
+ {0x1F3C7, 0x1F3C7, prExtendedPictographic}, // E1.0 [1] (🏇) horse racing
+ {0x1F3C8, 0x1F3C8, prExtendedPictographic}, // E0.6 [1] (🏈) american football
+ {0x1F3C9, 0x1F3C9, prExtendedPictographic}, // E1.0 [1] (🏉) rugby football
+ {0x1F3CA, 0x1F3CA, prExtendedPictographic}, // E0.6 [1] (🏊) person swimming
+ {0x1F3CB, 0x1F3CE, prExtendedPictographic}, // E0.7 [4] (🏋️..🏎️) person lifting weights..racing car
+ {0x1F3CF, 0x1F3D3, prExtendedPictographic}, // E1.0 [5] (🏏..🏓) cricket game..ping pong
+ {0x1F3D4, 0x1F3DF, prExtendedPictographic}, // E0.7 [12] (🏔️..🏟️) snow-capped mountain..stadium
+ {0x1F3E0, 0x1F3E3, prExtendedPictographic}, // E0.6 [4] (🏠..🏣) house..Japanese post office
+ {0x1F3E4, 0x1F3E4, prExtendedPictographic}, // E1.0 [1] (🏤) post office
+ {0x1F3E5, 0x1F3F0, prExtendedPictographic}, // E0.6 [12] (🏥..🏰) hospital..castle
+ {0x1F3F1, 0x1F3F2, prExtendedPictographic}, // E0.0 [2] (🏱..🏲) WHITE PENNANT..BLACK PENNANT
+ {0x1F3F3, 0x1F3F3, prExtendedPictographic}, // E0.7 [1] (🏳️) white flag
+ {0x1F3F4, 0x1F3F4, prExtendedPictographic}, // E1.0 [1] (🏴) black flag
+ {0x1F3F5, 0x1F3F5, prExtendedPictographic}, // E0.7 [1] (🏵️) rosette
+ {0x1F3F6, 0x1F3F6, prExtendedPictographic}, // E0.0 [1] (🏶) BLACK ROSETTE
+ {0x1F3F7, 0x1F3F7, prExtendedPictographic}, // E0.7 [1] (🏷️) label
+ {0x1F3F8, 0x1F3FA, prExtendedPictographic}, // E1.0 [3] (🏸..🏺) badminton..amphora
+ {0x1F3FB, 0x1F3FF, prExtend}, // Sk [5] EMOJI MODIFIER FITZPATRICK TYPE-1-2..EMOJI MODIFIER FITZPATRICK TYPE-6
+ {0x1F400, 0x1F407, prExtendedPictographic}, // E1.0 [8] (🐀..🐇) rat..rabbit
+ {0x1F408, 0x1F408, prExtendedPictographic}, // E0.7 [1] (🐈) cat
+ {0x1F409, 0x1F40B, prExtendedPictographic}, // E1.0 [3] (🐉..🐋) dragon..whale
+ {0x1F40C, 0x1F40E, prExtendedPictographic}, // E0.6 [3] (🐌..🐎) snail..horse
+ {0x1F40F, 0x1F410, prExtendedPictographic}, // E1.0 [2] (🐏..🐐) ram..goat
+ {0x1F411, 0x1F412, prExtendedPictographic}, // E0.6 [2] (🐑..🐒) ewe..monkey
+ {0x1F413, 0x1F413, prExtendedPictographic}, // E1.0 [1] (🐓) rooster
+ {0x1F414, 0x1F414, prExtendedPictographic}, // E0.6 [1] (🐔) chicken
+ {0x1F415, 0x1F415, prExtendedPictographic}, // E0.7 [1] (🐕) dog
+ {0x1F416, 0x1F416, prExtendedPictographic}, // E1.0 [1] (🐖) pig
+ {0x1F417, 0x1F429, prExtendedPictographic}, // E0.6 [19] (🐗..🐩) boar..poodle
+ {0x1F42A, 0x1F42A, prExtendedPictographic}, // E1.0 [1] (🐪) camel
+ {0x1F42B, 0x1F43E, prExtendedPictographic}, // E0.6 [20] (🐫..🐾) two-hump camel..paw prints
+ {0x1F43F, 0x1F43F, prExtendedPictographic}, // E0.7 [1] (🐿️) chipmunk
+ {0x1F440, 0x1F440, prExtendedPictographic}, // E0.6 [1] (👀) eyes
+ {0x1F441, 0x1F441, prExtendedPictographic}, // E0.7 [1] (👁️) eye
+ {0x1F442, 0x1F464, prExtendedPictographic}, // E0.6 [35] (👂..👤) ear..bust in silhouette
+ {0x1F465, 0x1F465, prExtendedPictographic}, // E1.0 [1] (👥) busts in silhouette
+ {0x1F466, 0x1F46B, prExtendedPictographic}, // E0.6 [6] (👦..👫) boy..woman and man holding hands
+ {0x1F46C, 0x1F46D, prExtendedPictographic}, // E1.0 [2] (👬..👭) men holding hands..women holding hands
+ {0x1F46E, 0x1F4AC, prExtendedPictographic}, // E0.6 [63] (👮..💬) police officer..speech balloon
+ {0x1F4AD, 0x1F4AD, prExtendedPictographic}, // E1.0 [1] (💭) thought balloon
+ {0x1F4AE, 0x1F4B5, prExtendedPictographic}, // E0.6 [8] (💮..💵) white flower..dollar banknote
+ {0x1F4B6, 0x1F4B7, prExtendedPictographic}, // E1.0 [2] (💶..💷) euro banknote..pound banknote
+ {0x1F4B8, 0x1F4EB, prExtendedPictographic}, // E0.6 [52] (💸..📫) money with wings..closed mailbox with raised flag
+ {0x1F4EC, 0x1F4ED, prExtendedPictographic}, // E0.7 [2] (📬..📭) open mailbox with raised flag..open mailbox with lowered flag
+ {0x1F4EE, 0x1F4EE, prExtendedPictographic}, // E0.6 [1] (📮) postbox
+ {0x1F4EF, 0x1F4EF, prExtendedPictographic}, // E1.0 [1] (📯) postal horn
+ {0x1F4F0, 0x1F4F4, prExtendedPictographic}, // E0.6 [5] (📰..📴) newspaper..mobile phone off
+ {0x1F4F5, 0x1F4F5, prExtendedPictographic}, // E1.0 [1] (📵) no mobile phones
+ {0x1F4F6, 0x1F4F7, prExtendedPictographic}, // E0.6 [2] (📶..📷) antenna bars..camera
+ {0x1F4F8, 0x1F4F8, prExtendedPictographic}, // E1.0 [1] (📸) camera with flash
+ {0x1F4F9, 0x1F4FC, prExtendedPictographic}, // E0.6 [4] (📹..📼) video camera..videocassette
+ {0x1F4FD, 0x1F4FD, prExtendedPictographic}, // E0.7 [1] (📽️) film projector
+ {0x1F4FE, 0x1F4FE, prExtendedPictographic}, // E0.0 [1] (📾) PORTABLE STEREO
+ {0x1F4FF, 0x1F502, prExtendedPictographic}, // E1.0 [4] (📿..🔂) prayer beads..repeat single button
+ {0x1F503, 0x1F503, prExtendedPictographic}, // E0.6 [1] (🔃) clockwise vertical arrows
+ {0x1F504, 0x1F507, prExtendedPictographic}, // E1.0 [4] (🔄..🔇) counterclockwise arrows button..muted speaker
+ {0x1F508, 0x1F508, prExtendedPictographic}, // E0.7 [1] (🔈) speaker low volume
+ {0x1F509, 0x1F509, prExtendedPictographic}, // E1.0 [1] (🔉) speaker medium volume
+ {0x1F50A, 0x1F514, prExtendedPictographic}, // E0.6 [11] (🔊..🔔) speaker high volume..bell
+ {0x1F515, 0x1F515, prExtendedPictographic}, // E1.0 [1] (🔕) bell with slash
+ {0x1F516, 0x1F52B, prExtendedPictographic}, // E0.6 [22] (🔖..🔫) bookmark..water pistol
+ {0x1F52C, 0x1F52D, prExtendedPictographic}, // E1.0 [2] (🔬..🔭) microscope..telescope
+ {0x1F52E, 0x1F53D, prExtendedPictographic}, // E0.6 [16] (🔮..🔽) crystal ball..downwards button
+ {0x1F546, 0x1F548, prExtendedPictographic}, // E0.0 [3] (🕆..🕈) WHITE LATIN CROSS..CELTIC CROSS
+ {0x1F549, 0x1F54A, prExtendedPictographic}, // E0.7 [2] (🕉️..🕊️) om..dove
+ {0x1F54B, 0x1F54E, prExtendedPictographic}, // E1.0 [4] (🕋..🕎) kaaba..menorah
+ {0x1F54F, 0x1F54F, prExtendedPictographic}, // E0.0 [1] (🕏) BOWL OF HYGIEIA
+ {0x1F550, 0x1F55B, prExtendedPictographic}, // E0.6 [12] (🕐..🕛) one o’clock..twelve o’clock
+ {0x1F55C, 0x1F567, prExtendedPictographic}, // E0.7 [12] (🕜..🕧) one-thirty..twelve-thirty
+ {0x1F568, 0x1F56E, prExtendedPictographic}, // E0.0 [7] (🕨..🕮) RIGHT SPEAKER..BOOK
+ {0x1F56F, 0x1F570, prExtendedPictographic}, // E0.7 [2] (🕯️..🕰️) candle..mantelpiece clock
+ {0x1F571, 0x1F572, prExtendedPictographic}, // E0.0 [2] (🕱..🕲) BLACK SKULL AND CROSSBONES..NO PIRACY
+ {0x1F573, 0x1F579, prExtendedPictographic}, // E0.7 [7] (🕳️..🕹️) hole..joystick
+ {0x1F57A, 0x1F57A, prExtendedPictographic}, // E3.0 [1] (🕺) man dancing
+ {0x1F57B, 0x1F586, prExtendedPictographic}, // E0.0 [12] (🕻..🖆) LEFT HAND TELEPHONE RECEIVER..PEN OVER STAMPED ENVELOPE
+ {0x1F587, 0x1F587, prExtendedPictographic}, // E0.7 [1] (🖇️) linked paperclips
+ {0x1F588, 0x1F589, prExtendedPictographic}, // E0.0 [2] (🖈..🖉) BLACK PUSHPIN..LOWER LEFT PENCIL
+ {0x1F58A, 0x1F58D, prExtendedPictographic}, // E0.7 [4] (🖊️..🖍️) pen..crayon
+ {0x1F58E, 0x1F58F, prExtendedPictographic}, // E0.0 [2] (🖎..🖏) LEFT WRITING HAND..TURNED OK HAND SIGN
+ {0x1F590, 0x1F590, prExtendedPictographic}, // E0.7 [1] (🖐️) hand with fingers splayed
+ {0x1F591, 0x1F594, prExtendedPictographic}, // E0.0 [4] (🖑..🖔) REVERSED RAISED HAND WITH FINGERS SPLAYED..REVERSED VICTORY HAND
+ {0x1F595, 0x1F596, prExtendedPictographic}, // E1.0 [2] (🖕..🖖) middle finger..vulcan salute
+ {0x1F597, 0x1F5A3, prExtendedPictographic}, // E0.0 [13] (🖗..🖣) WHITE DOWN POINTING LEFT HAND INDEX..BLACK DOWN POINTING BACKHAND INDEX
+ {0x1F5A4, 0x1F5A4, prExtendedPictographic}, // E3.0 [1] (🖤) black heart
+ {0x1F5A5, 0x1F5A5, prExtendedPictographic}, // E0.7 [1] (🖥️) desktop computer
+ {0x1F5A6, 0x1F5A7, prExtendedPictographic}, // E0.0 [2] (🖦..🖧) KEYBOARD AND MOUSE..THREE NETWORKED COMPUTERS
+ {0x1F5A8, 0x1F5A8, prExtendedPictographic}, // E0.7 [1] (🖨️) printer
+ {0x1F5A9, 0x1F5B0, prExtendedPictographic}, // E0.0 [8] (🖩..🖰) POCKET CALCULATOR..TWO BUTTON MOUSE
+ {0x1F5B1, 0x1F5B2, prExtendedPictographic}, // E0.7 [2] (🖱️..🖲️) computer mouse..trackball
+ {0x1F5B3, 0x1F5BB, prExtendedPictographic}, // E0.0 [9] (🖳..🖻) OLD PERSONAL COMPUTER..DOCUMENT WITH PICTURE
+ {0x1F5BC, 0x1F5BC, prExtendedPictographic}, // E0.7 [1] (🖼️) framed picture
+ {0x1F5BD, 0x1F5C1, prExtendedPictographic}, // E0.0 [5] (🖽..🗁) FRAME WITH TILES..OPEN FOLDER
+ {0x1F5C2, 0x1F5C4, prExtendedPictographic}, // E0.7 [3] (🗂️..🗄️) card index dividers..file cabinet
+ {0x1F5C5, 0x1F5D0, prExtendedPictographic}, // E0.0 [12] (🗅..🗐) EMPTY NOTE..PAGES
+ {0x1F5D1, 0x1F5D3, prExtendedPictographic}, // E0.7 [3] (🗑️..🗓️) wastebasket..spiral calendar
+ {0x1F5D4, 0x1F5DB, prExtendedPictographic}, // E0.0 [8] (🗔..🗛) DESKTOP WINDOW..DECREASE FONT SIZE SYMBOL
+ {0x1F5DC, 0x1F5DE, prExtendedPictographic}, // E0.7 [3] (🗜️..🗞️) clamp..rolled-up newspaper
+ {0x1F5DF, 0x1F5E0, prExtendedPictographic}, // E0.0 [2] (🗟..🗠) PAGE WITH CIRCLED TEXT..STOCK CHART
+ {0x1F5E1, 0x1F5E1, prExtendedPictographic}, // E0.7 [1] (🗡️) dagger
+ {0x1F5E2, 0x1F5E2, prExtendedPictographic}, // E0.0 [1] (🗢) LIPS
+ {0x1F5E3, 0x1F5E3, prExtendedPictographic}, // E0.7 [1] (🗣️) speaking head
+ {0x1F5E4, 0x1F5E7, prExtendedPictographic}, // E0.0 [4] (🗤..🗧) THREE RAYS ABOVE..THREE RAYS RIGHT
+ {0x1F5E8, 0x1F5E8, prExtendedPictographic}, // E2.0 [1] (🗨️) left speech bubble
+ {0x1F5E9, 0x1F5EE, prExtendedPictographic}, // E0.0 [6] (🗩..🗮) RIGHT SPEECH BUBBLE..LEFT ANGER BUBBLE
+ {0x1F5EF, 0x1F5EF, prExtendedPictographic}, // E0.7 [1] (🗯️) right anger bubble
+ {0x1F5F0, 0x1F5F2, prExtendedPictographic}, // E0.0 [3] (🗰..🗲) MOOD BUBBLE..LIGHTNING MOOD
+ {0x1F5F3, 0x1F5F3, prExtendedPictographic}, // E0.7 [1] (🗳️) ballot box with ballot
+ {0x1F5F4, 0x1F5F9, prExtendedPictographic}, // E0.0 [6] (🗴..🗹) BALLOT SCRIPT X..BALLOT BOX WITH BOLD CHECK
+ {0x1F5FA, 0x1F5FA, prExtendedPictographic}, // E0.7 [1] (🗺️) world map
+ {0x1F5FB, 0x1F5FF, prExtendedPictographic}, // E0.6 [5] (🗻..🗿) mount fuji..moai
+ {0x1F600, 0x1F600, prExtendedPictographic}, // E1.0 [1] (😀) grinning face
+ {0x1F601, 0x1F606, prExtendedPictographic}, // E0.6 [6] (😁..😆) beaming face with smiling eyes..grinning squinting face
+ {0x1F607, 0x1F608, prExtendedPictographic}, // E1.0 [2] (😇..😈) smiling face with halo..smiling face with horns
+ {0x1F609, 0x1F60D, prExtendedPictographic}, // E0.6 [5] (😉..😍) winking face..smiling face with heart-eyes
+ {0x1F60E, 0x1F60E, prExtendedPictographic}, // E1.0 [1] (😎) smiling face with sunglasses
+ {0x1F60F, 0x1F60F, prExtendedPictographic}, // E0.6 [1] (😏) smirking face
+ {0x1F610, 0x1F610, prExtendedPictographic}, // E0.7 [1] (😐) neutral face
+ {0x1F611, 0x1F611, prExtendedPictographic}, // E1.0 [1] (😑) expressionless face
+ {0x1F612, 0x1F614, prExtendedPictographic}, // E0.6 [3] (😒..😔) unamused face..pensive face
+ {0x1F615, 0x1F615, prExtendedPictographic}, // E1.0 [1] (😕) confused face
+ {0x1F616, 0x1F616, prExtendedPictographic}, // E0.6 [1] (😖) confounded face
+ {0x1F617, 0x1F617, prExtendedPictographic}, // E1.0 [1] (😗) kissing face
+ {0x1F618, 0x1F618, prExtendedPictographic}, // E0.6 [1] (😘) face blowing a kiss
+ {0x1F619, 0x1F619, prExtendedPictographic}, // E1.0 [1] (😙) kissing face with smiling eyes
+ {0x1F61A, 0x1F61A, prExtendedPictographic}, // E0.6 [1] (😚) kissing face with closed eyes
+ {0x1F61B, 0x1F61B, prExtendedPictographic}, // E1.0 [1] (😛) face with tongue
+ {0x1F61C, 0x1F61E, prExtendedPictographic}, // E0.6 [3] (😜..😞) winking face with tongue..disappointed face
+ {0x1F61F, 0x1F61F, prExtendedPictographic}, // E1.0 [1] (😟) worried face
+ {0x1F620, 0x1F625, prExtendedPictographic}, // E0.6 [6] (😠..😥) angry face..sad but relieved face
+ {0x1F626, 0x1F627, prExtendedPictographic}, // E1.0 [2] (😦..😧) frowning face with open mouth..anguished face
+ {0x1F628, 0x1F62B, prExtendedPictographic}, // E0.6 [4] (😨..😫) fearful face..tired face
+ {0x1F62C, 0x1F62C, prExtendedPictographic}, // E1.0 [1] (😬) grimacing face
+ {0x1F62D, 0x1F62D, prExtendedPictographic}, // E0.6 [1] (😭) loudly crying face
+ {0x1F62E, 0x1F62F, prExtendedPictographic}, // E1.0 [2] (😮..😯) face with open mouth..hushed face
+ {0x1F630, 0x1F633, prExtendedPictographic}, // E0.6 [4] (😰..😳) anxious face with sweat..flushed face
+ {0x1F634, 0x1F634, prExtendedPictographic}, // E1.0 [1] (😴) sleeping face
+ {0x1F635, 0x1F635, prExtendedPictographic}, // E0.6 [1] (😵) face with crossed-out eyes
+ {0x1F636, 0x1F636, prExtendedPictographic}, // E1.0 [1] (😶) face without mouth
+ {0x1F637, 0x1F640, prExtendedPictographic}, // E0.6 [10] (😷..🙀) face with medical mask..weary cat
+ {0x1F641, 0x1F644, prExtendedPictographic}, // E1.0 [4] (🙁..🙄) slightly frowning face..face with rolling eyes
+ {0x1F645, 0x1F64F, prExtendedPictographic}, // E0.6 [11] (🙅..🙏) person gesturing NO..folded hands
+ {0x1F680, 0x1F680, prExtendedPictographic}, // E0.6 [1] (🚀) rocket
+ {0x1F681, 0x1F682, prExtendedPictographic}, // E1.0 [2] (🚁..🚂) helicopter..locomotive
+ {0x1F683, 0x1F685, prExtendedPictographic}, // E0.6 [3] (🚃..🚅) railway car..bullet train
+ {0x1F686, 0x1F686, prExtendedPictographic}, // E1.0 [1] (🚆) train
+ {0x1F687, 0x1F687, prExtendedPictographic}, // E0.6 [1] (🚇) metro
+ {0x1F688, 0x1F688, prExtendedPictographic}, // E1.0 [1] (🚈) light rail
+ {0x1F689, 0x1F689, prExtendedPictographic}, // E0.6 [1] (🚉) station
+ {0x1F68A, 0x1F68B, prExtendedPictographic}, // E1.0 [2] (🚊..🚋) tram..tram car
+ {0x1F68C, 0x1F68C, prExtendedPictographic}, // E0.6 [1] (🚌) bus
+ {0x1F68D, 0x1F68D, prExtendedPictographic}, // E0.7 [1] (🚍) oncoming bus
+ {0x1F68E, 0x1F68E, prExtendedPictographic}, // E1.0 [1] (🚎) trolleybus
+ {0x1F68F, 0x1F68F, prExtendedPictographic}, // E0.6 [1] (🚏) bus stop
+ {0x1F690, 0x1F690, prExtendedPictographic}, // E1.0 [1] (🚐) minibus
+ {0x1F691, 0x1F693, prExtendedPictographic}, // E0.6 [3] (🚑..🚓) ambulance..police car
+ {0x1F694, 0x1F694, prExtendedPictographic}, // E0.7 [1] (🚔) oncoming police car
+ {0x1F695, 0x1F695, prExtendedPictographic}, // E0.6 [1] (🚕) taxi
+ {0x1F696, 0x1F696, prExtendedPictographic}, // E1.0 [1] (🚖) oncoming taxi
+ {0x1F697, 0x1F697, prExtendedPictographic}, // E0.6 [1] (🚗) automobile
+ {0x1F698, 0x1F698, prExtendedPictographic}, // E0.7 [1] (🚘) oncoming automobile
+ {0x1F699, 0x1F69A, prExtendedPictographic}, // E0.6 [2] (🚙..🚚) sport utility vehicle..delivery truck
+ {0x1F69B, 0x1F6A1, prExtendedPictographic}, // E1.0 [7] (🚛..🚡) articulated lorry..aerial tramway
+ {0x1F6A2, 0x1F6A2, prExtendedPictographic}, // E0.6 [1] (🚢) ship
+ {0x1F6A3, 0x1F6A3, prExtendedPictographic}, // E1.0 [1] (🚣) person rowing boat
+ {0x1F6A4, 0x1F6A5, prExtendedPictographic}, // E0.6 [2] (🚤..🚥) speedboat..horizontal traffic light
+ {0x1F6A6, 0x1F6A6, prExtendedPictographic}, // E1.0 [1] (🚦) vertical traffic light
+ {0x1F6A7, 0x1F6AD, prExtendedPictographic}, // E0.6 [7] (🚧..🚭) construction..no smoking
+ {0x1F6AE, 0x1F6B1, prExtendedPictographic}, // E1.0 [4] (🚮..🚱) litter in bin sign..non-potable water
+ {0x1F6B2, 0x1F6B2, prExtendedPictographic}, // E0.6 [1] (🚲) bicycle
+ {0x1F6B3, 0x1F6B5, prExtendedPictographic}, // E1.0 [3] (🚳..🚵) no bicycles..person mountain biking
+ {0x1F6B6, 0x1F6B6, prExtendedPictographic}, // E0.6 [1] (🚶) person walking
+ {0x1F6B7, 0x1F6B8, prExtendedPictographic}, // E1.0 [2] (🚷..🚸) no pedestrians..children crossing
+ {0x1F6B9, 0x1F6BE, prExtendedPictographic}, // E0.6 [6] (🚹..🚾) men’s room..water closet
+ {0x1F6BF, 0x1F6BF, prExtendedPictographic}, // E1.0 [1] (🚿) shower
+ {0x1F6C0, 0x1F6C0, prExtendedPictographic}, // E0.6 [1] (🛀) person taking bath
+ {0x1F6C1, 0x1F6C5, prExtendedPictographic}, // E1.0 [5] (🛁..🛅) bathtub..left luggage
+ {0x1F6C6, 0x1F6CA, prExtendedPictographic}, // E0.0 [5] (🛆..🛊) TRIANGLE WITH ROUNDED CORNERS..GIRLS SYMBOL
+ {0x1F6CB, 0x1F6CB, prExtendedPictographic}, // E0.7 [1] (🛋️) couch and lamp
+ {0x1F6CC, 0x1F6CC, prExtendedPictographic}, // E1.0 [1] (🛌) person in bed
+ {0x1F6CD, 0x1F6CF, prExtendedPictographic}, // E0.7 [3] (🛍️..🛏️) shopping bags..bed
+ {0x1F6D0, 0x1F6D0, prExtendedPictographic}, // E1.0 [1] (🛐) place of worship
+ {0x1F6D1, 0x1F6D2, prExtendedPictographic}, // E3.0 [2] (🛑..🛒) stop sign..shopping cart
+ {0x1F6D3, 0x1F6D4, prExtendedPictographic}, // E0.0 [2] (🛓..🛔) STUPA..PAGODA
+ {0x1F6D5, 0x1F6D5, prExtendedPictographic}, // E12.0 [1] (🛕) hindu temple
+ {0x1F6D6, 0x1F6D7, prExtendedPictographic}, // E13.0 [2] (🛖..🛗) hut..elevator
+ {0x1F6D8, 0x1F6DB, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F6DC, 0x1F6DC, prExtendedPictographic}, // E15.0 [1] (🛜) wireless
+ {0x1F6DD, 0x1F6DF, prExtendedPictographic}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
+ {0x1F6E0, 0x1F6E5, prExtendedPictographic}, // E0.7 [6] (🛠️..🛥️) hammer and wrench..motor boat
+ {0x1F6E6, 0x1F6E8, prExtendedPictographic}, // E0.0 [3] (🛦..🛨) UP-POINTING MILITARY AIRPLANE..UP-POINTING SMALL AIRPLANE
+ {0x1F6E9, 0x1F6E9, prExtendedPictographic}, // E0.7 [1] (🛩️) small airplane
+ {0x1F6EA, 0x1F6EA, prExtendedPictographic}, // E0.0 [1] (🛪) NORTHEAST-POINTING AIRPLANE
+ {0x1F6EB, 0x1F6EC, prExtendedPictographic}, // E1.0 [2] (🛫..🛬) airplane departure..airplane arrival
+ {0x1F6ED, 0x1F6EF, prExtendedPictographic}, // E0.0 [3] (..) ..
+ {0x1F6F0, 0x1F6F0, prExtendedPictographic}, // E0.7 [1] (🛰️) satellite
+ {0x1F6F1, 0x1F6F2, prExtendedPictographic}, // E0.0 [2] (🛱..🛲) ONCOMING FIRE ENGINE..DIESEL LOCOMOTIVE
+ {0x1F6F3, 0x1F6F3, prExtendedPictographic}, // E0.7 [1] (🛳️) passenger ship
+ {0x1F6F4, 0x1F6F6, prExtendedPictographic}, // E3.0 [3] (🛴..🛶) kick scooter..canoe
+ {0x1F6F7, 0x1F6F8, prExtendedPictographic}, // E5.0 [2] (🛷..🛸) sled..flying saucer
+ {0x1F6F9, 0x1F6F9, prExtendedPictographic}, // E11.0 [1] (🛹) skateboard
+ {0x1F6FA, 0x1F6FA, prExtendedPictographic}, // E12.0 [1] (🛺) auto rickshaw
+ {0x1F6FB, 0x1F6FC, prExtendedPictographic}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate
+ {0x1F6FD, 0x1F6FF, prExtendedPictographic}, // E0.0 [3] (..) ..
+ {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) LOT OF FORTUNE..ORCUS
+ {0x1F7D5, 0x1F7DF, prExtendedPictographic}, // E0.0 [11] (🟕..) CIRCLED TRIANGLE..
+ {0x1F7E0, 0x1F7EB, prExtendedPictographic}, // E12.0 [12] (🟠..🟫) orange circle..brown square
+ {0x1F7EC, 0x1F7EF, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F7F0, 0x1F7F0, prExtendedPictographic}, // E14.0 [1] (🟰) heavy equals sign
+ {0x1F7F1, 0x1F7FF, prExtendedPictographic}, // E0.0 [15] (..) ..
+ {0x1F80C, 0x1F80F, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1F848, 0x1F84F, prExtendedPictographic}, // E0.0 [8] (..) ..
+ {0x1F85A, 0x1F85F, prExtendedPictographic}, // E0.0 [6] (..) ..
+ {0x1F888, 0x1F88F, prExtendedPictographic}, // E0.0 [8] (..) ..
+ {0x1F8AE, 0x1F8FF, prExtendedPictographic}, // E0.0 [82] (..) ..
+ {0x1F90C, 0x1F90C, prExtendedPictographic}, // E13.0 [1] (🤌) pinched fingers
+ {0x1F90D, 0x1F90F, prExtendedPictographic}, // E12.0 [3] (🤍..🤏) white heart..pinching hand
+ {0x1F910, 0x1F918, prExtendedPictographic}, // E1.0 [9] (🤐..🤘) zipper-mouth face..sign of the horns
+ {0x1F919, 0x1F91E, prExtendedPictographic}, // E3.0 [6] (🤙..🤞) call me hand..crossed fingers
+ {0x1F91F, 0x1F91F, prExtendedPictographic}, // E5.0 [1] (🤟) love-you gesture
+ {0x1F920, 0x1F927, prExtendedPictographic}, // E3.0 [8] (🤠..🤧) cowboy hat face..sneezing face
+ {0x1F928, 0x1F92F, prExtendedPictographic}, // E5.0 [8] (🤨..🤯) face with raised eyebrow..exploding head
+ {0x1F930, 0x1F930, prExtendedPictographic}, // E3.0 [1] (🤰) pregnant woman
+ {0x1F931, 0x1F932, prExtendedPictographic}, // E5.0 [2] (🤱..🤲) breast-feeding..palms up together
+ {0x1F933, 0x1F93A, prExtendedPictographic}, // E3.0 [8] (🤳..🤺) selfie..person fencing
+ {0x1F93C, 0x1F93E, prExtendedPictographic}, // E3.0 [3] (🤼..🤾) people wrestling..person playing handball
+ {0x1F93F, 0x1F93F, prExtendedPictographic}, // E12.0 [1] (🤿) diving mask
+ {0x1F940, 0x1F945, prExtendedPictographic}, // E3.0 [6] (🥀..🥅) wilted flower..goal net
+ {0x1F947, 0x1F94B, prExtendedPictographic}, // E3.0 [5] (🥇..🥋) 1st place medal..martial arts uniform
+ {0x1F94C, 0x1F94C, prExtendedPictographic}, // E5.0 [1] (🥌) curling stone
+ {0x1F94D, 0x1F94F, prExtendedPictographic}, // E11.0 [3] (🥍..🥏) lacrosse..flying disc
+ {0x1F950, 0x1F95E, prExtendedPictographic}, // E3.0 [15] (🥐..🥞) croissant..pancakes
+ {0x1F95F, 0x1F96B, prExtendedPictographic}, // E5.0 [13] (🥟..🥫) dumpling..canned food
+ {0x1F96C, 0x1F970, prExtendedPictographic}, // E11.0 [5] (🥬..🥰) leafy green..smiling face with hearts
+ {0x1F971, 0x1F971, prExtendedPictographic}, // E12.0 [1] (🥱) yawning face
+ {0x1F972, 0x1F972, prExtendedPictographic}, // E13.0 [1] (🥲) smiling face with tear
+ {0x1F973, 0x1F976, prExtendedPictographic}, // E11.0 [4] (🥳..🥶) partying face..cold face
+ {0x1F977, 0x1F978, prExtendedPictographic}, // E13.0 [2] (🥷..🥸) ninja..disguised face
+ {0x1F979, 0x1F979, prExtendedPictographic}, // E14.0 [1] (🥹) face holding back tears
+ {0x1F97A, 0x1F97A, prExtendedPictographic}, // E11.0 [1] (🥺) pleading face
+ {0x1F97B, 0x1F97B, prExtendedPictographic}, // E12.0 [1] (🥻) sari
+ {0x1F97C, 0x1F97F, prExtendedPictographic}, // E11.0 [4] (🥼..🥿) lab coat..flat shoe
+ {0x1F980, 0x1F984, prExtendedPictographic}, // E1.0 [5] (🦀..🦄) crab..unicorn
+ {0x1F985, 0x1F991, prExtendedPictographic}, // E3.0 [13] (🦅..🦑) eagle..squid
+ {0x1F992, 0x1F997, prExtendedPictographic}, // E5.0 [6] (🦒..🦗) giraffe..cricket
+ {0x1F998, 0x1F9A2, prExtendedPictographic}, // E11.0 [11] (🦘..🦢) kangaroo..swan
+ {0x1F9A3, 0x1F9A4, prExtendedPictographic}, // E13.0 [2] (🦣..🦤) mammoth..dodo
+ {0x1F9A5, 0x1F9AA, prExtendedPictographic}, // E12.0 [6] (🦥..🦪) sloth..oyster
+ {0x1F9AB, 0x1F9AD, prExtendedPictographic}, // E13.0 [3] (🦫..🦭) beaver..seal
+ {0x1F9AE, 0x1F9AF, prExtendedPictographic}, // E12.0 [2] (🦮..🦯) guide dog..white cane
+ {0x1F9B0, 0x1F9B9, prExtendedPictographic}, // E11.0 [10] (🦰..🦹) red hair..supervillain
+ {0x1F9BA, 0x1F9BF, prExtendedPictographic}, // E12.0 [6] (🦺..🦿) safety vest..mechanical leg
+ {0x1F9C0, 0x1F9C0, prExtendedPictographic}, // E1.0 [1] (🧀) cheese wedge
+ {0x1F9C1, 0x1F9C2, prExtendedPictographic}, // E11.0 [2] (🧁..🧂) cupcake..salt
+ {0x1F9C3, 0x1F9CA, prExtendedPictographic}, // E12.0 [8] (🧃..🧊) beverage box..ice
+ {0x1F9CB, 0x1F9CB, prExtendedPictographic}, // E13.0 [1] (🧋) bubble tea
+ {0x1F9CC, 0x1F9CC, prExtendedPictographic}, // E14.0 [1] (🧌) troll
+ {0x1F9CD, 0x1F9CF, prExtendedPictographic}, // E12.0 [3] (🧍..🧏) person standing..deaf person
+ {0x1F9D0, 0x1F9E6, prExtendedPictographic}, // E5.0 [23] (🧐..🧦) face with monocle..socks
+ {0x1F9E7, 0x1F9FF, prExtendedPictographic}, // E11.0 [25] (🧧..🧿) red envelope..nazar amulet
+ {0x1FA00, 0x1FA6F, prExtendedPictographic}, // E0.0 [112] (🨀..) NEUTRAL CHESS KING..
+ {0x1FA70, 0x1FA73, prExtendedPictographic}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
+ {0x1FA74, 0x1FA74, prExtendedPictographic}, // E13.0 [1] (🩴) thong sandal
+ {0x1FA75, 0x1FA77, prExtendedPictographic}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart
+ {0x1FA78, 0x1FA7A, prExtendedPictographic}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
+ {0x1FA7B, 0x1FA7C, prExtendedPictographic}, // E14.0 [2] (🩻..🩼) x-ray..crutch
+ {0x1FA7D, 0x1FA7F, prExtendedPictographic}, // E0.0 [3] (..) ..
+ {0x1FA80, 0x1FA82, prExtendedPictographic}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
+ {0x1FA83, 0x1FA86, prExtendedPictographic}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
+ {0x1FA87, 0x1FA88, prExtendedPictographic}, // E15.0 [2] (🪇..🪈) maracas..flute
+ {0x1FA89, 0x1FA8F, prExtendedPictographic}, // E0.0 [7] (..) ..
+ {0x1FA90, 0x1FA95, prExtendedPictographic}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
+ {0x1FA96, 0x1FAA8, prExtendedPictographic}, // E13.0 [19] (🪖..🪨) military helmet..rock
+ {0x1FAA9, 0x1FAAC, prExtendedPictographic}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
+ {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda
+ {0x1FAB0, 0x1FAB6, prExtendedPictographic}, // E13.0 [7] (🪰..🪶) fly..feather
+ {0x1FAB7, 0x1FABA, prExtendedPictographic}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
+ {0x1FABB, 0x1FABD, prExtendedPictographic}, // E15.0 [3] (🪻..🪽) hyacinth..wing
+ {0x1FABE, 0x1FABE, prExtendedPictographic}, // E0.0 [1] ()
+ {0x1FABF, 0x1FABF, prExtendedPictographic}, // E15.0 [1] (🪿) goose
+ {0x1FAC0, 0x1FAC2, prExtendedPictographic}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
+ {0x1FAC3, 0x1FAC5, prExtendedPictographic}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
+ {0x1FAC6, 0x1FACD, prExtendedPictographic}, // E0.0 [8] (..) ..
+ {0x1FACE, 0x1FACF, prExtendedPictographic}, // E15.0 [2] (🫎..🫏) moose..donkey
+ {0x1FAD0, 0x1FAD6, prExtendedPictographic}, // E13.0 [7] (🫐..🫖) blueberries..teapot
+ {0x1FAD7, 0x1FAD9, prExtendedPictographic}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
+ {0x1FADA, 0x1FADB, prExtendedPictographic}, // E15.0 [2] (🫚..🫛) ginger root..pea pod
+ {0x1FADC, 0x1FADF, prExtendedPictographic}, // E0.0 [4] (..) ..
+ {0x1FAE0, 0x1FAE7, prExtendedPictographic}, // E14.0 [8] (🫠..🫧) melting face..bubbles
+ {0x1FAE8, 0x1FAE8, prExtendedPictographic}, // E15.0 [1] (🫨) shaking face
+ {0x1FAE9, 0x1FAEF, prExtendedPictographic}, // E0.0 [7] (..) ..
+ {0x1FAF0, 0x1FAF6, prExtendedPictographic}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
+ {0x1FAF7, 0x1FAF8, prExtendedPictographic}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand
+ {0x1FAF9, 0x1FAFF, prExtendedPictographic}, // E0.0 [7] (..) ..
+ {0x1FC00, 0x1FFFD, prExtendedPictographic}, // E0.0[1022] (..) ..
+ {0xE0000, 0xE0000, prControl}, // Cn
+ {0xE0001, 0xE0001, prControl}, // Cf LANGUAGE TAG
+ {0xE0002, 0xE001F, prControl}, // Cn [30] ..
+ {0xE0020, 0xE007F, prExtend}, // Cf [96] TAG SPACE..CANCEL TAG
+ {0xE0080, 0xE00FF, prControl}, // Cn [128] ..
+ {0xE0100, 0xE01EF, prExtend}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256
+ {0xE01F0, 0xE0FFF, prControl}, // Cn [3600] ..
+}
diff --git a/vendor/github.com/rivo/uniseg/graphemerules.go b/vendor/github.com/rivo/uniseg/graphemerules.go
new file mode 100644
index 0000000000..5d399d29c8
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/graphemerules.go
@@ -0,0 +1,176 @@
+package uniseg
+
+// The states of the grapheme cluster parser.
+const (
+ grAny = iota
+ grCR
+ grControlLF
+ grL
+ grLVV
+ grLVTT
+ grPrepend
+ grExtendedPictographic
+ grExtendedPictographicZWJ
+ grRIOdd
+ grRIEven
+)
+
+// The grapheme cluster parser's breaking instructions.
+const (
+ grNoBoundary = iota
+ grBoundary
+)
+
+// grTransitions implements the grapheme cluster parser's state transitions.
+// Maps state and property to a new state, a breaking instruction, and rule
+// number. The breaking instruction always refers to the boundary between the
+// last and next code point. Returns negative values if no transition is found.
+//
+// This function is used as follows:
+//
+// 1. Find specific state + specific property. Stop if found.
+// 2. Find specific state + any property.
+// 3. Find any state + specific property.
+// 4. If only (2) or (3) (but not both) was found, stop.
+// 5. If both (2) and (3) were found, use state from (3) and breaking instruction
+// from the transition with the lower rule number, prefer (3) if rule numbers
+// are equal. Stop.
+// 6. Assume grAny and grBoundary.
+//
+// Unicode version 15.0.0.
+func grTransitions(state, prop int) (newState int, newProp int, boundary int) {
+ // It turns out that using a big switch statement is much faster than using
+ // a map.
+
+ switch uint64(state) | uint64(prop)<<32 {
+ // GB5
+ case grAny | prCR<<32:
+ return grCR, grBoundary, 50
+ case grAny | prLF<<32:
+ return grControlLF, grBoundary, 50
+ case grAny | prControl<<32:
+ return grControlLF, grBoundary, 50
+
+ // GB4
+ case grCR | prAny<<32:
+ return grAny, grBoundary, 40
+ case grControlLF | prAny<<32:
+ return grAny, grBoundary, 40
+
+ // GB3
+ case grCR | prLF<<32:
+ return grControlLF, grNoBoundary, 30
+
+ // GB6
+ case grAny | prL<<32:
+ return grL, grBoundary, 9990
+ case grL | prL<<32:
+ return grL, grNoBoundary, 60
+ case grL | prV<<32:
+ return grLVV, grNoBoundary, 60
+ case grL | prLV<<32:
+ return grLVV, grNoBoundary, 60
+ case grL | prLVT<<32:
+ return grLVTT, grNoBoundary, 60
+
+ // GB7
+ case grAny | prLV<<32:
+ return grLVV, grBoundary, 9990
+ case grAny | prV<<32:
+ return grLVV, grBoundary, 9990
+ case grLVV | prV<<32:
+ return grLVV, grNoBoundary, 70
+ case grLVV | prT<<32:
+ return grLVTT, grNoBoundary, 70
+
+ // GB8
+ case grAny | prLVT<<32:
+ return grLVTT, grBoundary, 9990
+ case grAny | prT<<32:
+ return grLVTT, grBoundary, 9990
+ case grLVTT | prT<<32:
+ return grLVTT, grNoBoundary, 80
+
+ // GB9
+ case grAny | prExtend<<32:
+ return grAny, grNoBoundary, 90
+ case grAny | prZWJ<<32:
+ return grAny, grNoBoundary, 90
+
+ // GB9a
+ case grAny | prSpacingMark<<32:
+ return grAny, grNoBoundary, 91
+
+ // GB9b
+ case grAny | prPrepend<<32:
+ return grPrepend, grBoundary, 9990
+ case grPrepend | prAny<<32:
+ return grAny, grNoBoundary, 92
+
+ // GB11
+ case grAny | prExtendedPictographic<<32:
+ return grExtendedPictographic, grBoundary, 9990
+ case grExtendedPictographic | prExtend<<32:
+ return grExtendedPictographic, grNoBoundary, 110
+ case grExtendedPictographic | prZWJ<<32:
+ return grExtendedPictographicZWJ, grNoBoundary, 110
+ case grExtendedPictographicZWJ | prExtendedPictographic<<32:
+ return grExtendedPictographic, grNoBoundary, 110
+
+ // GB12 / GB13
+ case grAny | prRegionalIndicator<<32:
+ return grRIOdd, grBoundary, 9990
+ case grRIOdd | prRegionalIndicator<<32:
+ return grRIEven, grNoBoundary, 120
+ case grRIEven | prRegionalIndicator<<32:
+ return grRIOdd, grBoundary, 120
+ default:
+ return -1, -1, -1
+ }
+}
+
+// transitionGraphemeState determines the new state of the grapheme cluster
+// parser given the current state and the next code point. It also returns the
+// code point's grapheme property (the value mapped by the [graphemeCodePoints]
+// table) and whether a cluster boundary was detected.
+func transitionGraphemeState(state int, r rune) (newState, prop int, boundary bool) {
+ // Determine the property of the next character.
+ prop = propertyGraphemes(r)
+
+ // Find the applicable transition.
+ nextState, nextProp, _ := grTransitions(state, prop)
+ if nextState >= 0 {
+ // We have a specific transition. We'll use it.
+ return nextState, prop, nextProp == grBoundary
+ }
+
+ // No specific transition found. Try the less specific ones.
+ anyPropState, anyPropProp, anyPropRule := grTransitions(state, prAny)
+ anyStateState, anyStateProp, anyStateRule := grTransitions(grAny, prop)
+ if anyPropState >= 0 && anyStateState >= 0 {
+ // Both apply. We'll use a mix (see comments for grTransitions).
+ newState = anyStateState
+ boundary = anyStateProp == grBoundary
+ if anyPropRule < anyStateRule {
+ boundary = anyPropProp == grBoundary
+ }
+ return
+ }
+
+ if anyPropState >= 0 {
+ // We only have a specific state.
+ return anyPropState, prop, anyPropProp == grBoundary
+ // This branch will probably never be reached because okAnyState will
+ // always be true given the current transition map. But we keep it here
+ // for future modifications to the transition map where this may not be
+ // true anymore.
+ }
+
+ if anyStateState >= 0 {
+ // We only have a specific property.
+ return anyStateState, prop, anyStateProp == grBoundary
+ }
+
+ // No known transition. GB999: Any ÷ Any.
+ return grAny, prop, true
+}
diff --git a/vendor/github.com/rivo/uniseg/line.go b/vendor/github.com/rivo/uniseg/line.go
new file mode 100644
index 0000000000..7a46318d93
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/line.go
@@ -0,0 +1,134 @@
+package uniseg
+
+import "unicode/utf8"
+
+// FirstLineSegment returns the prefix of the given byte slice after which a
+// decision to break the string over to the next line can or must be made,
+// according to the rules of [Unicode Standard Annex #14]. This is used to
+// implement line breaking.
+//
+// Line breaking, also known as word wrapping, is the process of breaking a
+// section of text into lines such that it will fit in the available width of a
+// page, window or other display area.
+//
+// The returned "segment" may not be broken into smaller parts, unless no other
+// breaking opportunities present themselves, in which case you may break by
+// grapheme clusters (using the [FirstGraphemeCluster] function to determine the
+// grapheme clusters).
+//
+// The "mustBreak" flag indicates whether you MUST break the line after the
+// given segment (true), for example after newline characters, or you MAY break
+// the line after the given segment (false).
+//
+// This function can be called continuously to extract all non-breaking sub-sets
+// from a byte slice, as illustrated in the example below.
+//
+// If you don't know the current state, for example when calling the function
+// for the first time, you must pass -1. For consecutive calls, pass the state
+// and rest slice returned by the previous call.
+//
+// The "rest" slice is the sub-slice of the original byte slice "b" starting
+// after the last byte of the identified line segment. If the length of the
+// "rest" slice is 0, the entire byte slice "b" has been processed. The
+// "segment" byte slice is the sub-slice of the input slice containing the
+// identified line segment.
+//
+// Given an empty byte slice "b", the function returns nil values.
+//
+// Note that in accordance with [UAX #14 LB3], the final segment will end with
+// "mustBreak" set to true. You can choose to ignore this by checking if the
+// length of the "rest" slice is 0 and calling [HasTrailingLineBreak] or
+// [HasTrailingLineBreakInString] on the last rune.
+//
+// Note also that this algorithm may break within grapheme clusters. This is
+// addressed in Section 8.2 Example 6 of UAX #14. To avoid this, you can use
+// the [Step] function instead.
+//
+// [Unicode Standard Annex #14]: https://www.unicode.org/reports/tr14/
+// [UAX #14 LB3]: https://www.unicode.org/reports/tr14/#Algorithm
+func FirstLineSegment(b []byte, state int) (segment, rest []byte, mustBreak bool, newState int) {
+ // An empty byte slice returns nothing.
+ if len(b) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRune(b)
+ if len(b) <= length { // If we're already past the end, there is nothing else to parse.
+ return b, nil, true, lbAny // LB3.
+ }
+
+ // If we don't know the state, determine it now.
+ if state < 0 {
+ state, _ = transitionLineBreakState(state, r, b[length:], "")
+ }
+
+ // Transition until we find a boundary.
+ var boundary int
+ for {
+ r, l := utf8.DecodeRune(b[length:])
+ state, boundary = transitionLineBreakState(state, r, b[length+l:], "")
+
+ if boundary != LineDontBreak {
+ return b[:length], b[length:], boundary == LineMustBreak, state
+ }
+
+ length += l
+ if len(b) <= length {
+ return b, nil, true, lbAny // LB3
+ }
+ }
+}
+
+// FirstLineSegmentInString is like [FirstLineSegment] but its input and outputs
+// are strings.
+func FirstLineSegmentInString(str string, state int) (segment, rest string, mustBreak bool, newState int) {
+ // An empty byte slice returns nothing.
+ if len(str) == 0 {
+ return
+ }
+
+ // Extract the first rune.
+ r, length := utf8.DecodeRuneInString(str)
+ if len(str) <= length { // If we're already past the end, there is nothing else to parse.
+ return str, "", true, lbAny // LB3.
+ }
+
+ // If we don't know the state, determine it now.
+ if state < 0 {
+ state, _ = transitionLineBreakState(state, r, nil, str[length:])
+ }
+
+ // Transition until we find a boundary.
+ var boundary int
+ for {
+ r, l := utf8.DecodeRuneInString(str[length:])
+ state, boundary = transitionLineBreakState(state, r, nil, str[length+l:])
+
+ if boundary != LineDontBreak {
+ return str[:length], str[length:], boundary == LineMustBreak, state
+ }
+
+ length += l
+ if len(str) <= length {
+ return str, "", true, lbAny // LB3.
+ }
+ }
+}
+
+// HasTrailingLineBreak returns true if the last rune in the given byte slice is
+// one of the hard line break code points defined in LB4 and LB5 of [UAX #14].
+//
+// [UAX #14]: https://www.unicode.org/reports/tr14/#Algorithm
+func HasTrailingLineBreak(b []byte) bool {
+ r, _ := utf8.DecodeLastRune(b)
+ property, _ := propertyLineBreak(r)
+ return property == prBK || property == prCR || property == prLF || property == prNL
+}
+
+// HasTrailingLineBreakInString is like [HasTrailingLineBreak] but for a string.
+func HasTrailingLineBreakInString(str string) bool {
+ r, _ := utf8.DecodeLastRuneInString(str)
+ property, _ := propertyLineBreak(r)
+ return property == prBK || property == prCR || property == prLF || property == prNL
+}
diff --git a/vendor/github.com/rivo/uniseg/lineproperties.go b/vendor/github.com/rivo/uniseg/lineproperties.go
new file mode 100644
index 0000000000..ac7fac4c05
--- /dev/null
+++ b/vendor/github.com/rivo/uniseg/lineproperties.go
@@ -0,0 +1,3554 @@
+// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+
+package uniseg
+
+// lineBreakCodePoints are taken from
+// https://www.unicode.org/Public/15.0.0/ucd/LineBreak.txt
+// and
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
+// ("Extended_Pictographic" only)
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
+// license agreement.
+var lineBreakCodePoints = [][4]int{
+ {0x0000, 0x0008, prCM, gcCc}, // [9] ..
+ {0x0009, 0x0009, prBA, gcCc}, //
+ {0x000A, 0x000A, prLF, gcCc}, //
+ {0x000B, 0x000C, prBK, gcCc}, // [2] ..
+ {0x000D, 0x000D, prCR, gcCc}, //
+ {0x000E, 0x001F, prCM, gcCc}, // [18] ..
+ {0x0020, 0x0020, prSP, gcZs}, // SPACE
+ {0x0021, 0x0021, prEX, gcPo}, // EXCLAMATION MARK
+ {0x0022, 0x0022, prQU, gcPo}, // QUOTATION MARK
+ {0x0023, 0x0023, prAL, gcPo}, // NUMBER SIGN
+ {0x0024, 0x0024, prPR, gcSc}, // DOLLAR SIGN
+ {0x0025, 0x0025, prPO, gcPo}, // PERCENT SIGN
+ {0x0026, 0x0026, prAL, gcPo}, // AMPERSAND
+ {0x0027, 0x0027, prQU, gcPo}, // APOSTROPHE
+ {0x0028, 0x0028, prOP, gcPs}, // LEFT PARENTHESIS
+ {0x0029, 0x0029, prCP, gcPe}, // RIGHT PARENTHESIS
+ {0x002A, 0x002A, prAL, gcPo}, // ASTERISK
+ {0x002B, 0x002B, prPR, gcSm}, // PLUS SIGN
+ {0x002C, 0x002C, prIS, gcPo}, // COMMA
+ {0x002D, 0x002D, prHY, gcPd}, // HYPHEN-MINUS
+ {0x002E, 0x002E, prIS, gcPo}, // FULL STOP
+ {0x002F, 0x002F, prSY, gcPo}, // SOLIDUS
+ {0x0030, 0x0039, prNU, gcNd}, // [10] DIGIT ZERO..DIGIT NINE
+ {0x003A, 0x003B, prIS, gcPo}, // [2] COLON..SEMICOLON
+ {0x003C, 0x003E, prAL, gcSm}, // [3] LESS-THAN SIGN..GREATER-THAN SIGN
+ {0x003F, 0x003F, prEX, gcPo}, // QUESTION MARK
+ {0x0040, 0x0040, prAL, gcPo}, // COMMERCIAL AT
+ {0x0041, 0x005A, prAL, gcLu}, // [26] LATIN CAPITAL LETTER A..LATIN CAPITAL LETTER Z
+ {0x005B, 0x005B, prOP, gcPs}, // LEFT SQUARE BRACKET
+ {0x005C, 0x005C, prPR, gcPo}, // REVERSE SOLIDUS
+ {0x005D, 0x005D, prCP, gcPe}, // RIGHT SQUARE BRACKET
+ {0x005E, 0x005E, prAL, gcSk}, // CIRCUMFLEX ACCENT
+ {0x005F, 0x005F, prAL, gcPc}, // LOW LINE
+ {0x0060, 0x0060, prAL, gcSk}, // GRAVE ACCENT
+ {0x0061, 0x007A, prAL, gcLl}, // [26] LATIN SMALL LETTER A..LATIN SMALL LETTER Z
+ {0x007B, 0x007B, prOP, gcPs}, // LEFT CURLY BRACKET
+ {0x007C, 0x007C, prBA, gcSm}, // VERTICAL LINE
+ {0x007D, 0x007D, prCL, gcPe}, // RIGHT CURLY BRACKET
+ {0x007E, 0x007E, prAL, gcSm}, // TILDE
+ {0x007F, 0x007F, prCM, gcCc}, //
+ {0x0080, 0x0084, prCM, gcCc}, // [5] ..
+ {0x0085, 0x0085, prNL, gcCc}, //
+ {0x0086, 0x009F, prCM, gcCc}, // [26] ..
+ {0x00A0, 0x00A0, prGL, gcZs}, // NO-BREAK SPACE
+ {0x00A1, 0x00A1, prOP, gcPo}, // INVERTED EXCLAMATION MARK
+ {0x00A2, 0x00A2, prPO, gcSc}, // CENT SIGN
+ {0x00A3, 0x00A5, prPR, gcSc}, // [3] POUND SIGN..YEN SIGN
+ {0x00A6, 0x00A6, prAL, gcSo}, // BROKEN BAR
+ {0x00A7, 0x00A7, prAI, gcPo}, // SECTION SIGN
+ {0x00A8, 0x00A8, prAI, gcSk}, // DIAERESIS
+ {0x00A9, 0x00A9, prAL, gcSo}, // COPYRIGHT SIGN
+ {0x00AA, 0x00AA, prAI, gcLo}, // FEMININE ORDINAL INDICATOR
+ {0x00AB, 0x00AB, prQU, gcPi}, // LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00AC, 0x00AC, prAL, gcSm}, // NOT SIGN
+ {0x00AD, 0x00AD, prBA, gcCf}, // SOFT HYPHEN
+ {0x00AE, 0x00AE, prAL, gcSo}, // REGISTERED SIGN
+ {0x00AF, 0x00AF, prAL, gcSk}, // MACRON
+ {0x00B0, 0x00B0, prPO, gcSo}, // DEGREE SIGN
+ {0x00B1, 0x00B1, prPR, gcSm}, // PLUS-MINUS SIGN
+ {0x00B2, 0x00B3, prAI, gcNo}, // [2] SUPERSCRIPT TWO..SUPERSCRIPT THREE
+ {0x00B4, 0x00B4, prBB, gcSk}, // ACUTE ACCENT
+ {0x00B5, 0x00B5, prAL, gcLl}, // MICRO SIGN
+ {0x00B6, 0x00B7, prAI, gcPo}, // [2] PILCROW SIGN..MIDDLE DOT
+ {0x00B8, 0x00B8, prAI, gcSk}, // CEDILLA
+ {0x00B9, 0x00B9, prAI, gcNo}, // SUPERSCRIPT ONE
+ {0x00BA, 0x00BA, prAI, gcLo}, // MASCULINE ORDINAL INDICATOR
+ {0x00BB, 0x00BB, prQU, gcPf}, // RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+ {0x00BC, 0x00BE, prAI, gcNo}, // [3] VULGAR FRACTION ONE QUARTER..VULGAR FRACTION THREE QUARTERS
+ {0x00BF, 0x00BF, prOP, gcPo}, // INVERTED QUESTION MARK
+ {0x00C0, 0x00D6, prAL, gcLu}, // [23] LATIN CAPITAL LETTER A WITH GRAVE..LATIN CAPITAL LETTER O WITH DIAERESIS
+ {0x00D7, 0x00D7, prAI, gcSm}, // MULTIPLICATION SIGN
+ {0x00D8, 0x00F6, prAL, gcLC}, // [31] LATIN CAPITAL LETTER O WITH STROKE..LATIN SMALL LETTER O WITH DIAERESIS
+ {0x00F7, 0x00F7, prAI, gcSm}, // DIVISION SIGN
+ {0x00F8, 0x00FF, prAL, gcLl}, // [8] LATIN SMALL LETTER O WITH STROKE..LATIN SMALL LETTER Y WITH DIAERESIS
+ {0x0100, 0x017F, prAL, gcLC}, // [128] LATIN CAPITAL LETTER A WITH MACRON..LATIN SMALL LETTER LONG S
+ {0x0180, 0x01BA, prAL, gcLC}, // [59] LATIN SMALL LETTER B WITH STROKE..LATIN SMALL LETTER EZH WITH TAIL
+ {0x01BB, 0x01BB, prAL, gcLo}, // LATIN LETTER TWO WITH STROKE
+ {0x01BC, 0x01BF, prAL, gcLC}, // [4] LATIN CAPITAL LETTER TONE FIVE..LATIN LETTER WYNN
+ {0x01C0, 0x01C3, prAL, gcLo}, // [4] LATIN LETTER DENTAL CLICK..LATIN LETTER RETROFLEX CLICK
+ {0x01C4, 0x024F, prAL, gcLC}, // [140] LATIN CAPITAL LETTER DZ WITH CARON..LATIN SMALL LETTER Y WITH STROKE
+ {0x0250, 0x0293, prAL, gcLl}, // [68] LATIN SMALL LETTER TURNED A..LATIN SMALL LETTER EZH WITH CURL
+ {0x0294, 0x0294, prAL, gcLo}, // LATIN LETTER GLOTTAL STOP
+ {0x0295, 0x02AF, prAL, gcLl}, // [27] LATIN LETTER PHARYNGEAL VOICED FRICATIVE..LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL
+ {0x02B0, 0x02C1, prAL, gcLm}, // [18] MODIFIER LETTER SMALL H..MODIFIER LETTER REVERSED GLOTTAL STOP
+ {0x02C2, 0x02C5, prAL, gcSk}, // [4] MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER DOWN ARROWHEAD
+ {0x02C6, 0x02C6, prAL, gcLm}, // MODIFIER LETTER CIRCUMFLEX ACCENT
+ {0x02C7, 0x02C7, prAI, gcLm}, // CARON
+ {0x02C8, 0x02C8, prBB, gcLm}, // MODIFIER LETTER VERTICAL LINE
+ {0x02C9, 0x02CB, prAI, gcLm}, // [3] MODIFIER LETTER MACRON..MODIFIER LETTER GRAVE ACCENT
+ {0x02CC, 0x02CC, prBB, gcLm}, // MODIFIER LETTER LOW VERTICAL LINE
+ {0x02CD, 0x02CD, prAI, gcLm}, // MODIFIER LETTER LOW MACRON
+ {0x02CE, 0x02CF, prAL, gcLm}, // [2] MODIFIER LETTER LOW GRAVE ACCENT..MODIFIER LETTER LOW ACUTE ACCENT
+ {0x02D0, 0x02D0, prAI, gcLm}, // MODIFIER LETTER TRIANGULAR COLON
+ {0x02D1, 0x02D1, prAL, gcLm}, // MODIFIER LETTER HALF TRIANGULAR COLON
+ {0x02D2, 0x02D7, prAL, gcSk}, // [6] MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER MINUS SIGN
+ {0x02D8, 0x02DB, prAI, gcSk}, // [4] BREVE..OGONEK
+ {0x02DC, 0x02DC, prAL, gcSk}, // SMALL TILDE
+ {0x02DD, 0x02DD, prAI, gcSk}, // DOUBLE ACUTE ACCENT
+ {0x02DE, 0x02DE, prAL, gcSk}, // MODIFIER LETTER RHOTIC HOOK
+ {0x02DF, 0x02DF, prBB, gcSk}, // MODIFIER LETTER CROSS ACCENT
+ {0x02E0, 0x02E4, prAL, gcLm}, // [5] MODIFIER LETTER SMALL GAMMA..MODIFIER LETTER SMALL REVERSED GLOTTAL STOP
+ {0x02E5, 0x02EB, prAL, gcSk}, // [7] MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER YANG DEPARTING TONE MARK
+ {0x02EC, 0x02EC, prAL, gcLm}, // MODIFIER LETTER VOICING
+ {0x02ED, 0x02ED, prAL, gcSk}, // MODIFIER LETTER UNASPIRATED
+ {0x02EE, 0x02EE, prAL, gcLm}, // MODIFIER LETTER DOUBLE APOSTROPHE
+ {0x02EF, 0x02FF, prAL, gcSk}, // [17] MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW
+ {0x0300, 0x034E, prCM, gcMn}, // [79] COMBINING GRAVE ACCENT..COMBINING UPWARDS ARROW BELOW
+ {0x034F, 0x034F, prGL, gcMn}, // COMBINING GRAPHEME JOINER
+ {0x0350, 0x035B, prCM, gcMn}, // [12] COMBINING RIGHT ARROWHEAD ABOVE..COMBINING ZIGZAG ABOVE
+ {0x035C, 0x0362, prGL, gcMn}, // [7] COMBINING DOUBLE BREVE BELOW..COMBINING DOUBLE RIGHTWARDS ARROW BELOW
+ {0x0363, 0x036F, prCM, gcMn}, // [13] COMBINING LATIN SMALL LETTER A..COMBINING LATIN SMALL LETTER X
+ {0x0370, 0x0373, prAL, gcLC}, // [4] GREEK CAPITAL LETTER HETA..GREEK SMALL LETTER ARCHAIC SAMPI
+ {0x0374, 0x0374, prAL, gcLm}, // GREEK NUMERAL SIGN
+ {0x0375, 0x0375, prAL, gcSk}, // GREEK LOWER NUMERAL SIGN
+ {0x0376, 0x0377, prAL, gcLC}, // [2] GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA..GREEK SMALL LETTER PAMPHYLIAN DIGAMMA
+ {0x037A, 0x037A, prAL, gcLm}, // GREEK YPOGEGRAMMENI
+ {0x037B, 0x037D, prAL, gcLl}, // [3] GREEK SMALL REVERSED LUNATE SIGMA SYMBOL..GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x037E, 0x037E, prIS, gcPo}, // GREEK QUESTION MARK
+ {0x037F, 0x037F, prAL, gcLu}, // GREEK CAPITAL LETTER YOT
+ {0x0384, 0x0385, prAL, gcSk}, // [2] GREEK TONOS..GREEK DIALYTIKA TONOS
+ {0x0386, 0x0386, prAL, gcLu}, // GREEK CAPITAL LETTER ALPHA WITH TONOS
+ {0x0387, 0x0387, prAL, gcPo}, // GREEK ANO TELEIA
+ {0x0388, 0x038A, prAL, gcLu}, // [3] GREEK CAPITAL LETTER EPSILON WITH TONOS..GREEK CAPITAL LETTER IOTA WITH TONOS
+ {0x038C, 0x038C, prAL, gcLu}, // GREEK CAPITAL LETTER OMICRON WITH TONOS
+ {0x038E, 0x03A1, prAL, gcLC}, // [20] GREEK CAPITAL LETTER UPSILON WITH TONOS..GREEK CAPITAL LETTER RHO
+ {0x03A3, 0x03F5, prAL, gcLC}, // [83] GREEK CAPITAL LETTER SIGMA..GREEK LUNATE EPSILON SYMBOL
+ {0x03F6, 0x03F6, prAL, gcSm}, // GREEK REVERSED LUNATE EPSILON SYMBOL
+ {0x03F7, 0x03FF, prAL, gcLC}, // [9] GREEK CAPITAL LETTER SHO..GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL
+ {0x0400, 0x0481, prAL, gcLC}, // [130] CYRILLIC CAPITAL LETTER IE WITH GRAVE..CYRILLIC SMALL LETTER KOPPA
+ {0x0482, 0x0482, prAL, gcSo}, // CYRILLIC THOUSANDS SIGN
+ {0x0483, 0x0487, prCM, gcMn}, // [5] COMBINING CYRILLIC TITLO..COMBINING CYRILLIC POKRYTIE
+ {0x0488, 0x0489, prCM, gcMe}, // [2] COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN
+ {0x048A, 0x04FF, prAL, gcLC}, // [118] CYRILLIC CAPITAL LETTER SHORT I WITH TAIL..CYRILLIC SMALL LETTER HA WITH STROKE
+ {0x0500, 0x052F, prAL, gcLC}, // [48] CYRILLIC CAPITAL LETTER KOMI DE..CYRILLIC SMALL LETTER EL WITH DESCENDER
+ {0x0531, 0x0556, prAL, gcLu}, // [38] ARMENIAN CAPITAL LETTER AYB..ARMENIAN CAPITAL LETTER FEH
+ {0x0559, 0x0559, prAL, gcLm}, // ARMENIAN MODIFIER LETTER LEFT HALF RING
+ {0x055A, 0x055F, prAL, gcPo}, // [6] ARMENIAN APOSTROPHE..ARMENIAN ABBREVIATION MARK
+ {0x0560, 0x0588, prAL, gcLl}, // [41] ARMENIAN SMALL LETTER TURNED AYB..ARMENIAN SMALL LETTER YI WITH STROKE
+ {0x0589, 0x0589, prIS, gcPo}, // ARMENIAN FULL STOP
+ {0x058A, 0x058A, prBA, gcPd}, // ARMENIAN HYPHEN
+ {0x058D, 0x058E, prAL, gcSo}, // [2] RIGHT-FACING ARMENIAN ETERNITY SIGN..LEFT-FACING ARMENIAN ETERNITY SIGN
+ {0x058F, 0x058F, prPR, gcSc}, // ARMENIAN DRAM SIGN
+ {0x0591, 0x05BD, prCM, gcMn}, // [45] HEBREW ACCENT ETNAHTA..HEBREW POINT METEG
+ {0x05BE, 0x05BE, prBA, gcPd}, // HEBREW PUNCTUATION MAQAF
+ {0x05BF, 0x05BF, prCM, gcMn}, // HEBREW POINT RAFE
+ {0x05C0, 0x05C0, prAL, gcPo}, // HEBREW PUNCTUATION PASEQ
+ {0x05C1, 0x05C2, prCM, gcMn}, // [2] HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT
+ {0x05C3, 0x05C3, prAL, gcPo}, // HEBREW PUNCTUATION SOF PASUQ
+ {0x05C4, 0x05C5, prCM, gcMn}, // [2] HEBREW MARK UPPER DOT..HEBREW MARK LOWER DOT
+ {0x05C6, 0x05C6, prEX, gcPo}, // HEBREW PUNCTUATION NUN HAFUKHA
+ {0x05C7, 0x05C7, prCM, gcMn}, // HEBREW POINT QAMATS QATAN
+ {0x05D0, 0x05EA, prHL, gcLo}, // [27] HEBREW LETTER ALEF..HEBREW LETTER TAV
+ {0x05EF, 0x05F2, prHL, gcLo}, // [4] HEBREW YOD TRIANGLE..HEBREW LIGATURE YIDDISH DOUBLE YOD
+ {0x05F3, 0x05F4, prAL, gcPo}, // [2] HEBREW PUNCTUATION GERESH..HEBREW PUNCTUATION GERSHAYIM
+ {0x0600, 0x0605, prAL, gcCf}, // [6] ARABIC NUMBER SIGN..ARABIC NUMBER MARK ABOVE
+ {0x0606, 0x0608, prAL, gcSm}, // [3] ARABIC-INDIC CUBE ROOT..ARABIC RAY
+ {0x0609, 0x060A, prPO, gcPo}, // [2] ARABIC-INDIC PER MILLE SIGN..ARABIC-INDIC PER TEN THOUSAND SIGN
+ {0x060B, 0x060B, prPO, gcSc}, // AFGHANI SIGN
+ {0x060C, 0x060D, prIS, gcPo}, // [2] ARABIC COMMA..ARABIC DATE SEPARATOR
+ {0x060E, 0x060F, prAL, gcSo}, // [2] ARABIC POETIC VERSE SIGN..ARABIC SIGN MISRA
+ {0x0610, 0x061A, prCM, gcMn}, // [11] ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL KASRA
+ {0x061B, 0x061B, prEX, gcPo}, // ARABIC SEMICOLON
+ {0x061C, 0x061C, prCM, gcCf}, // ARABIC LETTER MARK
+ {0x061D, 0x061F, prEX, gcPo}, // [3] ARABIC END OF TEXT MARK..ARABIC QUESTION MARK
+ {0x0620, 0x063F, prAL, gcLo}, // [32] ARABIC LETTER KASHMIRI YEH..ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE
+ {0x0640, 0x0640, prAL, gcLm}, // ARABIC TATWEEL
+ {0x0641, 0x064A, prAL, gcLo}, // [10] ARABIC LETTER FEH..ARABIC LETTER YEH
+ {0x064B, 0x065F, prCM, gcMn}, // [21] ARABIC FATHATAN..ARABIC WAVY HAMZA BELOW
+ {0x0660, 0x0669, prNU, gcNd}, // [10] ARABIC-INDIC DIGIT ZERO..ARABIC-INDIC DIGIT NINE
+ {0x066A, 0x066A, prPO, gcPo}, // ARABIC PERCENT SIGN
+ {0x066B, 0x066C, prNU, gcPo}, // [2] ARABIC DECIMAL SEPARATOR..ARABIC THOUSANDS SEPARATOR
+ {0x066D, 0x066D, prAL, gcPo}, // ARABIC FIVE POINTED STAR
+ {0x066E, 0x066F, prAL, gcLo}, // [2] ARABIC LETTER DOTLESS BEH..ARABIC LETTER DOTLESS QAF
+ {0x0670, 0x0670, prCM, gcMn}, // ARABIC LETTER SUPERSCRIPT ALEF
+ {0x0671, 0x06D3, prAL, gcLo}, // [99] ARABIC LETTER ALEF WASLA..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE
+ {0x06D4, 0x06D4, prEX, gcPo}, // ARABIC FULL STOP
+ {0x06D5, 0x06D5, prAL, gcLo}, // ARABIC LETTER AE
+ {0x06D6, 0x06DC, prCM, gcMn}, // [7] ARABIC SMALL HIGH LIGATURE SAD WITH LAM WITH ALEF MAKSURA..ARABIC SMALL HIGH SEEN
+ {0x06DD, 0x06DD, prAL, gcCf}, // ARABIC END OF AYAH
+ {0x06DE, 0x06DE, prAL, gcSo}, // ARABIC START OF RUB EL HIZB
+ {0x06DF, 0x06E4, prCM, gcMn}, // [6] ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH MADDA
+ {0x06E5, 0x06E6, prAL, gcLm}, // [2] ARABIC SMALL WAW..ARABIC SMALL YEH
+ {0x06E7, 0x06E8, prCM, gcMn}, // [2] ARABIC SMALL HIGH YEH..ARABIC SMALL HIGH NOON
+ {0x06E9, 0x06E9, prAL, gcSo}, // ARABIC PLACE OF SAJDAH
+ {0x06EA, 0x06ED, prCM, gcMn}, // [4] ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM
+ {0x06EE, 0x06EF, prAL, gcLo}, // [2] ARABIC LETTER DAL WITH INVERTED V..ARABIC LETTER REH WITH INVERTED V
+ {0x06F0, 0x06F9, prNU, gcNd}, // [10] EXTENDED ARABIC-INDIC DIGIT ZERO..EXTENDED ARABIC-INDIC DIGIT NINE
+ {0x06FA, 0x06FC, prAL, gcLo}, // [3] ARABIC LETTER SHEEN WITH DOT BELOW..ARABIC LETTER GHAIN WITH DOT BELOW
+ {0x06FD, 0x06FE, prAL, gcSo}, // [2] ARABIC SIGN SINDHI AMPERSAND..ARABIC SIGN SINDHI POSTPOSITION MEN
+ {0x06FF, 0x06FF, prAL, gcLo}, // ARABIC LETTER HEH WITH INVERTED V
+ {0x0700, 0x070D, prAL, gcPo}, // [14] SYRIAC END OF PARAGRAPH..SYRIAC HARKLEAN ASTERISCUS
+ {0x070F, 0x070F, prAL, gcCf}, // SYRIAC ABBREVIATION MARK
+ {0x0710, 0x0710, prAL, gcLo}, // SYRIAC LETTER ALAPH
+ {0x0711, 0x0711, prCM, gcMn}, // SYRIAC LETTER SUPERSCRIPT ALAPH
+ {0x0712, 0x072F, prAL, gcLo}, // [30] SYRIAC LETTER BETH..SYRIAC LETTER PERSIAN DHALATH
+ {0x0730, 0x074A, prCM, gcMn}, // [27] SYRIAC PTHAHA ABOVE..SYRIAC BARREKH
+ {0x074D, 0x074F, prAL, gcLo}, // [3] SYRIAC LETTER SOGDIAN ZHAIN..SYRIAC LETTER SOGDIAN FE
+ {0x0750, 0x077F, prAL, gcLo}, // [48] ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW..ARABIC LETTER KAF WITH TWO DOTS ABOVE
+ {0x0780, 0x07A5, prAL, gcLo}, // [38] THAANA LETTER HAA..THAANA LETTER WAAVU
+ {0x07A6, 0x07B0, prCM, gcMn}, // [11] THAANA ABAFILI..THAANA SUKUN
+ {0x07B1, 0x07B1, prAL, gcLo}, // THAANA LETTER NAA
+ {0x07C0, 0x07C9, prNU, gcNd}, // [10] NKO DIGIT ZERO..NKO DIGIT NINE
+ {0x07CA, 0x07EA, prAL, gcLo}, // [33] NKO LETTER A..NKO LETTER JONA RA
+ {0x07EB, 0x07F3, prCM, gcMn}, // [9] NKO COMBINING SHORT HIGH TONE..NKO COMBINING DOUBLE DOT ABOVE
+ {0x07F4, 0x07F5, prAL, gcLm}, // [2] NKO HIGH TONE APOSTROPHE..NKO LOW TONE APOSTROPHE
+ {0x07F6, 0x07F6, prAL, gcSo}, // NKO SYMBOL OO DENNEN
+ {0x07F7, 0x07F7, prAL, gcPo}, // NKO SYMBOL GBAKURUNEN
+ {0x07F8, 0x07F8, prIS, gcPo}, // NKO COMMA
+ {0x07F9, 0x07F9, prEX, gcPo}, // NKO EXCLAMATION MARK
+ {0x07FA, 0x07FA, prAL, gcLm}, // NKO LAJANYALAN
+ {0x07FD, 0x07FD, prCM, gcMn}, // NKO DANTAYALAN
+ {0x07FE, 0x07FF, prPR, gcSc}, // [2] NKO DOROME SIGN..NKO TAMAN SIGN
+ {0x0800, 0x0815, prAL, gcLo}, // [22] SAMARITAN LETTER ALAF..SAMARITAN LETTER TAAF
+ {0x0816, 0x0819, prCM, gcMn}, // [4] SAMARITAN MARK IN..SAMARITAN MARK DAGESH
+ {0x081A, 0x081A, prAL, gcLm}, // SAMARITAN MODIFIER LETTER EPENTHETIC YUT
+ {0x081B, 0x0823, prCM, gcMn}, // [9] SAMARITAN MARK EPENTHETIC YUT..SAMARITAN VOWEL SIGN A
+ {0x0824, 0x0824, prAL, gcLm}, // SAMARITAN MODIFIER LETTER SHORT A
+ {0x0825, 0x0827, prCM, gcMn}, // [3] SAMARITAN VOWEL SIGN SHORT A..SAMARITAN VOWEL SIGN U
+ {0x0828, 0x0828, prAL, gcLm}, // SAMARITAN MODIFIER LETTER I
+ {0x0829, 0x082D, prCM, gcMn}, // [5] SAMARITAN VOWEL SIGN LONG I..SAMARITAN MARK NEQUDAA
+ {0x0830, 0x083E, prAL, gcPo}, // [15] SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU
+ {0x0840, 0x0858, prAL, gcLo}, // [25] MANDAIC LETTER HALQA..MANDAIC LETTER AIN
+ {0x0859, 0x085B, prCM, gcMn}, // [3] MANDAIC AFFRICATION MARK..MANDAIC GEMINATION MARK
+ {0x085E, 0x085E, prAL, gcPo}, // MANDAIC PUNCTUATION
+ {0x0860, 0x086A, prAL, gcLo}, // [11] SYRIAC LETTER MALAYALAM NGA..SYRIAC LETTER MALAYALAM SSA
+ {0x0870, 0x0887, prAL, gcLo}, // [24] ARABIC LETTER ALEF WITH ATTACHED FATHA..ARABIC BASELINE ROUND DOT
+ {0x0888, 0x0888, prAL, gcSk}, // ARABIC RAISED ROUND DOT
+ {0x0889, 0x088E, prAL, gcLo}, // [6] ARABIC LETTER NOON WITH INVERTED SMALL V..ARABIC VERTICAL TAIL
+ {0x0890, 0x0891, prAL, gcCf}, // [2] ARABIC POUND MARK ABOVE..ARABIC PIASTRE MARK ABOVE
+ {0x0898, 0x089F, prCM, gcMn}, // [8] ARABIC SMALL HIGH WORD AL-JUZ..ARABIC HALF MADDA OVER MADDA
+ {0x08A0, 0x08C8, prAL, gcLo}, // [41] ARABIC LETTER BEH WITH SMALL V BELOW..ARABIC LETTER GRAF
+ {0x08C9, 0x08C9, prAL, gcLm}, // ARABIC SMALL FARSI YEH
+ {0x08CA, 0x08E1, prCM, gcMn}, // [24] ARABIC SMALL HIGH FARSI YEH..ARABIC SMALL HIGH SIGN SAFHA
+ {0x08E2, 0x08E2, prAL, gcCf}, // ARABIC DISPUTED END OF AYAH
+ {0x08E3, 0x08FF, prCM, gcMn}, // [29] ARABIC TURNED DAMMA BELOW..ARABIC MARK SIDEWAYS NOON GHUNNA
+ {0x0900, 0x0902, prCM, gcMn}, // [3] DEVANAGARI SIGN INVERTED CANDRABINDU..DEVANAGARI SIGN ANUSVARA
+ {0x0903, 0x0903, prCM, gcMc}, // DEVANAGARI SIGN VISARGA
+ {0x0904, 0x0939, prAL, gcLo}, // [54] DEVANAGARI LETTER SHORT A..DEVANAGARI LETTER HA
+ {0x093A, 0x093A, prCM, gcMn}, // DEVANAGARI VOWEL SIGN OE
+ {0x093B, 0x093B, prCM, gcMc}, // DEVANAGARI VOWEL SIGN OOE
+ {0x093C, 0x093C, prCM, gcMn}, // DEVANAGARI SIGN NUKTA
+ {0x093D, 0x093D, prAL, gcLo}, // DEVANAGARI SIGN AVAGRAHA
+ {0x093E, 0x0940, prCM, gcMc}, // [3] DEVANAGARI VOWEL SIGN AA..DEVANAGARI VOWEL SIGN II
+ {0x0941, 0x0948, prCM, gcMn}, // [8] DEVANAGARI VOWEL SIGN U..DEVANAGARI VOWEL SIGN AI
+ {0x0949, 0x094C, prCM, gcMc}, // [4] DEVANAGARI VOWEL SIGN CANDRA O..DEVANAGARI VOWEL SIGN AU
+ {0x094D, 0x094D, prCM, gcMn}, // DEVANAGARI SIGN VIRAMA
+ {0x094E, 0x094F, prCM, gcMc}, // [2] DEVANAGARI VOWEL SIGN PRISHTHAMATRA E..DEVANAGARI VOWEL SIGN AW
+ {0x0950, 0x0950, prAL, gcLo}, // DEVANAGARI OM
+ {0x0951, 0x0957, prCM, gcMn}, // [7] DEVANAGARI STRESS SIGN UDATTA..DEVANAGARI VOWEL SIGN UUE
+ {0x0958, 0x0961, prAL, gcLo}, // [10] DEVANAGARI LETTER QA..DEVANAGARI LETTER VOCALIC LL
+ {0x0962, 0x0963, prCM, gcMn}, // [2] DEVANAGARI VOWEL SIGN VOCALIC L..DEVANAGARI VOWEL SIGN VOCALIC LL
+ {0x0964, 0x0965, prBA, gcPo}, // [2] DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA
+ {0x0966, 0x096F, prNU, gcNd}, // [10] DEVANAGARI DIGIT ZERO..DEVANAGARI DIGIT NINE
+ {0x0970, 0x0970, prAL, gcPo}, // DEVANAGARI ABBREVIATION SIGN
+ {0x0971, 0x0971, prAL, gcLm}, // DEVANAGARI SIGN HIGH SPACING DOT
+ {0x0972, 0x097F, prAL, gcLo}, // [14] DEVANAGARI LETTER CANDRA A..DEVANAGARI LETTER BBA
+ {0x0980, 0x0980, prAL, gcLo}, // BENGALI ANJI
+ {0x0981, 0x0981, prCM, gcMn}, // BENGALI SIGN CANDRABINDU
+ {0x0982, 0x0983, prCM, gcMc}, // [2] BENGALI SIGN ANUSVARA..BENGALI SIGN VISARGA
+ {0x0985, 0x098C, prAL, gcLo}, // [8] BENGALI LETTER A..BENGALI LETTER VOCALIC L
+ {0x098F, 0x0990, prAL, gcLo}, // [2] BENGALI LETTER E..BENGALI LETTER AI
+ {0x0993, 0x09A8, prAL, gcLo}, // [22] BENGALI LETTER O..BENGALI LETTER NA
+ {0x09AA, 0x09B0, prAL, gcLo}, // [7] BENGALI LETTER PA..BENGALI LETTER RA
+ {0x09B2, 0x09B2, prAL, gcLo}, // BENGALI LETTER LA
+ {0x09B6, 0x09B9, prAL, gcLo}, // [4] BENGALI LETTER SHA..BENGALI LETTER HA
+ {0x09BC, 0x09BC, prCM, gcMn}, // BENGALI SIGN NUKTA
+ {0x09BD, 0x09BD, prAL, gcLo}, // BENGALI SIGN AVAGRAHA
+ {0x09BE, 0x09C0, prCM, gcMc}, // [3] BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN II
+ {0x09C1, 0x09C4, prCM, gcMn}, // [4] BENGALI VOWEL SIGN U..BENGALI VOWEL SIGN VOCALIC RR
+ {0x09C7, 0x09C8, prCM, gcMc}, // [2] BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI
+ {0x09CB, 0x09CC, prCM, gcMc}, // [2] BENGALI VOWEL SIGN O..BENGALI VOWEL SIGN AU
+ {0x09CD, 0x09CD, prCM, gcMn}, // BENGALI SIGN VIRAMA
+ {0x09CE, 0x09CE, prAL, gcLo}, // BENGALI LETTER KHANDA TA
+ {0x09D7, 0x09D7, prCM, gcMc}, // BENGALI AU LENGTH MARK
+ {0x09DC, 0x09DD, prAL, gcLo}, // [2] BENGALI LETTER RRA..BENGALI LETTER RHA
+ {0x09DF, 0x09E1, prAL, gcLo}, // [3] BENGALI LETTER YYA..BENGALI LETTER VOCALIC LL
+ {0x09E2, 0x09E3, prCM, gcMn}, // [2] BENGALI VOWEL SIGN VOCALIC L..BENGALI VOWEL SIGN VOCALIC LL
+ {0x09E6, 0x09EF, prNU, gcNd}, // [10] BENGALI DIGIT ZERO..BENGALI DIGIT NINE
+ {0x09F0, 0x09F1, prAL, gcLo}, // [2] BENGALI LETTER RA WITH MIDDLE DIAGONAL..BENGALI LETTER RA WITH LOWER DIAGONAL
+ {0x09F2, 0x09F3, prPO, gcSc}, // [2] BENGALI RUPEE MARK..BENGALI RUPEE SIGN
+ {0x09F4, 0x09F8, prAL, gcNo}, // [5] BENGALI CURRENCY NUMERATOR ONE..BENGALI CURRENCY NUMERATOR ONE LESS THAN THE DENOMINATOR
+ {0x09F9, 0x09F9, prPO, gcNo}, // BENGALI CURRENCY DENOMINATOR SIXTEEN
+ {0x09FA, 0x09FA, prAL, gcSo}, // BENGALI ISSHAR
+ {0x09FB, 0x09FB, prPR, gcSc}, // BENGALI GANDA MARK
+ {0x09FC, 0x09FC, prAL, gcLo}, // BENGALI LETTER VEDIC ANUSVARA
+ {0x09FD, 0x09FD, prAL, gcPo}, // BENGALI ABBREVIATION SIGN
+ {0x09FE, 0x09FE, prCM, gcMn}, // BENGALI SANDHI MARK
+ {0x0A01, 0x0A02, prCM, gcMn}, // [2] GURMUKHI SIGN ADAK BINDI..GURMUKHI SIGN BINDI
+ {0x0A03, 0x0A03, prCM, gcMc}, // GURMUKHI SIGN VISARGA
+ {0x0A05, 0x0A0A, prAL, gcLo}, // [6] GURMUKHI LETTER A..GURMUKHI LETTER UU
+ {0x0A0F, 0x0A10, prAL, gcLo}, // [2] GURMUKHI LETTER EE..GURMUKHI LETTER AI
+ {0x0A13, 0x0A28, prAL, gcLo}, // [22] GURMUKHI LETTER OO..GURMUKHI LETTER NA
+ {0x0A2A, 0x0A30, prAL, gcLo}, // [7] GURMUKHI LETTER PA..GURMUKHI LETTER RA
+ {0x0A32, 0x0A33, prAL, gcLo}, // [2] GURMUKHI LETTER LA..GURMUKHI LETTER LLA
+ {0x0A35, 0x0A36, prAL, gcLo}, // [2] GURMUKHI LETTER VA..GURMUKHI LETTER SHA
+ {0x0A38, 0x0A39, prAL, gcLo}, // [2] GURMUKHI LETTER SA..GURMUKHI LETTER HA
+ {0x0A3C, 0x0A3C, prCM, gcMn}, // GURMUKHI SIGN NUKTA
+ {0x0A3E, 0x0A40, prCM, gcMc}, // [3] GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN II
+ {0x0A41, 0x0A42, prCM, gcMn}, // [2] GURMUKHI VOWEL SIGN U..GURMUKHI VOWEL SIGN UU
+ {0x0A47, 0x0A48, prCM, gcMn}, // [2] GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI
+ {0x0A4B, 0x0A4D, prCM, gcMn}, // [3] GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA
+ {0x0A51, 0x0A51, prCM, gcMn}, // GURMUKHI SIGN UDAAT
+ {0x0A59, 0x0A5C, prAL, gcLo}, // [4] GURMUKHI LETTER KHHA..GURMUKHI LETTER RRA
+ {0x0A5E, 0x0A5E, prAL, gcLo}, // GURMUKHI LETTER FA
+ {0x0A66, 0x0A6F, prNU, gcNd}, // [10] GURMUKHI DIGIT ZERO..GURMUKHI DIGIT NINE
+ {0x0A70, 0x0A71, prCM, gcMn}, // [2] GURMUKHI TIPPI..GURMUKHI ADDAK
+ {0x0A72, 0x0A74, prAL, gcLo}, // [3] GURMUKHI IRI..GURMUKHI EK ONKAR
+ {0x0A75, 0x0A75, prCM, gcMn}, // GURMUKHI SIGN YAKASH
+ {0x0A76, 0x0A76, prAL, gcPo}, // GURMUKHI ABBREVIATION SIGN
+ {0x0A81, 0x0A82, prCM, gcMn}, // [2] GUJARATI SIGN CANDRABINDU..GUJARATI SIGN ANUSVARA
+ {0x0A83, 0x0A83, prCM, gcMc}, // GUJARATI SIGN VISARGA
+ {0x0A85, 0x0A8D, prAL, gcLo}, // [9] GUJARATI LETTER A..GUJARATI VOWEL CANDRA E
+ {0x0A8F, 0x0A91, prAL, gcLo}, // [3] GUJARATI LETTER E..GUJARATI VOWEL CANDRA O
+ {0x0A93, 0x0AA8, prAL, gcLo}, // [22] GUJARATI LETTER O..GUJARATI LETTER NA
+ {0x0AAA, 0x0AB0, prAL, gcLo}, // [7] GUJARATI LETTER PA..GUJARATI LETTER RA
+ {0x0AB2, 0x0AB3, prAL, gcLo}, // [2] GUJARATI LETTER LA..GUJARATI LETTER LLA
+ {0x0AB5, 0x0AB9, prAL, gcLo}, // [5] GUJARATI LETTER VA..GUJARATI LETTER HA
+ {0x0ABC, 0x0ABC, prCM, gcMn}, // GUJARATI SIGN NUKTA
+ {0x0ABD, 0x0ABD, prAL, gcLo}, // GUJARATI SIGN AVAGRAHA
+ {0x0ABE, 0x0AC0, prCM, gcMc}, // [3] GUJARATI VOWEL SIGN AA..GUJARATI VOWEL SIGN II
+ {0x0AC1, 0x0AC5, prCM, gcMn}, // [5] GUJARATI VOWEL SIGN U..GUJARATI VOWEL SIGN CANDRA E
+ {0x0AC7, 0x0AC8, prCM, gcMn}, // [2] GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN AI
+ {0x0AC9, 0x0AC9, prCM, gcMc}, // GUJARATI VOWEL SIGN CANDRA O
+ {0x0ACB, 0x0ACC, prCM, gcMc}, // [2] GUJARATI VOWEL SIGN O..GUJARATI VOWEL SIGN AU
+ {0x0ACD, 0x0ACD, prCM, gcMn}, // GUJARATI SIGN VIRAMA
+ {0x0AD0, 0x0AD0, prAL, gcLo}, // GUJARATI OM
+ {0x0AE0, 0x0AE1, prAL, gcLo}, // [2] GUJARATI LETTER VOCALIC RR..GUJARATI LETTER VOCALIC LL
+ {0x0AE2, 0x0AE3, prCM, gcMn}, // [2] GUJARATI VOWEL SIGN VOCALIC L..GUJARATI VOWEL SIGN VOCALIC LL
+ {0x0AE6, 0x0AEF, prNU, gcNd}, // [10] GUJARATI DIGIT ZERO..GUJARATI DIGIT NINE
+ {0x0AF0, 0x0AF0, prAL, gcPo}, // GUJARATI ABBREVIATION SIGN
+ {0x0AF1, 0x0AF1, prPR, gcSc}, // GUJARATI RUPEE SIGN
+ {0x0AF9, 0x0AF9, prAL, gcLo}, // GUJARATI LETTER ZHA
+ {0x0AFA, 0x0AFF, prCM, gcMn}, // [6] GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE
+ {0x0B01, 0x0B01, prCM, gcMn}, // ORIYA SIGN CANDRABINDU
+ {0x0B02, 0x0B03, prCM, gcMc}, // [2] ORIYA SIGN ANUSVARA..ORIYA SIGN VISARGA
+ {0x0B05, 0x0B0C, prAL, gcLo}, // [8] ORIYA LETTER A..ORIYA LETTER VOCALIC L
+ {0x0B0F, 0x0B10, prAL, gcLo}, // [2] ORIYA LETTER E..ORIYA LETTER AI
+ {0x0B13, 0x0B28, prAL, gcLo}, // [22] ORIYA LETTER O..ORIYA LETTER NA
+ {0x0B2A, 0x0B30, prAL, gcLo}, // [7] ORIYA LETTER PA..ORIYA LETTER RA
+ {0x0B32, 0x0B33, prAL, gcLo}, // [2] ORIYA LETTER LA..ORIYA LETTER LLA
+ {0x0B35, 0x0B39, prAL, gcLo}, // [5] ORIYA LETTER VA..ORIYA LETTER HA
+ {0x0B3C, 0x0B3C, prCM, gcMn}, // ORIYA SIGN NUKTA
+ {0x0B3D, 0x0B3D, prAL, gcLo}, // ORIYA SIGN AVAGRAHA
+ {0x0B3E, 0x0B3E, prCM, gcMc}, // ORIYA VOWEL SIGN AA
+ {0x0B3F, 0x0B3F, prCM, gcMn}, // ORIYA VOWEL SIGN I
+ {0x0B40, 0x0B40, prCM, gcMc}, // ORIYA VOWEL SIGN II
+ {0x0B41, 0x0B44, prCM, gcMn}, // [4] ORIYA VOWEL SIGN U..ORIYA VOWEL SIGN VOCALIC RR
+ {0x0B47, 0x0B48, prCM, gcMc}, // [2] ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI
+ {0x0B4B, 0x0B4C, prCM, gcMc}, // [2] ORIYA VOWEL SIGN O..ORIYA VOWEL SIGN AU
+ {0x0B4D, 0x0B4D, prCM, gcMn}, // ORIYA SIGN VIRAMA
+ {0x0B55, 0x0B56, prCM, gcMn}, // [2] ORIYA SIGN OVERLINE..ORIYA AI LENGTH MARK
+ {0x0B57, 0x0B57, prCM, gcMc}, // ORIYA AU LENGTH MARK
+ {0x0B5C, 0x0B5D, prAL, gcLo}, // [2] ORIYA LETTER RRA..ORIYA LETTER RHA
+ {0x0B5F, 0x0B61, prAL, gcLo}, // [3] ORIYA LETTER YYA..ORIYA LETTER VOCALIC LL
+ {0x0B62, 0x0B63, prCM, gcMn}, // [2] ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL
+ {0x0B66, 0x0B6F, prNU, gcNd}, // [10] ORIYA DIGIT ZERO..ORIYA DIGIT NINE
+ {0x0B70, 0x0B70, prAL, gcSo}, // ORIYA ISSHAR
+ {0x0B71, 0x0B71, prAL, gcLo}, // ORIYA LETTER WA
+ {0x0B72, 0x0B77, prAL, gcNo}, // [6] ORIYA FRACTION ONE QUARTER..ORIYA FRACTION THREE SIXTEENTHS
+ {0x0B82, 0x0B82, prCM, gcMn}, // TAMIL SIGN ANUSVARA
+ {0x0B83, 0x0B83, prAL, gcLo}, // TAMIL SIGN VISARGA
+ {0x0B85, 0x0B8A, prAL, gcLo}, // [6] TAMIL LETTER A..TAMIL LETTER UU
+ {0x0B8E, 0x0B90, prAL, gcLo}, // [3] TAMIL LETTER E..TAMIL LETTER AI
+ {0x0B92, 0x0B95, prAL, gcLo}, // [4] TAMIL LETTER O..TAMIL LETTER KA
+ {0x0B99, 0x0B9A, prAL, gcLo}, // [2] TAMIL LETTER NGA..TAMIL LETTER CA
+ {0x0B9C, 0x0B9C, prAL, gcLo}, // TAMIL LETTER JA
+ {0x0B9E, 0x0B9F, prAL, gcLo}, // [2] TAMIL LETTER NYA..TAMIL LETTER TTA
+ {0x0BA3, 0x0BA4, prAL, gcLo}, // [2] TAMIL LETTER NNA..TAMIL LETTER TA
+ {0x0BA8, 0x0BAA, prAL, gcLo}, // [3] TAMIL LETTER NA..TAMIL LETTER PA
+ {0x0BAE, 0x0BB9, prAL, gcLo}, // [12] TAMIL LETTER MA..TAMIL LETTER HA
+ {0x0BBE, 0x0BBF, prCM, gcMc}, // [2] TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN I
+ {0x0BC0, 0x0BC0, prCM, gcMn}, // TAMIL VOWEL SIGN II
+ {0x0BC1, 0x0BC2, prCM, gcMc}, // [2] TAMIL VOWEL SIGN U..TAMIL VOWEL SIGN UU
+ {0x0BC6, 0x0BC8, prCM, gcMc}, // [3] TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI
+ {0x0BCA, 0x0BCC, prCM, gcMc}, // [3] TAMIL VOWEL SIGN O..TAMIL VOWEL SIGN AU
+ {0x0BCD, 0x0BCD, prCM, gcMn}, // TAMIL SIGN VIRAMA
+ {0x0BD0, 0x0BD0, prAL, gcLo}, // TAMIL OM
+ {0x0BD7, 0x0BD7, prCM, gcMc}, // TAMIL AU LENGTH MARK
+ {0x0BE6, 0x0BEF, prNU, gcNd}, // [10] TAMIL DIGIT ZERO..TAMIL DIGIT NINE
+ {0x0BF0, 0x0BF2, prAL, gcNo}, // [3] TAMIL NUMBER TEN..TAMIL NUMBER ONE THOUSAND
+ {0x0BF3, 0x0BF8, prAL, gcSo}, // [6] TAMIL DAY SIGN..TAMIL AS ABOVE SIGN
+ {0x0BF9, 0x0BF9, prPR, gcSc}, // TAMIL RUPEE SIGN
+ {0x0BFA, 0x0BFA, prAL, gcSo}, // TAMIL NUMBER SIGN
+ {0x0C00, 0x0C00, prCM, gcMn}, // TELUGU SIGN COMBINING CANDRABINDU ABOVE
+ {0x0C01, 0x0C03, prCM, gcMc}, // [3] TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA
+ {0x0C04, 0x0C04, prCM, gcMn}, // TELUGU SIGN COMBINING ANUSVARA ABOVE
+ {0x0C05, 0x0C0C, prAL, gcLo}, // [8] TELUGU LETTER A..TELUGU LETTER VOCALIC L
+ {0x0C0E, 0x0C10, prAL, gcLo}, // [3] TELUGU LETTER E..TELUGU LETTER AI
+ {0x0C12, 0x0C28, prAL, gcLo}, // [23] TELUGU LETTER O..TELUGU LETTER NA
+ {0x0C2A, 0x0C39, prAL, gcLo}, // [16] TELUGU LETTER PA..TELUGU LETTER HA
+ {0x0C3C, 0x0C3C, prCM, gcMn}, // TELUGU SIGN NUKTA
+ {0x0C3D, 0x0C3D, prAL, gcLo}, // TELUGU SIGN AVAGRAHA
+ {0x0C3E, 0x0C40, prCM, gcMn}, // [3] TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN II
+ {0x0C41, 0x0C44, prCM, gcMc}, // [4] TELUGU VOWEL SIGN U..TELUGU VOWEL SIGN VOCALIC RR
+ {0x0C46, 0x0C48, prCM, gcMn}, // [3] TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI
+ {0x0C4A, 0x0C4D, prCM, gcMn}, // [4] TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA
+ {0x0C55, 0x0C56, prCM, gcMn}, // [2] TELUGU LENGTH MARK..TELUGU AI LENGTH MARK
+ {0x0C58, 0x0C5A, prAL, gcLo}, // [3] TELUGU LETTER TSA..TELUGU LETTER RRRA
+ {0x0C5D, 0x0C5D, prAL, gcLo}, // TELUGU LETTER NAKAARA POLLU
+ {0x0C60, 0x0C61, prAL, gcLo}, // [2] TELUGU LETTER VOCALIC RR..TELUGU LETTER VOCALIC LL
+ {0x0C62, 0x0C63, prCM, gcMn}, // [2] TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL
+ {0x0C66, 0x0C6F, prNU, gcNd}, // [10] TELUGU DIGIT ZERO..TELUGU DIGIT NINE
+ {0x0C77, 0x0C77, prBB, gcPo}, // TELUGU SIGN SIDDHAM
+ {0x0C78, 0x0C7E, prAL, gcNo}, // [7] TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR..TELUGU FRACTION DIGIT THREE FOR EVEN POWERS OF FOUR
+ {0x0C7F, 0x0C7F, prAL, gcSo}, // TELUGU SIGN TUUMU
+ {0x0C80, 0x0C80, prAL, gcLo}, // KANNADA SIGN SPACING CANDRABINDU
+ {0x0C81, 0x0C81, prCM, gcMn}, // KANNADA SIGN CANDRABINDU
+ {0x0C82, 0x0C83, prCM, gcMc}, // [2] KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA
+ {0x0C84, 0x0C84, prBB, gcPo}, // KANNADA SIGN SIDDHAM
+ {0x0C85, 0x0C8C, prAL, gcLo}, // [8] KANNADA LETTER A..KANNADA LETTER VOCALIC L
+ {0x0C8E, 0x0C90, prAL, gcLo}, // [3] KANNADA LETTER E..KANNADA LETTER AI
+ {0x0C92, 0x0CA8, prAL, gcLo}, // [23] KANNADA LETTER O..KANNADA LETTER NA
+ {0x0CAA, 0x0CB3, prAL, gcLo}, // [10] KANNADA LETTER PA..KANNADA LETTER LLA
+ {0x0CB5, 0x0CB9, prAL, gcLo}, // [5] KANNADA LETTER VA..KANNADA LETTER HA
+ {0x0CBC, 0x0CBC, prCM, gcMn}, // KANNADA SIGN NUKTA
+ {0x0CBD, 0x0CBD, prAL, gcLo}, // KANNADA SIGN AVAGRAHA
+ {0x0CBE, 0x0CBE, prCM, gcMc}, // KANNADA VOWEL SIGN AA
+ {0x0CBF, 0x0CBF, prCM, gcMn}, // KANNADA VOWEL SIGN I
+ {0x0CC0, 0x0CC4, prCM, gcMc}, // [5] KANNADA VOWEL SIGN II..KANNADA VOWEL SIGN VOCALIC RR
+ {0x0CC6, 0x0CC6, prCM, gcMn}, // KANNADA VOWEL SIGN E
+ {0x0CC7, 0x0CC8, prCM, gcMc}, // [2] KANNADA VOWEL SIGN EE..KANNADA VOWEL SIGN AI
+ {0x0CCA, 0x0CCB, prCM, gcMc}, // [2] KANNADA VOWEL SIGN O..KANNADA VOWEL SIGN OO
+ {0x0CCC, 0x0CCD, prCM, gcMn}, // [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
+ {0x0CD5, 0x0CD6, prCM, gcMc}, // [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
+ {0x0CDD, 0x0CDE, prAL, gcLo}, // [2] KANNADA LETTER NAKAARA POLLU..KANNADA LETTER FA
+ {0x0CE0, 0x0CE1, prAL, gcLo}, // [2] KANNADA LETTER VOCALIC RR..KANNADA LETTER VOCALIC LL
+ {0x0CE2, 0x0CE3, prCM, gcMn}, // [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0CE6, 0x0CEF, prNU, gcNd}, // [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
+ {0x0CF1, 0x0CF2, prAL, gcLo}, // [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0CF3, 0x0CF3, prCM, gcMc}, // KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
+ {0x0D00, 0x0D01, prCM, gcMn}, // [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
+ {0x0D02, 0x0D03, prCM, gcMc}, // [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
+ {0x0D04, 0x0D0C, prAL, gcLo}, // [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
+ {0x0D0E, 0x0D10, prAL, gcLo}, // [3] MALAYALAM LETTER E..MALAYALAM LETTER AI
+ {0x0D12, 0x0D3A, prAL, gcLo}, // [41] MALAYALAM LETTER O..MALAYALAM LETTER TTTA
+ {0x0D3B, 0x0D3C, prCM, gcMn}, // [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
+ {0x0D3D, 0x0D3D, prAL, gcLo}, // MALAYALAM SIGN AVAGRAHA
+ {0x0D3E, 0x0D40, prCM, gcMc}, // [3] MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN II
+ {0x0D41, 0x0D44, prCM, gcMn}, // [4] MALAYALAM VOWEL SIGN U..MALAYALAM VOWEL SIGN VOCALIC RR
+ {0x0D46, 0x0D48, prCM, gcMc}, // [3] MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI
+ {0x0D4A, 0x0D4C, prCM, gcMc}, // [3] MALAYALAM VOWEL SIGN O..MALAYALAM VOWEL SIGN AU
+ {0x0D4D, 0x0D4D, prCM, gcMn}, // MALAYALAM SIGN VIRAMA
+ {0x0D4E, 0x0D4E, prAL, gcLo}, // MALAYALAM LETTER DOT REPH
+ {0x0D4F, 0x0D4F, prAL, gcSo}, // MALAYALAM SIGN PARA
+ {0x0D54, 0x0D56, prAL, gcLo}, // [3] MALAYALAM LETTER CHILLU M..MALAYALAM LETTER CHILLU LLL
+ {0x0D57, 0x0D57, prCM, gcMc}, // MALAYALAM AU LENGTH MARK
+ {0x0D58, 0x0D5E, prAL, gcNo}, // [7] MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH..MALAYALAM FRACTION ONE FIFTH
+ {0x0D5F, 0x0D61, prAL, gcLo}, // [3] MALAYALAM LETTER ARCHAIC II..MALAYALAM LETTER VOCALIC LL
+ {0x0D62, 0x0D63, prCM, gcMn}, // [2] MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL
+ {0x0D66, 0x0D6F, prNU, gcNd}, // [10] MALAYALAM DIGIT ZERO..MALAYALAM DIGIT NINE
+ {0x0D70, 0x0D78, prAL, gcNo}, // [9] MALAYALAM NUMBER TEN..MALAYALAM FRACTION THREE SIXTEENTHS
+ {0x0D79, 0x0D79, prPO, gcSo}, // MALAYALAM DATE MARK
+ {0x0D7A, 0x0D7F, prAL, gcLo}, // [6] MALAYALAM LETTER CHILLU NN..MALAYALAM LETTER CHILLU K
+ {0x0D81, 0x0D81, prCM, gcMn}, // SINHALA SIGN CANDRABINDU
+ {0x0D82, 0x0D83, prCM, gcMc}, // [2] SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA
+ {0x0D85, 0x0D96, prAL, gcLo}, // [18] SINHALA LETTER AYANNA..SINHALA LETTER AUYANNA
+ {0x0D9A, 0x0DB1, prAL, gcLo}, // [24] SINHALA LETTER ALPAPRAANA KAYANNA..SINHALA LETTER DANTAJA NAYANNA
+ {0x0DB3, 0x0DBB, prAL, gcLo}, // [9] SINHALA LETTER SANYAKA DAYANNA..SINHALA LETTER RAYANNA
+ {0x0DBD, 0x0DBD, prAL, gcLo}, // SINHALA LETTER DANTAJA LAYANNA
+ {0x0DC0, 0x0DC6, prAL, gcLo}, // [7] SINHALA LETTER VAYANNA..SINHALA LETTER FAYANNA
+ {0x0DCA, 0x0DCA, prCM, gcMn}, // SINHALA SIGN AL-LAKUNA
+ {0x0DCF, 0x0DD1, prCM, gcMc}, // [3] SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN DIGA AEDA-PILLA
+ {0x0DD2, 0x0DD4, prCM, gcMn}, // [3] SINHALA VOWEL SIGN KETTI IS-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA
+ {0x0DD6, 0x0DD6, prCM, gcMn}, // SINHALA VOWEL SIGN DIGA PAA-PILLA
+ {0x0DD8, 0x0DDF, prCM, gcMc}, // [8] SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA
+ {0x0DE6, 0x0DEF, prNU, gcNd}, // [10] SINHALA LITH DIGIT ZERO..SINHALA LITH DIGIT NINE
+ {0x0DF2, 0x0DF3, prCM, gcMc}, // [2] SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA
+ {0x0DF4, 0x0DF4, prAL, gcPo}, // SINHALA PUNCTUATION KUNDDALIYA
+ {0x0E01, 0x0E30, prSA, gcLo}, // [48] THAI CHARACTER KO KAI..THAI CHARACTER SARA A
+ {0x0E31, 0x0E31, prSA, gcMn}, // THAI CHARACTER MAI HAN-AKAT
+ {0x0E32, 0x0E33, prSA, gcLo}, // [2] THAI CHARACTER SARA AA..THAI CHARACTER SARA AM
+ {0x0E34, 0x0E3A, prSA, gcMn}, // [7] THAI CHARACTER SARA I..THAI CHARACTER PHINTHU
+ {0x0E3F, 0x0E3F, prPR, gcSc}, // THAI CURRENCY SYMBOL BAHT
+ {0x0E40, 0x0E45, prSA, gcLo}, // [6] THAI CHARACTER SARA E..THAI CHARACTER LAKKHANGYAO
+ {0x0E46, 0x0E46, prSA, gcLm}, // THAI CHARACTER MAIYAMOK
+ {0x0E47, 0x0E4E, prSA, gcMn}, // [8] THAI CHARACTER MAITAIKHU..THAI CHARACTER YAMAKKAN
+ {0x0E4F, 0x0E4F, prAL, gcPo}, // THAI CHARACTER FONGMAN
+ {0x0E50, 0x0E59, prNU, gcNd}, // [10] THAI DIGIT ZERO..THAI DIGIT NINE
+ {0x0E5A, 0x0E5B, prBA, gcPo}, // [2] THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT
+ {0x0E81, 0x0E82, prSA, gcLo}, // [2] LAO LETTER KO..LAO LETTER KHO SUNG
+ {0x0E84, 0x0E84, prSA, gcLo}, // LAO LETTER KHO TAM
+ {0x0E86, 0x0E8A, prSA, gcLo}, // [5] LAO LETTER PALI GHA..LAO LETTER SO TAM
+ {0x0E8C, 0x0EA3, prSA, gcLo}, // [24] LAO LETTER PALI JHA..LAO LETTER LO LING
+ {0x0EA5, 0x0EA5, prSA, gcLo}, // LAO LETTER LO LOOT
+ {0x0EA7, 0x0EB0, prSA, gcLo}, // [10] LAO LETTER WO..LAO VOWEL SIGN A
+ {0x0EB1, 0x0EB1, prSA, gcMn}, // LAO VOWEL SIGN MAI KAN
+ {0x0EB2, 0x0EB3, prSA, gcLo}, // [2] LAO VOWEL SIGN AA..LAO VOWEL SIGN AM
+ {0x0EB4, 0x0EBC, prSA, gcMn}, // [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
+ {0x0EBD, 0x0EBD, prSA, gcLo}, // LAO SEMIVOWEL SIGN NYO
+ {0x0EC0, 0x0EC4, prSA, gcLo}, // [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
+ {0x0EC6, 0x0EC6, prSA, gcLm}, // LAO KO LA
+ {0x0EC8, 0x0ECE, prSA, gcMn}, // [7] LAO TONE MAI EK..LAO YAMAKKAN
+ {0x0ED0, 0x0ED9, prNU, gcNd}, // [10] LAO DIGIT ZERO..LAO DIGIT NINE
+ {0x0EDC, 0x0EDF, prSA, gcLo}, // [4] LAO HO NO..LAO LETTER KHMU NYO
+ {0x0F00, 0x0F00, prAL, gcLo}, // TIBETAN SYLLABLE OM
+ {0x0F01, 0x0F03, prBB, gcSo}, // [3] TIBETAN MARK GTER YIG MGO TRUNCATED A..TIBETAN MARK GTER YIG MGO -UM GTER TSHEG MA
+ {0x0F04, 0x0F04, prBB, gcPo}, // TIBETAN MARK INITIAL YIG MGO MDUN MA
+ {0x0F05, 0x0F05, prAL, gcPo}, // TIBETAN MARK CLOSING YIG MGO SGAB MA
+ {0x0F06, 0x0F07, prBB, gcPo}, // [2] TIBETAN MARK CARET YIG MGO PHUR SHAD MA..TIBETAN MARK YIG MGO TSHEG SHAD MA
+ {0x0F08, 0x0F08, prGL, gcPo}, // TIBETAN MARK SBRUL SHAD
+ {0x0F09, 0x0F0A, prBB, gcPo}, // [2] TIBETAN MARK BSKUR YIG MGO..TIBETAN MARK BKA- SHOG YIG MGO
+ {0x0F0B, 0x0F0B, prBA, gcPo}, // TIBETAN MARK INTERSYLLABIC TSHEG
+ {0x0F0C, 0x0F0C, prGL, gcPo}, // TIBETAN MARK DELIMITER TSHEG BSTAR
+ {0x0F0D, 0x0F11, prEX, gcPo}, // [5] TIBETAN MARK SHAD..TIBETAN MARK RIN CHEN SPUNGS SHAD
+ {0x0F12, 0x0F12, prGL, gcPo}, // TIBETAN MARK RGYA GRAM SHAD
+ {0x0F13, 0x0F13, prAL, gcSo}, // TIBETAN MARK CARET -DZUD RTAGS ME LONG CAN
+ {0x0F14, 0x0F14, prEX, gcPo}, // TIBETAN MARK GTER TSHEG
+ {0x0F15, 0x0F17, prAL, gcSo}, // [3] TIBETAN LOGOTYPE SIGN CHAD RTAGS..TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS
+ {0x0F18, 0x0F19, prCM, gcMn}, // [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
+ {0x0F1A, 0x0F1F, prAL, gcSo}, // [6] TIBETAN SIGN RDEL DKAR GCIG..TIBETAN SIGN RDEL DKAR RDEL NAG
+ {0x0F20, 0x0F29, prNU, gcNd}, // [10] TIBETAN DIGIT ZERO..TIBETAN DIGIT NINE
+ {0x0F2A, 0x0F33, prAL, gcNo}, // [10] TIBETAN DIGIT HALF ONE..TIBETAN DIGIT HALF ZERO
+ {0x0F34, 0x0F34, prBA, gcSo}, // TIBETAN MARK BSDUS RTAGS
+ {0x0F35, 0x0F35, prCM, gcMn}, // TIBETAN MARK NGAS BZUNG NYI ZLA
+ {0x0F36, 0x0F36, prAL, gcSo}, // TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN
+ {0x0F37, 0x0F37, prCM, gcMn}, // TIBETAN MARK NGAS BZUNG SGOR RTAGS
+ {0x0F38, 0x0F38, prAL, gcSo}, // TIBETAN MARK CHE MGO
+ {0x0F39, 0x0F39, prCM, gcMn}, // TIBETAN MARK TSA -PHRU
+ {0x0F3A, 0x0F3A, prOP, gcPs}, // TIBETAN MARK GUG RTAGS GYON
+ {0x0F3B, 0x0F3B, prCL, gcPe}, // TIBETAN MARK GUG RTAGS GYAS
+ {0x0F3C, 0x0F3C, prOP, gcPs}, // TIBETAN MARK ANG KHANG GYON
+ {0x0F3D, 0x0F3D, prCL, gcPe}, // TIBETAN MARK ANG KHANG GYAS
+ {0x0F3E, 0x0F3F, prCM, gcMc}, // [2] TIBETAN SIGN YAR TSHES..TIBETAN SIGN MAR TSHES
+ {0x0F40, 0x0F47, prAL, gcLo}, // [8] TIBETAN LETTER KA..TIBETAN LETTER JA
+ {0x0F49, 0x0F6C, prAL, gcLo}, // [36] TIBETAN LETTER NYA..TIBETAN LETTER RRA
+ {0x0F71, 0x0F7E, prCM, gcMn}, // [14] TIBETAN VOWEL SIGN AA..TIBETAN SIGN RJES SU NGA RO
+ {0x0F7F, 0x0F7F, prBA, gcMc}, // TIBETAN SIGN RNAM BCAD
+ {0x0F80, 0x0F84, prCM, gcMn}, // [5] TIBETAN VOWEL SIGN REVERSED I..TIBETAN MARK HALANTA
+ {0x0F85, 0x0F85, prBA, gcPo}, // TIBETAN MARK PALUTA
+ {0x0F86, 0x0F87, prCM, gcMn}, // [2] TIBETAN SIGN LCI RTAGS..TIBETAN SIGN YANG RTAGS
+ {0x0F88, 0x0F8C, prAL, gcLo}, // [5] TIBETAN SIGN LCE TSA CAN..TIBETAN SIGN INVERTED MCHU CAN
+ {0x0F8D, 0x0F97, prCM, gcMn}, // [11] TIBETAN SUBJOINED SIGN LCE TSA CAN..TIBETAN SUBJOINED LETTER JA
+ {0x0F99, 0x0FBC, prCM, gcMn}, // [36] TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER FIXED-FORM RA
+ {0x0FBE, 0x0FBF, prBA, gcSo}, // [2] TIBETAN KU RU KHA..TIBETAN KU RU KHA BZHI MIG CAN
+ {0x0FC0, 0x0FC5, prAL, gcSo}, // [6] TIBETAN CANTILLATION SIGN HEAVY BEAT..TIBETAN SYMBOL RDO RJE
+ {0x0FC6, 0x0FC6, prCM, gcMn}, // TIBETAN SYMBOL PADMA GDAN
+ {0x0FC7, 0x0FCC, prAL, gcSo}, // [6] TIBETAN SYMBOL RDO RJE RGYA GRAM..TIBETAN SYMBOL NOR BU BZHI -KHYIL
+ {0x0FCE, 0x0FCF, prAL, gcSo}, // [2] TIBETAN SIGN RDEL NAG RDEL DKAR..TIBETAN SIGN RDEL NAG GSUM
+ {0x0FD0, 0x0FD1, prBB, gcPo}, // [2] TIBETAN MARK BSKA- SHOG GI MGO RGYAN..TIBETAN MARK MNYAM YIG GI MGO RGYAN
+ {0x0FD2, 0x0FD2, prBA, gcPo}, // TIBETAN MARK NYIS TSHEG
+ {0x0FD3, 0x0FD3, prBB, gcPo}, // TIBETAN MARK INITIAL BRDA RNYING YIG MGO MDUN MA
+ {0x0FD4, 0x0FD4, prAL, gcPo}, // TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA
+ {0x0FD5, 0x0FD8, prAL, gcSo}, // [4] RIGHT-FACING SVASTI SIGN..LEFT-FACING SVASTI SIGN WITH DOTS
+ {0x0FD9, 0x0FDA, prGL, gcPo}, // [2] TIBETAN MARK LEADING MCHAN RTAGS..TIBETAN MARK TRAILING MCHAN RTAGS
+ {0x1000, 0x102A, prSA, gcLo}, // [43] MYANMAR LETTER KA..MYANMAR LETTER AU
+ {0x102B, 0x102C, prSA, gcMc}, // [2] MYANMAR VOWEL SIGN TALL AA..MYANMAR VOWEL SIGN AA
+ {0x102D, 0x1030, prSA, gcMn}, // [4] MYANMAR VOWEL SIGN I..MYANMAR VOWEL SIGN UU
+ {0x1031, 0x1031, prSA, gcMc}, // MYANMAR VOWEL SIGN E
+ {0x1032, 0x1037, prSA, gcMn}, // [6] MYANMAR VOWEL SIGN AI..MYANMAR SIGN DOT BELOW
+ {0x1038, 0x1038, prSA, gcMc}, // MYANMAR SIGN VISARGA
+ {0x1039, 0x103A, prSA, gcMn}, // [2] MYANMAR SIGN VIRAMA..MYANMAR SIGN ASAT
+ {0x103B, 0x103C, prSA, gcMc}, // [2] MYANMAR CONSONANT SIGN MEDIAL YA..MYANMAR CONSONANT SIGN MEDIAL RA
+ {0x103D, 0x103E, prSA, gcMn}, // [2] MYANMAR CONSONANT SIGN MEDIAL WA..MYANMAR CONSONANT SIGN MEDIAL HA
+ {0x103F, 0x103F, prSA, gcLo}, // MYANMAR LETTER GREAT SA
+ {0x1040, 0x1049, prNU, gcNd}, // [10] MYANMAR DIGIT ZERO..MYANMAR DIGIT NINE
+ {0x104A, 0x104B, prBA, gcPo}, // [2] MYANMAR SIGN LITTLE SECTION..MYANMAR SIGN SECTION
+ {0x104C, 0x104F, prAL, gcPo}, // [4] MYANMAR SYMBOL LOCATIVE..MYANMAR SYMBOL GENITIVE
+ {0x1050, 0x1055, prSA, gcLo}, // [6] MYANMAR LETTER SHA..MYANMAR LETTER VOCALIC LL
+ {0x1056, 0x1057, prSA, gcMc}, // [2] MYANMAR VOWEL SIGN VOCALIC R..MYANMAR VOWEL SIGN VOCALIC RR
+ {0x1058, 0x1059, prSA, gcMn}, // [2] MYANMAR VOWEL SIGN VOCALIC L..MYANMAR VOWEL SIGN VOCALIC LL
+ {0x105A, 0x105D, prSA, gcLo}, // [4] MYANMAR LETTER MON NGA..MYANMAR LETTER MON BBE
+ {0x105E, 0x1060, prSA, gcMn}, // [3] MYANMAR CONSONANT SIGN MON MEDIAL NA..MYANMAR CONSONANT SIGN MON MEDIAL LA
+ {0x1061, 0x1061, prSA, gcLo}, // MYANMAR LETTER SGAW KAREN SHA
+ {0x1062, 0x1064, prSA, gcMc}, // [3] MYANMAR VOWEL SIGN SGAW KAREN EU..MYANMAR TONE MARK SGAW KAREN KE PHO
+ {0x1065, 0x1066, prSA, gcLo}, // [2] MYANMAR LETTER WESTERN PWO KAREN THA..MYANMAR LETTER WESTERN PWO KAREN PWA
+ {0x1067, 0x106D, prSA, gcMc}, // [7] MYANMAR VOWEL SIGN WESTERN PWO KAREN EU..MYANMAR SIGN WESTERN PWO KAREN TONE-5
+ {0x106E, 0x1070, prSA, gcLo}, // [3] MYANMAR LETTER EASTERN PWO KAREN NNA..MYANMAR LETTER EASTERN PWO KAREN GHWA
+ {0x1071, 0x1074, prSA, gcMn}, // [4] MYANMAR VOWEL SIGN GEBA KAREN I..MYANMAR VOWEL SIGN KAYAH EE
+ {0x1075, 0x1081, prSA, gcLo}, // [13] MYANMAR LETTER SHAN KA..MYANMAR LETTER SHAN HA
+ {0x1082, 0x1082, prSA, gcMn}, // MYANMAR CONSONANT SIGN SHAN MEDIAL WA
+ {0x1083, 0x1084, prSA, gcMc}, // [2] MYANMAR VOWEL SIGN SHAN AA..MYANMAR VOWEL SIGN SHAN E
+ {0x1085, 0x1086, prSA, gcMn}, // [2] MYANMAR VOWEL SIGN SHAN E ABOVE..MYANMAR VOWEL SIGN SHAN FINAL Y
+ {0x1087, 0x108C, prSA, gcMc}, // [6] MYANMAR SIGN SHAN TONE-2..MYANMAR SIGN SHAN COUNCIL TONE-3
+ {0x108D, 0x108D, prSA, gcMn}, // MYANMAR SIGN SHAN COUNCIL EMPHATIC TONE
+ {0x108E, 0x108E, prSA, gcLo}, // MYANMAR LETTER RUMAI PALAUNG FA
+ {0x108F, 0x108F, prSA, gcMc}, // MYANMAR SIGN RUMAI PALAUNG TONE-5
+ {0x1090, 0x1099, prNU, gcNd}, // [10] MYANMAR SHAN DIGIT ZERO..MYANMAR SHAN DIGIT NINE
+ {0x109A, 0x109C, prSA, gcMc}, // [3] MYANMAR SIGN KHAMTI TONE-1..MYANMAR VOWEL SIGN AITON A
+ {0x109D, 0x109D, prSA, gcMn}, // MYANMAR VOWEL SIGN AITON AI
+ {0x109E, 0x109F, prSA, gcSo}, // [2] MYANMAR SYMBOL SHAN ONE..MYANMAR SYMBOL SHAN EXCLAMATION
+ {0x10A0, 0x10C5, prAL, gcLu}, // [38] GEORGIAN CAPITAL LETTER AN..GEORGIAN CAPITAL LETTER HOE
+ {0x10C7, 0x10C7, prAL, gcLu}, // GEORGIAN CAPITAL LETTER YN
+ {0x10CD, 0x10CD, prAL, gcLu}, // GEORGIAN CAPITAL LETTER AEN
+ {0x10D0, 0x10FA, prAL, gcLl}, // [43] GEORGIAN LETTER AN..GEORGIAN LETTER AIN
+ {0x10FB, 0x10FB, prAL, gcPo}, // GEORGIAN PARAGRAPH SEPARATOR
+ {0x10FC, 0x10FC, prAL, gcLm}, // MODIFIER LETTER GEORGIAN NAR
+ {0x10FD, 0x10FF, prAL, gcLl}, // [3] GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN
+ {0x1100, 0x115F, prJL, gcLo}, // [96] HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG FILLER
+ {0x1160, 0x11A7, prJV, gcLo}, // [72] HANGUL JUNGSEONG FILLER..HANGUL JUNGSEONG O-YAE
+ {0x11A8, 0x11FF, prJT, gcLo}, // [88] HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG SSANGNIEUN
+ {0x1200, 0x1248, prAL, gcLo}, // [73] ETHIOPIC SYLLABLE HA..ETHIOPIC SYLLABLE QWA
+ {0x124A, 0x124D, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE
+ {0x1250, 0x1256, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE QHA..ETHIOPIC SYLLABLE QHO
+ {0x1258, 0x1258, prAL, gcLo}, // ETHIOPIC SYLLABLE QHWA
+ {0x125A, 0x125D, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE QHWI..ETHIOPIC SYLLABLE QHWE
+ {0x1260, 0x1288, prAL, gcLo}, // [41] ETHIOPIC SYLLABLE BA..ETHIOPIC SYLLABLE XWA
+ {0x128A, 0x128D, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE XWI..ETHIOPIC SYLLABLE XWE
+ {0x1290, 0x12B0, prAL, gcLo}, // [33] ETHIOPIC SYLLABLE NA..ETHIOPIC SYLLABLE KWA
+ {0x12B2, 0x12B5, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE KWI..ETHIOPIC SYLLABLE KWE
+ {0x12B8, 0x12BE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE KXA..ETHIOPIC SYLLABLE KXO
+ {0x12C0, 0x12C0, prAL, gcLo}, // ETHIOPIC SYLLABLE KXWA
+ {0x12C2, 0x12C5, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE KXWI..ETHIOPIC SYLLABLE KXWE
+ {0x12C8, 0x12D6, prAL, gcLo}, // [15] ETHIOPIC SYLLABLE WA..ETHIOPIC SYLLABLE PHARYNGEAL O
+ {0x12D8, 0x1310, prAL, gcLo}, // [57] ETHIOPIC SYLLABLE ZA..ETHIOPIC SYLLABLE GWA
+ {0x1312, 0x1315, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE GWI..ETHIOPIC SYLLABLE GWE
+ {0x1318, 0x135A, prAL, gcLo}, // [67] ETHIOPIC SYLLABLE GGA..ETHIOPIC SYLLABLE FYA
+ {0x135D, 0x135F, prCM, gcMn}, // [3] ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING GEMINATION MARK
+ {0x1360, 0x1360, prAL, gcPo}, // ETHIOPIC SECTION MARK
+ {0x1361, 0x1361, prBA, gcPo}, // ETHIOPIC WORDSPACE
+ {0x1362, 0x1368, prAL, gcPo}, // [7] ETHIOPIC FULL STOP..ETHIOPIC PARAGRAPH SEPARATOR
+ {0x1369, 0x137C, prAL, gcNo}, // [20] ETHIOPIC DIGIT ONE..ETHIOPIC NUMBER TEN THOUSAND
+ {0x1380, 0x138F, prAL, gcLo}, // [16] ETHIOPIC SYLLABLE SEBATBEIT MWA..ETHIOPIC SYLLABLE PWE
+ {0x1390, 0x1399, prAL, gcSo}, // [10] ETHIOPIC TONAL MARK YIZET..ETHIOPIC TONAL MARK KURT
+ {0x13A0, 0x13F5, prAL, gcLu}, // [86] CHEROKEE LETTER A..CHEROKEE LETTER MV
+ {0x13F8, 0x13FD, prAL, gcLl}, // [6] CHEROKEE SMALL LETTER YE..CHEROKEE SMALL LETTER MV
+ {0x1400, 0x1400, prBA, gcPd}, // CANADIAN SYLLABICS HYPHEN
+ {0x1401, 0x166C, prAL, gcLo}, // [620] CANADIAN SYLLABICS E..CANADIAN SYLLABICS CARRIER TTSA
+ {0x166D, 0x166D, prAL, gcSo}, // CANADIAN SYLLABICS CHI SIGN
+ {0x166E, 0x166E, prAL, gcPo}, // CANADIAN SYLLABICS FULL STOP
+ {0x166F, 0x167F, prAL, gcLo}, // [17] CANADIAN SYLLABICS QAI..CANADIAN SYLLABICS BLACKFOOT W
+ {0x1680, 0x1680, prBA, gcZs}, // OGHAM SPACE MARK
+ {0x1681, 0x169A, prAL, gcLo}, // [26] OGHAM LETTER BEITH..OGHAM LETTER PEITH
+ {0x169B, 0x169B, prOP, gcPs}, // OGHAM FEATHER MARK
+ {0x169C, 0x169C, prCL, gcPe}, // OGHAM REVERSED FEATHER MARK
+ {0x16A0, 0x16EA, prAL, gcLo}, // [75] RUNIC LETTER FEHU FEOH FE F..RUNIC LETTER X
+ {0x16EB, 0x16ED, prBA, gcPo}, // [3] RUNIC SINGLE PUNCTUATION..RUNIC CROSS PUNCTUATION
+ {0x16EE, 0x16F0, prAL, gcNl}, // [3] RUNIC ARLAUG SYMBOL..RUNIC BELGTHOR SYMBOL
+ {0x16F1, 0x16F8, prAL, gcLo}, // [8] RUNIC LETTER K..RUNIC LETTER FRANKS CASKET AESC
+ {0x1700, 0x1711, prAL, gcLo}, // [18] TAGALOG LETTER A..TAGALOG LETTER HA
+ {0x1712, 0x1714, prCM, gcMn}, // [3] TAGALOG VOWEL SIGN I..TAGALOG SIGN VIRAMA
+ {0x1715, 0x1715, prCM, gcMc}, // TAGALOG SIGN PAMUDPOD
+ {0x171F, 0x171F, prAL, gcLo}, // TAGALOG LETTER ARCHAIC RA
+ {0x1720, 0x1731, prAL, gcLo}, // [18] HANUNOO LETTER A..HANUNOO LETTER HA
+ {0x1732, 0x1733, prCM, gcMn}, // [2] HANUNOO VOWEL SIGN I..HANUNOO VOWEL SIGN U
+ {0x1734, 0x1734, prCM, gcMc}, // HANUNOO SIGN PAMUDPOD
+ {0x1735, 0x1736, prBA, gcPo}, // [2] PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION
+ {0x1740, 0x1751, prAL, gcLo}, // [18] BUHID LETTER A..BUHID LETTER HA
+ {0x1752, 0x1753, prCM, gcMn}, // [2] BUHID VOWEL SIGN I..BUHID VOWEL SIGN U
+ {0x1760, 0x176C, prAL, gcLo}, // [13] TAGBANWA LETTER A..TAGBANWA LETTER YA
+ {0x176E, 0x1770, prAL, gcLo}, // [3] TAGBANWA LETTER LA..TAGBANWA LETTER SA
+ {0x1772, 0x1773, prCM, gcMn}, // [2] TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U
+ {0x1780, 0x17B3, prSA, gcLo}, // [52] KHMER LETTER KA..KHMER INDEPENDENT VOWEL QAU
+ {0x17B4, 0x17B5, prSA, gcMn}, // [2] KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA
+ {0x17B6, 0x17B6, prSA, gcMc}, // KHMER VOWEL SIGN AA
+ {0x17B7, 0x17BD, prSA, gcMn}, // [7] KHMER VOWEL SIGN I..KHMER VOWEL SIGN UA
+ {0x17BE, 0x17C5, prSA, gcMc}, // [8] KHMER VOWEL SIGN OE..KHMER VOWEL SIGN AU
+ {0x17C6, 0x17C6, prSA, gcMn}, // KHMER SIGN NIKAHIT
+ {0x17C7, 0x17C8, prSA, gcMc}, // [2] KHMER SIGN REAHMUK..KHMER SIGN YUUKALEAPINTU
+ {0x17C9, 0x17D3, prSA, gcMn}, // [11] KHMER SIGN MUUSIKATOAN..KHMER SIGN BATHAMASAT
+ {0x17D4, 0x17D5, prBA, gcPo}, // [2] KHMER SIGN KHAN..KHMER SIGN BARIYOOSAN
+ {0x17D6, 0x17D6, prNS, gcPo}, // KHMER SIGN CAMNUC PII KUUH
+ {0x17D7, 0x17D7, prSA, gcLm}, // KHMER SIGN LEK TOO
+ {0x17D8, 0x17D8, prBA, gcPo}, // KHMER SIGN BEYYAL
+ {0x17D9, 0x17D9, prAL, gcPo}, // KHMER SIGN PHNAEK MUAN
+ {0x17DA, 0x17DA, prBA, gcPo}, // KHMER SIGN KOOMUUT
+ {0x17DB, 0x17DB, prPR, gcSc}, // KHMER CURRENCY SYMBOL RIEL
+ {0x17DC, 0x17DC, prSA, gcLo}, // KHMER SIGN AVAKRAHASANYA
+ {0x17DD, 0x17DD, prSA, gcMn}, // KHMER SIGN ATTHACAN
+ {0x17E0, 0x17E9, prNU, gcNd}, // [10] KHMER DIGIT ZERO..KHMER DIGIT NINE
+ {0x17F0, 0x17F9, prAL, gcNo}, // [10] KHMER SYMBOL LEK ATTAK SON..KHMER SYMBOL LEK ATTAK PRAM-BUON
+ {0x1800, 0x1801, prAL, gcPo}, // [2] MONGOLIAN BIRGA..MONGOLIAN ELLIPSIS
+ {0x1802, 0x1803, prEX, gcPo}, // [2] MONGOLIAN COMMA..MONGOLIAN FULL STOP
+ {0x1804, 0x1805, prBA, gcPo}, // [2] MONGOLIAN COLON..MONGOLIAN FOUR DOTS
+ {0x1806, 0x1806, prBB, gcPd}, // MONGOLIAN TODO SOFT HYPHEN
+ {0x1807, 0x1807, prAL, gcPo}, // MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER
+ {0x1808, 0x1809, prEX, gcPo}, // [2] MONGOLIAN MANCHU COMMA..MONGOLIAN MANCHU FULL STOP
+ {0x180A, 0x180A, prAL, gcPo}, // MONGOLIAN NIRUGU
+ {0x180B, 0x180D, prCM, gcMn}, // [3] MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE
+ {0x180E, 0x180E, prGL, gcCf}, // MONGOLIAN VOWEL SEPARATOR
+ {0x180F, 0x180F, prCM, gcMn}, // MONGOLIAN FREE VARIATION SELECTOR FOUR
+ {0x1810, 0x1819, prNU, gcNd}, // [10] MONGOLIAN DIGIT ZERO..MONGOLIAN DIGIT NINE
+ {0x1820, 0x1842, prAL, gcLo}, // [35] MONGOLIAN LETTER A..MONGOLIAN LETTER CHI
+ {0x1843, 0x1843, prAL, gcLm}, // MONGOLIAN LETTER TODO LONG VOWEL SIGN
+ {0x1844, 0x1878, prAL, gcLo}, // [53] MONGOLIAN LETTER TODO E..MONGOLIAN LETTER CHA WITH TWO DOTS
+ {0x1880, 0x1884, prAL, gcLo}, // [5] MONGOLIAN LETTER ALI GALI ANUSVARA ONE..MONGOLIAN LETTER ALI GALI INVERTED UBADAMA
+ {0x1885, 0x1886, prCM, gcMn}, // [2] MONGOLIAN LETTER ALI GALI BALUDA..MONGOLIAN LETTER ALI GALI THREE BALUDA
+ {0x1887, 0x18A8, prAL, gcLo}, // [34] MONGOLIAN LETTER ALI GALI A..MONGOLIAN LETTER MANCHU ALI GALI BHA
+ {0x18A9, 0x18A9, prCM, gcMn}, // MONGOLIAN LETTER ALI GALI DAGALGA
+ {0x18AA, 0x18AA, prAL, gcLo}, // MONGOLIAN LETTER MANCHU ALI GALI LHA
+ {0x18B0, 0x18F5, prAL, gcLo}, // [70] CANADIAN SYLLABICS OY..CANADIAN SYLLABICS CARRIER DENTAL S
+ {0x1900, 0x191E, prAL, gcLo}, // [31] LIMBU VOWEL-CARRIER LETTER..LIMBU LETTER TRA
+ {0x1920, 0x1922, prCM, gcMn}, // [3] LIMBU VOWEL SIGN A..LIMBU VOWEL SIGN U
+ {0x1923, 0x1926, prCM, gcMc}, // [4] LIMBU VOWEL SIGN EE..LIMBU VOWEL SIGN AU
+ {0x1927, 0x1928, prCM, gcMn}, // [2] LIMBU VOWEL SIGN E..LIMBU VOWEL SIGN O
+ {0x1929, 0x192B, prCM, gcMc}, // [3] LIMBU SUBJOINED LETTER YA..LIMBU SUBJOINED LETTER WA
+ {0x1930, 0x1931, prCM, gcMc}, // [2] LIMBU SMALL LETTER KA..LIMBU SMALL LETTER NGA
+ {0x1932, 0x1932, prCM, gcMn}, // LIMBU SMALL LETTER ANUSVARA
+ {0x1933, 0x1938, prCM, gcMc}, // [6] LIMBU SMALL LETTER TA..LIMBU SMALL LETTER LA
+ {0x1939, 0x193B, prCM, gcMn}, // [3] LIMBU SIGN MUKPHRENG..LIMBU SIGN SA-I
+ {0x1940, 0x1940, prAL, gcSo}, // LIMBU SIGN LOO
+ {0x1944, 0x1945, prEX, gcPo}, // [2] LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK
+ {0x1946, 0x194F, prNU, gcNd}, // [10] LIMBU DIGIT ZERO..LIMBU DIGIT NINE
+ {0x1950, 0x196D, prSA, gcLo}, // [30] TAI LE LETTER KA..TAI LE LETTER AI
+ {0x1970, 0x1974, prSA, gcLo}, // [5] TAI LE LETTER TONE-2..TAI LE LETTER TONE-6
+ {0x1980, 0x19AB, prSA, gcLo}, // [44] NEW TAI LUE LETTER HIGH QA..NEW TAI LUE LETTER LOW SUA
+ {0x19B0, 0x19C9, prSA, gcLo}, // [26] NEW TAI LUE VOWEL SIGN VOWEL SHORTENER..NEW TAI LUE TONE MARK-2
+ {0x19D0, 0x19D9, prNU, gcNd}, // [10] NEW TAI LUE DIGIT ZERO..NEW TAI LUE DIGIT NINE
+ {0x19DA, 0x19DA, prSA, gcNo}, // NEW TAI LUE THAM DIGIT ONE
+ {0x19DE, 0x19DF, prSA, gcSo}, // [2] NEW TAI LUE SIGN LAE..NEW TAI LUE SIGN LAEV
+ {0x19E0, 0x19FF, prAL, gcSo}, // [32] KHMER SYMBOL PATHAMASAT..KHMER SYMBOL DAP-PRAM ROC
+ {0x1A00, 0x1A16, prAL, gcLo}, // [23] BUGINESE LETTER KA..BUGINESE LETTER HA
+ {0x1A17, 0x1A18, prCM, gcMn}, // [2] BUGINESE VOWEL SIGN I..BUGINESE VOWEL SIGN U
+ {0x1A19, 0x1A1A, prCM, gcMc}, // [2] BUGINESE VOWEL SIGN E..BUGINESE VOWEL SIGN O
+ {0x1A1B, 0x1A1B, prCM, gcMn}, // BUGINESE VOWEL SIGN AE
+ {0x1A1E, 0x1A1F, prAL, gcPo}, // [2] BUGINESE PALLAWA..BUGINESE END OF SECTION
+ {0x1A20, 0x1A54, prSA, gcLo}, // [53] TAI THAM LETTER HIGH KA..TAI THAM LETTER GREAT SA
+ {0x1A55, 0x1A55, prSA, gcMc}, // TAI THAM CONSONANT SIGN MEDIAL RA
+ {0x1A56, 0x1A56, prSA, gcMn}, // TAI THAM CONSONANT SIGN MEDIAL LA
+ {0x1A57, 0x1A57, prSA, gcMc}, // TAI THAM CONSONANT SIGN LA TANG LAI
+ {0x1A58, 0x1A5E, prSA, gcMn}, // [7] TAI THAM SIGN MAI KANG LAI..TAI THAM CONSONANT SIGN SA
+ {0x1A60, 0x1A60, prSA, gcMn}, // TAI THAM SIGN SAKOT
+ {0x1A61, 0x1A61, prSA, gcMc}, // TAI THAM VOWEL SIGN A
+ {0x1A62, 0x1A62, prSA, gcMn}, // TAI THAM VOWEL SIGN MAI SAT
+ {0x1A63, 0x1A64, prSA, gcMc}, // [2] TAI THAM VOWEL SIGN AA..TAI THAM VOWEL SIGN TALL AA
+ {0x1A65, 0x1A6C, prSA, gcMn}, // [8] TAI THAM VOWEL SIGN I..TAI THAM VOWEL SIGN OA BELOW
+ {0x1A6D, 0x1A72, prSA, gcMc}, // [6] TAI THAM VOWEL SIGN OY..TAI THAM VOWEL SIGN THAM AI
+ {0x1A73, 0x1A7C, prSA, gcMn}, // [10] TAI THAM VOWEL SIGN OA ABOVE..TAI THAM SIGN KHUEN-LUE KARAN
+ {0x1A7F, 0x1A7F, prCM, gcMn}, // TAI THAM COMBINING CRYPTOGRAMMIC DOT
+ {0x1A80, 0x1A89, prNU, gcNd}, // [10] TAI THAM HORA DIGIT ZERO..TAI THAM HORA DIGIT NINE
+ {0x1A90, 0x1A99, prNU, gcNd}, // [10] TAI THAM THAM DIGIT ZERO..TAI THAM THAM DIGIT NINE
+ {0x1AA0, 0x1AA6, prSA, gcPo}, // [7] TAI THAM SIGN WIANG..TAI THAM SIGN REVERSED ROTATED RANA
+ {0x1AA7, 0x1AA7, prSA, gcLm}, // TAI THAM SIGN MAI YAMOK
+ {0x1AA8, 0x1AAD, prSA, gcPo}, // [6] TAI THAM SIGN KAAN..TAI THAM SIGN CAANG
+ {0x1AB0, 0x1ABD, prCM, gcMn}, // [14] COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW
+ {0x1ABE, 0x1ABE, prCM, gcMe}, // COMBINING PARENTHESES OVERLAY
+ {0x1ABF, 0x1ACE, prCM, gcMn}, // [16] COMBINING LATIN SMALL LETTER W BELOW..COMBINING LATIN SMALL LETTER INSULAR T
+ {0x1B00, 0x1B03, prCM, gcMn}, // [4] BALINESE SIGN ULU RICEM..BALINESE SIGN SURANG
+ {0x1B04, 0x1B04, prCM, gcMc}, // BALINESE SIGN BISAH
+ {0x1B05, 0x1B33, prAL, gcLo}, // [47] BALINESE LETTER AKARA..BALINESE LETTER HA
+ {0x1B34, 0x1B34, prCM, gcMn}, // BALINESE SIGN REREKAN
+ {0x1B35, 0x1B35, prCM, gcMc}, // BALINESE VOWEL SIGN TEDUNG
+ {0x1B36, 0x1B3A, prCM, gcMn}, // [5] BALINESE VOWEL SIGN ULU..BALINESE VOWEL SIGN RA REPA
+ {0x1B3B, 0x1B3B, prCM, gcMc}, // BALINESE VOWEL SIGN RA REPA TEDUNG
+ {0x1B3C, 0x1B3C, prCM, gcMn}, // BALINESE VOWEL SIGN LA LENGA
+ {0x1B3D, 0x1B41, prCM, gcMc}, // [5] BALINESE VOWEL SIGN LA LENGA TEDUNG..BALINESE VOWEL SIGN TALING REPA TEDUNG
+ {0x1B42, 0x1B42, prCM, gcMn}, // BALINESE VOWEL SIGN PEPET
+ {0x1B43, 0x1B44, prCM, gcMc}, // [2] BALINESE VOWEL SIGN PEPET TEDUNG..BALINESE ADEG ADEG
+ {0x1B45, 0x1B4C, prAL, gcLo}, // [8] BALINESE LETTER KAF SASAK..BALINESE LETTER ARCHAIC JNYA
+ {0x1B50, 0x1B59, prNU, gcNd}, // [10] BALINESE DIGIT ZERO..BALINESE DIGIT NINE
+ {0x1B5A, 0x1B5B, prBA, gcPo}, // [2] BALINESE PANTI..BALINESE PAMADA
+ {0x1B5C, 0x1B5C, prAL, gcPo}, // BALINESE WINDU
+ {0x1B5D, 0x1B60, prBA, gcPo}, // [4] BALINESE CARIK PAMUNGKAH..BALINESE PAMENENG
+ {0x1B61, 0x1B6A, prAL, gcSo}, // [10] BALINESE MUSICAL SYMBOL DONG..BALINESE MUSICAL SYMBOL DANG GEDE
+ {0x1B6B, 0x1B73, prCM, gcMn}, // [9] BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG
+ {0x1B74, 0x1B7C, prAL, gcSo}, // [9] BALINESE MUSICAL SYMBOL RIGHT-HAND OPEN DUG..BALINESE MUSICAL SYMBOL LEFT-HAND OPEN PING
+ {0x1B7D, 0x1B7E, prBA, gcPo}, // [2] BALINESE PANTI LANTANG..BALINESE PAMADA LANTANG
+ {0x1B80, 0x1B81, prCM, gcMn}, // [2] SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PANGLAYAR
+ {0x1B82, 0x1B82, prCM, gcMc}, // SUNDANESE SIGN PANGWISAD
+ {0x1B83, 0x1BA0, prAL, gcLo}, // [30] SUNDANESE LETTER A..SUNDANESE LETTER HA
+ {0x1BA1, 0x1BA1, prCM, gcMc}, // SUNDANESE CONSONANT SIGN PAMINGKAL
+ {0x1BA2, 0x1BA5, prCM, gcMn}, // [4] SUNDANESE CONSONANT SIGN PANYAKRA..SUNDANESE VOWEL SIGN PANYUKU
+ {0x1BA6, 0x1BA7, prCM, gcMc}, // [2] SUNDANESE VOWEL SIGN PANAELAENG..SUNDANESE VOWEL SIGN PANOLONG
+ {0x1BA8, 0x1BA9, prCM, gcMn}, // [2] SUNDANESE VOWEL SIGN PAMEPET..SUNDANESE VOWEL SIGN PANEULEUNG
+ {0x1BAA, 0x1BAA, prCM, gcMc}, // SUNDANESE SIGN PAMAAEH
+ {0x1BAB, 0x1BAD, prCM, gcMn}, // [3] SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA
+ {0x1BAE, 0x1BAF, prAL, gcLo}, // [2] SUNDANESE LETTER KHA..SUNDANESE LETTER SYA
+ {0x1BB0, 0x1BB9, prNU, gcNd}, // [10] SUNDANESE DIGIT ZERO..SUNDANESE DIGIT NINE
+ {0x1BBA, 0x1BBF, prAL, gcLo}, // [6] SUNDANESE AVAGRAHA..SUNDANESE LETTER FINAL M
+ {0x1BC0, 0x1BE5, prAL, gcLo}, // [38] BATAK LETTER A..BATAK LETTER U
+ {0x1BE6, 0x1BE6, prCM, gcMn}, // BATAK SIGN TOMPI
+ {0x1BE7, 0x1BE7, prCM, gcMc}, // BATAK VOWEL SIGN E
+ {0x1BE8, 0x1BE9, prCM, gcMn}, // [2] BATAK VOWEL SIGN PAKPAK E..BATAK VOWEL SIGN EE
+ {0x1BEA, 0x1BEC, prCM, gcMc}, // [3] BATAK VOWEL SIGN I..BATAK VOWEL SIGN O
+ {0x1BED, 0x1BED, prCM, gcMn}, // BATAK VOWEL SIGN KARO O
+ {0x1BEE, 0x1BEE, prCM, gcMc}, // BATAK VOWEL SIGN U
+ {0x1BEF, 0x1BF1, prCM, gcMn}, // [3] BATAK VOWEL SIGN U FOR SIMALUNGUN SA..BATAK CONSONANT SIGN H
+ {0x1BF2, 0x1BF3, prCM, gcMc}, // [2] BATAK PANGOLAT..BATAK PANONGONAN
+ {0x1BFC, 0x1BFF, prAL, gcPo}, // [4] BATAK SYMBOL BINDU NA METEK..BATAK SYMBOL BINDU PANGOLAT
+ {0x1C00, 0x1C23, prAL, gcLo}, // [36] LEPCHA LETTER KA..LEPCHA LETTER A
+ {0x1C24, 0x1C2B, prCM, gcMc}, // [8] LEPCHA SUBJOINED LETTER YA..LEPCHA VOWEL SIGN UU
+ {0x1C2C, 0x1C33, prCM, gcMn}, // [8] LEPCHA VOWEL SIGN E..LEPCHA CONSONANT SIGN T
+ {0x1C34, 0x1C35, prCM, gcMc}, // [2] LEPCHA CONSONANT SIGN NYIN-DO..LEPCHA CONSONANT SIGN KANG
+ {0x1C36, 0x1C37, prCM, gcMn}, // [2] LEPCHA SIGN RAN..LEPCHA SIGN NUKTA
+ {0x1C3B, 0x1C3F, prBA, gcPo}, // [5] LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK
+ {0x1C40, 0x1C49, prNU, gcNd}, // [10] LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE
+ {0x1C4D, 0x1C4F, prAL, gcLo}, // [3] LEPCHA LETTER TTA..LEPCHA LETTER DDA
+ {0x1C50, 0x1C59, prNU, gcNd}, // [10] OL CHIKI DIGIT ZERO..OL CHIKI DIGIT NINE
+ {0x1C5A, 0x1C77, prAL, gcLo}, // [30] OL CHIKI LETTER LA..OL CHIKI LETTER OH
+ {0x1C78, 0x1C7D, prAL, gcLm}, // [6] OL CHIKI MU TTUDDAG..OL CHIKI AHAD
+ {0x1C7E, 0x1C7F, prBA, gcPo}, // [2] OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD
+ {0x1C80, 0x1C88, prAL, gcLl}, // [9] CYRILLIC SMALL LETTER ROUNDED VE..CYRILLIC SMALL LETTER UNBLENDED UK
+ {0x1C90, 0x1CBA, prAL, gcLu}, // [43] GEORGIAN MTAVRULI CAPITAL LETTER AN..GEORGIAN MTAVRULI CAPITAL LETTER AIN
+ {0x1CBD, 0x1CBF, prAL, gcLu}, // [3] GEORGIAN MTAVRULI CAPITAL LETTER AEN..GEORGIAN MTAVRULI CAPITAL LETTER LABIAL SIGN
+ {0x1CC0, 0x1CC7, prAL, gcPo}, // [8] SUNDANESE PUNCTUATION BINDU SURYA..SUNDANESE PUNCTUATION BINDU BA SATANGA
+ {0x1CD0, 0x1CD2, prCM, gcMn}, // [3] VEDIC TONE KARSHANA..VEDIC TONE PRENKHA
+ {0x1CD3, 0x1CD3, prAL, gcPo}, // VEDIC SIGN NIHSHVASA
+ {0x1CD4, 0x1CE0, prCM, gcMn}, // [13] VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC TONE RIGVEDIC KASHMIRI INDEPENDENT SVARITA
+ {0x1CE1, 0x1CE1, prCM, gcMc}, // VEDIC TONE ATHARVAVEDIC INDEPENDENT SVARITA
+ {0x1CE2, 0x1CE8, prCM, gcMn}, // [7] VEDIC SIGN VISARGA SVARITA..VEDIC SIGN VISARGA ANUDATTA WITH TAIL
+ {0x1CE9, 0x1CEC, prAL, gcLo}, // [4] VEDIC SIGN ANUSVARA ANTARGOMUKHA..VEDIC SIGN ANUSVARA VAMAGOMUKHA WITH TAIL
+ {0x1CED, 0x1CED, prCM, gcMn}, // VEDIC SIGN TIRYAK
+ {0x1CEE, 0x1CF3, prAL, gcLo}, // [6] VEDIC SIGN HEXIFORM LONG ANUSVARA..VEDIC SIGN ROTATED ARDHAVISARGA
+ {0x1CF4, 0x1CF4, prCM, gcMn}, // VEDIC TONE CANDRA ABOVE
+ {0x1CF5, 0x1CF6, prAL, gcLo}, // [2] VEDIC SIGN JIHVAMULIYA..VEDIC SIGN UPADHMANIYA
+ {0x1CF7, 0x1CF7, prCM, gcMc}, // VEDIC SIGN ATIKRAMA
+ {0x1CF8, 0x1CF9, prCM, gcMn}, // [2] VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE
+ {0x1CFA, 0x1CFA, prAL, gcLo}, // VEDIC SIGN DOUBLE ANUSVARA ANTARGOMUKHA
+ {0x1D00, 0x1D2B, prAL, gcLl}, // [44] LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL
+ {0x1D2C, 0x1D6A, prAL, gcLm}, // [63] MODIFIER LETTER CAPITAL A..GREEK SUBSCRIPT SMALL LETTER CHI
+ {0x1D6B, 0x1D77, prAL, gcLl}, // [13] LATIN SMALL LETTER UE..LATIN SMALL LETTER TURNED G
+ {0x1D78, 0x1D78, prAL, gcLm}, // MODIFIER LETTER CYRILLIC EN
+ {0x1D79, 0x1D7F, prAL, gcLl}, // [7] LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER UPSILON WITH STROKE
+ {0x1D80, 0x1D9A, prAL, gcLl}, // [27] LATIN SMALL LETTER B WITH PALATAL HOOK..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK
+ {0x1D9B, 0x1DBF, prAL, gcLm}, // [37] MODIFIER LETTER SMALL TURNED ALPHA..MODIFIER LETTER SMALL THETA
+ {0x1DC0, 0x1DCC, prCM, gcMn}, // [13] COMBINING DOTTED GRAVE ACCENT..COMBINING MACRON-BREVE
+ {0x1DCD, 0x1DCD, prGL, gcMn}, // COMBINING DOUBLE CIRCUMFLEX ABOVE
+ {0x1DCE, 0x1DFB, prCM, gcMn}, // [46] COMBINING OGONEK ABOVE..COMBINING DELETION MARK
+ {0x1DFC, 0x1DFC, prGL, gcMn}, // COMBINING DOUBLE INVERTED BREVE BELOW
+ {0x1DFD, 0x1DFF, prCM, gcMn}, // [3] COMBINING ALMOST EQUAL TO BELOW..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x1E00, 0x1EFF, prAL, gcLC}, // [256] LATIN CAPITAL LETTER A WITH RING BELOW..LATIN SMALL LETTER Y WITH LOOP
+ {0x1F00, 0x1F15, prAL, gcLC}, // [22] GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F18, 0x1F1D, prAL, gcLu}, // [6] GREEK CAPITAL LETTER EPSILON WITH PSILI..GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA
+ {0x1F20, 0x1F45, prAL, gcLC}, // [38] GREEK SMALL LETTER ETA WITH PSILI..GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F48, 0x1F4D, prAL, gcLu}, // [6] GREEK CAPITAL LETTER OMICRON WITH PSILI..GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA
+ {0x1F50, 0x1F57, prAL, gcLl}, // [8] GREEK SMALL LETTER UPSILON WITH PSILI..GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI
+ {0x1F59, 0x1F59, prAL, gcLu}, // GREEK CAPITAL LETTER UPSILON WITH DASIA
+ {0x1F5B, 0x1F5B, prAL, gcLu}, // GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA
+ {0x1F5D, 0x1F5D, prAL, gcLu}, // GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA
+ {0x1F5F, 0x1F7D, prAL, gcLC}, // [31] GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI..GREEK SMALL LETTER OMEGA WITH OXIA
+ {0x1F80, 0x1FB4, prAL, gcLC}, // [53] GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI..GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FB6, 0x1FBC, prAL, gcLC}, // [7] GREEK SMALL LETTER ALPHA WITH PERISPOMENI..GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI
+ {0x1FBD, 0x1FBD, prAL, gcSk}, // GREEK KORONIS
+ {0x1FBE, 0x1FBE, prAL, gcLl}, // GREEK PROSGEGRAMMENI
+ {0x1FBF, 0x1FC1, prAL, gcSk}, // [3] GREEK PSILI..GREEK DIALYTIKA AND PERISPOMENI
+ {0x1FC2, 0x1FC4, prAL, gcLl}, // [3] GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FC6, 0x1FCC, prAL, gcLC}, // [7] GREEK SMALL LETTER ETA WITH PERISPOMENI..GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI
+ {0x1FCD, 0x1FCF, prAL, gcSk}, // [3] GREEK PSILI AND VARIA..GREEK PSILI AND PERISPOMENI
+ {0x1FD0, 0x1FD3, prAL, gcLl}, // [4] GREEK SMALL LETTER IOTA WITH VRACHY..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA
+ {0x1FD6, 0x1FDB, prAL, gcLC}, // [6] GREEK SMALL LETTER IOTA WITH PERISPOMENI..GREEK CAPITAL LETTER IOTA WITH OXIA
+ {0x1FDD, 0x1FDF, prAL, gcSk}, // [3] GREEK DASIA AND VARIA..GREEK DASIA AND PERISPOMENI
+ {0x1FE0, 0x1FEC, prAL, gcLC}, // [13] GREEK SMALL LETTER UPSILON WITH VRACHY..GREEK CAPITAL LETTER RHO WITH DASIA
+ {0x1FED, 0x1FEF, prAL, gcSk}, // [3] GREEK DIALYTIKA AND VARIA..GREEK VARIA
+ {0x1FF2, 0x1FF4, prAL, gcLl}, // [3] GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI..GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI
+ {0x1FF6, 0x1FFC, prAL, gcLC}, // [7] GREEK SMALL LETTER OMEGA WITH PERISPOMENI..GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI
+ {0x1FFD, 0x1FFD, prBB, gcSk}, // GREEK OXIA
+ {0x1FFE, 0x1FFE, prAL, gcSk}, // GREEK DASIA
+ {0x2000, 0x2006, prBA, gcZs}, // [7] EN QUAD..SIX-PER-EM SPACE
+ {0x2007, 0x2007, prGL, gcZs}, // FIGURE SPACE
+ {0x2008, 0x200A, prBA, gcZs}, // [3] PUNCTUATION SPACE..HAIR SPACE
+ {0x200B, 0x200B, prZW, gcCf}, // ZERO WIDTH SPACE
+ {0x200C, 0x200C, prCM, gcCf}, // ZERO WIDTH NON-JOINER
+ {0x200D, 0x200D, prZWJ, gcCf}, // ZERO WIDTH JOINER
+ {0x200E, 0x200F, prCM, gcCf}, // [2] LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK
+ {0x2010, 0x2010, prBA, gcPd}, // HYPHEN
+ {0x2011, 0x2011, prGL, gcPd}, // NON-BREAKING HYPHEN
+ {0x2012, 0x2013, prBA, gcPd}, // [2] FIGURE DASH..EN DASH
+ {0x2014, 0x2014, prB2, gcPd}, // EM DASH
+ {0x2015, 0x2015, prAI, gcPd}, // HORIZONTAL BAR
+ {0x2016, 0x2016, prAI, gcPo}, // DOUBLE VERTICAL LINE
+ {0x2017, 0x2017, prAL, gcPo}, // DOUBLE LOW LINE
+ {0x2018, 0x2018, prQU, gcPi}, // LEFT SINGLE QUOTATION MARK
+ {0x2019, 0x2019, prQU, gcPf}, // RIGHT SINGLE QUOTATION MARK
+ {0x201A, 0x201A, prOP, gcPs}, // SINGLE LOW-9 QUOTATION MARK
+ {0x201B, 0x201C, prQU, gcPi}, // [2] SINGLE HIGH-REVERSED-9 QUOTATION MARK..LEFT DOUBLE QUOTATION MARK
+ {0x201D, 0x201D, prQU, gcPf}, // RIGHT DOUBLE QUOTATION MARK
+ {0x201E, 0x201E, prOP, gcPs}, // DOUBLE LOW-9 QUOTATION MARK
+ {0x201F, 0x201F, prQU, gcPi}, // DOUBLE HIGH-REVERSED-9 QUOTATION MARK
+ {0x2020, 0x2021, prAI, gcPo}, // [2] DAGGER..DOUBLE DAGGER
+ {0x2022, 0x2023, prAL, gcPo}, // [2] BULLET..TRIANGULAR BULLET
+ {0x2024, 0x2026, prIN, gcPo}, // [3] ONE DOT LEADER..HORIZONTAL ELLIPSIS
+ {0x2027, 0x2027, prBA, gcPo}, // HYPHENATION POINT
+ {0x2028, 0x2028, prBK, gcZl}, // LINE SEPARATOR
+ {0x2029, 0x2029, prBK, gcZp}, // PARAGRAPH SEPARATOR
+ {0x202A, 0x202E, prCM, gcCf}, // [5] LEFT-TO-RIGHT EMBEDDING..RIGHT-TO-LEFT OVERRIDE
+ {0x202F, 0x202F, prGL, gcZs}, // NARROW NO-BREAK SPACE
+ {0x2030, 0x2037, prPO, gcPo}, // [8] PER MILLE SIGN..REVERSED TRIPLE PRIME
+ {0x2038, 0x2038, prAL, gcPo}, // CARET
+ {0x2039, 0x2039, prQU, gcPi}, // SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+ {0x203A, 0x203A, prQU, gcPf}, // SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+ {0x203B, 0x203B, prAI, gcPo}, // REFERENCE MARK
+ {0x203C, 0x203D, prNS, gcPo}, // [2] DOUBLE EXCLAMATION MARK..INTERROBANG
+ {0x203E, 0x203E, prAL, gcPo}, // OVERLINE
+ {0x203F, 0x2040, prAL, gcPc}, // [2] UNDERTIE..CHARACTER TIE
+ {0x2041, 0x2043, prAL, gcPo}, // [3] CARET INSERTION POINT..HYPHEN BULLET
+ {0x2044, 0x2044, prIS, gcSm}, // FRACTION SLASH
+ {0x2045, 0x2045, prOP, gcPs}, // LEFT SQUARE BRACKET WITH QUILL
+ {0x2046, 0x2046, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH QUILL
+ {0x2047, 0x2049, prNS, gcPo}, // [3] DOUBLE QUESTION MARK..EXCLAMATION QUESTION MARK
+ {0x204A, 0x2051, prAL, gcPo}, // [8] TIRONIAN SIGN ET..TWO ASTERISKS ALIGNED VERTICALLY
+ {0x2052, 0x2052, prAL, gcSm}, // COMMERCIAL MINUS SIGN
+ {0x2053, 0x2053, prAL, gcPo}, // SWUNG DASH
+ {0x2054, 0x2054, prAL, gcPc}, // INVERTED UNDERTIE
+ {0x2055, 0x2055, prAL, gcPo}, // FLOWER PUNCTUATION MARK
+ {0x2056, 0x2056, prBA, gcPo}, // THREE DOT PUNCTUATION
+ {0x2057, 0x2057, prPO, gcPo}, // QUADRUPLE PRIME
+ {0x2058, 0x205B, prBA, gcPo}, // [4] FOUR DOT PUNCTUATION..FOUR DOT MARK
+ {0x205C, 0x205C, prAL, gcPo}, // DOTTED CROSS
+ {0x205D, 0x205E, prBA, gcPo}, // [2] TRICOLON..VERTICAL FOUR DOTS
+ {0x205F, 0x205F, prBA, gcZs}, // MEDIUM MATHEMATICAL SPACE
+ {0x2060, 0x2060, prWJ, gcCf}, // WORD JOINER
+ {0x2061, 0x2064, prAL, gcCf}, // [4] FUNCTION APPLICATION..INVISIBLE PLUS
+ {0x2066, 0x206F, prCM, gcCf}, // [10] LEFT-TO-RIGHT ISOLATE..NOMINAL DIGIT SHAPES
+ {0x2070, 0x2070, prAL, gcNo}, // SUPERSCRIPT ZERO
+ {0x2071, 0x2071, prAL, gcLm}, // SUPERSCRIPT LATIN SMALL LETTER I
+ {0x2074, 0x2074, prAI, gcNo}, // SUPERSCRIPT FOUR
+ {0x2075, 0x2079, prAL, gcNo}, // [5] SUPERSCRIPT FIVE..SUPERSCRIPT NINE
+ {0x207A, 0x207C, prAL, gcSm}, // [3] SUPERSCRIPT PLUS SIGN..SUPERSCRIPT EQUALS SIGN
+ {0x207D, 0x207D, prOP, gcPs}, // SUPERSCRIPT LEFT PARENTHESIS
+ {0x207E, 0x207E, prCL, gcPe}, // SUPERSCRIPT RIGHT PARENTHESIS
+ {0x207F, 0x207F, prAI, gcLm}, // SUPERSCRIPT LATIN SMALL LETTER N
+ {0x2080, 0x2080, prAL, gcNo}, // SUBSCRIPT ZERO
+ {0x2081, 0x2084, prAI, gcNo}, // [4] SUBSCRIPT ONE..SUBSCRIPT FOUR
+ {0x2085, 0x2089, prAL, gcNo}, // [5] SUBSCRIPT FIVE..SUBSCRIPT NINE
+ {0x208A, 0x208C, prAL, gcSm}, // [3] SUBSCRIPT PLUS SIGN..SUBSCRIPT EQUALS SIGN
+ {0x208D, 0x208D, prOP, gcPs}, // SUBSCRIPT LEFT PARENTHESIS
+ {0x208E, 0x208E, prCL, gcPe}, // SUBSCRIPT RIGHT PARENTHESIS
+ {0x2090, 0x209C, prAL, gcLm}, // [13] LATIN SUBSCRIPT SMALL LETTER A..LATIN SUBSCRIPT SMALL LETTER T
+ {0x20A0, 0x20A6, prPR, gcSc}, // [7] EURO-CURRENCY SIGN..NAIRA SIGN
+ {0x20A7, 0x20A7, prPO, gcSc}, // PESETA SIGN
+ {0x20A8, 0x20B5, prPR, gcSc}, // [14] RUPEE SIGN..CEDI SIGN
+ {0x20B6, 0x20B6, prPO, gcSc}, // LIVRE TOURNOIS SIGN
+ {0x20B7, 0x20BA, prPR, gcSc}, // [4] SPESMILO SIGN..TURKISH LIRA SIGN
+ {0x20BB, 0x20BB, prPO, gcSc}, // NORDIC MARK SIGN
+ {0x20BC, 0x20BD, prPR, gcSc}, // [2] MANAT SIGN..RUBLE SIGN
+ {0x20BE, 0x20BE, prPO, gcSc}, // LARI SIGN
+ {0x20BF, 0x20BF, prPR, gcSc}, // BITCOIN SIGN
+ {0x20C0, 0x20C0, prPO, gcSc}, // SOM SIGN
+ {0x20C1, 0x20CF, prPR, gcCn}, // [15] ..
+ {0x20D0, 0x20DC, prCM, gcMn}, // [13] COMBINING LEFT HARPOON ABOVE..COMBINING FOUR DOTS ABOVE
+ {0x20DD, 0x20E0, prCM, gcMe}, // [4] COMBINING ENCLOSING CIRCLE..COMBINING ENCLOSING CIRCLE BACKSLASH
+ {0x20E1, 0x20E1, prCM, gcMn}, // COMBINING LEFT RIGHT ARROW ABOVE
+ {0x20E2, 0x20E4, prCM, gcMe}, // [3] COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING UPWARD POINTING TRIANGLE
+ {0x20E5, 0x20F0, prCM, gcMn}, // [12] COMBINING REVERSE SOLIDUS OVERLAY..COMBINING ASTERISK ABOVE
+ {0x2100, 0x2101, prAL, gcSo}, // [2] ACCOUNT OF..ADDRESSED TO THE SUBJECT
+ {0x2102, 0x2102, prAL, gcLu}, // DOUBLE-STRUCK CAPITAL C
+ {0x2103, 0x2103, prPO, gcSo}, // DEGREE CELSIUS
+ {0x2104, 0x2104, prAL, gcSo}, // CENTRE LINE SYMBOL
+ {0x2105, 0x2105, prAI, gcSo}, // CARE OF
+ {0x2106, 0x2106, prAL, gcSo}, // CADA UNA
+ {0x2107, 0x2107, prAL, gcLu}, // EULER CONSTANT
+ {0x2108, 0x2108, prAL, gcSo}, // SCRUPLE
+ {0x2109, 0x2109, prPO, gcSo}, // DEGREE FAHRENHEIT
+ {0x210A, 0x2112, prAL, gcLC}, // [9] SCRIPT SMALL G..SCRIPT CAPITAL L
+ {0x2113, 0x2113, prAI, gcLl}, // SCRIPT SMALL L
+ {0x2114, 0x2114, prAL, gcSo}, // L B BAR SYMBOL
+ {0x2115, 0x2115, prAL, gcLu}, // DOUBLE-STRUCK CAPITAL N
+ {0x2116, 0x2116, prPR, gcSo}, // NUMERO SIGN
+ {0x2117, 0x2117, prAL, gcSo}, // SOUND RECORDING COPYRIGHT
+ {0x2118, 0x2118, prAL, gcSm}, // SCRIPT CAPITAL P
+ {0x2119, 0x211D, prAL, gcLu}, // [5] DOUBLE-STRUCK CAPITAL P..DOUBLE-STRUCK CAPITAL R
+ {0x211E, 0x2120, prAL, gcSo}, // [3] PRESCRIPTION TAKE..SERVICE MARK
+ {0x2121, 0x2122, prAI, gcSo}, // [2] TELEPHONE SIGN..TRADE MARK SIGN
+ {0x2123, 0x2123, prAL, gcSo}, // VERSICLE
+ {0x2124, 0x2124, prAL, gcLu}, // DOUBLE-STRUCK CAPITAL Z
+ {0x2125, 0x2125, prAL, gcSo}, // OUNCE SIGN
+ {0x2126, 0x2126, prAL, gcLu}, // OHM SIGN
+ {0x2127, 0x2127, prAL, gcSo}, // INVERTED OHM SIGN
+ {0x2128, 0x2128, prAL, gcLu}, // BLACK-LETTER CAPITAL Z
+ {0x2129, 0x2129, prAL, gcSo}, // TURNED GREEK SMALL LETTER IOTA
+ {0x212A, 0x212A, prAL, gcLu}, // KELVIN SIGN
+ {0x212B, 0x212B, prAI, gcLu}, // ANGSTROM SIGN
+ {0x212C, 0x212D, prAL, gcLu}, // [2] SCRIPT CAPITAL B..BLACK-LETTER CAPITAL C
+ {0x212E, 0x212E, prAL, gcSo}, // ESTIMATED SYMBOL
+ {0x212F, 0x2134, prAL, gcLC}, // [6] SCRIPT SMALL E..SCRIPT SMALL O
+ {0x2135, 0x2138, prAL, gcLo}, // [4] ALEF SYMBOL..DALET SYMBOL
+ {0x2139, 0x2139, prAL, gcLl}, // INFORMATION SOURCE
+ {0x213A, 0x213B, prAL, gcSo}, // [2] ROTATED CAPITAL Q..FACSIMILE SIGN
+ {0x213C, 0x213F, prAL, gcLC}, // [4] DOUBLE-STRUCK SMALL PI..DOUBLE-STRUCK CAPITAL PI
+ {0x2140, 0x2144, prAL, gcSm}, // [5] DOUBLE-STRUCK N-ARY SUMMATION..TURNED SANS-SERIF CAPITAL Y
+ {0x2145, 0x2149, prAL, gcLC}, // [5] DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL J
+ {0x214A, 0x214A, prAL, gcSo}, // PROPERTY LINE
+ {0x214B, 0x214B, prAL, gcSm}, // TURNED AMPERSAND
+ {0x214C, 0x214D, prAL, gcSo}, // [2] PER SIGN..AKTIESELSKAB
+ {0x214E, 0x214E, prAL, gcLl}, // TURNED SMALL F
+ {0x214F, 0x214F, prAL, gcSo}, // SYMBOL FOR SAMARITAN SOURCE
+ {0x2150, 0x2153, prAL, gcNo}, // [4] VULGAR FRACTION ONE SEVENTH..VULGAR FRACTION ONE THIRD
+ {0x2154, 0x2155, prAI, gcNo}, // [2] VULGAR FRACTION TWO THIRDS..VULGAR FRACTION ONE FIFTH
+ {0x2156, 0x215A, prAL, gcNo}, // [5] VULGAR FRACTION TWO FIFTHS..VULGAR FRACTION FIVE SIXTHS
+ {0x215B, 0x215B, prAI, gcNo}, // VULGAR FRACTION ONE EIGHTH
+ {0x215C, 0x215D, prAL, gcNo}, // [2] VULGAR FRACTION THREE EIGHTHS..VULGAR FRACTION FIVE EIGHTHS
+ {0x215E, 0x215E, prAI, gcNo}, // VULGAR FRACTION SEVEN EIGHTHS
+ {0x215F, 0x215F, prAL, gcNo}, // FRACTION NUMERATOR ONE
+ {0x2160, 0x216B, prAI, gcNl}, // [12] ROMAN NUMERAL ONE..ROMAN NUMERAL TWELVE
+ {0x216C, 0x216F, prAL, gcNl}, // [4] ROMAN NUMERAL FIFTY..ROMAN NUMERAL ONE THOUSAND
+ {0x2170, 0x2179, prAI, gcNl}, // [10] SMALL ROMAN NUMERAL ONE..SMALL ROMAN NUMERAL TEN
+ {0x217A, 0x2182, prAL, gcNl}, // [9] SMALL ROMAN NUMERAL ELEVEN..ROMAN NUMERAL TEN THOUSAND
+ {0x2183, 0x2184, prAL, gcLC}, // [2] ROMAN NUMERAL REVERSED ONE HUNDRED..LATIN SMALL LETTER REVERSED C
+ {0x2185, 0x2188, prAL, gcNl}, // [4] ROMAN NUMERAL SIX LATE FORM..ROMAN NUMERAL ONE HUNDRED THOUSAND
+ {0x2189, 0x2189, prAI, gcNo}, // VULGAR FRACTION ZERO THIRDS
+ {0x218A, 0x218B, prAL, gcSo}, // [2] TURNED DIGIT TWO..TURNED DIGIT THREE
+ {0x2190, 0x2194, prAI, gcSm}, // [5] LEFTWARDS ARROW..LEFT RIGHT ARROW
+ {0x2195, 0x2199, prAI, gcSo}, // [5] UP DOWN ARROW..SOUTH WEST ARROW
+ {0x219A, 0x219B, prAL, gcSm}, // [2] LEFTWARDS ARROW WITH STROKE..RIGHTWARDS ARROW WITH STROKE
+ {0x219C, 0x219F, prAL, gcSo}, // [4] LEFTWARDS WAVE ARROW..UPWARDS TWO HEADED ARROW
+ {0x21A0, 0x21A0, prAL, gcSm}, // RIGHTWARDS TWO HEADED ARROW
+ {0x21A1, 0x21A2, prAL, gcSo}, // [2] DOWNWARDS TWO HEADED ARROW..LEFTWARDS ARROW WITH TAIL
+ {0x21A3, 0x21A3, prAL, gcSm}, // RIGHTWARDS ARROW WITH TAIL
+ {0x21A4, 0x21A5, prAL, gcSo}, // [2] LEFTWARDS ARROW FROM BAR..UPWARDS ARROW FROM BAR
+ {0x21A6, 0x21A6, prAL, gcSm}, // RIGHTWARDS ARROW FROM BAR
+ {0x21A7, 0x21AD, prAL, gcSo}, // [7] DOWNWARDS ARROW FROM BAR..LEFT RIGHT WAVE ARROW
+ {0x21AE, 0x21AE, prAL, gcSm}, // LEFT RIGHT ARROW WITH STROKE
+ {0x21AF, 0x21CD, prAL, gcSo}, // [31] DOWNWARDS ZIGZAG ARROW..LEFTWARDS DOUBLE ARROW WITH STROKE
+ {0x21CE, 0x21CF, prAL, gcSm}, // [2] LEFT RIGHT DOUBLE ARROW WITH STROKE..RIGHTWARDS DOUBLE ARROW WITH STROKE
+ {0x21D0, 0x21D1, prAL, gcSo}, // [2] LEFTWARDS DOUBLE ARROW..UPWARDS DOUBLE ARROW
+ {0x21D2, 0x21D2, prAI, gcSm}, // RIGHTWARDS DOUBLE ARROW
+ {0x21D3, 0x21D3, prAL, gcSo}, // DOWNWARDS DOUBLE ARROW
+ {0x21D4, 0x21D4, prAI, gcSm}, // LEFT RIGHT DOUBLE ARROW
+ {0x21D5, 0x21F3, prAL, gcSo}, // [31] UP DOWN DOUBLE ARROW..UP DOWN WHITE ARROW
+ {0x21F4, 0x21FF, prAL, gcSm}, // [12] RIGHT ARROW WITH SMALL CIRCLE..LEFT RIGHT OPEN-HEADED ARROW
+ {0x2200, 0x2200, prAI, gcSm}, // FOR ALL
+ {0x2201, 0x2201, prAL, gcSm}, // COMPLEMENT
+ {0x2202, 0x2203, prAI, gcSm}, // [2] PARTIAL DIFFERENTIAL..THERE EXISTS
+ {0x2204, 0x2206, prAL, gcSm}, // [3] THERE DOES NOT EXIST..INCREMENT
+ {0x2207, 0x2208, prAI, gcSm}, // [2] NABLA..ELEMENT OF
+ {0x2209, 0x220A, prAL, gcSm}, // [2] NOT AN ELEMENT OF..SMALL ELEMENT OF
+ {0x220B, 0x220B, prAI, gcSm}, // CONTAINS AS MEMBER
+ {0x220C, 0x220E, prAL, gcSm}, // [3] DOES NOT CONTAIN AS MEMBER..END OF PROOF
+ {0x220F, 0x220F, prAI, gcSm}, // N-ARY PRODUCT
+ {0x2210, 0x2210, prAL, gcSm}, // N-ARY COPRODUCT
+ {0x2211, 0x2211, prAI, gcSm}, // N-ARY SUMMATION
+ {0x2212, 0x2213, prPR, gcSm}, // [2] MINUS SIGN..MINUS-OR-PLUS SIGN
+ {0x2214, 0x2214, prAL, gcSm}, // DOT PLUS
+ {0x2215, 0x2215, prAI, gcSm}, // DIVISION SLASH
+ {0x2216, 0x2219, prAL, gcSm}, // [4] SET MINUS..BULLET OPERATOR
+ {0x221A, 0x221A, prAI, gcSm}, // SQUARE ROOT
+ {0x221B, 0x221C, prAL, gcSm}, // [2] CUBE ROOT..FOURTH ROOT
+ {0x221D, 0x2220, prAI, gcSm}, // [4] PROPORTIONAL TO..ANGLE
+ {0x2221, 0x2222, prAL, gcSm}, // [2] MEASURED ANGLE..SPHERICAL ANGLE
+ {0x2223, 0x2223, prAI, gcSm}, // DIVIDES
+ {0x2224, 0x2224, prAL, gcSm}, // DOES NOT DIVIDE
+ {0x2225, 0x2225, prAI, gcSm}, // PARALLEL TO
+ {0x2226, 0x2226, prAL, gcSm}, // NOT PARALLEL TO
+ {0x2227, 0x222C, prAI, gcSm}, // [6] LOGICAL AND..DOUBLE INTEGRAL
+ {0x222D, 0x222D, prAL, gcSm}, // TRIPLE INTEGRAL
+ {0x222E, 0x222E, prAI, gcSm}, // CONTOUR INTEGRAL
+ {0x222F, 0x2233, prAL, gcSm}, // [5] SURFACE INTEGRAL..ANTICLOCKWISE CONTOUR INTEGRAL
+ {0x2234, 0x2237, prAI, gcSm}, // [4] THEREFORE..PROPORTION
+ {0x2238, 0x223B, prAL, gcSm}, // [4] DOT MINUS..HOMOTHETIC
+ {0x223C, 0x223D, prAI, gcSm}, // [2] TILDE OPERATOR..REVERSED TILDE
+ {0x223E, 0x2247, prAL, gcSm}, // [10] INVERTED LAZY S..NEITHER APPROXIMATELY NOR ACTUALLY EQUAL TO
+ {0x2248, 0x2248, prAI, gcSm}, // ALMOST EQUAL TO
+ {0x2249, 0x224B, prAL, gcSm}, // [3] NOT ALMOST EQUAL TO..TRIPLE TILDE
+ {0x224C, 0x224C, prAI, gcSm}, // ALL EQUAL TO
+ {0x224D, 0x2251, prAL, gcSm}, // [5] EQUIVALENT TO..GEOMETRICALLY EQUAL TO
+ {0x2252, 0x2252, prAI, gcSm}, // APPROXIMATELY EQUAL TO OR THE IMAGE OF
+ {0x2253, 0x225F, prAL, gcSm}, // [13] IMAGE OF OR APPROXIMATELY EQUAL TO..QUESTIONED EQUAL TO
+ {0x2260, 0x2261, prAI, gcSm}, // [2] NOT EQUAL TO..IDENTICAL TO
+ {0x2262, 0x2263, prAL, gcSm}, // [2] NOT IDENTICAL TO..STRICTLY EQUIVALENT TO
+ {0x2264, 0x2267, prAI, gcSm}, // [4] LESS-THAN OR EQUAL TO..GREATER-THAN OVER EQUAL TO
+ {0x2268, 0x2269, prAL, gcSm}, // [2] LESS-THAN BUT NOT EQUAL TO..GREATER-THAN BUT NOT EQUAL TO
+ {0x226A, 0x226B, prAI, gcSm}, // [2] MUCH LESS-THAN..MUCH GREATER-THAN
+ {0x226C, 0x226D, prAL, gcSm}, // [2] BETWEEN..NOT EQUIVALENT TO
+ {0x226E, 0x226F, prAI, gcSm}, // [2] NOT LESS-THAN..NOT GREATER-THAN
+ {0x2270, 0x2281, prAL, gcSm}, // [18] NEITHER LESS-THAN NOR EQUAL TO..DOES NOT SUCCEED
+ {0x2282, 0x2283, prAI, gcSm}, // [2] SUBSET OF..SUPERSET OF
+ {0x2284, 0x2285, prAL, gcSm}, // [2] NOT A SUBSET OF..NOT A SUPERSET OF
+ {0x2286, 0x2287, prAI, gcSm}, // [2] SUBSET OF OR EQUAL TO..SUPERSET OF OR EQUAL TO
+ {0x2288, 0x2294, prAL, gcSm}, // [13] NEITHER A SUBSET OF NOR EQUAL TO..SQUARE CUP
+ {0x2295, 0x2295, prAI, gcSm}, // CIRCLED PLUS
+ {0x2296, 0x2298, prAL, gcSm}, // [3] CIRCLED MINUS..CIRCLED DIVISION SLASH
+ {0x2299, 0x2299, prAI, gcSm}, // CIRCLED DOT OPERATOR
+ {0x229A, 0x22A4, prAL, gcSm}, // [11] CIRCLED RING OPERATOR..DOWN TACK
+ {0x22A5, 0x22A5, prAI, gcSm}, // UP TACK
+ {0x22A6, 0x22BE, prAL, gcSm}, // [25] ASSERTION..RIGHT ANGLE WITH ARC
+ {0x22BF, 0x22BF, prAI, gcSm}, // RIGHT TRIANGLE
+ {0x22C0, 0x22EE, prAL, gcSm}, // [47] N-ARY LOGICAL AND..VERTICAL ELLIPSIS
+ {0x22EF, 0x22EF, prIN, gcSm}, // MIDLINE HORIZONTAL ELLIPSIS
+ {0x22F0, 0x22FF, prAL, gcSm}, // [16] UP RIGHT DIAGONAL ELLIPSIS..Z NOTATION BAG MEMBERSHIP
+ {0x2300, 0x2307, prAL, gcSo}, // [8] DIAMETER SIGN..WAVY LINE
+ {0x2308, 0x2308, prOP, gcPs}, // LEFT CEILING
+ {0x2309, 0x2309, prCL, gcPe}, // RIGHT CEILING
+ {0x230A, 0x230A, prOP, gcPs}, // LEFT FLOOR
+ {0x230B, 0x230B, prCL, gcPe}, // RIGHT FLOOR
+ {0x230C, 0x2311, prAL, gcSo}, // [6] BOTTOM RIGHT CROP..SQUARE LOZENGE
+ {0x2312, 0x2312, prAI, gcSo}, // ARC
+ {0x2313, 0x2319, prAL, gcSo}, // [7] SEGMENT..TURNED NOT SIGN
+ {0x231A, 0x231B, prID, gcSo}, // [2] WATCH..HOURGLASS
+ {0x231C, 0x231F, prAL, gcSo}, // [4] TOP LEFT CORNER..BOTTOM RIGHT CORNER
+ {0x2320, 0x2321, prAL, gcSm}, // [2] TOP HALF INTEGRAL..BOTTOM HALF INTEGRAL
+ {0x2322, 0x2328, prAL, gcSo}, // [7] FROWN..KEYBOARD
+ {0x2329, 0x2329, prOP, gcPs}, // LEFT-POINTING ANGLE BRACKET
+ {0x232A, 0x232A, prCL, gcPe}, // RIGHT-POINTING ANGLE BRACKET
+ {0x232B, 0x237B, prAL, gcSo}, // [81] ERASE TO THE LEFT..NOT CHECK MARK
+ {0x237C, 0x237C, prAL, gcSm}, // RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW
+ {0x237D, 0x239A, prAL, gcSo}, // [30] SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL
+ {0x239B, 0x23B3, prAL, gcSm}, // [25] LEFT PARENTHESIS UPPER HOOK..SUMMATION BOTTOM
+ {0x23B4, 0x23DB, prAL, gcSo}, // [40] TOP SQUARE BRACKET..FUSE
+ {0x23DC, 0x23E1, prAL, gcSm}, // [6] TOP PARENTHESIS..BOTTOM TORTOISE SHELL BRACKET
+ {0x23E2, 0x23EF, prAL, gcSo}, // [14] WHITE TRAPEZIUM..BLACK RIGHT-POINTING TRIANGLE WITH DOUBLE VERTICAL BAR
+ {0x23F0, 0x23F3, prID, gcSo}, // [4] ALARM CLOCK..HOURGLASS WITH FLOWING SAND
+ {0x23F4, 0x23FF, prAL, gcSo}, // [12] BLACK MEDIUM LEFT-POINTING TRIANGLE..OBSERVER EYE SYMBOL
+ {0x2400, 0x2426, prAL, gcSo}, // [39] SYMBOL FOR NULL..SYMBOL FOR SUBSTITUTE FORM TWO
+ {0x2440, 0x244A, prAL, gcSo}, // [11] OCR HOOK..OCR DOUBLE BACKSLASH
+ {0x2460, 0x249B, prAI, gcNo}, // [60] CIRCLED DIGIT ONE..NUMBER TWENTY FULL STOP
+ {0x249C, 0x24E9, prAI, gcSo}, // [78] PARENTHESIZED LATIN SMALL LETTER A..CIRCLED LATIN SMALL LETTER Z
+ {0x24EA, 0x24FE, prAI, gcNo}, // [21] CIRCLED DIGIT ZERO..DOUBLE CIRCLED NUMBER TEN
+ {0x24FF, 0x24FF, prAL, gcNo}, // NEGATIVE CIRCLED DIGIT ZERO
+ {0x2500, 0x254B, prAI, gcSo}, // [76] BOX DRAWINGS LIGHT HORIZONTAL..BOX DRAWINGS HEAVY VERTICAL AND HORIZONTAL
+ {0x254C, 0x254F, prAL, gcSo}, // [4] BOX DRAWINGS LIGHT DOUBLE DASH HORIZONTAL..BOX DRAWINGS HEAVY DOUBLE DASH VERTICAL
+ {0x2550, 0x2574, prAI, gcSo}, // [37] BOX DRAWINGS DOUBLE HORIZONTAL..BOX DRAWINGS LIGHT LEFT
+ {0x2575, 0x257F, prAL, gcSo}, // [11] BOX DRAWINGS LIGHT UP..BOX DRAWINGS HEAVY UP AND LIGHT DOWN
+ {0x2580, 0x258F, prAI, gcSo}, // [16] UPPER HALF BLOCK..LEFT ONE EIGHTH BLOCK
+ {0x2590, 0x2591, prAL, gcSo}, // [2] RIGHT HALF BLOCK..LIGHT SHADE
+ {0x2592, 0x2595, prAI, gcSo}, // [4] MEDIUM SHADE..RIGHT ONE EIGHTH BLOCK
+ {0x2596, 0x259F, prAL, gcSo}, // [10] QUADRANT LOWER LEFT..QUADRANT UPPER RIGHT AND LOWER LEFT AND LOWER RIGHT
+ {0x25A0, 0x25A1, prAI, gcSo}, // [2] BLACK SQUARE..WHITE SQUARE
+ {0x25A2, 0x25A2, prAL, gcSo}, // WHITE SQUARE WITH ROUNDED CORNERS
+ {0x25A3, 0x25A9, prAI, gcSo}, // [7] WHITE SQUARE CONTAINING BLACK SMALL SQUARE..SQUARE WITH DIAGONAL CROSSHATCH FILL
+ {0x25AA, 0x25B1, prAL, gcSo}, // [8] BLACK SMALL SQUARE..WHITE PARALLELOGRAM
+ {0x25B2, 0x25B3, prAI, gcSo}, // [2] BLACK UP-POINTING TRIANGLE..WHITE UP-POINTING TRIANGLE
+ {0x25B4, 0x25B5, prAL, gcSo}, // [2] BLACK UP-POINTING SMALL TRIANGLE..WHITE UP-POINTING SMALL TRIANGLE
+ {0x25B6, 0x25B6, prAI, gcSo}, // BLACK RIGHT-POINTING TRIANGLE
+ {0x25B7, 0x25B7, prAI, gcSm}, // WHITE RIGHT-POINTING TRIANGLE
+ {0x25B8, 0x25BB, prAL, gcSo}, // [4] BLACK RIGHT-POINTING SMALL TRIANGLE..WHITE RIGHT-POINTING POINTER
+ {0x25BC, 0x25BD, prAI, gcSo}, // [2] BLACK DOWN-POINTING TRIANGLE..WHITE DOWN-POINTING TRIANGLE
+ {0x25BE, 0x25BF, prAL, gcSo}, // [2] BLACK DOWN-POINTING SMALL TRIANGLE..WHITE DOWN-POINTING SMALL TRIANGLE
+ {0x25C0, 0x25C0, prAI, gcSo}, // BLACK LEFT-POINTING TRIANGLE
+ {0x25C1, 0x25C1, prAI, gcSm}, // WHITE LEFT-POINTING TRIANGLE
+ {0x25C2, 0x25C5, prAL, gcSo}, // [4] BLACK LEFT-POINTING SMALL TRIANGLE..WHITE LEFT-POINTING POINTER
+ {0x25C6, 0x25C8, prAI, gcSo}, // [3] BLACK DIAMOND..WHITE DIAMOND CONTAINING BLACK SMALL DIAMOND
+ {0x25C9, 0x25CA, prAL, gcSo}, // [2] FISHEYE..LOZENGE
+ {0x25CB, 0x25CB, prAI, gcSo}, // WHITE CIRCLE
+ {0x25CC, 0x25CD, prAL, gcSo}, // [2] DOTTED CIRCLE..CIRCLE WITH VERTICAL FILL
+ {0x25CE, 0x25D1, prAI, gcSo}, // [4] BULLSEYE..CIRCLE WITH RIGHT HALF BLACK
+ {0x25D2, 0x25E1, prAL, gcSo}, // [16] CIRCLE WITH LOWER HALF BLACK..LOWER HALF CIRCLE
+ {0x25E2, 0x25E5, prAI, gcSo}, // [4] BLACK LOWER RIGHT TRIANGLE..BLACK UPPER RIGHT TRIANGLE
+ {0x25E6, 0x25EE, prAL, gcSo}, // [9] WHITE BULLET..UP-POINTING TRIANGLE WITH RIGHT HALF BLACK
+ {0x25EF, 0x25EF, prAI, gcSo}, // LARGE CIRCLE
+ {0x25F0, 0x25F7, prAL, gcSo}, // [8] WHITE SQUARE WITH UPPER LEFT QUADRANT..WHITE CIRCLE WITH UPPER RIGHT QUADRANT
+ {0x25F8, 0x25FF, prAL, gcSm}, // [8] UPPER LEFT TRIANGLE..LOWER RIGHT TRIANGLE
+ {0x2600, 0x2603, prID, gcSo}, // [4] BLACK SUN WITH RAYS..SNOWMAN
+ {0x2604, 0x2604, prAL, gcSo}, // COMET
+ {0x2605, 0x2606, prAI, gcSo}, // [2] BLACK STAR..WHITE STAR
+ {0x2607, 0x2608, prAL, gcSo}, // [2] LIGHTNING..THUNDERSTORM
+ {0x2609, 0x2609, prAI, gcSo}, // SUN
+ {0x260A, 0x260D, prAL, gcSo}, // [4] ASCENDING NODE..OPPOSITION
+ {0x260E, 0x260F, prAI, gcSo}, // [2] BLACK TELEPHONE..WHITE TELEPHONE
+ {0x2610, 0x2613, prAL, gcSo}, // [4] BALLOT BOX..SALTIRE
+ {0x2614, 0x2615, prID, gcSo}, // [2] UMBRELLA WITH RAIN DROPS..HOT BEVERAGE
+ {0x2616, 0x2617, prAI, gcSo}, // [2] WHITE SHOGI PIECE..BLACK SHOGI PIECE
+ {0x2618, 0x2618, prID, gcSo}, // SHAMROCK
+ {0x2619, 0x2619, prAL, gcSo}, // REVERSED ROTATED FLORAL HEART BULLET
+ {0x261A, 0x261C, prID, gcSo}, // [3] BLACK LEFT POINTING INDEX..WHITE LEFT POINTING INDEX
+ {0x261D, 0x261D, prEB, gcSo}, // WHITE UP POINTING INDEX
+ {0x261E, 0x261F, prID, gcSo}, // [2] WHITE RIGHT POINTING INDEX..WHITE DOWN POINTING INDEX
+ {0x2620, 0x2638, prAL, gcSo}, // [25] SKULL AND CROSSBONES..WHEEL OF DHARMA
+ {0x2639, 0x263B, prID, gcSo}, // [3] WHITE FROWNING FACE..BLACK SMILING FACE
+ {0x263C, 0x263F, prAL, gcSo}, // [4] WHITE SUN WITH RAYS..MERCURY
+ {0x2640, 0x2640, prAI, gcSo}, // FEMALE SIGN
+ {0x2641, 0x2641, prAL, gcSo}, // EARTH
+ {0x2642, 0x2642, prAI, gcSo}, // MALE SIGN
+ {0x2643, 0x265F, prAL, gcSo}, // [29] JUPITER..BLACK CHESS PAWN
+ {0x2660, 0x2661, prAI, gcSo}, // [2] BLACK SPADE SUIT..WHITE HEART SUIT
+ {0x2662, 0x2662, prAL, gcSo}, // WHITE DIAMOND SUIT
+ {0x2663, 0x2665, prAI, gcSo}, // [3] BLACK CLUB SUIT..BLACK HEART SUIT
+ {0x2666, 0x2666, prAL, gcSo}, // BLACK DIAMOND SUIT
+ {0x2667, 0x2667, prAI, gcSo}, // WHITE CLUB SUIT
+ {0x2668, 0x2668, prID, gcSo}, // HOT SPRINGS
+ {0x2669, 0x266A, prAI, gcSo}, // [2] QUARTER NOTE..EIGHTH NOTE
+ {0x266B, 0x266B, prAL, gcSo}, // BEAMED EIGHTH NOTES
+ {0x266C, 0x266D, prAI, gcSo}, // [2] BEAMED SIXTEENTH NOTES..MUSIC FLAT SIGN
+ {0x266E, 0x266E, prAL, gcSo}, // MUSIC NATURAL SIGN
+ {0x266F, 0x266F, prAI, gcSm}, // MUSIC SHARP SIGN
+ {0x2670, 0x267E, prAL, gcSo}, // [15] WEST SYRIAC CROSS..PERMANENT PAPER SIGN
+ {0x267F, 0x267F, prID, gcSo}, // WHEELCHAIR SYMBOL
+ {0x2680, 0x269D, prAL, gcSo}, // [30] DIE FACE-1..OUTLINED WHITE STAR
+ {0x269E, 0x269F, prAI, gcSo}, // [2] THREE LINES CONVERGING RIGHT..THREE LINES CONVERGING LEFT
+ {0x26A0, 0x26BC, prAL, gcSo}, // [29] WARNING SIGN..SESQUIQUADRATE
+ {0x26BD, 0x26C8, prID, gcSo}, // [12] SOCCER BALL..THUNDER CLOUD AND RAIN
+ {0x26C9, 0x26CC, prAI, gcSo}, // [4] TURNED WHITE SHOGI PIECE..CROSSING LANES
+ {0x26CD, 0x26CD, prID, gcSo}, // DISABLED CAR
+ {0x26CE, 0x26CE, prAL, gcSo}, // OPHIUCHUS
+ {0x26CF, 0x26D1, prID, gcSo}, // [3] PICK..HELMET WITH WHITE CROSS
+ {0x26D2, 0x26D2, prAI, gcSo}, // CIRCLED CROSSING LANES
+ {0x26D3, 0x26D4, prID, gcSo}, // [2] CHAINS..NO ENTRY
+ {0x26D5, 0x26D7, prAI, gcSo}, // [3] ALTERNATE ONE-WAY LEFT WAY TRAFFIC..WHITE TWO-WAY LEFT WAY TRAFFIC
+ {0x26D8, 0x26D9, prID, gcSo}, // [2] BLACK LEFT LANE MERGE..WHITE LEFT LANE MERGE
+ {0x26DA, 0x26DB, prAI, gcSo}, // [2] DRIVE SLOW SIGN..HEAVY WHITE DOWN-POINTING TRIANGLE
+ {0x26DC, 0x26DC, prID, gcSo}, // LEFT CLOSED ENTRY
+ {0x26DD, 0x26DE, prAI, gcSo}, // [2] SQUARED SALTIRE..FALLING DIAGONAL IN WHITE CIRCLE IN BLACK SQUARE
+ {0x26DF, 0x26E1, prID, gcSo}, // [3] BLACK TRUCK..RESTRICTED LEFT ENTRY-2
+ {0x26E2, 0x26E2, prAL, gcSo}, // ASTRONOMICAL SYMBOL FOR URANUS
+ {0x26E3, 0x26E3, prAI, gcSo}, // HEAVY CIRCLE WITH STROKE AND TWO DOTS ABOVE
+ {0x26E4, 0x26E7, prAL, gcSo}, // [4] PENTAGRAM..INVERTED PENTAGRAM
+ {0x26E8, 0x26E9, prAI, gcSo}, // [2] BLACK CROSS ON SHIELD..SHINTO SHRINE
+ {0x26EA, 0x26EA, prID, gcSo}, // CHURCH
+ {0x26EB, 0x26F0, prAI, gcSo}, // [6] CASTLE..MOUNTAIN
+ {0x26F1, 0x26F5, prID, gcSo}, // [5] UMBRELLA ON GROUND..SAILBOAT
+ {0x26F6, 0x26F6, prAI, gcSo}, // SQUARE FOUR CORNERS
+ {0x26F7, 0x26F8, prID, gcSo}, // [2] SKIER..ICE SKATE
+ {0x26F9, 0x26F9, prEB, gcSo}, // PERSON WITH BALL
+ {0x26FA, 0x26FA, prID, gcSo}, // TENT
+ {0x26FB, 0x26FC, prAI, gcSo}, // [2] JAPANESE BANK SYMBOL..HEADSTONE GRAVEYARD SYMBOL
+ {0x26FD, 0x26FF, prID, gcSo}, // [3] FUEL PUMP..WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE
+ {0x2700, 0x2704, prID, gcSo}, // [5] BLACK SAFETY SCISSORS..WHITE SCISSORS
+ {0x2705, 0x2707, prAL, gcSo}, // [3] WHITE HEAVY CHECK MARK..TAPE DRIVE
+ {0x2708, 0x2709, prID, gcSo}, // [2] AIRPLANE..ENVELOPE
+ {0x270A, 0x270D, prEB, gcSo}, // [4] RAISED FIST..WRITING HAND
+ {0x270E, 0x2756, prAL, gcSo}, // [73] LOWER RIGHT PENCIL..BLACK DIAMOND MINUS WHITE X
+ {0x2757, 0x2757, prAI, gcSo}, // HEAVY EXCLAMATION MARK SYMBOL
+ {0x2758, 0x275A, prAL, gcSo}, // [3] LIGHT VERTICAL BAR..HEAVY VERTICAL BAR
+ {0x275B, 0x2760, prQU, gcSo}, // [6] HEAVY SINGLE TURNED COMMA QUOTATION MARK ORNAMENT..HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT
+ {0x2761, 0x2761, prAL, gcSo}, // CURVED STEM PARAGRAPH SIGN ORNAMENT
+ {0x2762, 0x2763, prEX, gcSo}, // [2] HEAVY EXCLAMATION MARK ORNAMENT..HEAVY HEART EXCLAMATION MARK ORNAMENT
+ {0x2764, 0x2764, prID, gcSo}, // HEAVY BLACK HEART
+ {0x2765, 0x2767, prAL, gcSo}, // [3] ROTATED HEAVY BLACK HEART BULLET..ROTATED FLORAL HEART BULLET
+ {0x2768, 0x2768, prOP, gcPs}, // MEDIUM LEFT PARENTHESIS ORNAMENT
+ {0x2769, 0x2769, prCL, gcPe}, // MEDIUM RIGHT PARENTHESIS ORNAMENT
+ {0x276A, 0x276A, prOP, gcPs}, // MEDIUM FLATTENED LEFT PARENTHESIS ORNAMENT
+ {0x276B, 0x276B, prCL, gcPe}, // MEDIUM FLATTENED RIGHT PARENTHESIS ORNAMENT
+ {0x276C, 0x276C, prOP, gcPs}, // MEDIUM LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276D, 0x276D, prCL, gcPe}, // MEDIUM RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x276E, 0x276E, prOP, gcPs}, // HEAVY LEFT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x276F, 0x276F, prCL, gcPe}, // HEAVY RIGHT-POINTING ANGLE QUOTATION MARK ORNAMENT
+ {0x2770, 0x2770, prOP, gcPs}, // HEAVY LEFT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2771, 0x2771, prCL, gcPe}, // HEAVY RIGHT-POINTING ANGLE BRACKET ORNAMENT
+ {0x2772, 0x2772, prOP, gcPs}, // LIGHT LEFT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2773, 0x2773, prCL, gcPe}, // LIGHT RIGHT TORTOISE SHELL BRACKET ORNAMENT
+ {0x2774, 0x2774, prOP, gcPs}, // MEDIUM LEFT CURLY BRACKET ORNAMENT
+ {0x2775, 0x2775, prCL, gcPe}, // MEDIUM RIGHT CURLY BRACKET ORNAMENT
+ {0x2776, 0x2793, prAI, gcNo}, // [30] DINGBAT NEGATIVE CIRCLED DIGIT ONE..DINGBAT NEGATIVE CIRCLED SANS-SERIF NUMBER TEN
+ {0x2794, 0x27BF, prAL, gcSo}, // [44] HEAVY WIDE-HEADED RIGHTWARDS ARROW..DOUBLE CURLY LOOP
+ {0x27C0, 0x27C4, prAL, gcSm}, // [5] THREE DIMENSIONAL ANGLE..OPEN SUPERSET
+ {0x27C5, 0x27C5, prOP, gcPs}, // LEFT S-SHAPED BAG DELIMITER
+ {0x27C6, 0x27C6, prCL, gcPe}, // RIGHT S-SHAPED BAG DELIMITER
+ {0x27C7, 0x27E5, prAL, gcSm}, // [31] OR WITH DOT INSIDE..WHITE SQUARE WITH RIGHTWARDS TICK
+ {0x27E6, 0x27E6, prOP, gcPs}, // MATHEMATICAL LEFT WHITE SQUARE BRACKET
+ {0x27E7, 0x27E7, prCL, gcPe}, // MATHEMATICAL RIGHT WHITE SQUARE BRACKET
+ {0x27E8, 0x27E8, prOP, gcPs}, // MATHEMATICAL LEFT ANGLE BRACKET
+ {0x27E9, 0x27E9, prCL, gcPe}, // MATHEMATICAL RIGHT ANGLE BRACKET
+ {0x27EA, 0x27EA, prOP, gcPs}, // MATHEMATICAL LEFT DOUBLE ANGLE BRACKET
+ {0x27EB, 0x27EB, prCL, gcPe}, // MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET
+ {0x27EC, 0x27EC, prOP, gcPs}, // MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET
+ {0x27ED, 0x27ED, prCL, gcPe}, // MATHEMATICAL RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x27EE, 0x27EE, prOP, gcPs}, // MATHEMATICAL LEFT FLATTENED PARENTHESIS
+ {0x27EF, 0x27EF, prCL, gcPe}, // MATHEMATICAL RIGHT FLATTENED PARENTHESIS
+ {0x27F0, 0x27FF, prAL, gcSm}, // [16] UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW
+ {0x2800, 0x28FF, prAL, gcSo}, // [256] BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678
+ {0x2900, 0x297F, prAL, gcSm}, // [128] RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..DOWN FISH TAIL
+ {0x2980, 0x2982, prAL, gcSm}, // [3] TRIPLE VERTICAL BAR DELIMITER..Z NOTATION TYPE COLON
+ {0x2983, 0x2983, prOP, gcPs}, // LEFT WHITE CURLY BRACKET
+ {0x2984, 0x2984, prCL, gcPe}, // RIGHT WHITE CURLY BRACKET
+ {0x2985, 0x2985, prOP, gcPs}, // LEFT WHITE PARENTHESIS
+ {0x2986, 0x2986, prCL, gcPe}, // RIGHT WHITE PARENTHESIS
+ {0x2987, 0x2987, prOP, gcPs}, // Z NOTATION LEFT IMAGE BRACKET
+ {0x2988, 0x2988, prCL, gcPe}, // Z NOTATION RIGHT IMAGE BRACKET
+ {0x2989, 0x2989, prOP, gcPs}, // Z NOTATION LEFT BINDING BRACKET
+ {0x298A, 0x298A, prCL, gcPe}, // Z NOTATION RIGHT BINDING BRACKET
+ {0x298B, 0x298B, prOP, gcPs}, // LEFT SQUARE BRACKET WITH UNDERBAR
+ {0x298C, 0x298C, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH UNDERBAR
+ {0x298D, 0x298D, prOP, gcPs}, // LEFT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x298E, 0x298E, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x298F, 0x298F, prOP, gcPs}, // LEFT SQUARE BRACKET WITH TICK IN BOTTOM CORNER
+ {0x2990, 0x2990, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH TICK IN TOP CORNER
+ {0x2991, 0x2991, prOP, gcPs}, // LEFT ANGLE BRACKET WITH DOT
+ {0x2992, 0x2992, prCL, gcPe}, // RIGHT ANGLE BRACKET WITH DOT
+ {0x2993, 0x2993, prOP, gcPs}, // LEFT ARC LESS-THAN BRACKET
+ {0x2994, 0x2994, prCL, gcPe}, // RIGHT ARC GREATER-THAN BRACKET
+ {0x2995, 0x2995, prOP, gcPs}, // DOUBLE LEFT ARC GREATER-THAN BRACKET
+ {0x2996, 0x2996, prCL, gcPe}, // DOUBLE RIGHT ARC LESS-THAN BRACKET
+ {0x2997, 0x2997, prOP, gcPs}, // LEFT BLACK TORTOISE SHELL BRACKET
+ {0x2998, 0x2998, prCL, gcPe}, // RIGHT BLACK TORTOISE SHELL BRACKET
+ {0x2999, 0x29D7, prAL, gcSm}, // [63] DOTTED FENCE..BLACK HOURGLASS
+ {0x29D8, 0x29D8, prOP, gcPs}, // LEFT WIGGLY FENCE
+ {0x29D9, 0x29D9, prCL, gcPe}, // RIGHT WIGGLY FENCE
+ {0x29DA, 0x29DA, prOP, gcPs}, // LEFT DOUBLE WIGGLY FENCE
+ {0x29DB, 0x29DB, prCL, gcPe}, // RIGHT DOUBLE WIGGLY FENCE
+ {0x29DC, 0x29FB, prAL, gcSm}, // [32] INCOMPLETE INFINITY..TRIPLE PLUS
+ {0x29FC, 0x29FC, prOP, gcPs}, // LEFT-POINTING CURVED ANGLE BRACKET
+ {0x29FD, 0x29FD, prCL, gcPe}, // RIGHT-POINTING CURVED ANGLE BRACKET
+ {0x29FE, 0x29FF, prAL, gcSm}, // [2] TINY..MINY
+ {0x2A00, 0x2AFF, prAL, gcSm}, // [256] N-ARY CIRCLED DOT OPERATOR..N-ARY WHITE VERTICAL BAR
+ {0x2B00, 0x2B2F, prAL, gcSo}, // [48] NORTH EAST WHITE ARROW..WHITE VERTICAL ELLIPSE
+ {0x2B30, 0x2B44, prAL, gcSm}, // [21] LEFT ARROW WITH SMALL CIRCLE..RIGHTWARDS ARROW THROUGH SUPERSET
+ {0x2B45, 0x2B46, prAL, gcSo}, // [2] LEFTWARDS QUADRUPLE ARROW..RIGHTWARDS QUADRUPLE ARROW
+ {0x2B47, 0x2B4C, prAL, gcSm}, // [6] REVERSE TILDE OPERATOR ABOVE RIGHTWARDS ARROW..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR
+ {0x2B4D, 0x2B54, prAL, gcSo}, // [8] DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..WHITE RIGHT-POINTING PENTAGON
+ {0x2B55, 0x2B59, prAI, gcSo}, // [5] HEAVY LARGE CIRCLE..HEAVY CIRCLED SALTIRE
+ {0x2B5A, 0x2B73, prAL, gcSo}, // [26] SLANTED NORTH ARROW WITH HOOKED HEAD..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR
+ {0x2B76, 0x2B95, prAL, gcSo}, // [32] NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW
+ {0x2B97, 0x2BFF, prAL, gcSo}, // [105] SYMBOL FOR TYPE A ELECTRONICS..HELLSCHREIBER PAUSE SYMBOL
+ {0x2C00, 0x2C5F, prAL, gcLC}, // [96] GLAGOLITIC CAPITAL LETTER AZU..GLAGOLITIC SMALL LETTER CAUDATE CHRIVI
+ {0x2C60, 0x2C7B, prAL, gcLC}, // [28] LATIN CAPITAL LETTER L WITH DOUBLE BAR..LATIN LETTER SMALL CAPITAL TURNED E
+ {0x2C7C, 0x2C7D, prAL, gcLm}, // [2] LATIN SUBSCRIPT SMALL LETTER J..MODIFIER LETTER CAPITAL V
+ {0x2C7E, 0x2C7F, prAL, gcLu}, // [2] LATIN CAPITAL LETTER S WITH SWASH TAIL..LATIN CAPITAL LETTER Z WITH SWASH TAIL
+ {0x2C80, 0x2CE4, prAL, gcLC}, // [101] COPTIC CAPITAL LETTER ALFA..COPTIC SYMBOL KAI
+ {0x2CE5, 0x2CEA, prAL, gcSo}, // [6] COPTIC SYMBOL MI RO..COPTIC SYMBOL SHIMA SIMA
+ {0x2CEB, 0x2CEE, prAL, gcLC}, // [4] COPTIC CAPITAL LETTER CRYPTOGRAMMIC SHEI..COPTIC SMALL LETTER CRYPTOGRAMMIC GANGIA
+ {0x2CEF, 0x2CF1, prCM, gcMn}, // [3] COPTIC COMBINING NI ABOVE..COPTIC COMBINING SPIRITUS LENIS
+ {0x2CF2, 0x2CF3, prAL, gcLC}, // [2] COPTIC CAPITAL LETTER BOHAIRIC KHEI..COPTIC SMALL LETTER BOHAIRIC KHEI
+ {0x2CF9, 0x2CF9, prEX, gcPo}, // COPTIC OLD NUBIAN FULL STOP
+ {0x2CFA, 0x2CFC, prBA, gcPo}, // [3] COPTIC OLD NUBIAN DIRECT QUESTION MARK..COPTIC OLD NUBIAN VERSE DIVIDER
+ {0x2CFD, 0x2CFD, prAL, gcNo}, // COPTIC FRACTION ONE HALF
+ {0x2CFE, 0x2CFE, prEX, gcPo}, // COPTIC FULL STOP
+ {0x2CFF, 0x2CFF, prBA, gcPo}, // COPTIC MORPHOLOGICAL DIVIDER
+ {0x2D00, 0x2D25, prAL, gcLl}, // [38] GEORGIAN SMALL LETTER AN..GEORGIAN SMALL LETTER HOE
+ {0x2D27, 0x2D27, prAL, gcLl}, // GEORGIAN SMALL LETTER YN
+ {0x2D2D, 0x2D2D, prAL, gcLl}, // GEORGIAN SMALL LETTER AEN
+ {0x2D30, 0x2D67, prAL, gcLo}, // [56] TIFINAGH LETTER YA..TIFINAGH LETTER YO
+ {0x2D6F, 0x2D6F, prAL, gcLm}, // TIFINAGH MODIFIER LETTER LABIALIZATION MARK
+ {0x2D70, 0x2D70, prBA, gcPo}, // TIFINAGH SEPARATOR MARK
+ {0x2D7F, 0x2D7F, prCM, gcMn}, // TIFINAGH CONSONANT JOINER
+ {0x2D80, 0x2D96, prAL, gcLo}, // [23] ETHIOPIC SYLLABLE LOA..ETHIOPIC SYLLABLE GGWE
+ {0x2DA0, 0x2DA6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE SSA..ETHIOPIC SYLLABLE SSO
+ {0x2DA8, 0x2DAE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE CCA..ETHIOPIC SYLLABLE CCO
+ {0x2DB0, 0x2DB6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE ZZA..ETHIOPIC SYLLABLE ZZO
+ {0x2DB8, 0x2DBE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE CCHA..ETHIOPIC SYLLABLE CCHO
+ {0x2DC0, 0x2DC6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE QYA..ETHIOPIC SYLLABLE QYO
+ {0x2DC8, 0x2DCE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE KYA..ETHIOPIC SYLLABLE KYO
+ {0x2DD0, 0x2DD6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE XYA..ETHIOPIC SYLLABLE XYO
+ {0x2DD8, 0x2DDE, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE GYA..ETHIOPIC SYLLABLE GYO
+ {0x2DE0, 0x2DFF, prCM, gcMn}, // [32] COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS
+ {0x2E00, 0x2E01, prQU, gcPo}, // [2] RIGHT ANGLE SUBSTITUTION MARKER..RIGHT ANGLE DOTTED SUBSTITUTION MARKER
+ {0x2E02, 0x2E02, prQU, gcPi}, // LEFT SUBSTITUTION BRACKET
+ {0x2E03, 0x2E03, prQU, gcPf}, // RIGHT SUBSTITUTION BRACKET
+ {0x2E04, 0x2E04, prQU, gcPi}, // LEFT DOTTED SUBSTITUTION BRACKET
+ {0x2E05, 0x2E05, prQU, gcPf}, // RIGHT DOTTED SUBSTITUTION BRACKET
+ {0x2E06, 0x2E08, prQU, gcPo}, // [3] RAISED INTERPOLATION MARKER..DOTTED TRANSPOSITION MARKER
+ {0x2E09, 0x2E09, prQU, gcPi}, // LEFT TRANSPOSITION BRACKET
+ {0x2E0A, 0x2E0A, prQU, gcPf}, // RIGHT TRANSPOSITION BRACKET
+ {0x2E0B, 0x2E0B, prQU, gcPo}, // RAISED SQUARE
+ {0x2E0C, 0x2E0C, prQU, gcPi}, // LEFT RAISED OMISSION BRACKET
+ {0x2E0D, 0x2E0D, prQU, gcPf}, // RIGHT RAISED OMISSION BRACKET
+ {0x2E0E, 0x2E15, prBA, gcPo}, // [8] EDITORIAL CORONIS..UPWARDS ANCORA
+ {0x2E16, 0x2E16, prAL, gcPo}, // DOTTED RIGHT-POINTING ANGLE
+ {0x2E17, 0x2E17, prBA, gcPd}, // DOUBLE OBLIQUE HYPHEN
+ {0x2E18, 0x2E18, prOP, gcPo}, // INVERTED INTERROBANG
+ {0x2E19, 0x2E19, prBA, gcPo}, // PALM BRANCH
+ {0x2E1A, 0x2E1A, prAL, gcPd}, // HYPHEN WITH DIAERESIS
+ {0x2E1B, 0x2E1B, prAL, gcPo}, // TILDE WITH RING ABOVE
+ {0x2E1C, 0x2E1C, prQU, gcPi}, // LEFT LOW PARAPHRASE BRACKET
+ {0x2E1D, 0x2E1D, prQU, gcPf}, // RIGHT LOW PARAPHRASE BRACKET
+ {0x2E1E, 0x2E1F, prAL, gcPo}, // [2] TILDE WITH DOT ABOVE..TILDE WITH DOT BELOW
+ {0x2E20, 0x2E20, prQU, gcPi}, // LEFT VERTICAL BAR WITH QUILL
+ {0x2E21, 0x2E21, prQU, gcPf}, // RIGHT VERTICAL BAR WITH QUILL
+ {0x2E22, 0x2E22, prOP, gcPs}, // TOP LEFT HALF BRACKET
+ {0x2E23, 0x2E23, prCL, gcPe}, // TOP RIGHT HALF BRACKET
+ {0x2E24, 0x2E24, prOP, gcPs}, // BOTTOM LEFT HALF BRACKET
+ {0x2E25, 0x2E25, prCL, gcPe}, // BOTTOM RIGHT HALF BRACKET
+ {0x2E26, 0x2E26, prOP, gcPs}, // LEFT SIDEWAYS U BRACKET
+ {0x2E27, 0x2E27, prCL, gcPe}, // RIGHT SIDEWAYS U BRACKET
+ {0x2E28, 0x2E28, prOP, gcPs}, // LEFT DOUBLE PARENTHESIS
+ {0x2E29, 0x2E29, prCL, gcPe}, // RIGHT DOUBLE PARENTHESIS
+ {0x2E2A, 0x2E2D, prBA, gcPo}, // [4] TWO DOTS OVER ONE DOT PUNCTUATION..FIVE DOT MARK
+ {0x2E2E, 0x2E2E, prEX, gcPo}, // REVERSED QUESTION MARK
+ {0x2E2F, 0x2E2F, prAL, gcLm}, // VERTICAL TILDE
+ {0x2E30, 0x2E31, prBA, gcPo}, // [2] RING POINT..WORD SEPARATOR MIDDLE DOT
+ {0x2E32, 0x2E32, prAL, gcPo}, // TURNED COMMA
+ {0x2E33, 0x2E34, prBA, gcPo}, // [2] RAISED DOT..RAISED COMMA
+ {0x2E35, 0x2E39, prAL, gcPo}, // [5] TURNED SEMICOLON..TOP HALF SECTION SIGN
+ {0x2E3A, 0x2E3B, prB2, gcPd}, // [2] TWO-EM DASH..THREE-EM DASH
+ {0x2E3C, 0x2E3E, prBA, gcPo}, // [3] STENOGRAPHIC FULL STOP..WIGGLY VERTICAL LINE
+ {0x2E3F, 0x2E3F, prAL, gcPo}, // CAPITULUM
+ {0x2E40, 0x2E40, prBA, gcPd}, // DOUBLE HYPHEN
+ {0x2E41, 0x2E41, prBA, gcPo}, // REVERSED COMMA
+ {0x2E42, 0x2E42, prOP, gcPs}, // DOUBLE LOW-REVERSED-9 QUOTATION MARK
+ {0x2E43, 0x2E4A, prBA, gcPo}, // [8] DASH WITH LEFT UPTURN..DOTTED SOLIDUS
+ {0x2E4B, 0x2E4B, prAL, gcPo}, // TRIPLE DAGGER
+ {0x2E4C, 0x2E4C, prBA, gcPo}, // MEDIEVAL COMMA
+ {0x2E4D, 0x2E4D, prAL, gcPo}, // PARAGRAPHUS MARK
+ {0x2E4E, 0x2E4F, prBA, gcPo}, // [2] PUNCTUS ELEVATUS MARK..CORNISH VERSE DIVIDER
+ {0x2E50, 0x2E51, prAL, gcSo}, // [2] CROSS PATTY WITH RIGHT CROSSBAR..CROSS PATTY WITH LEFT CROSSBAR
+ {0x2E52, 0x2E52, prAL, gcPo}, // TIRONIAN SIGN CAPITAL ET
+ {0x2E53, 0x2E54, prEX, gcPo}, // [2] MEDIEVAL EXCLAMATION MARK..MEDIEVAL QUESTION MARK
+ {0x2E55, 0x2E55, prOP, gcPs}, // LEFT SQUARE BRACKET WITH STROKE
+ {0x2E56, 0x2E56, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH STROKE
+ {0x2E57, 0x2E57, prOP, gcPs}, // LEFT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E58, 0x2E58, prCL, gcPe}, // RIGHT SQUARE BRACKET WITH DOUBLE STROKE
+ {0x2E59, 0x2E59, prOP, gcPs}, // TOP HALF LEFT PARENTHESIS
+ {0x2E5A, 0x2E5A, prCL, gcPe}, // TOP HALF RIGHT PARENTHESIS
+ {0x2E5B, 0x2E5B, prOP, gcPs}, // BOTTOM HALF LEFT PARENTHESIS
+ {0x2E5C, 0x2E5C, prCL, gcPe}, // BOTTOM HALF RIGHT PARENTHESIS
+ {0x2E5D, 0x2E5D, prBA, gcPd}, // OBLIQUE HYPHEN
+ {0x2E80, 0x2E99, prID, gcSo}, // [26] CJK RADICAL REPEAT..CJK RADICAL RAP
+ {0x2E9B, 0x2EF3, prID, gcSo}, // [89] CJK RADICAL CHOKE..CJK RADICAL C-SIMPLIFIED TURTLE
+ {0x2F00, 0x2FD5, prID, gcSo}, // [214] KANGXI RADICAL ONE..KANGXI RADICAL FLUTE
+ {0x2FF0, 0x2FFB, prID, gcSo}, // [12] IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID
+ {0x3000, 0x3000, prBA, gcZs}, // IDEOGRAPHIC SPACE
+ {0x3001, 0x3002, prCL, gcPo}, // [2] IDEOGRAPHIC COMMA..IDEOGRAPHIC FULL STOP
+ {0x3003, 0x3003, prID, gcPo}, // DITTO MARK
+ {0x3004, 0x3004, prID, gcSo}, // JAPANESE INDUSTRIAL STANDARD SYMBOL
+ {0x3005, 0x3005, prNS, gcLm}, // IDEOGRAPHIC ITERATION MARK
+ {0x3006, 0x3006, prID, gcLo}, // IDEOGRAPHIC CLOSING MARK
+ {0x3007, 0x3007, prID, gcNl}, // IDEOGRAPHIC NUMBER ZERO
+ {0x3008, 0x3008, prOP, gcPs}, // LEFT ANGLE BRACKET
+ {0x3009, 0x3009, prCL, gcPe}, // RIGHT ANGLE BRACKET
+ {0x300A, 0x300A, prOP, gcPs}, // LEFT DOUBLE ANGLE BRACKET
+ {0x300B, 0x300B, prCL, gcPe}, // RIGHT DOUBLE ANGLE BRACKET
+ {0x300C, 0x300C, prOP, gcPs}, // LEFT CORNER BRACKET
+ {0x300D, 0x300D, prCL, gcPe}, // RIGHT CORNER BRACKET
+ {0x300E, 0x300E, prOP, gcPs}, // LEFT WHITE CORNER BRACKET
+ {0x300F, 0x300F, prCL, gcPe}, // RIGHT WHITE CORNER BRACKET
+ {0x3010, 0x3010, prOP, gcPs}, // LEFT BLACK LENTICULAR BRACKET
+ {0x3011, 0x3011, prCL, gcPe}, // RIGHT BLACK LENTICULAR BRACKET
+ {0x3012, 0x3013, prID, gcSo}, // [2] POSTAL MARK..GETA MARK
+ {0x3014, 0x3014, prOP, gcPs}, // LEFT TORTOISE SHELL BRACKET
+ {0x3015, 0x3015, prCL, gcPe}, // RIGHT TORTOISE SHELL BRACKET
+ {0x3016, 0x3016, prOP, gcPs}, // LEFT WHITE LENTICULAR BRACKET
+ {0x3017, 0x3017, prCL, gcPe}, // RIGHT WHITE LENTICULAR BRACKET
+ {0x3018, 0x3018, prOP, gcPs}, // LEFT WHITE TORTOISE SHELL BRACKET
+ {0x3019, 0x3019, prCL, gcPe}, // RIGHT WHITE TORTOISE SHELL BRACKET
+ {0x301A, 0x301A, prOP, gcPs}, // LEFT WHITE SQUARE BRACKET
+ {0x301B, 0x301B, prCL, gcPe}, // RIGHT WHITE SQUARE BRACKET
+ {0x301C, 0x301C, prNS, gcPd}, // WAVE DASH
+ {0x301D, 0x301D, prOP, gcPs}, // REVERSED DOUBLE PRIME QUOTATION MARK
+ {0x301E, 0x301F, prCL, gcPe}, // [2] DOUBLE PRIME QUOTATION MARK..LOW DOUBLE PRIME QUOTATION MARK
+ {0x3020, 0x3020, prID, gcSo}, // POSTAL MARK FACE
+ {0x3021, 0x3029, prID, gcNl}, // [9] HANGZHOU NUMERAL ONE..HANGZHOU NUMERAL NINE
+ {0x302A, 0x302D, prCM, gcMn}, // [4] IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK
+ {0x302E, 0x302F, prCM, gcMc}, // [2] HANGUL SINGLE DOT TONE MARK..HANGUL DOUBLE DOT TONE MARK
+ {0x3030, 0x3030, prID, gcPd}, // WAVY DASH
+ {0x3031, 0x3034, prID, gcLm}, // [4] VERTICAL KANA REPEAT MARK..VERTICAL KANA REPEAT WITH VOICED SOUND MARK UPPER HALF
+ {0x3035, 0x3035, prCM, gcLm}, // VERTICAL KANA REPEAT MARK LOWER HALF
+ {0x3036, 0x3037, prID, gcSo}, // [2] CIRCLED POSTAL MARK..IDEOGRAPHIC TELEGRAPH LINE FEED SEPARATOR SYMBOL
+ {0x3038, 0x303A, prID, gcNl}, // [3] HANGZHOU NUMERAL TEN..HANGZHOU NUMERAL THIRTY
+ {0x303B, 0x303B, prNS, gcLm}, // VERTICAL IDEOGRAPHIC ITERATION MARK
+ {0x303C, 0x303C, prNS, gcLo}, // MASU MARK
+ {0x303D, 0x303D, prID, gcPo}, // PART ALTERNATION MARK
+ {0x303E, 0x303F, prID, gcSo}, // [2] IDEOGRAPHIC VARIATION INDICATOR..IDEOGRAPHIC HALF FILL SPACE
+ {0x3041, 0x3041, prCJ, gcLo}, // HIRAGANA LETTER SMALL A
+ {0x3042, 0x3042, prID, gcLo}, // HIRAGANA LETTER A
+ {0x3043, 0x3043, prCJ, gcLo}, // HIRAGANA LETTER SMALL I
+ {0x3044, 0x3044, prID, gcLo}, // HIRAGANA LETTER I
+ {0x3045, 0x3045, prCJ, gcLo}, // HIRAGANA LETTER SMALL U
+ {0x3046, 0x3046, prID, gcLo}, // HIRAGANA LETTER U
+ {0x3047, 0x3047, prCJ, gcLo}, // HIRAGANA LETTER SMALL E
+ {0x3048, 0x3048, prID, gcLo}, // HIRAGANA LETTER E
+ {0x3049, 0x3049, prCJ, gcLo}, // HIRAGANA LETTER SMALL O
+ {0x304A, 0x3062, prID, gcLo}, // [25] HIRAGANA LETTER O..HIRAGANA LETTER DI
+ {0x3063, 0x3063, prCJ, gcLo}, // HIRAGANA LETTER SMALL TU
+ {0x3064, 0x3082, prID, gcLo}, // [31] HIRAGANA LETTER TU..HIRAGANA LETTER MO
+ {0x3083, 0x3083, prCJ, gcLo}, // HIRAGANA LETTER SMALL YA
+ {0x3084, 0x3084, prID, gcLo}, // HIRAGANA LETTER YA
+ {0x3085, 0x3085, prCJ, gcLo}, // HIRAGANA LETTER SMALL YU
+ {0x3086, 0x3086, prID, gcLo}, // HIRAGANA LETTER YU
+ {0x3087, 0x3087, prCJ, gcLo}, // HIRAGANA LETTER SMALL YO
+ {0x3088, 0x308D, prID, gcLo}, // [6] HIRAGANA LETTER YO..HIRAGANA LETTER RO
+ {0x308E, 0x308E, prCJ, gcLo}, // HIRAGANA LETTER SMALL WA
+ {0x308F, 0x3094, prID, gcLo}, // [6] HIRAGANA LETTER WA..HIRAGANA LETTER VU
+ {0x3095, 0x3096, prCJ, gcLo}, // [2] HIRAGANA LETTER SMALL KA..HIRAGANA LETTER SMALL KE
+ {0x3099, 0x309A, prCM, gcMn}, // [2] COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309B, 0x309C, prNS, gcSk}, // [2] KATAKANA-HIRAGANA VOICED SOUND MARK..KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK
+ {0x309D, 0x309E, prNS, gcLm}, // [2] HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK
+ {0x309F, 0x309F, prID, gcLo}, // HIRAGANA DIGRAPH YORI
+ {0x30A0, 0x30A0, prNS, gcPd}, // KATAKANA-HIRAGANA DOUBLE HYPHEN
+ {0x30A1, 0x30A1, prCJ, gcLo}, // KATAKANA LETTER SMALL A
+ {0x30A2, 0x30A2, prID, gcLo}, // KATAKANA LETTER A
+ {0x30A3, 0x30A3, prCJ, gcLo}, // KATAKANA LETTER SMALL I
+ {0x30A4, 0x30A4, prID, gcLo}, // KATAKANA LETTER I
+ {0x30A5, 0x30A5, prCJ, gcLo}, // KATAKANA LETTER SMALL U
+ {0x30A6, 0x30A6, prID, gcLo}, // KATAKANA LETTER U
+ {0x30A7, 0x30A7, prCJ, gcLo}, // KATAKANA LETTER SMALL E
+ {0x30A8, 0x30A8, prID, gcLo}, // KATAKANA LETTER E
+ {0x30A9, 0x30A9, prCJ, gcLo}, // KATAKANA LETTER SMALL O
+ {0x30AA, 0x30C2, prID, gcLo}, // [25] KATAKANA LETTER O..KATAKANA LETTER DI
+ {0x30C3, 0x30C3, prCJ, gcLo}, // KATAKANA LETTER SMALL TU
+ {0x30C4, 0x30E2, prID, gcLo}, // [31] KATAKANA LETTER TU..KATAKANA LETTER MO
+ {0x30E3, 0x30E3, prCJ, gcLo}, // KATAKANA LETTER SMALL YA
+ {0x30E4, 0x30E4, prID, gcLo}, // KATAKANA LETTER YA
+ {0x30E5, 0x30E5, prCJ, gcLo}, // KATAKANA LETTER SMALL YU
+ {0x30E6, 0x30E6, prID, gcLo}, // KATAKANA LETTER YU
+ {0x30E7, 0x30E7, prCJ, gcLo}, // KATAKANA LETTER SMALL YO
+ {0x30E8, 0x30ED, prID, gcLo}, // [6] KATAKANA LETTER YO..KATAKANA LETTER RO
+ {0x30EE, 0x30EE, prCJ, gcLo}, // KATAKANA LETTER SMALL WA
+ {0x30EF, 0x30F4, prID, gcLo}, // [6] KATAKANA LETTER WA..KATAKANA LETTER VU
+ {0x30F5, 0x30F6, prCJ, gcLo}, // [2] KATAKANA LETTER SMALL KA..KATAKANA LETTER SMALL KE
+ {0x30F7, 0x30FA, prID, gcLo}, // [4] KATAKANA LETTER VA..KATAKANA LETTER VO
+ {0x30FB, 0x30FB, prNS, gcPo}, // KATAKANA MIDDLE DOT
+ {0x30FC, 0x30FC, prCJ, gcLm}, // KATAKANA-HIRAGANA PROLONGED SOUND MARK
+ {0x30FD, 0x30FE, prNS, gcLm}, // [2] KATAKANA ITERATION MARK..KATAKANA VOICED ITERATION MARK
+ {0x30FF, 0x30FF, prID, gcLo}, // KATAKANA DIGRAPH KOTO
+ {0x3105, 0x312F, prID, gcLo}, // [43] BOPOMOFO LETTER B..BOPOMOFO LETTER NN
+ {0x3131, 0x318E, prID, gcLo}, // [94] HANGUL LETTER KIYEOK..HANGUL LETTER ARAEAE
+ {0x3190, 0x3191, prID, gcSo}, // [2] IDEOGRAPHIC ANNOTATION LINKING MARK..IDEOGRAPHIC ANNOTATION REVERSE MARK
+ {0x3192, 0x3195, prID, gcNo}, // [4] IDEOGRAPHIC ANNOTATION ONE MARK..IDEOGRAPHIC ANNOTATION FOUR MARK
+ {0x3196, 0x319F, prID, gcSo}, // [10] IDEOGRAPHIC ANNOTATION TOP MARK..IDEOGRAPHIC ANNOTATION MAN MARK
+ {0x31A0, 0x31BF, prID, gcLo}, // [32] BOPOMOFO LETTER BU..BOPOMOFO LETTER AH
+ {0x31C0, 0x31E3, prID, gcSo}, // [36] CJK STROKE T..CJK STROKE Q
+ {0x31F0, 0x31FF, prCJ, gcLo}, // [16] KATAKANA LETTER SMALL KU..KATAKANA LETTER SMALL RO
+ {0x3200, 0x321E, prID, gcSo}, // [31] PARENTHESIZED HANGUL KIYEOK..PARENTHESIZED KOREAN CHARACTER O HU
+ {0x3220, 0x3229, prID, gcNo}, // [10] PARENTHESIZED IDEOGRAPH ONE..PARENTHESIZED IDEOGRAPH TEN
+ {0x322A, 0x3247, prID, gcSo}, // [30] PARENTHESIZED IDEOGRAPH MOON..CIRCLED IDEOGRAPH KOTO
+ {0x3248, 0x324F, prAI, gcNo}, // [8] CIRCLED NUMBER TEN ON BLACK SQUARE..CIRCLED NUMBER EIGHTY ON BLACK SQUARE
+ {0x3250, 0x3250, prID, gcSo}, // PARTNERSHIP SIGN
+ {0x3251, 0x325F, prID, gcNo}, // [15] CIRCLED NUMBER TWENTY ONE..CIRCLED NUMBER THIRTY FIVE
+ {0x3260, 0x327F, prID, gcSo}, // [32] CIRCLED HANGUL KIYEOK..KOREAN STANDARD SYMBOL
+ {0x3280, 0x3289, prID, gcNo}, // [10] CIRCLED IDEOGRAPH ONE..CIRCLED IDEOGRAPH TEN
+ {0x328A, 0x32B0, prID, gcSo}, // [39] CIRCLED IDEOGRAPH MOON..CIRCLED IDEOGRAPH NIGHT
+ {0x32B1, 0x32BF, prID, gcNo}, // [15] CIRCLED NUMBER THIRTY SIX..CIRCLED NUMBER FIFTY
+ {0x32C0, 0x32FF, prID, gcSo}, // [64] IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY..SQUARE ERA NAME REIWA
+ {0x3300, 0x33FF, prID, gcSo}, // [256] SQUARE APAATO..SQUARE GAL
+ {0x3400, 0x4DBF, prID, gcLo}, // [6592] CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DBF
+ {0x4DC0, 0x4DFF, prAL, gcSo}, // [64] HEXAGRAM FOR THE CREATIVE HEAVEN..HEXAGRAM FOR BEFORE COMPLETION
+ {0x4E00, 0x9FFF, prID, gcLo}, // [20992] CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FFF
+ {0xA000, 0xA014, prID, gcLo}, // [21] YI SYLLABLE IT..YI SYLLABLE E
+ {0xA015, 0xA015, prNS, gcLm}, // YI SYLLABLE WU
+ {0xA016, 0xA48C, prID, gcLo}, // [1143] YI SYLLABLE BIT..YI SYLLABLE YYR
+ {0xA490, 0xA4C6, prID, gcSo}, // [55] YI RADICAL QOT..YI RADICAL KE
+ {0xA4D0, 0xA4F7, prAL, gcLo}, // [40] LISU LETTER BA..LISU LETTER OE
+ {0xA4F8, 0xA4FD, prAL, gcLm}, // [6] LISU LETTER TONE MYA TI..LISU LETTER TONE MYA JEU
+ {0xA4FE, 0xA4FF, prBA, gcPo}, // [2] LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP
+ {0xA500, 0xA60B, prAL, gcLo}, // [268] VAI SYLLABLE EE..VAI SYLLABLE NG
+ {0xA60C, 0xA60C, prAL, gcLm}, // VAI SYLLABLE LENGTHENER
+ {0xA60D, 0xA60D, prBA, gcPo}, // VAI COMMA
+ {0xA60E, 0xA60E, prEX, gcPo}, // VAI FULL STOP
+ {0xA60F, 0xA60F, prBA, gcPo}, // VAI QUESTION MARK
+ {0xA610, 0xA61F, prAL, gcLo}, // [16] VAI SYLLABLE NDOLE FA..VAI SYMBOL JONG
+ {0xA620, 0xA629, prNU, gcNd}, // [10] VAI DIGIT ZERO..VAI DIGIT NINE
+ {0xA62A, 0xA62B, prAL, gcLo}, // [2] VAI SYLLABLE NDOLE MA..VAI SYLLABLE NDOLE DO
+ {0xA640, 0xA66D, prAL, gcLC}, // [46] CYRILLIC CAPITAL LETTER ZEMLYA..CYRILLIC SMALL LETTER DOUBLE MONOCULAR O
+ {0xA66E, 0xA66E, prAL, gcLo}, // CYRILLIC LETTER MULTIOCULAR O
+ {0xA66F, 0xA66F, prCM, gcMn}, // COMBINING CYRILLIC VZMET
+ {0xA670, 0xA672, prCM, gcMe}, // [3] COMBINING CYRILLIC TEN MILLIONS SIGN..COMBINING CYRILLIC THOUSAND MILLIONS SIGN
+ {0xA673, 0xA673, prAL, gcPo}, // SLAVONIC ASTERISK
+ {0xA674, 0xA67D, prCM, gcMn}, // [10] COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC PAYEROK
+ {0xA67E, 0xA67E, prAL, gcPo}, // CYRILLIC KAVYKA
+ {0xA67F, 0xA67F, prAL, gcLm}, // CYRILLIC PAYEROK
+ {0xA680, 0xA69B, prAL, gcLC}, // [28] CYRILLIC CAPITAL LETTER DWE..CYRILLIC SMALL LETTER CROSSED O
+ {0xA69C, 0xA69D, prAL, gcLm}, // [2] MODIFIER LETTER CYRILLIC HARD SIGN..MODIFIER LETTER CYRILLIC SOFT SIGN
+ {0xA69E, 0xA69F, prCM, gcMn}, // [2] COMBINING CYRILLIC LETTER EF..COMBINING CYRILLIC LETTER IOTIFIED E
+ {0xA6A0, 0xA6E5, prAL, gcLo}, // [70] BAMUM LETTER A..BAMUM LETTER KI
+ {0xA6E6, 0xA6EF, prAL, gcNl}, // [10] BAMUM LETTER MO..BAMUM LETTER KOGHOM
+ {0xA6F0, 0xA6F1, prCM, gcMn}, // [2] BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS
+ {0xA6F2, 0xA6F2, prAL, gcPo}, // BAMUM NJAEMLI
+ {0xA6F3, 0xA6F7, prBA, gcPo}, // [5] BAMUM FULL STOP..BAMUM QUESTION MARK
+ {0xA700, 0xA716, prAL, gcSk}, // [23] MODIFIER LETTER CHINESE TONE YIN PING..MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR
+ {0xA717, 0xA71F, prAL, gcLm}, // [9] MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK
+ {0xA720, 0xA721, prAL, gcSk}, // [2] MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE
+ {0xA722, 0xA76F, prAL, gcLC}, // [78] LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF..LATIN SMALL LETTER CON
+ {0xA770, 0xA770, prAL, gcLm}, // MODIFIER LETTER US
+ {0xA771, 0xA787, prAL, gcLC}, // [23] LATIN SMALL LETTER DUM..LATIN SMALL LETTER INSULAR T
+ {0xA788, 0xA788, prAL, gcLm}, // MODIFIER LETTER LOW CIRCUMFLEX ACCENT
+ {0xA789, 0xA78A, prAL, gcSk}, // [2] MODIFIER LETTER COLON..MODIFIER LETTER SHORT EQUALS SIGN
+ {0xA78B, 0xA78E, prAL, gcLC}, // [4] LATIN CAPITAL LETTER SALTILLO..LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT
+ {0xA78F, 0xA78F, prAL, gcLo}, // LATIN LETTER SINOLOGICAL DOT
+ {0xA790, 0xA7CA, prAL, gcLC}, // [59] LATIN CAPITAL LETTER N WITH DESCENDER..LATIN SMALL LETTER S WITH SHORT STROKE OVERLAY
+ {0xA7D0, 0xA7D1, prAL, gcLC}, // [2] LATIN CAPITAL LETTER CLOSED INSULAR G..LATIN SMALL LETTER CLOSED INSULAR G
+ {0xA7D3, 0xA7D3, prAL, gcLl}, // LATIN SMALL LETTER DOUBLE THORN
+ {0xA7D5, 0xA7D9, prAL, gcLC}, // [5] LATIN SMALL LETTER DOUBLE WYNN..LATIN SMALL LETTER SIGMOID S
+ {0xA7F2, 0xA7F4, prAL, gcLm}, // [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q
+ {0xA7F5, 0xA7F6, prAL, gcLC}, // [2] LATIN CAPITAL LETTER REVERSED HALF H..LATIN SMALL LETTER REVERSED HALF H
+ {0xA7F7, 0xA7F7, prAL, gcLo}, // LATIN EPIGRAPHIC LETTER SIDEWAYS I
+ {0xA7F8, 0xA7F9, prAL, gcLm}, // [2] MODIFIER LETTER CAPITAL H WITH STROKE..MODIFIER LETTER SMALL LIGATURE OE
+ {0xA7FA, 0xA7FA, prAL, gcLl}, // LATIN LETTER SMALL CAPITAL TURNED M
+ {0xA7FB, 0xA7FF, prAL, gcLo}, // [5] LATIN EPIGRAPHIC LETTER REVERSED F..LATIN EPIGRAPHIC LETTER ARCHAIC M
+ {0xA800, 0xA801, prAL, gcLo}, // [2] SYLOTI NAGRI LETTER A..SYLOTI NAGRI LETTER I
+ {0xA802, 0xA802, prCM, gcMn}, // SYLOTI NAGRI SIGN DVISVARA
+ {0xA803, 0xA805, prAL, gcLo}, // [3] SYLOTI NAGRI LETTER U..SYLOTI NAGRI LETTER O
+ {0xA806, 0xA806, prCM, gcMn}, // SYLOTI NAGRI SIGN HASANTA
+ {0xA807, 0xA80A, prAL, gcLo}, // [4] SYLOTI NAGRI LETTER KO..SYLOTI NAGRI LETTER GHO
+ {0xA80B, 0xA80B, prCM, gcMn}, // SYLOTI NAGRI SIGN ANUSVARA
+ {0xA80C, 0xA822, prAL, gcLo}, // [23] SYLOTI NAGRI LETTER CO..SYLOTI NAGRI LETTER HO
+ {0xA823, 0xA824, prCM, gcMc}, // [2] SYLOTI NAGRI VOWEL SIGN A..SYLOTI NAGRI VOWEL SIGN I
+ {0xA825, 0xA826, prCM, gcMn}, // [2] SYLOTI NAGRI VOWEL SIGN U..SYLOTI NAGRI VOWEL SIGN E
+ {0xA827, 0xA827, prCM, gcMc}, // SYLOTI NAGRI VOWEL SIGN OO
+ {0xA828, 0xA82B, prAL, gcSo}, // [4] SYLOTI NAGRI POETRY MARK-1..SYLOTI NAGRI POETRY MARK-4
+ {0xA82C, 0xA82C, prCM, gcMn}, // SYLOTI NAGRI SIGN ALTERNATE HASANTA
+ {0xA830, 0xA835, prAL, gcNo}, // [6] NORTH INDIC FRACTION ONE QUARTER..NORTH INDIC FRACTION THREE SIXTEENTHS
+ {0xA836, 0xA837, prAL, gcSo}, // [2] NORTH INDIC QUARTER MARK..NORTH INDIC PLACEHOLDER MARK
+ {0xA838, 0xA838, prPO, gcSc}, // NORTH INDIC RUPEE MARK
+ {0xA839, 0xA839, prAL, gcSo}, // NORTH INDIC QUANTITY MARK
+ {0xA840, 0xA873, prAL, gcLo}, // [52] PHAGS-PA LETTER KA..PHAGS-PA LETTER CANDRABINDU
+ {0xA874, 0xA875, prBB, gcPo}, // [2] PHAGS-PA SINGLE HEAD MARK..PHAGS-PA DOUBLE HEAD MARK
+ {0xA876, 0xA877, prEX, gcPo}, // [2] PHAGS-PA MARK SHAD..PHAGS-PA MARK DOUBLE SHAD
+ {0xA880, 0xA881, prCM, gcMc}, // [2] SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VISARGA
+ {0xA882, 0xA8B3, prAL, gcLo}, // [50] SAURASHTRA LETTER A..SAURASHTRA LETTER LLA
+ {0xA8B4, 0xA8C3, prCM, gcMc}, // [16] SAURASHTRA CONSONANT SIGN HAARU..SAURASHTRA VOWEL SIGN AU
+ {0xA8C4, 0xA8C5, prCM, gcMn}, // [2] SAURASHTRA SIGN VIRAMA..SAURASHTRA SIGN CANDRABINDU
+ {0xA8CE, 0xA8CF, prBA, gcPo}, // [2] SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA
+ {0xA8D0, 0xA8D9, prNU, gcNd}, // [10] SAURASHTRA DIGIT ZERO..SAURASHTRA DIGIT NINE
+ {0xA8E0, 0xA8F1, prCM, gcMn}, // [18] COMBINING DEVANAGARI DIGIT ZERO..COMBINING DEVANAGARI SIGN AVAGRAHA
+ {0xA8F2, 0xA8F7, prAL, gcLo}, // [6] DEVANAGARI SIGN SPACING CANDRABINDU..DEVANAGARI SIGN CANDRABINDU AVAGRAHA
+ {0xA8F8, 0xA8FA, prAL, gcPo}, // [3] DEVANAGARI SIGN PUSHPIKA..DEVANAGARI CARET
+ {0xA8FB, 0xA8FB, prAL, gcLo}, // DEVANAGARI HEADSTROKE
+ {0xA8FC, 0xA8FC, prBB, gcPo}, // DEVANAGARI SIGN SIDDHAM
+ {0xA8FD, 0xA8FE, prAL, gcLo}, // [2] DEVANAGARI JAIN OM..DEVANAGARI LETTER AY
+ {0xA8FF, 0xA8FF, prCM, gcMn}, // DEVANAGARI VOWEL SIGN AY
+ {0xA900, 0xA909, prNU, gcNd}, // [10] KAYAH LI DIGIT ZERO..KAYAH LI DIGIT NINE
+ {0xA90A, 0xA925, prAL, gcLo}, // [28] KAYAH LI LETTER KA..KAYAH LI LETTER OO
+ {0xA926, 0xA92D, prCM, gcMn}, // [8] KAYAH LI VOWEL UE..KAYAH LI TONE CALYA PLOPHU
+ {0xA92E, 0xA92F, prBA, gcPo}, // [2] KAYAH LI SIGN CWI..KAYAH LI SIGN SHYA
+ {0xA930, 0xA946, prAL, gcLo}, // [23] REJANG LETTER KA..REJANG LETTER A
+ {0xA947, 0xA951, prCM, gcMn}, // [11] REJANG VOWEL SIGN I..REJANG CONSONANT SIGN R
+ {0xA952, 0xA953, prCM, gcMc}, // [2] REJANG CONSONANT SIGN H..REJANG VIRAMA
+ {0xA95F, 0xA95F, prAL, gcPo}, // REJANG SECTION MARK
+ {0xA960, 0xA97C, prJL, gcLo}, // [29] HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH
+ {0xA980, 0xA982, prCM, gcMn}, // [3] JAVANESE SIGN PANYANGGA..JAVANESE SIGN LAYAR
+ {0xA983, 0xA983, prCM, gcMc}, // JAVANESE SIGN WIGNYAN
+ {0xA984, 0xA9B2, prAL, gcLo}, // [47] JAVANESE LETTER A..JAVANESE LETTER HA
+ {0xA9B3, 0xA9B3, prCM, gcMn}, // JAVANESE SIGN CECAK TELU
+ {0xA9B4, 0xA9B5, prCM, gcMc}, // [2] JAVANESE VOWEL SIGN TARUNG..JAVANESE VOWEL SIGN TOLONG
+ {0xA9B6, 0xA9B9, prCM, gcMn}, // [4] JAVANESE VOWEL SIGN WULU..JAVANESE VOWEL SIGN SUKU MENDUT
+ {0xA9BA, 0xA9BB, prCM, gcMc}, // [2] JAVANESE VOWEL SIGN TALING..JAVANESE VOWEL SIGN DIRGA MURE
+ {0xA9BC, 0xA9BD, prCM, gcMn}, // [2] JAVANESE VOWEL SIGN PEPET..JAVANESE CONSONANT SIGN KERET
+ {0xA9BE, 0xA9C0, prCM, gcMc}, // [3] JAVANESE CONSONANT SIGN PENGKAL..JAVANESE PANGKON
+ {0xA9C1, 0xA9C6, prAL, gcPo}, // [6] JAVANESE LEFT RERENGGAN..JAVANESE PADA WINDU
+ {0xA9C7, 0xA9C9, prBA, gcPo}, // [3] JAVANESE PADA PANGKAT..JAVANESE PADA LUNGSI
+ {0xA9CA, 0xA9CD, prAL, gcPo}, // [4] JAVANESE PADA ADEG..JAVANESE TURNED PADA PISELEH
+ {0xA9CF, 0xA9CF, prAL, gcLm}, // JAVANESE PANGRANGKEP
+ {0xA9D0, 0xA9D9, prNU, gcNd}, // [10] JAVANESE DIGIT ZERO..JAVANESE DIGIT NINE
+ {0xA9DE, 0xA9DF, prAL, gcPo}, // [2] JAVANESE PADA TIRTA TUMETES..JAVANESE PADA ISEN-ISEN
+ {0xA9E0, 0xA9E4, prSA, gcLo}, // [5] MYANMAR LETTER SHAN GHA..MYANMAR LETTER SHAN BHA
+ {0xA9E5, 0xA9E5, prSA, gcMn}, // MYANMAR SIGN SHAN SAW
+ {0xA9E6, 0xA9E6, prSA, gcLm}, // MYANMAR MODIFIER LETTER SHAN REDUPLICATION
+ {0xA9E7, 0xA9EF, prSA, gcLo}, // [9] MYANMAR LETTER TAI LAING NYA..MYANMAR LETTER TAI LAING NNA
+ {0xA9F0, 0xA9F9, prNU, gcNd}, // [10] MYANMAR TAI LAING DIGIT ZERO..MYANMAR TAI LAING DIGIT NINE
+ {0xA9FA, 0xA9FE, prSA, gcLo}, // [5] MYANMAR LETTER TAI LAING LLA..MYANMAR LETTER TAI LAING BHA
+ {0xAA00, 0xAA28, prAL, gcLo}, // [41] CHAM LETTER A..CHAM LETTER HA
+ {0xAA29, 0xAA2E, prCM, gcMn}, // [6] CHAM VOWEL SIGN AA..CHAM VOWEL SIGN OE
+ {0xAA2F, 0xAA30, prCM, gcMc}, // [2] CHAM VOWEL SIGN O..CHAM VOWEL SIGN AI
+ {0xAA31, 0xAA32, prCM, gcMn}, // [2] CHAM VOWEL SIGN AU..CHAM VOWEL SIGN UE
+ {0xAA33, 0xAA34, prCM, gcMc}, // [2] CHAM CONSONANT SIGN YA..CHAM CONSONANT SIGN RA
+ {0xAA35, 0xAA36, prCM, gcMn}, // [2] CHAM CONSONANT SIGN LA..CHAM CONSONANT SIGN WA
+ {0xAA40, 0xAA42, prAL, gcLo}, // [3] CHAM LETTER FINAL K..CHAM LETTER FINAL NG
+ {0xAA43, 0xAA43, prCM, gcMn}, // CHAM CONSONANT SIGN FINAL NG
+ {0xAA44, 0xAA4B, prAL, gcLo}, // [8] CHAM LETTER FINAL CH..CHAM LETTER FINAL SS
+ {0xAA4C, 0xAA4C, prCM, gcMn}, // CHAM CONSONANT SIGN FINAL M
+ {0xAA4D, 0xAA4D, prCM, gcMc}, // CHAM CONSONANT SIGN FINAL H
+ {0xAA50, 0xAA59, prNU, gcNd}, // [10] CHAM DIGIT ZERO..CHAM DIGIT NINE
+ {0xAA5C, 0xAA5C, prAL, gcPo}, // CHAM PUNCTUATION SPIRAL
+ {0xAA5D, 0xAA5F, prBA, gcPo}, // [3] CHAM PUNCTUATION DANDA..CHAM PUNCTUATION TRIPLE DANDA
+ {0xAA60, 0xAA6F, prSA, gcLo}, // [16] MYANMAR LETTER KHAMTI GA..MYANMAR LETTER KHAMTI FA
+ {0xAA70, 0xAA70, prSA, gcLm}, // MYANMAR MODIFIER LETTER KHAMTI REDUPLICATION
+ {0xAA71, 0xAA76, prSA, gcLo}, // [6] MYANMAR LETTER KHAMTI XA..MYANMAR LOGOGRAM KHAMTI HM
+ {0xAA77, 0xAA79, prSA, gcSo}, // [3] MYANMAR SYMBOL AITON EXCLAMATION..MYANMAR SYMBOL AITON TWO
+ {0xAA7A, 0xAA7A, prSA, gcLo}, // MYANMAR LETTER AITON RA
+ {0xAA7B, 0xAA7B, prSA, gcMc}, // MYANMAR SIGN PAO KAREN TONE
+ {0xAA7C, 0xAA7C, prSA, gcMn}, // MYANMAR SIGN TAI LAING TONE-2
+ {0xAA7D, 0xAA7D, prSA, gcMc}, // MYANMAR SIGN TAI LAING TONE-5
+ {0xAA7E, 0xAA7F, prSA, gcLo}, // [2] MYANMAR LETTER SHWE PALAUNG CHA..MYANMAR LETTER SHWE PALAUNG SHA
+ {0xAA80, 0xAAAF, prSA, gcLo}, // [48] TAI VIET LETTER LOW KO..TAI VIET LETTER HIGH O
+ {0xAAB0, 0xAAB0, prSA, gcMn}, // TAI VIET MAI KANG
+ {0xAAB1, 0xAAB1, prSA, gcLo}, // TAI VIET VOWEL AA
+ {0xAAB2, 0xAAB4, prSA, gcMn}, // [3] TAI VIET VOWEL I..TAI VIET VOWEL U
+ {0xAAB5, 0xAAB6, prSA, gcLo}, // [2] TAI VIET VOWEL E..TAI VIET VOWEL O
+ {0xAAB7, 0xAAB8, prSA, gcMn}, // [2] TAI VIET MAI KHIT..TAI VIET VOWEL IA
+ {0xAAB9, 0xAABD, prSA, gcLo}, // [5] TAI VIET VOWEL UEA..TAI VIET VOWEL AN
+ {0xAABE, 0xAABF, prSA, gcMn}, // [2] TAI VIET VOWEL AM..TAI VIET TONE MAI EK
+ {0xAAC0, 0xAAC0, prSA, gcLo}, // TAI VIET TONE MAI NUENG
+ {0xAAC1, 0xAAC1, prSA, gcMn}, // TAI VIET TONE MAI THO
+ {0xAAC2, 0xAAC2, prSA, gcLo}, // TAI VIET TONE MAI SONG
+ {0xAADB, 0xAADC, prSA, gcLo}, // [2] TAI VIET SYMBOL KON..TAI VIET SYMBOL NUENG
+ {0xAADD, 0xAADD, prSA, gcLm}, // TAI VIET SYMBOL SAM
+ {0xAADE, 0xAADF, prSA, gcPo}, // [2] TAI VIET SYMBOL HO HOI..TAI VIET SYMBOL KOI KOI
+ {0xAAE0, 0xAAEA, prAL, gcLo}, // [11] MEETEI MAYEK LETTER E..MEETEI MAYEK LETTER SSA
+ {0xAAEB, 0xAAEB, prCM, gcMc}, // MEETEI MAYEK VOWEL SIGN II
+ {0xAAEC, 0xAAED, prCM, gcMn}, // [2] MEETEI MAYEK VOWEL SIGN UU..MEETEI MAYEK VOWEL SIGN AAI
+ {0xAAEE, 0xAAEF, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN AU..MEETEI MAYEK VOWEL SIGN AAU
+ {0xAAF0, 0xAAF1, prBA, gcPo}, // [2] MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM
+ {0xAAF2, 0xAAF2, prAL, gcLo}, // MEETEI MAYEK ANJI
+ {0xAAF3, 0xAAF4, prAL, gcLm}, // [2] MEETEI MAYEK SYLLABLE REPETITION MARK..MEETEI MAYEK WORD REPETITION MARK
+ {0xAAF5, 0xAAF5, prCM, gcMc}, // MEETEI MAYEK VOWEL SIGN VISARGA
+ {0xAAF6, 0xAAF6, prCM, gcMn}, // MEETEI MAYEK VIRAMA
+ {0xAB01, 0xAB06, prAL, gcLo}, // [6] ETHIOPIC SYLLABLE TTHU..ETHIOPIC SYLLABLE TTHO
+ {0xAB09, 0xAB0E, prAL, gcLo}, // [6] ETHIOPIC SYLLABLE DDHU..ETHIOPIC SYLLABLE DDHO
+ {0xAB11, 0xAB16, prAL, gcLo}, // [6] ETHIOPIC SYLLABLE DZU..ETHIOPIC SYLLABLE DZO
+ {0xAB20, 0xAB26, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE CCHHA..ETHIOPIC SYLLABLE CCHHO
+ {0xAB28, 0xAB2E, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE BBA..ETHIOPIC SYLLABLE BBO
+ {0xAB30, 0xAB5A, prAL, gcLl}, // [43] LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG
+ {0xAB5B, 0xAB5B, prAL, gcSk}, // MODIFIER BREVE WITH INVERTED BREVE
+ {0xAB5C, 0xAB5F, prAL, gcLm}, // [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK
+ {0xAB60, 0xAB68, prAL, gcLl}, // [9] LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER TURNED R WITH MIDDLE TILDE
+ {0xAB69, 0xAB69, prAL, gcLm}, // MODIFIER LETTER SMALL TURNED W
+ {0xAB6A, 0xAB6B, prAL, gcSk}, // [2] MODIFIER LETTER LEFT TACK..MODIFIER LETTER RIGHT TACK
+ {0xAB70, 0xABBF, prAL, gcLl}, // [80] CHEROKEE SMALL LETTER A..CHEROKEE SMALL LETTER YA
+ {0xABC0, 0xABE2, prAL, gcLo}, // [35] MEETEI MAYEK LETTER KOK..MEETEI MAYEK LETTER I LONSUM
+ {0xABE3, 0xABE4, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
+ {0xABE5, 0xABE5, prCM, gcMn}, // MEETEI MAYEK VOWEL SIGN ANAP
+ {0xABE6, 0xABE7, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN YENAP..MEETEI MAYEK VOWEL SIGN SOUNAP
+ {0xABE8, 0xABE8, prCM, gcMn}, // MEETEI MAYEK VOWEL SIGN UNAP
+ {0xABE9, 0xABEA, prCM, gcMc}, // [2] MEETEI MAYEK VOWEL SIGN CHEINAP..MEETEI MAYEK VOWEL SIGN NUNG
+ {0xABEB, 0xABEB, prBA, gcPo}, // MEETEI MAYEK CHEIKHEI
+ {0xABEC, 0xABEC, prCM, gcMc}, // MEETEI MAYEK LUM IYEK
+ {0xABED, 0xABED, prCM, gcMn}, // MEETEI MAYEK APUN IYEK
+ {0xABF0, 0xABF9, prNU, gcNd}, // [10] MEETEI MAYEK DIGIT ZERO..MEETEI MAYEK DIGIT NINE
+ {0xAC00, 0xAC00, prH2, gcLo}, // HANGUL SYLLABLE GA
+ {0xAC01, 0xAC1B, prH3, gcLo}, // [27] HANGUL SYLLABLE GAG..HANGUL SYLLABLE GAH
+ {0xAC1C, 0xAC1C, prH2, gcLo}, // HANGUL SYLLABLE GAE
+ {0xAC1D, 0xAC37, prH3, gcLo}, // [27] HANGUL SYLLABLE GAEG..HANGUL SYLLABLE GAEH
+ {0xAC38, 0xAC38, prH2, gcLo}, // HANGUL SYLLABLE GYA
+ {0xAC39, 0xAC53, prH3, gcLo}, // [27] HANGUL SYLLABLE GYAG..HANGUL SYLLABLE GYAH
+ {0xAC54, 0xAC54, prH2, gcLo}, // HANGUL SYLLABLE GYAE
+ {0xAC55, 0xAC6F, prH3, gcLo}, // [27] HANGUL SYLLABLE GYAEG..HANGUL SYLLABLE GYAEH
+ {0xAC70, 0xAC70, prH2, gcLo}, // HANGUL SYLLABLE GEO
+ {0xAC71, 0xAC8B, prH3, gcLo}, // [27] HANGUL SYLLABLE GEOG..HANGUL SYLLABLE GEOH
+ {0xAC8C, 0xAC8C, prH2, gcLo}, // HANGUL SYLLABLE GE
+ {0xAC8D, 0xACA7, prH3, gcLo}, // [27] HANGUL SYLLABLE GEG..HANGUL SYLLABLE GEH
+ {0xACA8, 0xACA8, prH2, gcLo}, // HANGUL SYLLABLE GYEO
+ {0xACA9, 0xACC3, prH3, gcLo}, // [27] HANGUL SYLLABLE GYEOG..HANGUL SYLLABLE GYEOH
+ {0xACC4, 0xACC4, prH2, gcLo}, // HANGUL SYLLABLE GYE
+ {0xACC5, 0xACDF, prH3, gcLo}, // [27] HANGUL SYLLABLE GYEG..HANGUL SYLLABLE GYEH
+ {0xACE0, 0xACE0, prH2, gcLo}, // HANGUL SYLLABLE GO
+ {0xACE1, 0xACFB, prH3, gcLo}, // [27] HANGUL SYLLABLE GOG..HANGUL SYLLABLE GOH
+ {0xACFC, 0xACFC, prH2, gcLo}, // HANGUL SYLLABLE GWA
+ {0xACFD, 0xAD17, prH3, gcLo}, // [27] HANGUL SYLLABLE GWAG..HANGUL SYLLABLE GWAH
+ {0xAD18, 0xAD18, prH2, gcLo}, // HANGUL SYLLABLE GWAE
+ {0xAD19, 0xAD33, prH3, gcLo}, // [27] HANGUL SYLLABLE GWAEG..HANGUL SYLLABLE GWAEH
+ {0xAD34, 0xAD34, prH2, gcLo}, // HANGUL SYLLABLE GOE
+ {0xAD35, 0xAD4F, prH3, gcLo}, // [27] HANGUL SYLLABLE GOEG..HANGUL SYLLABLE GOEH
+ {0xAD50, 0xAD50, prH2, gcLo}, // HANGUL SYLLABLE GYO
+ {0xAD51, 0xAD6B, prH3, gcLo}, // [27] HANGUL SYLLABLE GYOG..HANGUL SYLLABLE GYOH
+ {0xAD6C, 0xAD6C, prH2, gcLo}, // HANGUL SYLLABLE GU
+ {0xAD6D, 0xAD87, prH3, gcLo}, // [27] HANGUL SYLLABLE GUG..HANGUL SYLLABLE GUH
+ {0xAD88, 0xAD88, prH2, gcLo}, // HANGUL SYLLABLE GWEO
+ {0xAD89, 0xADA3, prH3, gcLo}, // [27] HANGUL SYLLABLE GWEOG..HANGUL SYLLABLE GWEOH
+ {0xADA4, 0xADA4, prH2, gcLo}, // HANGUL SYLLABLE GWE
+ {0xADA5, 0xADBF, prH3, gcLo}, // [27] HANGUL SYLLABLE GWEG..HANGUL SYLLABLE GWEH
+ {0xADC0, 0xADC0, prH2, gcLo}, // HANGUL SYLLABLE GWI
+ {0xADC1, 0xADDB, prH3, gcLo}, // [27] HANGUL SYLLABLE GWIG..HANGUL SYLLABLE GWIH
+ {0xADDC, 0xADDC, prH2, gcLo}, // HANGUL SYLLABLE GYU
+ {0xADDD, 0xADF7, prH3, gcLo}, // [27] HANGUL SYLLABLE GYUG..HANGUL SYLLABLE GYUH
+ {0xADF8, 0xADF8, prH2, gcLo}, // HANGUL SYLLABLE GEU
+ {0xADF9, 0xAE13, prH3, gcLo}, // [27] HANGUL SYLLABLE GEUG..HANGUL SYLLABLE GEUH
+ {0xAE14, 0xAE14, prH2, gcLo}, // HANGUL SYLLABLE GYI
+ {0xAE15, 0xAE2F, prH3, gcLo}, // [27] HANGUL SYLLABLE GYIG..HANGUL SYLLABLE GYIH
+ {0xAE30, 0xAE30, prH2, gcLo}, // HANGUL SYLLABLE GI
+ {0xAE31, 0xAE4B, prH3, gcLo}, // [27] HANGUL SYLLABLE GIG..HANGUL SYLLABLE GIH
+ {0xAE4C, 0xAE4C, prH2, gcLo}, // HANGUL SYLLABLE GGA
+ {0xAE4D, 0xAE67, prH3, gcLo}, // [27] HANGUL SYLLABLE GGAG..HANGUL SYLLABLE GGAH
+ {0xAE68, 0xAE68, prH2, gcLo}, // HANGUL SYLLABLE GGAE
+ {0xAE69, 0xAE83, prH3, gcLo}, // [27] HANGUL SYLLABLE GGAEG..HANGUL SYLLABLE GGAEH
+ {0xAE84, 0xAE84, prH2, gcLo}, // HANGUL SYLLABLE GGYA
+ {0xAE85, 0xAE9F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYAG..HANGUL SYLLABLE GGYAH
+ {0xAEA0, 0xAEA0, prH2, gcLo}, // HANGUL SYLLABLE GGYAE
+ {0xAEA1, 0xAEBB, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYAEG..HANGUL SYLLABLE GGYAEH
+ {0xAEBC, 0xAEBC, prH2, gcLo}, // HANGUL SYLLABLE GGEO
+ {0xAEBD, 0xAED7, prH3, gcLo}, // [27] HANGUL SYLLABLE GGEOG..HANGUL SYLLABLE GGEOH
+ {0xAED8, 0xAED8, prH2, gcLo}, // HANGUL SYLLABLE GGE
+ {0xAED9, 0xAEF3, prH3, gcLo}, // [27] HANGUL SYLLABLE GGEG..HANGUL SYLLABLE GGEH
+ {0xAEF4, 0xAEF4, prH2, gcLo}, // HANGUL SYLLABLE GGYEO
+ {0xAEF5, 0xAF0F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYEOG..HANGUL SYLLABLE GGYEOH
+ {0xAF10, 0xAF10, prH2, gcLo}, // HANGUL SYLLABLE GGYE
+ {0xAF11, 0xAF2B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYEG..HANGUL SYLLABLE GGYEH
+ {0xAF2C, 0xAF2C, prH2, gcLo}, // HANGUL SYLLABLE GGO
+ {0xAF2D, 0xAF47, prH3, gcLo}, // [27] HANGUL SYLLABLE GGOG..HANGUL SYLLABLE GGOH
+ {0xAF48, 0xAF48, prH2, gcLo}, // HANGUL SYLLABLE GGWA
+ {0xAF49, 0xAF63, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWAG..HANGUL SYLLABLE GGWAH
+ {0xAF64, 0xAF64, prH2, gcLo}, // HANGUL SYLLABLE GGWAE
+ {0xAF65, 0xAF7F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWAEG..HANGUL SYLLABLE GGWAEH
+ {0xAF80, 0xAF80, prH2, gcLo}, // HANGUL SYLLABLE GGOE
+ {0xAF81, 0xAF9B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGOEG..HANGUL SYLLABLE GGOEH
+ {0xAF9C, 0xAF9C, prH2, gcLo}, // HANGUL SYLLABLE GGYO
+ {0xAF9D, 0xAFB7, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYOG..HANGUL SYLLABLE GGYOH
+ {0xAFB8, 0xAFB8, prH2, gcLo}, // HANGUL SYLLABLE GGU
+ {0xAFB9, 0xAFD3, prH3, gcLo}, // [27] HANGUL SYLLABLE GGUG..HANGUL SYLLABLE GGUH
+ {0xAFD4, 0xAFD4, prH2, gcLo}, // HANGUL SYLLABLE GGWEO
+ {0xAFD5, 0xAFEF, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWEOG..HANGUL SYLLABLE GGWEOH
+ {0xAFF0, 0xAFF0, prH2, gcLo}, // HANGUL SYLLABLE GGWE
+ {0xAFF1, 0xB00B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWEG..HANGUL SYLLABLE GGWEH
+ {0xB00C, 0xB00C, prH2, gcLo}, // HANGUL SYLLABLE GGWI
+ {0xB00D, 0xB027, prH3, gcLo}, // [27] HANGUL SYLLABLE GGWIG..HANGUL SYLLABLE GGWIH
+ {0xB028, 0xB028, prH2, gcLo}, // HANGUL SYLLABLE GGYU
+ {0xB029, 0xB043, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYUG..HANGUL SYLLABLE GGYUH
+ {0xB044, 0xB044, prH2, gcLo}, // HANGUL SYLLABLE GGEU
+ {0xB045, 0xB05F, prH3, gcLo}, // [27] HANGUL SYLLABLE GGEUG..HANGUL SYLLABLE GGEUH
+ {0xB060, 0xB060, prH2, gcLo}, // HANGUL SYLLABLE GGYI
+ {0xB061, 0xB07B, prH3, gcLo}, // [27] HANGUL SYLLABLE GGYIG..HANGUL SYLLABLE GGYIH
+ {0xB07C, 0xB07C, prH2, gcLo}, // HANGUL SYLLABLE GGI
+ {0xB07D, 0xB097, prH3, gcLo}, // [27] HANGUL SYLLABLE GGIG..HANGUL SYLLABLE GGIH
+ {0xB098, 0xB098, prH2, gcLo}, // HANGUL SYLLABLE NA
+ {0xB099, 0xB0B3, prH3, gcLo}, // [27] HANGUL SYLLABLE NAG..HANGUL SYLLABLE NAH
+ {0xB0B4, 0xB0B4, prH2, gcLo}, // HANGUL SYLLABLE NAE
+ {0xB0B5, 0xB0CF, prH3, gcLo}, // [27] HANGUL SYLLABLE NAEG..HANGUL SYLLABLE NAEH
+ {0xB0D0, 0xB0D0, prH2, gcLo}, // HANGUL SYLLABLE NYA
+ {0xB0D1, 0xB0EB, prH3, gcLo}, // [27] HANGUL SYLLABLE NYAG..HANGUL SYLLABLE NYAH
+ {0xB0EC, 0xB0EC, prH2, gcLo}, // HANGUL SYLLABLE NYAE
+ {0xB0ED, 0xB107, prH3, gcLo}, // [27] HANGUL SYLLABLE NYAEG..HANGUL SYLLABLE NYAEH
+ {0xB108, 0xB108, prH2, gcLo}, // HANGUL SYLLABLE NEO
+ {0xB109, 0xB123, prH3, gcLo}, // [27] HANGUL SYLLABLE NEOG..HANGUL SYLLABLE NEOH
+ {0xB124, 0xB124, prH2, gcLo}, // HANGUL SYLLABLE NE
+ {0xB125, 0xB13F, prH3, gcLo}, // [27] HANGUL SYLLABLE NEG..HANGUL SYLLABLE NEH
+ {0xB140, 0xB140, prH2, gcLo}, // HANGUL SYLLABLE NYEO
+ {0xB141, 0xB15B, prH3, gcLo}, // [27] HANGUL SYLLABLE NYEOG..HANGUL SYLLABLE NYEOH
+ {0xB15C, 0xB15C, prH2, gcLo}, // HANGUL SYLLABLE NYE
+ {0xB15D, 0xB177, prH3, gcLo}, // [27] HANGUL SYLLABLE NYEG..HANGUL SYLLABLE NYEH
+ {0xB178, 0xB178, prH2, gcLo}, // HANGUL SYLLABLE NO
+ {0xB179, 0xB193, prH3, gcLo}, // [27] HANGUL SYLLABLE NOG..HANGUL SYLLABLE NOH
+ {0xB194, 0xB194, prH2, gcLo}, // HANGUL SYLLABLE NWA
+ {0xB195, 0xB1AF, prH3, gcLo}, // [27] HANGUL SYLLABLE NWAG..HANGUL SYLLABLE NWAH
+ {0xB1B0, 0xB1B0, prH2, gcLo}, // HANGUL SYLLABLE NWAE
+ {0xB1B1, 0xB1CB, prH3, gcLo}, // [27] HANGUL SYLLABLE NWAEG..HANGUL SYLLABLE NWAEH
+ {0xB1CC, 0xB1CC, prH2, gcLo}, // HANGUL SYLLABLE NOE
+ {0xB1CD, 0xB1E7, prH3, gcLo}, // [27] HANGUL SYLLABLE NOEG..HANGUL SYLLABLE NOEH
+ {0xB1E8, 0xB1E8, prH2, gcLo}, // HANGUL SYLLABLE NYO
+ {0xB1E9, 0xB203, prH3, gcLo}, // [27] HANGUL SYLLABLE NYOG..HANGUL SYLLABLE NYOH
+ {0xB204, 0xB204, prH2, gcLo}, // HANGUL SYLLABLE NU
+ {0xB205, 0xB21F, prH3, gcLo}, // [27] HANGUL SYLLABLE NUG..HANGUL SYLLABLE NUH
+ {0xB220, 0xB220, prH2, gcLo}, // HANGUL SYLLABLE NWEO
+ {0xB221, 0xB23B, prH3, gcLo}, // [27] HANGUL SYLLABLE NWEOG..HANGUL SYLLABLE NWEOH
+ {0xB23C, 0xB23C, prH2, gcLo}, // HANGUL SYLLABLE NWE
+ {0xB23D, 0xB257, prH3, gcLo}, // [27] HANGUL SYLLABLE NWEG..HANGUL SYLLABLE NWEH
+ {0xB258, 0xB258, prH2, gcLo}, // HANGUL SYLLABLE NWI
+ {0xB259, 0xB273, prH3, gcLo}, // [27] HANGUL SYLLABLE NWIG..HANGUL SYLLABLE NWIH
+ {0xB274, 0xB274, prH2, gcLo}, // HANGUL SYLLABLE NYU
+ {0xB275, 0xB28F, prH3, gcLo}, // [27] HANGUL SYLLABLE NYUG..HANGUL SYLLABLE NYUH
+ {0xB290, 0xB290, prH2, gcLo}, // HANGUL SYLLABLE NEU
+ {0xB291, 0xB2AB, prH3, gcLo}, // [27] HANGUL SYLLABLE NEUG..HANGUL SYLLABLE NEUH
+ {0xB2AC, 0xB2AC, prH2, gcLo}, // HANGUL SYLLABLE NYI
+ {0xB2AD, 0xB2C7, prH3, gcLo}, // [27] HANGUL SYLLABLE NYIG..HANGUL SYLLABLE NYIH
+ {0xB2C8, 0xB2C8, prH2, gcLo}, // HANGUL SYLLABLE NI
+ {0xB2C9, 0xB2E3, prH3, gcLo}, // [27] HANGUL SYLLABLE NIG..HANGUL SYLLABLE NIH
+ {0xB2E4, 0xB2E4, prH2, gcLo}, // HANGUL SYLLABLE DA
+ {0xB2E5, 0xB2FF, prH3, gcLo}, // [27] HANGUL SYLLABLE DAG..HANGUL SYLLABLE DAH
+ {0xB300, 0xB300, prH2, gcLo}, // HANGUL SYLLABLE DAE
+ {0xB301, 0xB31B, prH3, gcLo}, // [27] HANGUL SYLLABLE DAEG..HANGUL SYLLABLE DAEH
+ {0xB31C, 0xB31C, prH2, gcLo}, // HANGUL SYLLABLE DYA
+ {0xB31D, 0xB337, prH3, gcLo}, // [27] HANGUL SYLLABLE DYAG..HANGUL SYLLABLE DYAH
+ {0xB338, 0xB338, prH2, gcLo}, // HANGUL SYLLABLE DYAE
+ {0xB339, 0xB353, prH3, gcLo}, // [27] HANGUL SYLLABLE DYAEG..HANGUL SYLLABLE DYAEH
+ {0xB354, 0xB354, prH2, gcLo}, // HANGUL SYLLABLE DEO
+ {0xB355, 0xB36F, prH3, gcLo}, // [27] HANGUL SYLLABLE DEOG..HANGUL SYLLABLE DEOH
+ {0xB370, 0xB370, prH2, gcLo}, // HANGUL SYLLABLE DE
+ {0xB371, 0xB38B, prH3, gcLo}, // [27] HANGUL SYLLABLE DEG..HANGUL SYLLABLE DEH
+ {0xB38C, 0xB38C, prH2, gcLo}, // HANGUL SYLLABLE DYEO
+ {0xB38D, 0xB3A7, prH3, gcLo}, // [27] HANGUL SYLLABLE DYEOG..HANGUL SYLLABLE DYEOH
+ {0xB3A8, 0xB3A8, prH2, gcLo}, // HANGUL SYLLABLE DYE
+ {0xB3A9, 0xB3C3, prH3, gcLo}, // [27] HANGUL SYLLABLE DYEG..HANGUL SYLLABLE DYEH
+ {0xB3C4, 0xB3C4, prH2, gcLo}, // HANGUL SYLLABLE DO
+ {0xB3C5, 0xB3DF, prH3, gcLo}, // [27] HANGUL SYLLABLE DOG..HANGUL SYLLABLE DOH
+ {0xB3E0, 0xB3E0, prH2, gcLo}, // HANGUL SYLLABLE DWA
+ {0xB3E1, 0xB3FB, prH3, gcLo}, // [27] HANGUL SYLLABLE DWAG..HANGUL SYLLABLE DWAH
+ {0xB3FC, 0xB3FC, prH2, gcLo}, // HANGUL SYLLABLE DWAE
+ {0xB3FD, 0xB417, prH3, gcLo}, // [27] HANGUL SYLLABLE DWAEG..HANGUL SYLLABLE DWAEH
+ {0xB418, 0xB418, prH2, gcLo}, // HANGUL SYLLABLE DOE
+ {0xB419, 0xB433, prH3, gcLo}, // [27] HANGUL SYLLABLE DOEG..HANGUL SYLLABLE DOEH
+ {0xB434, 0xB434, prH2, gcLo}, // HANGUL SYLLABLE DYO
+ {0xB435, 0xB44F, prH3, gcLo}, // [27] HANGUL SYLLABLE DYOG..HANGUL SYLLABLE DYOH
+ {0xB450, 0xB450, prH2, gcLo}, // HANGUL SYLLABLE DU
+ {0xB451, 0xB46B, prH3, gcLo}, // [27] HANGUL SYLLABLE DUG..HANGUL SYLLABLE DUH
+ {0xB46C, 0xB46C, prH2, gcLo}, // HANGUL SYLLABLE DWEO
+ {0xB46D, 0xB487, prH3, gcLo}, // [27] HANGUL SYLLABLE DWEOG..HANGUL SYLLABLE DWEOH
+ {0xB488, 0xB488, prH2, gcLo}, // HANGUL SYLLABLE DWE
+ {0xB489, 0xB4A3, prH3, gcLo}, // [27] HANGUL SYLLABLE DWEG..HANGUL SYLLABLE DWEH
+ {0xB4A4, 0xB4A4, prH2, gcLo}, // HANGUL SYLLABLE DWI
+ {0xB4A5, 0xB4BF, prH3, gcLo}, // [27] HANGUL SYLLABLE DWIG..HANGUL SYLLABLE DWIH
+ {0xB4C0, 0xB4C0, prH2, gcLo}, // HANGUL SYLLABLE DYU
+ {0xB4C1, 0xB4DB, prH3, gcLo}, // [27] HANGUL SYLLABLE DYUG..HANGUL SYLLABLE DYUH
+ {0xB4DC, 0xB4DC, prH2, gcLo}, // HANGUL SYLLABLE DEU
+ {0xB4DD, 0xB4F7, prH3, gcLo}, // [27] HANGUL SYLLABLE DEUG..HANGUL SYLLABLE DEUH
+ {0xB4F8, 0xB4F8, prH2, gcLo}, // HANGUL SYLLABLE DYI
+ {0xB4F9, 0xB513, prH3, gcLo}, // [27] HANGUL SYLLABLE DYIG..HANGUL SYLLABLE DYIH
+ {0xB514, 0xB514, prH2, gcLo}, // HANGUL SYLLABLE DI
+ {0xB515, 0xB52F, prH3, gcLo}, // [27] HANGUL SYLLABLE DIG..HANGUL SYLLABLE DIH
+ {0xB530, 0xB530, prH2, gcLo}, // HANGUL SYLLABLE DDA
+ {0xB531, 0xB54B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDAG..HANGUL SYLLABLE DDAH
+ {0xB54C, 0xB54C, prH2, gcLo}, // HANGUL SYLLABLE DDAE
+ {0xB54D, 0xB567, prH3, gcLo}, // [27] HANGUL SYLLABLE DDAEG..HANGUL SYLLABLE DDAEH
+ {0xB568, 0xB568, prH2, gcLo}, // HANGUL SYLLABLE DDYA
+ {0xB569, 0xB583, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYAG..HANGUL SYLLABLE DDYAH
+ {0xB584, 0xB584, prH2, gcLo}, // HANGUL SYLLABLE DDYAE
+ {0xB585, 0xB59F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYAEG..HANGUL SYLLABLE DDYAEH
+ {0xB5A0, 0xB5A0, prH2, gcLo}, // HANGUL SYLLABLE DDEO
+ {0xB5A1, 0xB5BB, prH3, gcLo}, // [27] HANGUL SYLLABLE DDEOG..HANGUL SYLLABLE DDEOH
+ {0xB5BC, 0xB5BC, prH2, gcLo}, // HANGUL SYLLABLE DDE
+ {0xB5BD, 0xB5D7, prH3, gcLo}, // [27] HANGUL SYLLABLE DDEG..HANGUL SYLLABLE DDEH
+ {0xB5D8, 0xB5D8, prH2, gcLo}, // HANGUL SYLLABLE DDYEO
+ {0xB5D9, 0xB5F3, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYEOG..HANGUL SYLLABLE DDYEOH
+ {0xB5F4, 0xB5F4, prH2, gcLo}, // HANGUL SYLLABLE DDYE
+ {0xB5F5, 0xB60F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYEG..HANGUL SYLLABLE DDYEH
+ {0xB610, 0xB610, prH2, gcLo}, // HANGUL SYLLABLE DDO
+ {0xB611, 0xB62B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDOG..HANGUL SYLLABLE DDOH
+ {0xB62C, 0xB62C, prH2, gcLo}, // HANGUL SYLLABLE DDWA
+ {0xB62D, 0xB647, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWAG..HANGUL SYLLABLE DDWAH
+ {0xB648, 0xB648, prH2, gcLo}, // HANGUL SYLLABLE DDWAE
+ {0xB649, 0xB663, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWAEG..HANGUL SYLLABLE DDWAEH
+ {0xB664, 0xB664, prH2, gcLo}, // HANGUL SYLLABLE DDOE
+ {0xB665, 0xB67F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDOEG..HANGUL SYLLABLE DDOEH
+ {0xB680, 0xB680, prH2, gcLo}, // HANGUL SYLLABLE DDYO
+ {0xB681, 0xB69B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYOG..HANGUL SYLLABLE DDYOH
+ {0xB69C, 0xB69C, prH2, gcLo}, // HANGUL SYLLABLE DDU
+ {0xB69D, 0xB6B7, prH3, gcLo}, // [27] HANGUL SYLLABLE DDUG..HANGUL SYLLABLE DDUH
+ {0xB6B8, 0xB6B8, prH2, gcLo}, // HANGUL SYLLABLE DDWEO
+ {0xB6B9, 0xB6D3, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWEOG..HANGUL SYLLABLE DDWEOH
+ {0xB6D4, 0xB6D4, prH2, gcLo}, // HANGUL SYLLABLE DDWE
+ {0xB6D5, 0xB6EF, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWEG..HANGUL SYLLABLE DDWEH
+ {0xB6F0, 0xB6F0, prH2, gcLo}, // HANGUL SYLLABLE DDWI
+ {0xB6F1, 0xB70B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDWIG..HANGUL SYLLABLE DDWIH
+ {0xB70C, 0xB70C, prH2, gcLo}, // HANGUL SYLLABLE DDYU
+ {0xB70D, 0xB727, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYUG..HANGUL SYLLABLE DDYUH
+ {0xB728, 0xB728, prH2, gcLo}, // HANGUL SYLLABLE DDEU
+ {0xB729, 0xB743, prH3, gcLo}, // [27] HANGUL SYLLABLE DDEUG..HANGUL SYLLABLE DDEUH
+ {0xB744, 0xB744, prH2, gcLo}, // HANGUL SYLLABLE DDYI
+ {0xB745, 0xB75F, prH3, gcLo}, // [27] HANGUL SYLLABLE DDYIG..HANGUL SYLLABLE DDYIH
+ {0xB760, 0xB760, prH2, gcLo}, // HANGUL SYLLABLE DDI
+ {0xB761, 0xB77B, prH3, gcLo}, // [27] HANGUL SYLLABLE DDIG..HANGUL SYLLABLE DDIH
+ {0xB77C, 0xB77C, prH2, gcLo}, // HANGUL SYLLABLE RA
+ {0xB77D, 0xB797, prH3, gcLo}, // [27] HANGUL SYLLABLE RAG..HANGUL SYLLABLE RAH
+ {0xB798, 0xB798, prH2, gcLo}, // HANGUL SYLLABLE RAE
+ {0xB799, 0xB7B3, prH3, gcLo}, // [27] HANGUL SYLLABLE RAEG..HANGUL SYLLABLE RAEH
+ {0xB7B4, 0xB7B4, prH2, gcLo}, // HANGUL SYLLABLE RYA
+ {0xB7B5, 0xB7CF, prH3, gcLo}, // [27] HANGUL SYLLABLE RYAG..HANGUL SYLLABLE RYAH
+ {0xB7D0, 0xB7D0, prH2, gcLo}, // HANGUL SYLLABLE RYAE
+ {0xB7D1, 0xB7EB, prH3, gcLo}, // [27] HANGUL SYLLABLE RYAEG..HANGUL SYLLABLE RYAEH
+ {0xB7EC, 0xB7EC, prH2, gcLo}, // HANGUL SYLLABLE REO
+ {0xB7ED, 0xB807, prH3, gcLo}, // [27] HANGUL SYLLABLE REOG..HANGUL SYLLABLE REOH
+ {0xB808, 0xB808, prH2, gcLo}, // HANGUL SYLLABLE RE
+ {0xB809, 0xB823, prH3, gcLo}, // [27] HANGUL SYLLABLE REG..HANGUL SYLLABLE REH
+ {0xB824, 0xB824, prH2, gcLo}, // HANGUL SYLLABLE RYEO
+ {0xB825, 0xB83F, prH3, gcLo}, // [27] HANGUL SYLLABLE RYEOG..HANGUL SYLLABLE RYEOH
+ {0xB840, 0xB840, prH2, gcLo}, // HANGUL SYLLABLE RYE
+ {0xB841, 0xB85B, prH3, gcLo}, // [27] HANGUL SYLLABLE RYEG..HANGUL SYLLABLE RYEH
+ {0xB85C, 0xB85C, prH2, gcLo}, // HANGUL SYLLABLE RO
+ {0xB85D, 0xB877, prH3, gcLo}, // [27] HANGUL SYLLABLE ROG..HANGUL SYLLABLE ROH
+ {0xB878, 0xB878, prH2, gcLo}, // HANGUL SYLLABLE RWA
+ {0xB879, 0xB893, prH3, gcLo}, // [27] HANGUL SYLLABLE RWAG..HANGUL SYLLABLE RWAH
+ {0xB894, 0xB894, prH2, gcLo}, // HANGUL SYLLABLE RWAE
+ {0xB895, 0xB8AF, prH3, gcLo}, // [27] HANGUL SYLLABLE RWAEG..HANGUL SYLLABLE RWAEH
+ {0xB8B0, 0xB8B0, prH2, gcLo}, // HANGUL SYLLABLE ROE
+ {0xB8B1, 0xB8CB, prH3, gcLo}, // [27] HANGUL SYLLABLE ROEG..HANGUL SYLLABLE ROEH
+ {0xB8CC, 0xB8CC, prH2, gcLo}, // HANGUL SYLLABLE RYO
+ {0xB8CD, 0xB8E7, prH3, gcLo}, // [27] HANGUL SYLLABLE RYOG..HANGUL SYLLABLE RYOH
+ {0xB8E8, 0xB8E8, prH2, gcLo}, // HANGUL SYLLABLE RU
+ {0xB8E9, 0xB903, prH3, gcLo}, // [27] HANGUL SYLLABLE RUG..HANGUL SYLLABLE RUH
+ {0xB904, 0xB904, prH2, gcLo}, // HANGUL SYLLABLE RWEO
+ {0xB905, 0xB91F, prH3, gcLo}, // [27] HANGUL SYLLABLE RWEOG..HANGUL SYLLABLE RWEOH
+ {0xB920, 0xB920, prH2, gcLo}, // HANGUL SYLLABLE RWE
+ {0xB921, 0xB93B, prH3, gcLo}, // [27] HANGUL SYLLABLE RWEG..HANGUL SYLLABLE RWEH
+ {0xB93C, 0xB93C, prH2, gcLo}, // HANGUL SYLLABLE RWI
+ {0xB93D, 0xB957, prH3, gcLo}, // [27] HANGUL SYLLABLE RWIG..HANGUL SYLLABLE RWIH
+ {0xB958, 0xB958, prH2, gcLo}, // HANGUL SYLLABLE RYU
+ {0xB959, 0xB973, prH3, gcLo}, // [27] HANGUL SYLLABLE RYUG..HANGUL SYLLABLE RYUH
+ {0xB974, 0xB974, prH2, gcLo}, // HANGUL SYLLABLE REU
+ {0xB975, 0xB98F, prH3, gcLo}, // [27] HANGUL SYLLABLE REUG..HANGUL SYLLABLE REUH
+ {0xB990, 0xB990, prH2, gcLo}, // HANGUL SYLLABLE RYI
+ {0xB991, 0xB9AB, prH3, gcLo}, // [27] HANGUL SYLLABLE RYIG..HANGUL SYLLABLE RYIH
+ {0xB9AC, 0xB9AC, prH2, gcLo}, // HANGUL SYLLABLE RI
+ {0xB9AD, 0xB9C7, prH3, gcLo}, // [27] HANGUL SYLLABLE RIG..HANGUL SYLLABLE RIH
+ {0xB9C8, 0xB9C8, prH2, gcLo}, // HANGUL SYLLABLE MA
+ {0xB9C9, 0xB9E3, prH3, gcLo}, // [27] HANGUL SYLLABLE MAG..HANGUL SYLLABLE MAH
+ {0xB9E4, 0xB9E4, prH2, gcLo}, // HANGUL SYLLABLE MAE
+ {0xB9E5, 0xB9FF, prH3, gcLo}, // [27] HANGUL SYLLABLE MAEG..HANGUL SYLLABLE MAEH
+ {0xBA00, 0xBA00, prH2, gcLo}, // HANGUL SYLLABLE MYA
+ {0xBA01, 0xBA1B, prH3, gcLo}, // [27] HANGUL SYLLABLE MYAG..HANGUL SYLLABLE MYAH
+ {0xBA1C, 0xBA1C, prH2, gcLo}, // HANGUL SYLLABLE MYAE
+ {0xBA1D, 0xBA37, prH3, gcLo}, // [27] HANGUL SYLLABLE MYAEG..HANGUL SYLLABLE MYAEH
+ {0xBA38, 0xBA38, prH2, gcLo}, // HANGUL SYLLABLE MEO
+ {0xBA39, 0xBA53, prH3, gcLo}, // [27] HANGUL SYLLABLE MEOG..HANGUL SYLLABLE MEOH
+ {0xBA54, 0xBA54, prH2, gcLo}, // HANGUL SYLLABLE ME
+ {0xBA55, 0xBA6F, prH3, gcLo}, // [27] HANGUL SYLLABLE MEG..HANGUL SYLLABLE MEH
+ {0xBA70, 0xBA70, prH2, gcLo}, // HANGUL SYLLABLE MYEO
+ {0xBA71, 0xBA8B, prH3, gcLo}, // [27] HANGUL SYLLABLE MYEOG..HANGUL SYLLABLE MYEOH
+ {0xBA8C, 0xBA8C, prH2, gcLo}, // HANGUL SYLLABLE MYE
+ {0xBA8D, 0xBAA7, prH3, gcLo}, // [27] HANGUL SYLLABLE MYEG..HANGUL SYLLABLE MYEH
+ {0xBAA8, 0xBAA8, prH2, gcLo}, // HANGUL SYLLABLE MO
+ {0xBAA9, 0xBAC3, prH3, gcLo}, // [27] HANGUL SYLLABLE MOG..HANGUL SYLLABLE MOH
+ {0xBAC4, 0xBAC4, prH2, gcLo}, // HANGUL SYLLABLE MWA
+ {0xBAC5, 0xBADF, prH3, gcLo}, // [27] HANGUL SYLLABLE MWAG..HANGUL SYLLABLE MWAH
+ {0xBAE0, 0xBAE0, prH2, gcLo}, // HANGUL SYLLABLE MWAE
+ {0xBAE1, 0xBAFB, prH3, gcLo}, // [27] HANGUL SYLLABLE MWAEG..HANGUL SYLLABLE MWAEH
+ {0xBAFC, 0xBAFC, prH2, gcLo}, // HANGUL SYLLABLE MOE
+ {0xBAFD, 0xBB17, prH3, gcLo}, // [27] HANGUL SYLLABLE MOEG..HANGUL SYLLABLE MOEH
+ {0xBB18, 0xBB18, prH2, gcLo}, // HANGUL SYLLABLE MYO
+ {0xBB19, 0xBB33, prH3, gcLo}, // [27] HANGUL SYLLABLE MYOG..HANGUL SYLLABLE MYOH
+ {0xBB34, 0xBB34, prH2, gcLo}, // HANGUL SYLLABLE MU
+ {0xBB35, 0xBB4F, prH3, gcLo}, // [27] HANGUL SYLLABLE MUG..HANGUL SYLLABLE MUH
+ {0xBB50, 0xBB50, prH2, gcLo}, // HANGUL SYLLABLE MWEO
+ {0xBB51, 0xBB6B, prH3, gcLo}, // [27] HANGUL SYLLABLE MWEOG..HANGUL SYLLABLE MWEOH
+ {0xBB6C, 0xBB6C, prH2, gcLo}, // HANGUL SYLLABLE MWE
+ {0xBB6D, 0xBB87, prH3, gcLo}, // [27] HANGUL SYLLABLE MWEG..HANGUL SYLLABLE MWEH
+ {0xBB88, 0xBB88, prH2, gcLo}, // HANGUL SYLLABLE MWI
+ {0xBB89, 0xBBA3, prH3, gcLo}, // [27] HANGUL SYLLABLE MWIG..HANGUL SYLLABLE MWIH
+ {0xBBA4, 0xBBA4, prH2, gcLo}, // HANGUL SYLLABLE MYU
+ {0xBBA5, 0xBBBF, prH3, gcLo}, // [27] HANGUL SYLLABLE MYUG..HANGUL SYLLABLE MYUH
+ {0xBBC0, 0xBBC0, prH2, gcLo}, // HANGUL SYLLABLE MEU
+ {0xBBC1, 0xBBDB, prH3, gcLo}, // [27] HANGUL SYLLABLE MEUG..HANGUL SYLLABLE MEUH
+ {0xBBDC, 0xBBDC, prH2, gcLo}, // HANGUL SYLLABLE MYI
+ {0xBBDD, 0xBBF7, prH3, gcLo}, // [27] HANGUL SYLLABLE MYIG..HANGUL SYLLABLE MYIH
+ {0xBBF8, 0xBBF8, prH2, gcLo}, // HANGUL SYLLABLE MI
+ {0xBBF9, 0xBC13, prH3, gcLo}, // [27] HANGUL SYLLABLE MIG..HANGUL SYLLABLE MIH
+ {0xBC14, 0xBC14, prH2, gcLo}, // HANGUL SYLLABLE BA
+ {0xBC15, 0xBC2F, prH3, gcLo}, // [27] HANGUL SYLLABLE BAG..HANGUL SYLLABLE BAH
+ {0xBC30, 0xBC30, prH2, gcLo}, // HANGUL SYLLABLE BAE
+ {0xBC31, 0xBC4B, prH3, gcLo}, // [27] HANGUL SYLLABLE BAEG..HANGUL SYLLABLE BAEH
+ {0xBC4C, 0xBC4C, prH2, gcLo}, // HANGUL SYLLABLE BYA
+ {0xBC4D, 0xBC67, prH3, gcLo}, // [27] HANGUL SYLLABLE BYAG..HANGUL SYLLABLE BYAH
+ {0xBC68, 0xBC68, prH2, gcLo}, // HANGUL SYLLABLE BYAE
+ {0xBC69, 0xBC83, prH3, gcLo}, // [27] HANGUL SYLLABLE BYAEG..HANGUL SYLLABLE BYAEH
+ {0xBC84, 0xBC84, prH2, gcLo}, // HANGUL SYLLABLE BEO
+ {0xBC85, 0xBC9F, prH3, gcLo}, // [27] HANGUL SYLLABLE BEOG..HANGUL SYLLABLE BEOH
+ {0xBCA0, 0xBCA0, prH2, gcLo}, // HANGUL SYLLABLE BE
+ {0xBCA1, 0xBCBB, prH3, gcLo}, // [27] HANGUL SYLLABLE BEG..HANGUL SYLLABLE BEH
+ {0xBCBC, 0xBCBC, prH2, gcLo}, // HANGUL SYLLABLE BYEO
+ {0xBCBD, 0xBCD7, prH3, gcLo}, // [27] HANGUL SYLLABLE BYEOG..HANGUL SYLLABLE BYEOH
+ {0xBCD8, 0xBCD8, prH2, gcLo}, // HANGUL SYLLABLE BYE
+ {0xBCD9, 0xBCF3, prH3, gcLo}, // [27] HANGUL SYLLABLE BYEG..HANGUL SYLLABLE BYEH
+ {0xBCF4, 0xBCF4, prH2, gcLo}, // HANGUL SYLLABLE BO
+ {0xBCF5, 0xBD0F, prH3, gcLo}, // [27] HANGUL SYLLABLE BOG..HANGUL SYLLABLE BOH
+ {0xBD10, 0xBD10, prH2, gcLo}, // HANGUL SYLLABLE BWA
+ {0xBD11, 0xBD2B, prH3, gcLo}, // [27] HANGUL SYLLABLE BWAG..HANGUL SYLLABLE BWAH
+ {0xBD2C, 0xBD2C, prH2, gcLo}, // HANGUL SYLLABLE BWAE
+ {0xBD2D, 0xBD47, prH3, gcLo}, // [27] HANGUL SYLLABLE BWAEG..HANGUL SYLLABLE BWAEH
+ {0xBD48, 0xBD48, prH2, gcLo}, // HANGUL SYLLABLE BOE
+ {0xBD49, 0xBD63, prH3, gcLo}, // [27] HANGUL SYLLABLE BOEG..HANGUL SYLLABLE BOEH
+ {0xBD64, 0xBD64, prH2, gcLo}, // HANGUL SYLLABLE BYO
+ {0xBD65, 0xBD7F, prH3, gcLo}, // [27] HANGUL SYLLABLE BYOG..HANGUL SYLLABLE BYOH
+ {0xBD80, 0xBD80, prH2, gcLo}, // HANGUL SYLLABLE BU
+ {0xBD81, 0xBD9B, prH3, gcLo}, // [27] HANGUL SYLLABLE BUG..HANGUL SYLLABLE BUH
+ {0xBD9C, 0xBD9C, prH2, gcLo}, // HANGUL SYLLABLE BWEO
+ {0xBD9D, 0xBDB7, prH3, gcLo}, // [27] HANGUL SYLLABLE BWEOG..HANGUL SYLLABLE BWEOH
+ {0xBDB8, 0xBDB8, prH2, gcLo}, // HANGUL SYLLABLE BWE
+ {0xBDB9, 0xBDD3, prH3, gcLo}, // [27] HANGUL SYLLABLE BWEG..HANGUL SYLLABLE BWEH
+ {0xBDD4, 0xBDD4, prH2, gcLo}, // HANGUL SYLLABLE BWI
+ {0xBDD5, 0xBDEF, prH3, gcLo}, // [27] HANGUL SYLLABLE BWIG..HANGUL SYLLABLE BWIH
+ {0xBDF0, 0xBDF0, prH2, gcLo}, // HANGUL SYLLABLE BYU
+ {0xBDF1, 0xBE0B, prH3, gcLo}, // [27] HANGUL SYLLABLE BYUG..HANGUL SYLLABLE BYUH
+ {0xBE0C, 0xBE0C, prH2, gcLo}, // HANGUL SYLLABLE BEU
+ {0xBE0D, 0xBE27, prH3, gcLo}, // [27] HANGUL SYLLABLE BEUG..HANGUL SYLLABLE BEUH
+ {0xBE28, 0xBE28, prH2, gcLo}, // HANGUL SYLLABLE BYI
+ {0xBE29, 0xBE43, prH3, gcLo}, // [27] HANGUL SYLLABLE BYIG..HANGUL SYLLABLE BYIH
+ {0xBE44, 0xBE44, prH2, gcLo}, // HANGUL SYLLABLE BI
+ {0xBE45, 0xBE5F, prH3, gcLo}, // [27] HANGUL SYLLABLE BIG..HANGUL SYLLABLE BIH
+ {0xBE60, 0xBE60, prH2, gcLo}, // HANGUL SYLLABLE BBA
+ {0xBE61, 0xBE7B, prH3, gcLo}, // [27] HANGUL SYLLABLE BBAG..HANGUL SYLLABLE BBAH
+ {0xBE7C, 0xBE7C, prH2, gcLo}, // HANGUL SYLLABLE BBAE
+ {0xBE7D, 0xBE97, prH3, gcLo}, // [27] HANGUL SYLLABLE BBAEG..HANGUL SYLLABLE BBAEH
+ {0xBE98, 0xBE98, prH2, gcLo}, // HANGUL SYLLABLE BBYA
+ {0xBE99, 0xBEB3, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYAG..HANGUL SYLLABLE BBYAH
+ {0xBEB4, 0xBEB4, prH2, gcLo}, // HANGUL SYLLABLE BBYAE
+ {0xBEB5, 0xBECF, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYAEG..HANGUL SYLLABLE BBYAEH
+ {0xBED0, 0xBED0, prH2, gcLo}, // HANGUL SYLLABLE BBEO
+ {0xBED1, 0xBEEB, prH3, gcLo}, // [27] HANGUL SYLLABLE BBEOG..HANGUL SYLLABLE BBEOH
+ {0xBEEC, 0xBEEC, prH2, gcLo}, // HANGUL SYLLABLE BBE
+ {0xBEED, 0xBF07, prH3, gcLo}, // [27] HANGUL SYLLABLE BBEG..HANGUL SYLLABLE BBEH
+ {0xBF08, 0xBF08, prH2, gcLo}, // HANGUL SYLLABLE BBYEO
+ {0xBF09, 0xBF23, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYEOG..HANGUL SYLLABLE BBYEOH
+ {0xBF24, 0xBF24, prH2, gcLo}, // HANGUL SYLLABLE BBYE
+ {0xBF25, 0xBF3F, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYEG..HANGUL SYLLABLE BBYEH
+ {0xBF40, 0xBF40, prH2, gcLo}, // HANGUL SYLLABLE BBO
+ {0xBF41, 0xBF5B, prH3, gcLo}, // [27] HANGUL SYLLABLE BBOG..HANGUL SYLLABLE BBOH
+ {0xBF5C, 0xBF5C, prH2, gcLo}, // HANGUL SYLLABLE BBWA
+ {0xBF5D, 0xBF77, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWAG..HANGUL SYLLABLE BBWAH
+ {0xBF78, 0xBF78, prH2, gcLo}, // HANGUL SYLLABLE BBWAE
+ {0xBF79, 0xBF93, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWAEG..HANGUL SYLLABLE BBWAEH
+ {0xBF94, 0xBF94, prH2, gcLo}, // HANGUL SYLLABLE BBOE
+ {0xBF95, 0xBFAF, prH3, gcLo}, // [27] HANGUL SYLLABLE BBOEG..HANGUL SYLLABLE BBOEH
+ {0xBFB0, 0xBFB0, prH2, gcLo}, // HANGUL SYLLABLE BBYO
+ {0xBFB1, 0xBFCB, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYOG..HANGUL SYLLABLE BBYOH
+ {0xBFCC, 0xBFCC, prH2, gcLo}, // HANGUL SYLLABLE BBU
+ {0xBFCD, 0xBFE7, prH3, gcLo}, // [27] HANGUL SYLLABLE BBUG..HANGUL SYLLABLE BBUH
+ {0xBFE8, 0xBFE8, prH2, gcLo}, // HANGUL SYLLABLE BBWEO
+ {0xBFE9, 0xC003, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWEOG..HANGUL SYLLABLE BBWEOH
+ {0xC004, 0xC004, prH2, gcLo}, // HANGUL SYLLABLE BBWE
+ {0xC005, 0xC01F, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWEG..HANGUL SYLLABLE BBWEH
+ {0xC020, 0xC020, prH2, gcLo}, // HANGUL SYLLABLE BBWI
+ {0xC021, 0xC03B, prH3, gcLo}, // [27] HANGUL SYLLABLE BBWIG..HANGUL SYLLABLE BBWIH
+ {0xC03C, 0xC03C, prH2, gcLo}, // HANGUL SYLLABLE BBYU
+ {0xC03D, 0xC057, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYUG..HANGUL SYLLABLE BBYUH
+ {0xC058, 0xC058, prH2, gcLo}, // HANGUL SYLLABLE BBEU
+ {0xC059, 0xC073, prH3, gcLo}, // [27] HANGUL SYLLABLE BBEUG..HANGUL SYLLABLE BBEUH
+ {0xC074, 0xC074, prH2, gcLo}, // HANGUL SYLLABLE BBYI
+ {0xC075, 0xC08F, prH3, gcLo}, // [27] HANGUL SYLLABLE BBYIG..HANGUL SYLLABLE BBYIH
+ {0xC090, 0xC090, prH2, gcLo}, // HANGUL SYLLABLE BBI
+ {0xC091, 0xC0AB, prH3, gcLo}, // [27] HANGUL SYLLABLE BBIG..HANGUL SYLLABLE BBIH
+ {0xC0AC, 0xC0AC, prH2, gcLo}, // HANGUL SYLLABLE SA
+ {0xC0AD, 0xC0C7, prH3, gcLo}, // [27] HANGUL SYLLABLE SAG..HANGUL SYLLABLE SAH
+ {0xC0C8, 0xC0C8, prH2, gcLo}, // HANGUL SYLLABLE SAE
+ {0xC0C9, 0xC0E3, prH3, gcLo}, // [27] HANGUL SYLLABLE SAEG..HANGUL SYLLABLE SAEH
+ {0xC0E4, 0xC0E4, prH2, gcLo}, // HANGUL SYLLABLE SYA
+ {0xC0E5, 0xC0FF, prH3, gcLo}, // [27] HANGUL SYLLABLE SYAG..HANGUL SYLLABLE SYAH
+ {0xC100, 0xC100, prH2, gcLo}, // HANGUL SYLLABLE SYAE
+ {0xC101, 0xC11B, prH3, gcLo}, // [27] HANGUL SYLLABLE SYAEG..HANGUL SYLLABLE SYAEH
+ {0xC11C, 0xC11C, prH2, gcLo}, // HANGUL SYLLABLE SEO
+ {0xC11D, 0xC137, prH3, gcLo}, // [27] HANGUL SYLLABLE SEOG..HANGUL SYLLABLE SEOH
+ {0xC138, 0xC138, prH2, gcLo}, // HANGUL SYLLABLE SE
+ {0xC139, 0xC153, prH3, gcLo}, // [27] HANGUL SYLLABLE SEG..HANGUL SYLLABLE SEH
+ {0xC154, 0xC154, prH2, gcLo}, // HANGUL SYLLABLE SYEO
+ {0xC155, 0xC16F, prH3, gcLo}, // [27] HANGUL SYLLABLE SYEOG..HANGUL SYLLABLE SYEOH
+ {0xC170, 0xC170, prH2, gcLo}, // HANGUL SYLLABLE SYE
+ {0xC171, 0xC18B, prH3, gcLo}, // [27] HANGUL SYLLABLE SYEG..HANGUL SYLLABLE SYEH
+ {0xC18C, 0xC18C, prH2, gcLo}, // HANGUL SYLLABLE SO
+ {0xC18D, 0xC1A7, prH3, gcLo}, // [27] HANGUL SYLLABLE SOG..HANGUL SYLLABLE SOH
+ {0xC1A8, 0xC1A8, prH2, gcLo}, // HANGUL SYLLABLE SWA
+ {0xC1A9, 0xC1C3, prH3, gcLo}, // [27] HANGUL SYLLABLE SWAG..HANGUL SYLLABLE SWAH
+ {0xC1C4, 0xC1C4, prH2, gcLo}, // HANGUL SYLLABLE SWAE
+ {0xC1C5, 0xC1DF, prH3, gcLo}, // [27] HANGUL SYLLABLE SWAEG..HANGUL SYLLABLE SWAEH
+ {0xC1E0, 0xC1E0, prH2, gcLo}, // HANGUL SYLLABLE SOE
+ {0xC1E1, 0xC1FB, prH3, gcLo}, // [27] HANGUL SYLLABLE SOEG..HANGUL SYLLABLE SOEH
+ {0xC1FC, 0xC1FC, prH2, gcLo}, // HANGUL SYLLABLE SYO
+ {0xC1FD, 0xC217, prH3, gcLo}, // [27] HANGUL SYLLABLE SYOG..HANGUL SYLLABLE SYOH
+ {0xC218, 0xC218, prH2, gcLo}, // HANGUL SYLLABLE SU
+ {0xC219, 0xC233, prH3, gcLo}, // [27] HANGUL SYLLABLE SUG..HANGUL SYLLABLE SUH
+ {0xC234, 0xC234, prH2, gcLo}, // HANGUL SYLLABLE SWEO
+ {0xC235, 0xC24F, prH3, gcLo}, // [27] HANGUL SYLLABLE SWEOG..HANGUL SYLLABLE SWEOH
+ {0xC250, 0xC250, prH2, gcLo}, // HANGUL SYLLABLE SWE
+ {0xC251, 0xC26B, prH3, gcLo}, // [27] HANGUL SYLLABLE SWEG..HANGUL SYLLABLE SWEH
+ {0xC26C, 0xC26C, prH2, gcLo}, // HANGUL SYLLABLE SWI
+ {0xC26D, 0xC287, prH3, gcLo}, // [27] HANGUL SYLLABLE SWIG..HANGUL SYLLABLE SWIH
+ {0xC288, 0xC288, prH2, gcLo}, // HANGUL SYLLABLE SYU
+ {0xC289, 0xC2A3, prH3, gcLo}, // [27] HANGUL SYLLABLE SYUG..HANGUL SYLLABLE SYUH
+ {0xC2A4, 0xC2A4, prH2, gcLo}, // HANGUL SYLLABLE SEU
+ {0xC2A5, 0xC2BF, prH3, gcLo}, // [27] HANGUL SYLLABLE SEUG..HANGUL SYLLABLE SEUH
+ {0xC2C0, 0xC2C0, prH2, gcLo}, // HANGUL SYLLABLE SYI
+ {0xC2C1, 0xC2DB, prH3, gcLo}, // [27] HANGUL SYLLABLE SYIG..HANGUL SYLLABLE SYIH
+ {0xC2DC, 0xC2DC, prH2, gcLo}, // HANGUL SYLLABLE SI
+ {0xC2DD, 0xC2F7, prH3, gcLo}, // [27] HANGUL SYLLABLE SIG..HANGUL SYLLABLE SIH
+ {0xC2F8, 0xC2F8, prH2, gcLo}, // HANGUL SYLLABLE SSA
+ {0xC2F9, 0xC313, prH3, gcLo}, // [27] HANGUL SYLLABLE SSAG..HANGUL SYLLABLE SSAH
+ {0xC314, 0xC314, prH2, gcLo}, // HANGUL SYLLABLE SSAE
+ {0xC315, 0xC32F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSAEG..HANGUL SYLLABLE SSAEH
+ {0xC330, 0xC330, prH2, gcLo}, // HANGUL SYLLABLE SSYA
+ {0xC331, 0xC34B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYAG..HANGUL SYLLABLE SSYAH
+ {0xC34C, 0xC34C, prH2, gcLo}, // HANGUL SYLLABLE SSYAE
+ {0xC34D, 0xC367, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYAEG..HANGUL SYLLABLE SSYAEH
+ {0xC368, 0xC368, prH2, gcLo}, // HANGUL SYLLABLE SSEO
+ {0xC369, 0xC383, prH3, gcLo}, // [27] HANGUL SYLLABLE SSEOG..HANGUL SYLLABLE SSEOH
+ {0xC384, 0xC384, prH2, gcLo}, // HANGUL SYLLABLE SSE
+ {0xC385, 0xC39F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSEG..HANGUL SYLLABLE SSEH
+ {0xC3A0, 0xC3A0, prH2, gcLo}, // HANGUL SYLLABLE SSYEO
+ {0xC3A1, 0xC3BB, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYEOG..HANGUL SYLLABLE SSYEOH
+ {0xC3BC, 0xC3BC, prH2, gcLo}, // HANGUL SYLLABLE SSYE
+ {0xC3BD, 0xC3D7, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYEG..HANGUL SYLLABLE SSYEH
+ {0xC3D8, 0xC3D8, prH2, gcLo}, // HANGUL SYLLABLE SSO
+ {0xC3D9, 0xC3F3, prH3, gcLo}, // [27] HANGUL SYLLABLE SSOG..HANGUL SYLLABLE SSOH
+ {0xC3F4, 0xC3F4, prH2, gcLo}, // HANGUL SYLLABLE SSWA
+ {0xC3F5, 0xC40F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWAG..HANGUL SYLLABLE SSWAH
+ {0xC410, 0xC410, prH2, gcLo}, // HANGUL SYLLABLE SSWAE
+ {0xC411, 0xC42B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWAEG..HANGUL SYLLABLE SSWAEH
+ {0xC42C, 0xC42C, prH2, gcLo}, // HANGUL SYLLABLE SSOE
+ {0xC42D, 0xC447, prH3, gcLo}, // [27] HANGUL SYLLABLE SSOEG..HANGUL SYLLABLE SSOEH
+ {0xC448, 0xC448, prH2, gcLo}, // HANGUL SYLLABLE SSYO
+ {0xC449, 0xC463, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYOG..HANGUL SYLLABLE SSYOH
+ {0xC464, 0xC464, prH2, gcLo}, // HANGUL SYLLABLE SSU
+ {0xC465, 0xC47F, prH3, gcLo}, // [27] HANGUL SYLLABLE SSUG..HANGUL SYLLABLE SSUH
+ {0xC480, 0xC480, prH2, gcLo}, // HANGUL SYLLABLE SSWEO
+ {0xC481, 0xC49B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWEOG..HANGUL SYLLABLE SSWEOH
+ {0xC49C, 0xC49C, prH2, gcLo}, // HANGUL SYLLABLE SSWE
+ {0xC49D, 0xC4B7, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWEG..HANGUL SYLLABLE SSWEH
+ {0xC4B8, 0xC4B8, prH2, gcLo}, // HANGUL SYLLABLE SSWI
+ {0xC4B9, 0xC4D3, prH3, gcLo}, // [27] HANGUL SYLLABLE SSWIG..HANGUL SYLLABLE SSWIH
+ {0xC4D4, 0xC4D4, prH2, gcLo}, // HANGUL SYLLABLE SSYU
+ {0xC4D5, 0xC4EF, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYUG..HANGUL SYLLABLE SSYUH
+ {0xC4F0, 0xC4F0, prH2, gcLo}, // HANGUL SYLLABLE SSEU
+ {0xC4F1, 0xC50B, prH3, gcLo}, // [27] HANGUL SYLLABLE SSEUG..HANGUL SYLLABLE SSEUH
+ {0xC50C, 0xC50C, prH2, gcLo}, // HANGUL SYLLABLE SSYI
+ {0xC50D, 0xC527, prH3, gcLo}, // [27] HANGUL SYLLABLE SSYIG..HANGUL SYLLABLE SSYIH
+ {0xC528, 0xC528, prH2, gcLo}, // HANGUL SYLLABLE SSI
+ {0xC529, 0xC543, prH3, gcLo}, // [27] HANGUL SYLLABLE SSIG..HANGUL SYLLABLE SSIH
+ {0xC544, 0xC544, prH2, gcLo}, // HANGUL SYLLABLE A
+ {0xC545, 0xC55F, prH3, gcLo}, // [27] HANGUL SYLLABLE AG..HANGUL SYLLABLE AH
+ {0xC560, 0xC560, prH2, gcLo}, // HANGUL SYLLABLE AE
+ {0xC561, 0xC57B, prH3, gcLo}, // [27] HANGUL SYLLABLE AEG..HANGUL SYLLABLE AEH
+ {0xC57C, 0xC57C, prH2, gcLo}, // HANGUL SYLLABLE YA
+ {0xC57D, 0xC597, prH3, gcLo}, // [27] HANGUL SYLLABLE YAG..HANGUL SYLLABLE YAH
+ {0xC598, 0xC598, prH2, gcLo}, // HANGUL SYLLABLE YAE
+ {0xC599, 0xC5B3, prH3, gcLo}, // [27] HANGUL SYLLABLE YAEG..HANGUL SYLLABLE YAEH
+ {0xC5B4, 0xC5B4, prH2, gcLo}, // HANGUL SYLLABLE EO
+ {0xC5B5, 0xC5CF, prH3, gcLo}, // [27] HANGUL SYLLABLE EOG..HANGUL SYLLABLE EOH
+ {0xC5D0, 0xC5D0, prH2, gcLo}, // HANGUL SYLLABLE E
+ {0xC5D1, 0xC5EB, prH3, gcLo}, // [27] HANGUL SYLLABLE EG..HANGUL SYLLABLE EH
+ {0xC5EC, 0xC5EC, prH2, gcLo}, // HANGUL SYLLABLE YEO
+ {0xC5ED, 0xC607, prH3, gcLo}, // [27] HANGUL SYLLABLE YEOG..HANGUL SYLLABLE YEOH
+ {0xC608, 0xC608, prH2, gcLo}, // HANGUL SYLLABLE YE
+ {0xC609, 0xC623, prH3, gcLo}, // [27] HANGUL SYLLABLE YEG..HANGUL SYLLABLE YEH
+ {0xC624, 0xC624, prH2, gcLo}, // HANGUL SYLLABLE O
+ {0xC625, 0xC63F, prH3, gcLo}, // [27] HANGUL SYLLABLE OG..HANGUL SYLLABLE OH
+ {0xC640, 0xC640, prH2, gcLo}, // HANGUL SYLLABLE WA
+ {0xC641, 0xC65B, prH3, gcLo}, // [27] HANGUL SYLLABLE WAG..HANGUL SYLLABLE WAH
+ {0xC65C, 0xC65C, prH2, gcLo}, // HANGUL SYLLABLE WAE
+ {0xC65D, 0xC677, prH3, gcLo}, // [27] HANGUL SYLLABLE WAEG..HANGUL SYLLABLE WAEH
+ {0xC678, 0xC678, prH2, gcLo}, // HANGUL SYLLABLE OE
+ {0xC679, 0xC693, prH3, gcLo}, // [27] HANGUL SYLLABLE OEG..HANGUL SYLLABLE OEH
+ {0xC694, 0xC694, prH2, gcLo}, // HANGUL SYLLABLE YO
+ {0xC695, 0xC6AF, prH3, gcLo}, // [27] HANGUL SYLLABLE YOG..HANGUL SYLLABLE YOH
+ {0xC6B0, 0xC6B0, prH2, gcLo}, // HANGUL SYLLABLE U
+ {0xC6B1, 0xC6CB, prH3, gcLo}, // [27] HANGUL SYLLABLE UG..HANGUL SYLLABLE UH
+ {0xC6CC, 0xC6CC, prH2, gcLo}, // HANGUL SYLLABLE WEO
+ {0xC6CD, 0xC6E7, prH3, gcLo}, // [27] HANGUL SYLLABLE WEOG..HANGUL SYLLABLE WEOH
+ {0xC6E8, 0xC6E8, prH2, gcLo}, // HANGUL SYLLABLE WE
+ {0xC6E9, 0xC703, prH3, gcLo}, // [27] HANGUL SYLLABLE WEG..HANGUL SYLLABLE WEH
+ {0xC704, 0xC704, prH2, gcLo}, // HANGUL SYLLABLE WI
+ {0xC705, 0xC71F, prH3, gcLo}, // [27] HANGUL SYLLABLE WIG..HANGUL SYLLABLE WIH
+ {0xC720, 0xC720, prH2, gcLo}, // HANGUL SYLLABLE YU
+ {0xC721, 0xC73B, prH3, gcLo}, // [27] HANGUL SYLLABLE YUG..HANGUL SYLLABLE YUH
+ {0xC73C, 0xC73C, prH2, gcLo}, // HANGUL SYLLABLE EU
+ {0xC73D, 0xC757, prH3, gcLo}, // [27] HANGUL SYLLABLE EUG..HANGUL SYLLABLE EUH
+ {0xC758, 0xC758, prH2, gcLo}, // HANGUL SYLLABLE YI
+ {0xC759, 0xC773, prH3, gcLo}, // [27] HANGUL SYLLABLE YIG..HANGUL SYLLABLE YIH
+ {0xC774, 0xC774, prH2, gcLo}, // HANGUL SYLLABLE I
+ {0xC775, 0xC78F, prH3, gcLo}, // [27] HANGUL SYLLABLE IG..HANGUL SYLLABLE IH
+ {0xC790, 0xC790, prH2, gcLo}, // HANGUL SYLLABLE JA
+ {0xC791, 0xC7AB, prH3, gcLo}, // [27] HANGUL SYLLABLE JAG..HANGUL SYLLABLE JAH
+ {0xC7AC, 0xC7AC, prH2, gcLo}, // HANGUL SYLLABLE JAE
+ {0xC7AD, 0xC7C7, prH3, gcLo}, // [27] HANGUL SYLLABLE JAEG..HANGUL SYLLABLE JAEH
+ {0xC7C8, 0xC7C8, prH2, gcLo}, // HANGUL SYLLABLE JYA
+ {0xC7C9, 0xC7E3, prH3, gcLo}, // [27] HANGUL SYLLABLE JYAG..HANGUL SYLLABLE JYAH
+ {0xC7E4, 0xC7E4, prH2, gcLo}, // HANGUL SYLLABLE JYAE
+ {0xC7E5, 0xC7FF, prH3, gcLo}, // [27] HANGUL SYLLABLE JYAEG..HANGUL SYLLABLE JYAEH
+ {0xC800, 0xC800, prH2, gcLo}, // HANGUL SYLLABLE JEO
+ {0xC801, 0xC81B, prH3, gcLo}, // [27] HANGUL SYLLABLE JEOG..HANGUL SYLLABLE JEOH
+ {0xC81C, 0xC81C, prH2, gcLo}, // HANGUL SYLLABLE JE
+ {0xC81D, 0xC837, prH3, gcLo}, // [27] HANGUL SYLLABLE JEG..HANGUL SYLLABLE JEH
+ {0xC838, 0xC838, prH2, gcLo}, // HANGUL SYLLABLE JYEO
+ {0xC839, 0xC853, prH3, gcLo}, // [27] HANGUL SYLLABLE JYEOG..HANGUL SYLLABLE JYEOH
+ {0xC854, 0xC854, prH2, gcLo}, // HANGUL SYLLABLE JYE
+ {0xC855, 0xC86F, prH3, gcLo}, // [27] HANGUL SYLLABLE JYEG..HANGUL SYLLABLE JYEH
+ {0xC870, 0xC870, prH2, gcLo}, // HANGUL SYLLABLE JO
+ {0xC871, 0xC88B, prH3, gcLo}, // [27] HANGUL SYLLABLE JOG..HANGUL SYLLABLE JOH
+ {0xC88C, 0xC88C, prH2, gcLo}, // HANGUL SYLLABLE JWA
+ {0xC88D, 0xC8A7, prH3, gcLo}, // [27] HANGUL SYLLABLE JWAG..HANGUL SYLLABLE JWAH
+ {0xC8A8, 0xC8A8, prH2, gcLo}, // HANGUL SYLLABLE JWAE
+ {0xC8A9, 0xC8C3, prH3, gcLo}, // [27] HANGUL SYLLABLE JWAEG..HANGUL SYLLABLE JWAEH
+ {0xC8C4, 0xC8C4, prH2, gcLo}, // HANGUL SYLLABLE JOE
+ {0xC8C5, 0xC8DF, prH3, gcLo}, // [27] HANGUL SYLLABLE JOEG..HANGUL SYLLABLE JOEH
+ {0xC8E0, 0xC8E0, prH2, gcLo}, // HANGUL SYLLABLE JYO
+ {0xC8E1, 0xC8FB, prH3, gcLo}, // [27] HANGUL SYLLABLE JYOG..HANGUL SYLLABLE JYOH
+ {0xC8FC, 0xC8FC, prH2, gcLo}, // HANGUL SYLLABLE JU
+ {0xC8FD, 0xC917, prH3, gcLo}, // [27] HANGUL SYLLABLE JUG..HANGUL SYLLABLE JUH
+ {0xC918, 0xC918, prH2, gcLo}, // HANGUL SYLLABLE JWEO
+ {0xC919, 0xC933, prH3, gcLo}, // [27] HANGUL SYLLABLE JWEOG..HANGUL SYLLABLE JWEOH
+ {0xC934, 0xC934, prH2, gcLo}, // HANGUL SYLLABLE JWE
+ {0xC935, 0xC94F, prH3, gcLo}, // [27] HANGUL SYLLABLE JWEG..HANGUL SYLLABLE JWEH
+ {0xC950, 0xC950, prH2, gcLo}, // HANGUL SYLLABLE JWI
+ {0xC951, 0xC96B, prH3, gcLo}, // [27] HANGUL SYLLABLE JWIG..HANGUL SYLLABLE JWIH
+ {0xC96C, 0xC96C, prH2, gcLo}, // HANGUL SYLLABLE JYU
+ {0xC96D, 0xC987, prH3, gcLo}, // [27] HANGUL SYLLABLE JYUG..HANGUL SYLLABLE JYUH
+ {0xC988, 0xC988, prH2, gcLo}, // HANGUL SYLLABLE JEU
+ {0xC989, 0xC9A3, prH3, gcLo}, // [27] HANGUL SYLLABLE JEUG..HANGUL SYLLABLE JEUH
+ {0xC9A4, 0xC9A4, prH2, gcLo}, // HANGUL SYLLABLE JYI
+ {0xC9A5, 0xC9BF, prH3, gcLo}, // [27] HANGUL SYLLABLE JYIG..HANGUL SYLLABLE JYIH
+ {0xC9C0, 0xC9C0, prH2, gcLo}, // HANGUL SYLLABLE JI
+ {0xC9C1, 0xC9DB, prH3, gcLo}, // [27] HANGUL SYLLABLE JIG..HANGUL SYLLABLE JIH
+ {0xC9DC, 0xC9DC, prH2, gcLo}, // HANGUL SYLLABLE JJA
+ {0xC9DD, 0xC9F7, prH3, gcLo}, // [27] HANGUL SYLLABLE JJAG..HANGUL SYLLABLE JJAH
+ {0xC9F8, 0xC9F8, prH2, gcLo}, // HANGUL SYLLABLE JJAE
+ {0xC9F9, 0xCA13, prH3, gcLo}, // [27] HANGUL SYLLABLE JJAEG..HANGUL SYLLABLE JJAEH
+ {0xCA14, 0xCA14, prH2, gcLo}, // HANGUL SYLLABLE JJYA
+ {0xCA15, 0xCA2F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYAG..HANGUL SYLLABLE JJYAH
+ {0xCA30, 0xCA30, prH2, gcLo}, // HANGUL SYLLABLE JJYAE
+ {0xCA31, 0xCA4B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYAEG..HANGUL SYLLABLE JJYAEH
+ {0xCA4C, 0xCA4C, prH2, gcLo}, // HANGUL SYLLABLE JJEO
+ {0xCA4D, 0xCA67, prH3, gcLo}, // [27] HANGUL SYLLABLE JJEOG..HANGUL SYLLABLE JJEOH
+ {0xCA68, 0xCA68, prH2, gcLo}, // HANGUL SYLLABLE JJE
+ {0xCA69, 0xCA83, prH3, gcLo}, // [27] HANGUL SYLLABLE JJEG..HANGUL SYLLABLE JJEH
+ {0xCA84, 0xCA84, prH2, gcLo}, // HANGUL SYLLABLE JJYEO
+ {0xCA85, 0xCA9F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYEOG..HANGUL SYLLABLE JJYEOH
+ {0xCAA0, 0xCAA0, prH2, gcLo}, // HANGUL SYLLABLE JJYE
+ {0xCAA1, 0xCABB, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYEG..HANGUL SYLLABLE JJYEH
+ {0xCABC, 0xCABC, prH2, gcLo}, // HANGUL SYLLABLE JJO
+ {0xCABD, 0xCAD7, prH3, gcLo}, // [27] HANGUL SYLLABLE JJOG..HANGUL SYLLABLE JJOH
+ {0xCAD8, 0xCAD8, prH2, gcLo}, // HANGUL SYLLABLE JJWA
+ {0xCAD9, 0xCAF3, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWAG..HANGUL SYLLABLE JJWAH
+ {0xCAF4, 0xCAF4, prH2, gcLo}, // HANGUL SYLLABLE JJWAE
+ {0xCAF5, 0xCB0F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWAEG..HANGUL SYLLABLE JJWAEH
+ {0xCB10, 0xCB10, prH2, gcLo}, // HANGUL SYLLABLE JJOE
+ {0xCB11, 0xCB2B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJOEG..HANGUL SYLLABLE JJOEH
+ {0xCB2C, 0xCB2C, prH2, gcLo}, // HANGUL SYLLABLE JJYO
+ {0xCB2D, 0xCB47, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYOG..HANGUL SYLLABLE JJYOH
+ {0xCB48, 0xCB48, prH2, gcLo}, // HANGUL SYLLABLE JJU
+ {0xCB49, 0xCB63, prH3, gcLo}, // [27] HANGUL SYLLABLE JJUG..HANGUL SYLLABLE JJUH
+ {0xCB64, 0xCB64, prH2, gcLo}, // HANGUL SYLLABLE JJWEO
+ {0xCB65, 0xCB7F, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWEOG..HANGUL SYLLABLE JJWEOH
+ {0xCB80, 0xCB80, prH2, gcLo}, // HANGUL SYLLABLE JJWE
+ {0xCB81, 0xCB9B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWEG..HANGUL SYLLABLE JJWEH
+ {0xCB9C, 0xCB9C, prH2, gcLo}, // HANGUL SYLLABLE JJWI
+ {0xCB9D, 0xCBB7, prH3, gcLo}, // [27] HANGUL SYLLABLE JJWIG..HANGUL SYLLABLE JJWIH
+ {0xCBB8, 0xCBB8, prH2, gcLo}, // HANGUL SYLLABLE JJYU
+ {0xCBB9, 0xCBD3, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYUG..HANGUL SYLLABLE JJYUH
+ {0xCBD4, 0xCBD4, prH2, gcLo}, // HANGUL SYLLABLE JJEU
+ {0xCBD5, 0xCBEF, prH3, gcLo}, // [27] HANGUL SYLLABLE JJEUG..HANGUL SYLLABLE JJEUH
+ {0xCBF0, 0xCBF0, prH2, gcLo}, // HANGUL SYLLABLE JJYI
+ {0xCBF1, 0xCC0B, prH3, gcLo}, // [27] HANGUL SYLLABLE JJYIG..HANGUL SYLLABLE JJYIH
+ {0xCC0C, 0xCC0C, prH2, gcLo}, // HANGUL SYLLABLE JJI
+ {0xCC0D, 0xCC27, prH3, gcLo}, // [27] HANGUL SYLLABLE JJIG..HANGUL SYLLABLE JJIH
+ {0xCC28, 0xCC28, prH2, gcLo}, // HANGUL SYLLABLE CA
+ {0xCC29, 0xCC43, prH3, gcLo}, // [27] HANGUL SYLLABLE CAG..HANGUL SYLLABLE CAH
+ {0xCC44, 0xCC44, prH2, gcLo}, // HANGUL SYLLABLE CAE
+ {0xCC45, 0xCC5F, prH3, gcLo}, // [27] HANGUL SYLLABLE CAEG..HANGUL SYLLABLE CAEH
+ {0xCC60, 0xCC60, prH2, gcLo}, // HANGUL SYLLABLE CYA
+ {0xCC61, 0xCC7B, prH3, gcLo}, // [27] HANGUL SYLLABLE CYAG..HANGUL SYLLABLE CYAH
+ {0xCC7C, 0xCC7C, prH2, gcLo}, // HANGUL SYLLABLE CYAE
+ {0xCC7D, 0xCC97, prH3, gcLo}, // [27] HANGUL SYLLABLE CYAEG..HANGUL SYLLABLE CYAEH
+ {0xCC98, 0xCC98, prH2, gcLo}, // HANGUL SYLLABLE CEO
+ {0xCC99, 0xCCB3, prH3, gcLo}, // [27] HANGUL SYLLABLE CEOG..HANGUL SYLLABLE CEOH
+ {0xCCB4, 0xCCB4, prH2, gcLo}, // HANGUL SYLLABLE CE
+ {0xCCB5, 0xCCCF, prH3, gcLo}, // [27] HANGUL SYLLABLE CEG..HANGUL SYLLABLE CEH
+ {0xCCD0, 0xCCD0, prH2, gcLo}, // HANGUL SYLLABLE CYEO
+ {0xCCD1, 0xCCEB, prH3, gcLo}, // [27] HANGUL SYLLABLE CYEOG..HANGUL SYLLABLE CYEOH
+ {0xCCEC, 0xCCEC, prH2, gcLo}, // HANGUL SYLLABLE CYE
+ {0xCCED, 0xCD07, prH3, gcLo}, // [27] HANGUL SYLLABLE CYEG..HANGUL SYLLABLE CYEH
+ {0xCD08, 0xCD08, prH2, gcLo}, // HANGUL SYLLABLE CO
+ {0xCD09, 0xCD23, prH3, gcLo}, // [27] HANGUL SYLLABLE COG..HANGUL SYLLABLE COH
+ {0xCD24, 0xCD24, prH2, gcLo}, // HANGUL SYLLABLE CWA
+ {0xCD25, 0xCD3F, prH3, gcLo}, // [27] HANGUL SYLLABLE CWAG..HANGUL SYLLABLE CWAH
+ {0xCD40, 0xCD40, prH2, gcLo}, // HANGUL SYLLABLE CWAE
+ {0xCD41, 0xCD5B, prH3, gcLo}, // [27] HANGUL SYLLABLE CWAEG..HANGUL SYLLABLE CWAEH
+ {0xCD5C, 0xCD5C, prH2, gcLo}, // HANGUL SYLLABLE COE
+ {0xCD5D, 0xCD77, prH3, gcLo}, // [27] HANGUL SYLLABLE COEG..HANGUL SYLLABLE COEH
+ {0xCD78, 0xCD78, prH2, gcLo}, // HANGUL SYLLABLE CYO
+ {0xCD79, 0xCD93, prH3, gcLo}, // [27] HANGUL SYLLABLE CYOG..HANGUL SYLLABLE CYOH
+ {0xCD94, 0xCD94, prH2, gcLo}, // HANGUL SYLLABLE CU
+ {0xCD95, 0xCDAF, prH3, gcLo}, // [27] HANGUL SYLLABLE CUG..HANGUL SYLLABLE CUH
+ {0xCDB0, 0xCDB0, prH2, gcLo}, // HANGUL SYLLABLE CWEO
+ {0xCDB1, 0xCDCB, prH3, gcLo}, // [27] HANGUL SYLLABLE CWEOG..HANGUL SYLLABLE CWEOH
+ {0xCDCC, 0xCDCC, prH2, gcLo}, // HANGUL SYLLABLE CWE
+ {0xCDCD, 0xCDE7, prH3, gcLo}, // [27] HANGUL SYLLABLE CWEG..HANGUL SYLLABLE CWEH
+ {0xCDE8, 0xCDE8, prH2, gcLo}, // HANGUL SYLLABLE CWI
+ {0xCDE9, 0xCE03, prH3, gcLo}, // [27] HANGUL SYLLABLE CWIG..HANGUL SYLLABLE CWIH
+ {0xCE04, 0xCE04, prH2, gcLo}, // HANGUL SYLLABLE CYU
+ {0xCE05, 0xCE1F, prH3, gcLo}, // [27] HANGUL SYLLABLE CYUG..HANGUL SYLLABLE CYUH
+ {0xCE20, 0xCE20, prH2, gcLo}, // HANGUL SYLLABLE CEU
+ {0xCE21, 0xCE3B, prH3, gcLo}, // [27] HANGUL SYLLABLE CEUG..HANGUL SYLLABLE CEUH
+ {0xCE3C, 0xCE3C, prH2, gcLo}, // HANGUL SYLLABLE CYI
+ {0xCE3D, 0xCE57, prH3, gcLo}, // [27] HANGUL SYLLABLE CYIG..HANGUL SYLLABLE CYIH
+ {0xCE58, 0xCE58, prH2, gcLo}, // HANGUL SYLLABLE CI
+ {0xCE59, 0xCE73, prH3, gcLo}, // [27] HANGUL SYLLABLE CIG..HANGUL SYLLABLE CIH
+ {0xCE74, 0xCE74, prH2, gcLo}, // HANGUL SYLLABLE KA
+ {0xCE75, 0xCE8F, prH3, gcLo}, // [27] HANGUL SYLLABLE KAG..HANGUL SYLLABLE KAH
+ {0xCE90, 0xCE90, prH2, gcLo}, // HANGUL SYLLABLE KAE
+ {0xCE91, 0xCEAB, prH3, gcLo}, // [27] HANGUL SYLLABLE KAEG..HANGUL SYLLABLE KAEH
+ {0xCEAC, 0xCEAC, prH2, gcLo}, // HANGUL SYLLABLE KYA
+ {0xCEAD, 0xCEC7, prH3, gcLo}, // [27] HANGUL SYLLABLE KYAG..HANGUL SYLLABLE KYAH
+ {0xCEC8, 0xCEC8, prH2, gcLo}, // HANGUL SYLLABLE KYAE
+ {0xCEC9, 0xCEE3, prH3, gcLo}, // [27] HANGUL SYLLABLE KYAEG..HANGUL SYLLABLE KYAEH
+ {0xCEE4, 0xCEE4, prH2, gcLo}, // HANGUL SYLLABLE KEO
+ {0xCEE5, 0xCEFF, prH3, gcLo}, // [27] HANGUL SYLLABLE KEOG..HANGUL SYLLABLE KEOH
+ {0xCF00, 0xCF00, prH2, gcLo}, // HANGUL SYLLABLE KE
+ {0xCF01, 0xCF1B, prH3, gcLo}, // [27] HANGUL SYLLABLE KEG..HANGUL SYLLABLE KEH
+ {0xCF1C, 0xCF1C, prH2, gcLo}, // HANGUL SYLLABLE KYEO
+ {0xCF1D, 0xCF37, prH3, gcLo}, // [27] HANGUL SYLLABLE KYEOG..HANGUL SYLLABLE KYEOH
+ {0xCF38, 0xCF38, prH2, gcLo}, // HANGUL SYLLABLE KYE
+ {0xCF39, 0xCF53, prH3, gcLo}, // [27] HANGUL SYLLABLE KYEG..HANGUL SYLLABLE KYEH
+ {0xCF54, 0xCF54, prH2, gcLo}, // HANGUL SYLLABLE KO
+ {0xCF55, 0xCF6F, prH3, gcLo}, // [27] HANGUL SYLLABLE KOG..HANGUL SYLLABLE KOH
+ {0xCF70, 0xCF70, prH2, gcLo}, // HANGUL SYLLABLE KWA
+ {0xCF71, 0xCF8B, prH3, gcLo}, // [27] HANGUL SYLLABLE KWAG..HANGUL SYLLABLE KWAH
+ {0xCF8C, 0xCF8C, prH2, gcLo}, // HANGUL SYLLABLE KWAE
+ {0xCF8D, 0xCFA7, prH3, gcLo}, // [27] HANGUL SYLLABLE KWAEG..HANGUL SYLLABLE KWAEH
+ {0xCFA8, 0xCFA8, prH2, gcLo}, // HANGUL SYLLABLE KOE
+ {0xCFA9, 0xCFC3, prH3, gcLo}, // [27] HANGUL SYLLABLE KOEG..HANGUL SYLLABLE KOEH
+ {0xCFC4, 0xCFC4, prH2, gcLo}, // HANGUL SYLLABLE KYO
+ {0xCFC5, 0xCFDF, prH3, gcLo}, // [27] HANGUL SYLLABLE KYOG..HANGUL SYLLABLE KYOH
+ {0xCFE0, 0xCFE0, prH2, gcLo}, // HANGUL SYLLABLE KU
+ {0xCFE1, 0xCFFB, prH3, gcLo}, // [27] HANGUL SYLLABLE KUG..HANGUL SYLLABLE KUH
+ {0xCFFC, 0xCFFC, prH2, gcLo}, // HANGUL SYLLABLE KWEO
+ {0xCFFD, 0xD017, prH3, gcLo}, // [27] HANGUL SYLLABLE KWEOG..HANGUL SYLLABLE KWEOH
+ {0xD018, 0xD018, prH2, gcLo}, // HANGUL SYLLABLE KWE
+ {0xD019, 0xD033, prH3, gcLo}, // [27] HANGUL SYLLABLE KWEG..HANGUL SYLLABLE KWEH
+ {0xD034, 0xD034, prH2, gcLo}, // HANGUL SYLLABLE KWI
+ {0xD035, 0xD04F, prH3, gcLo}, // [27] HANGUL SYLLABLE KWIG..HANGUL SYLLABLE KWIH
+ {0xD050, 0xD050, prH2, gcLo}, // HANGUL SYLLABLE KYU
+ {0xD051, 0xD06B, prH3, gcLo}, // [27] HANGUL SYLLABLE KYUG..HANGUL SYLLABLE KYUH
+ {0xD06C, 0xD06C, prH2, gcLo}, // HANGUL SYLLABLE KEU
+ {0xD06D, 0xD087, prH3, gcLo}, // [27] HANGUL SYLLABLE KEUG..HANGUL SYLLABLE KEUH
+ {0xD088, 0xD088, prH2, gcLo}, // HANGUL SYLLABLE KYI
+ {0xD089, 0xD0A3, prH3, gcLo}, // [27] HANGUL SYLLABLE KYIG..HANGUL SYLLABLE KYIH
+ {0xD0A4, 0xD0A4, prH2, gcLo}, // HANGUL SYLLABLE KI
+ {0xD0A5, 0xD0BF, prH3, gcLo}, // [27] HANGUL SYLLABLE KIG..HANGUL SYLLABLE KIH
+ {0xD0C0, 0xD0C0, prH2, gcLo}, // HANGUL SYLLABLE TA
+ {0xD0C1, 0xD0DB, prH3, gcLo}, // [27] HANGUL SYLLABLE TAG..HANGUL SYLLABLE TAH
+ {0xD0DC, 0xD0DC, prH2, gcLo}, // HANGUL SYLLABLE TAE
+ {0xD0DD, 0xD0F7, prH3, gcLo}, // [27] HANGUL SYLLABLE TAEG..HANGUL SYLLABLE TAEH
+ {0xD0F8, 0xD0F8, prH2, gcLo}, // HANGUL SYLLABLE TYA
+ {0xD0F9, 0xD113, prH3, gcLo}, // [27] HANGUL SYLLABLE TYAG..HANGUL SYLLABLE TYAH
+ {0xD114, 0xD114, prH2, gcLo}, // HANGUL SYLLABLE TYAE
+ {0xD115, 0xD12F, prH3, gcLo}, // [27] HANGUL SYLLABLE TYAEG..HANGUL SYLLABLE TYAEH
+ {0xD130, 0xD130, prH2, gcLo}, // HANGUL SYLLABLE TEO
+ {0xD131, 0xD14B, prH3, gcLo}, // [27] HANGUL SYLLABLE TEOG..HANGUL SYLLABLE TEOH
+ {0xD14C, 0xD14C, prH2, gcLo}, // HANGUL SYLLABLE TE
+ {0xD14D, 0xD167, prH3, gcLo}, // [27] HANGUL SYLLABLE TEG..HANGUL SYLLABLE TEH
+ {0xD168, 0xD168, prH2, gcLo}, // HANGUL SYLLABLE TYEO
+ {0xD169, 0xD183, prH3, gcLo}, // [27] HANGUL SYLLABLE TYEOG..HANGUL SYLLABLE TYEOH
+ {0xD184, 0xD184, prH2, gcLo}, // HANGUL SYLLABLE TYE
+ {0xD185, 0xD19F, prH3, gcLo}, // [27] HANGUL SYLLABLE TYEG..HANGUL SYLLABLE TYEH
+ {0xD1A0, 0xD1A0, prH2, gcLo}, // HANGUL SYLLABLE TO
+ {0xD1A1, 0xD1BB, prH3, gcLo}, // [27] HANGUL SYLLABLE TOG..HANGUL SYLLABLE TOH
+ {0xD1BC, 0xD1BC, prH2, gcLo}, // HANGUL SYLLABLE TWA
+ {0xD1BD, 0xD1D7, prH3, gcLo}, // [27] HANGUL SYLLABLE TWAG..HANGUL SYLLABLE TWAH
+ {0xD1D8, 0xD1D8, prH2, gcLo}, // HANGUL SYLLABLE TWAE
+ {0xD1D9, 0xD1F3, prH3, gcLo}, // [27] HANGUL SYLLABLE TWAEG..HANGUL SYLLABLE TWAEH
+ {0xD1F4, 0xD1F4, prH2, gcLo}, // HANGUL SYLLABLE TOE
+ {0xD1F5, 0xD20F, prH3, gcLo}, // [27] HANGUL SYLLABLE TOEG..HANGUL SYLLABLE TOEH
+ {0xD210, 0xD210, prH2, gcLo}, // HANGUL SYLLABLE TYO
+ {0xD211, 0xD22B, prH3, gcLo}, // [27] HANGUL SYLLABLE TYOG..HANGUL SYLLABLE TYOH
+ {0xD22C, 0xD22C, prH2, gcLo}, // HANGUL SYLLABLE TU
+ {0xD22D, 0xD247, prH3, gcLo}, // [27] HANGUL SYLLABLE TUG..HANGUL SYLLABLE TUH
+ {0xD248, 0xD248, prH2, gcLo}, // HANGUL SYLLABLE TWEO
+ {0xD249, 0xD263, prH3, gcLo}, // [27] HANGUL SYLLABLE TWEOG..HANGUL SYLLABLE TWEOH
+ {0xD264, 0xD264, prH2, gcLo}, // HANGUL SYLLABLE TWE
+ {0xD265, 0xD27F, prH3, gcLo}, // [27] HANGUL SYLLABLE TWEG..HANGUL SYLLABLE TWEH
+ {0xD280, 0xD280, prH2, gcLo}, // HANGUL SYLLABLE TWI
+ {0xD281, 0xD29B, prH3, gcLo}, // [27] HANGUL SYLLABLE TWIG..HANGUL SYLLABLE TWIH
+ {0xD29C, 0xD29C, prH2, gcLo}, // HANGUL SYLLABLE TYU
+ {0xD29D, 0xD2B7, prH3, gcLo}, // [27] HANGUL SYLLABLE TYUG..HANGUL SYLLABLE TYUH
+ {0xD2B8, 0xD2B8, prH2, gcLo}, // HANGUL SYLLABLE TEU
+ {0xD2B9, 0xD2D3, prH3, gcLo}, // [27] HANGUL SYLLABLE TEUG..HANGUL SYLLABLE TEUH
+ {0xD2D4, 0xD2D4, prH2, gcLo}, // HANGUL SYLLABLE TYI
+ {0xD2D5, 0xD2EF, prH3, gcLo}, // [27] HANGUL SYLLABLE TYIG..HANGUL SYLLABLE TYIH
+ {0xD2F0, 0xD2F0, prH2, gcLo}, // HANGUL SYLLABLE TI
+ {0xD2F1, 0xD30B, prH3, gcLo}, // [27] HANGUL SYLLABLE TIG..HANGUL SYLLABLE TIH
+ {0xD30C, 0xD30C, prH2, gcLo}, // HANGUL SYLLABLE PA
+ {0xD30D, 0xD327, prH3, gcLo}, // [27] HANGUL SYLLABLE PAG..HANGUL SYLLABLE PAH
+ {0xD328, 0xD328, prH2, gcLo}, // HANGUL SYLLABLE PAE
+ {0xD329, 0xD343, prH3, gcLo}, // [27] HANGUL SYLLABLE PAEG..HANGUL SYLLABLE PAEH
+ {0xD344, 0xD344, prH2, gcLo}, // HANGUL SYLLABLE PYA
+ {0xD345, 0xD35F, prH3, gcLo}, // [27] HANGUL SYLLABLE PYAG..HANGUL SYLLABLE PYAH
+ {0xD360, 0xD360, prH2, gcLo}, // HANGUL SYLLABLE PYAE
+ {0xD361, 0xD37B, prH3, gcLo}, // [27] HANGUL SYLLABLE PYAEG..HANGUL SYLLABLE PYAEH
+ {0xD37C, 0xD37C, prH2, gcLo}, // HANGUL SYLLABLE PEO
+ {0xD37D, 0xD397, prH3, gcLo}, // [27] HANGUL SYLLABLE PEOG..HANGUL SYLLABLE PEOH
+ {0xD398, 0xD398, prH2, gcLo}, // HANGUL SYLLABLE PE
+ {0xD399, 0xD3B3, prH3, gcLo}, // [27] HANGUL SYLLABLE PEG..HANGUL SYLLABLE PEH
+ {0xD3B4, 0xD3B4, prH2, gcLo}, // HANGUL SYLLABLE PYEO
+ {0xD3B5, 0xD3CF, prH3, gcLo}, // [27] HANGUL SYLLABLE PYEOG..HANGUL SYLLABLE PYEOH
+ {0xD3D0, 0xD3D0, prH2, gcLo}, // HANGUL SYLLABLE PYE
+ {0xD3D1, 0xD3EB, prH3, gcLo}, // [27] HANGUL SYLLABLE PYEG..HANGUL SYLLABLE PYEH
+ {0xD3EC, 0xD3EC, prH2, gcLo}, // HANGUL SYLLABLE PO
+ {0xD3ED, 0xD407, prH3, gcLo}, // [27] HANGUL SYLLABLE POG..HANGUL SYLLABLE POH
+ {0xD408, 0xD408, prH2, gcLo}, // HANGUL SYLLABLE PWA
+ {0xD409, 0xD423, prH3, gcLo}, // [27] HANGUL SYLLABLE PWAG..HANGUL SYLLABLE PWAH
+ {0xD424, 0xD424, prH2, gcLo}, // HANGUL SYLLABLE PWAE
+ {0xD425, 0xD43F, prH3, gcLo}, // [27] HANGUL SYLLABLE PWAEG..HANGUL SYLLABLE PWAEH
+ {0xD440, 0xD440, prH2, gcLo}, // HANGUL SYLLABLE POE
+ {0xD441, 0xD45B, prH3, gcLo}, // [27] HANGUL SYLLABLE POEG..HANGUL SYLLABLE POEH
+ {0xD45C, 0xD45C, prH2, gcLo}, // HANGUL SYLLABLE PYO
+ {0xD45D, 0xD477, prH3, gcLo}, // [27] HANGUL SYLLABLE PYOG..HANGUL SYLLABLE PYOH
+ {0xD478, 0xD478, prH2, gcLo}, // HANGUL SYLLABLE PU
+ {0xD479, 0xD493, prH3, gcLo}, // [27] HANGUL SYLLABLE PUG..HANGUL SYLLABLE PUH
+ {0xD494, 0xD494, prH2, gcLo}, // HANGUL SYLLABLE PWEO
+ {0xD495, 0xD4AF, prH3, gcLo}, // [27] HANGUL SYLLABLE PWEOG..HANGUL SYLLABLE PWEOH
+ {0xD4B0, 0xD4B0, prH2, gcLo}, // HANGUL SYLLABLE PWE
+ {0xD4B1, 0xD4CB, prH3, gcLo}, // [27] HANGUL SYLLABLE PWEG..HANGUL SYLLABLE PWEH
+ {0xD4CC, 0xD4CC, prH2, gcLo}, // HANGUL SYLLABLE PWI
+ {0xD4CD, 0xD4E7, prH3, gcLo}, // [27] HANGUL SYLLABLE PWIG..HANGUL SYLLABLE PWIH
+ {0xD4E8, 0xD4E8, prH2, gcLo}, // HANGUL SYLLABLE PYU
+ {0xD4E9, 0xD503, prH3, gcLo}, // [27] HANGUL SYLLABLE PYUG..HANGUL SYLLABLE PYUH
+ {0xD504, 0xD504, prH2, gcLo}, // HANGUL SYLLABLE PEU
+ {0xD505, 0xD51F, prH3, gcLo}, // [27] HANGUL SYLLABLE PEUG..HANGUL SYLLABLE PEUH
+ {0xD520, 0xD520, prH2, gcLo}, // HANGUL SYLLABLE PYI
+ {0xD521, 0xD53B, prH3, gcLo}, // [27] HANGUL SYLLABLE PYIG..HANGUL SYLLABLE PYIH
+ {0xD53C, 0xD53C, prH2, gcLo}, // HANGUL SYLLABLE PI
+ {0xD53D, 0xD557, prH3, gcLo}, // [27] HANGUL SYLLABLE PIG..HANGUL SYLLABLE PIH
+ {0xD558, 0xD558, prH2, gcLo}, // HANGUL SYLLABLE HA
+ {0xD559, 0xD573, prH3, gcLo}, // [27] HANGUL SYLLABLE HAG..HANGUL SYLLABLE HAH
+ {0xD574, 0xD574, prH2, gcLo}, // HANGUL SYLLABLE HAE
+ {0xD575, 0xD58F, prH3, gcLo}, // [27] HANGUL SYLLABLE HAEG..HANGUL SYLLABLE HAEH
+ {0xD590, 0xD590, prH2, gcLo}, // HANGUL SYLLABLE HYA
+ {0xD591, 0xD5AB, prH3, gcLo}, // [27] HANGUL SYLLABLE HYAG..HANGUL SYLLABLE HYAH
+ {0xD5AC, 0xD5AC, prH2, gcLo}, // HANGUL SYLLABLE HYAE
+ {0xD5AD, 0xD5C7, prH3, gcLo}, // [27] HANGUL SYLLABLE HYAEG..HANGUL SYLLABLE HYAEH
+ {0xD5C8, 0xD5C8, prH2, gcLo}, // HANGUL SYLLABLE HEO
+ {0xD5C9, 0xD5E3, prH3, gcLo}, // [27] HANGUL SYLLABLE HEOG..HANGUL SYLLABLE HEOH
+ {0xD5E4, 0xD5E4, prH2, gcLo}, // HANGUL SYLLABLE HE
+ {0xD5E5, 0xD5FF, prH3, gcLo}, // [27] HANGUL SYLLABLE HEG..HANGUL SYLLABLE HEH
+ {0xD600, 0xD600, prH2, gcLo}, // HANGUL SYLLABLE HYEO
+ {0xD601, 0xD61B, prH3, gcLo}, // [27] HANGUL SYLLABLE HYEOG..HANGUL SYLLABLE HYEOH
+ {0xD61C, 0xD61C, prH2, gcLo}, // HANGUL SYLLABLE HYE
+ {0xD61D, 0xD637, prH3, gcLo}, // [27] HANGUL SYLLABLE HYEG..HANGUL SYLLABLE HYEH
+ {0xD638, 0xD638, prH2, gcLo}, // HANGUL SYLLABLE HO
+ {0xD639, 0xD653, prH3, gcLo}, // [27] HANGUL SYLLABLE HOG..HANGUL SYLLABLE HOH
+ {0xD654, 0xD654, prH2, gcLo}, // HANGUL SYLLABLE HWA
+ {0xD655, 0xD66F, prH3, gcLo}, // [27] HANGUL SYLLABLE HWAG..HANGUL SYLLABLE HWAH
+ {0xD670, 0xD670, prH2, gcLo}, // HANGUL SYLLABLE HWAE
+ {0xD671, 0xD68B, prH3, gcLo}, // [27] HANGUL SYLLABLE HWAEG..HANGUL SYLLABLE HWAEH
+ {0xD68C, 0xD68C, prH2, gcLo}, // HANGUL SYLLABLE HOE
+ {0xD68D, 0xD6A7, prH3, gcLo}, // [27] HANGUL SYLLABLE HOEG..HANGUL SYLLABLE HOEH
+ {0xD6A8, 0xD6A8, prH2, gcLo}, // HANGUL SYLLABLE HYO
+ {0xD6A9, 0xD6C3, prH3, gcLo}, // [27] HANGUL SYLLABLE HYOG..HANGUL SYLLABLE HYOH
+ {0xD6C4, 0xD6C4, prH2, gcLo}, // HANGUL SYLLABLE HU
+ {0xD6C5, 0xD6DF, prH3, gcLo}, // [27] HANGUL SYLLABLE HUG..HANGUL SYLLABLE HUH
+ {0xD6E0, 0xD6E0, prH2, gcLo}, // HANGUL SYLLABLE HWEO
+ {0xD6E1, 0xD6FB, prH3, gcLo}, // [27] HANGUL SYLLABLE HWEOG..HANGUL SYLLABLE HWEOH
+ {0xD6FC, 0xD6FC, prH2, gcLo}, // HANGUL SYLLABLE HWE
+ {0xD6FD, 0xD717, prH3, gcLo}, // [27] HANGUL SYLLABLE HWEG..HANGUL SYLLABLE HWEH
+ {0xD718, 0xD718, prH2, gcLo}, // HANGUL SYLLABLE HWI
+ {0xD719, 0xD733, prH3, gcLo}, // [27] HANGUL SYLLABLE HWIG..HANGUL SYLLABLE HWIH
+ {0xD734, 0xD734, prH2, gcLo}, // HANGUL SYLLABLE HYU
+ {0xD735, 0xD74F, prH3, gcLo}, // [27] HANGUL SYLLABLE HYUG..HANGUL SYLLABLE HYUH
+ {0xD750, 0xD750, prH2, gcLo}, // HANGUL SYLLABLE HEU
+ {0xD751, 0xD76B, prH3, gcLo}, // [27] HANGUL SYLLABLE HEUG..HANGUL SYLLABLE HEUH
+ {0xD76C, 0xD76C, prH2, gcLo}, // HANGUL SYLLABLE HYI
+ {0xD76D, 0xD787, prH3, gcLo}, // [27] HANGUL SYLLABLE HYIG..HANGUL SYLLABLE HYIH
+ {0xD788, 0xD788, prH2, gcLo}, // HANGUL SYLLABLE HI
+ {0xD789, 0xD7A3, prH3, gcLo}, // [27] HANGUL SYLLABLE HIG..HANGUL SYLLABLE HIH
+ {0xD7B0, 0xD7C6, prJV, gcLo}, // [23] HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E
+ {0xD7CB, 0xD7FB, prJT, gcLo}, // [49] HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH
+ {0xD800, 0xDB7F, prSG, gcCs}, // [896] ..
+ {0xDB80, 0xDBFF, prSG, gcCs}, // [128] ..
+ {0xDC00, 0xDFFF, prSG, gcCs}, // [1024] ..
+ {0xE000, 0xF8FF, prXX, gcCo}, // [6400] ..
+ {0xF900, 0xFA6D, prID, gcLo}, // [366] CJK COMPATIBILITY IDEOGRAPH-F900..CJK COMPATIBILITY IDEOGRAPH-FA6D
+ {0xFA6E, 0xFA6F, prID, gcCn}, // [2] ..
+ {0xFA70, 0xFAD9, prID, gcLo}, // [106] CJK COMPATIBILITY IDEOGRAPH-FA70..CJK COMPATIBILITY IDEOGRAPH-FAD9
+ {0xFADA, 0xFAFF, prID, gcCn}, // [38] ..
+ {0xFB00, 0xFB06, prAL, gcLl}, // [7] LATIN SMALL LIGATURE FF..LATIN SMALL LIGATURE ST
+ {0xFB13, 0xFB17, prAL, gcLl}, // [5] ARMENIAN SMALL LIGATURE MEN NOW..ARMENIAN SMALL LIGATURE MEN XEH
+ {0xFB1D, 0xFB1D, prHL, gcLo}, // HEBREW LETTER YOD WITH HIRIQ
+ {0xFB1E, 0xFB1E, prCM, gcMn}, // HEBREW POINT JUDEO-SPANISH VARIKA
+ {0xFB1F, 0xFB28, prHL, gcLo}, // [10] HEBREW LIGATURE YIDDISH YOD YOD PATAH..HEBREW LETTER WIDE TAV
+ {0xFB29, 0xFB29, prAL, gcSm}, // HEBREW LETTER ALTERNATIVE PLUS SIGN
+ {0xFB2A, 0xFB36, prHL, gcLo}, // [13] HEBREW LETTER SHIN WITH SHIN DOT..HEBREW LETTER ZAYIN WITH DAGESH
+ {0xFB38, 0xFB3C, prHL, gcLo}, // [5] HEBREW LETTER TET WITH DAGESH..HEBREW LETTER LAMED WITH DAGESH
+ {0xFB3E, 0xFB3E, prHL, gcLo}, // HEBREW LETTER MEM WITH DAGESH
+ {0xFB40, 0xFB41, prHL, gcLo}, // [2] HEBREW LETTER NUN WITH DAGESH..HEBREW LETTER SAMEKH WITH DAGESH
+ {0xFB43, 0xFB44, prHL, gcLo}, // [2] HEBREW LETTER FINAL PE WITH DAGESH..HEBREW LETTER PE WITH DAGESH
+ {0xFB46, 0xFB4F, prHL, gcLo}, // [10] HEBREW LETTER TSADI WITH DAGESH..HEBREW LIGATURE ALEF LAMED
+ {0xFB50, 0xFBB1, prAL, gcLo}, // [98] ARABIC LETTER ALEF WASLA ISOLATED FORM..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM
+ {0xFBB2, 0xFBC2, prAL, gcSk}, // [17] ARABIC SYMBOL DOT ABOVE..ARABIC SYMBOL WASLA ABOVE
+ {0xFBD3, 0xFD3D, prAL, gcLo}, // [363] ARABIC LETTER NG ISOLATED FORM..ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM
+ {0xFD3E, 0xFD3E, prCL, gcPe}, // ORNATE LEFT PARENTHESIS
+ {0xFD3F, 0xFD3F, prOP, gcPs}, // ORNATE RIGHT PARENTHESIS
+ {0xFD40, 0xFD4F, prAL, gcSo}, // [16] ARABIC LIGATURE RAHIMAHU ALLAAH..ARABIC LIGATURE RAHIMAHUM ALLAAH
+ {0xFD50, 0xFD8F, prAL, gcLo}, // [64] ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM..ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM
+ {0xFD92, 0xFDC7, prAL, gcLo}, // [54] ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM..ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM
+ {0xFDCF, 0xFDCF, prAL, gcSo}, // ARABIC LIGATURE SALAAMUHU ALAYNAA
+ {0xFDF0, 0xFDFB, prAL, gcLo}, // [12] ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM..ARABIC LIGATURE JALLAJALALOUHOU
+ {0xFDFC, 0xFDFC, prPO, gcSc}, // RIAL SIGN
+ {0xFDFD, 0xFDFF, prAL, gcSo}, // [3] ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM..ARABIC LIGATURE AZZA WA JALL
+ {0xFE00, 0xFE0F, prCM, gcMn}, // [16] VARIATION SELECTOR-1..VARIATION SELECTOR-16
+ {0xFE10, 0xFE10, prIS, gcPo}, // PRESENTATION FORM FOR VERTICAL COMMA
+ {0xFE11, 0xFE12, prCL, gcPo}, // [2] PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC COMMA..PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC FULL STOP
+ {0xFE13, 0xFE14, prIS, gcPo}, // [2] PRESENTATION FORM FOR VERTICAL COLON..PRESENTATION FORM FOR VERTICAL SEMICOLON
+ {0xFE15, 0xFE16, prEX, gcPo}, // [2] PRESENTATION FORM FOR VERTICAL EXCLAMATION MARK..PRESENTATION FORM FOR VERTICAL QUESTION MARK
+ {0xFE17, 0xFE17, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT WHITE LENTICULAR BRACKET
+ {0xFE18, 0xFE18, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRAKCET
+ {0xFE19, 0xFE19, prIN, gcPo}, // PRESENTATION FORM FOR VERTICAL HORIZONTAL ELLIPSIS
+ {0xFE20, 0xFE2F, prCM, gcMn}, // [16] COMBINING LIGATURE LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF
+ {0xFE30, 0xFE30, prID, gcPo}, // PRESENTATION FORM FOR VERTICAL TWO DOT LEADER
+ {0xFE31, 0xFE32, prID, gcPd}, // [2] PRESENTATION FORM FOR VERTICAL EM DASH..PRESENTATION FORM FOR VERTICAL EN DASH
+ {0xFE33, 0xFE34, prID, gcPc}, // [2] PRESENTATION FORM FOR VERTICAL LOW LINE..PRESENTATION FORM FOR VERTICAL WAVY LOW LINE
+ {0xFE35, 0xFE35, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS
+ {0xFE36, 0xFE36, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS
+ {0xFE37, 0xFE37, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET
+ {0xFE38, 0xFE38, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET
+ {0xFE39, 0xFE39, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT TORTOISE SHELL BRACKET
+ {0xFE3A, 0xFE3A, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT TORTOISE SHELL BRACKET
+ {0xFE3B, 0xFE3B, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT BLACK LENTICULAR BRACKET
+ {0xFE3C, 0xFE3C, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT BLACK LENTICULAR BRACKET
+ {0xFE3D, 0xFE3D, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT DOUBLE ANGLE BRACKET
+ {0xFE3E, 0xFE3E, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT DOUBLE ANGLE BRACKET
+ {0xFE3F, 0xFE3F, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT ANGLE BRACKET
+ {0xFE40, 0xFE40, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT ANGLE BRACKET
+ {0xFE41, 0xFE41, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET
+ {0xFE42, 0xFE42, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET
+ {0xFE43, 0xFE43, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET
+ {0xFE44, 0xFE44, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET
+ {0xFE45, 0xFE46, prID, gcPo}, // [2] SESAME DOT..WHITE SESAME DOT
+ {0xFE47, 0xFE47, prOP, gcPs}, // PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET
+ {0xFE48, 0xFE48, prCL, gcPe}, // PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET
+ {0xFE49, 0xFE4C, prID, gcPo}, // [4] DASHED OVERLINE..DOUBLE WAVY OVERLINE
+ {0xFE4D, 0xFE4F, prID, gcPc}, // [3] DASHED LOW LINE..WAVY LOW LINE
+ {0xFE50, 0xFE50, prCL, gcPo}, // SMALL COMMA
+ {0xFE51, 0xFE51, prID, gcPo}, // SMALL IDEOGRAPHIC COMMA
+ {0xFE52, 0xFE52, prCL, gcPo}, // SMALL FULL STOP
+ {0xFE54, 0xFE55, prNS, gcPo}, // [2] SMALL SEMICOLON..SMALL COLON
+ {0xFE56, 0xFE57, prEX, gcPo}, // [2] SMALL QUESTION MARK..SMALL EXCLAMATION MARK
+ {0xFE58, 0xFE58, prID, gcPd}, // SMALL EM DASH
+ {0xFE59, 0xFE59, prOP, gcPs}, // SMALL LEFT PARENTHESIS
+ {0xFE5A, 0xFE5A, prCL, gcPe}, // SMALL RIGHT PARENTHESIS
+ {0xFE5B, 0xFE5B, prOP, gcPs}, // SMALL LEFT CURLY BRACKET
+ {0xFE5C, 0xFE5C, prCL, gcPe}, // SMALL RIGHT CURLY BRACKET
+ {0xFE5D, 0xFE5D, prOP, gcPs}, // SMALL LEFT TORTOISE SHELL BRACKET
+ {0xFE5E, 0xFE5E, prCL, gcPe}, // SMALL RIGHT TORTOISE SHELL BRACKET
+ {0xFE5F, 0xFE61, prID, gcPo}, // [3] SMALL NUMBER SIGN..SMALL ASTERISK
+ {0xFE62, 0xFE62, prID, gcSm}, // SMALL PLUS SIGN
+ {0xFE63, 0xFE63, prID, gcPd}, // SMALL HYPHEN-MINUS
+ {0xFE64, 0xFE66, prID, gcSm}, // [3] SMALL LESS-THAN SIGN..SMALL EQUALS SIGN
+ {0xFE68, 0xFE68, prID, gcPo}, // SMALL REVERSE SOLIDUS
+ {0xFE69, 0xFE69, prPR, gcSc}, // SMALL DOLLAR SIGN
+ {0xFE6A, 0xFE6A, prPO, gcPo}, // SMALL PERCENT SIGN
+ {0xFE6B, 0xFE6B, prID, gcPo}, // SMALL COMMERCIAL AT
+ {0xFE70, 0xFE74, prAL, gcLo}, // [5] ARABIC FATHATAN ISOLATED FORM..ARABIC KASRATAN ISOLATED FORM
+ {0xFE76, 0xFEFC, prAL, gcLo}, // [135] ARABIC FATHA ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF FINAL FORM
+ {0xFEFF, 0xFEFF, prWJ, gcCf}, // ZERO WIDTH NO-BREAK SPACE
+ {0xFF01, 0xFF01, prEX, gcPo}, // FULLWIDTH EXCLAMATION MARK
+ {0xFF02, 0xFF03, prID, gcPo}, // [2] FULLWIDTH QUOTATION MARK..FULLWIDTH NUMBER SIGN
+ {0xFF04, 0xFF04, prPR, gcSc}, // FULLWIDTH DOLLAR SIGN
+ {0xFF05, 0xFF05, prPO, gcPo}, // FULLWIDTH PERCENT SIGN
+ {0xFF06, 0xFF07, prID, gcPo}, // [2] FULLWIDTH AMPERSAND..FULLWIDTH APOSTROPHE
+ {0xFF08, 0xFF08, prOP, gcPs}, // FULLWIDTH LEFT PARENTHESIS
+ {0xFF09, 0xFF09, prCL, gcPe}, // FULLWIDTH RIGHT PARENTHESIS
+ {0xFF0A, 0xFF0A, prID, gcPo}, // FULLWIDTH ASTERISK
+ {0xFF0B, 0xFF0B, prID, gcSm}, // FULLWIDTH PLUS SIGN
+ {0xFF0C, 0xFF0C, prCL, gcPo}, // FULLWIDTH COMMA
+ {0xFF0D, 0xFF0D, prID, gcPd}, // FULLWIDTH HYPHEN-MINUS
+ {0xFF0E, 0xFF0E, prCL, gcPo}, // FULLWIDTH FULL STOP
+ {0xFF0F, 0xFF0F, prID, gcPo}, // FULLWIDTH SOLIDUS
+ {0xFF10, 0xFF19, prID, gcNd}, // [10] FULLWIDTH DIGIT ZERO..FULLWIDTH DIGIT NINE
+ {0xFF1A, 0xFF1B, prNS, gcPo}, // [2] FULLWIDTH COLON..FULLWIDTH SEMICOLON
+ {0xFF1C, 0xFF1E, prID, gcSm}, // [3] FULLWIDTH LESS-THAN SIGN..FULLWIDTH GREATER-THAN SIGN
+ {0xFF1F, 0xFF1F, prEX, gcPo}, // FULLWIDTH QUESTION MARK
+ {0xFF20, 0xFF20, prID, gcPo}, // FULLWIDTH COMMERCIAL AT
+ {0xFF21, 0xFF3A, prID, gcLu}, // [26] FULLWIDTH LATIN CAPITAL LETTER A..FULLWIDTH LATIN CAPITAL LETTER Z
+ {0xFF3B, 0xFF3B, prOP, gcPs}, // FULLWIDTH LEFT SQUARE BRACKET
+ {0xFF3C, 0xFF3C, prID, gcPo}, // FULLWIDTH REVERSE SOLIDUS
+ {0xFF3D, 0xFF3D, prCL, gcPe}, // FULLWIDTH RIGHT SQUARE BRACKET
+ {0xFF3E, 0xFF3E, prID, gcSk}, // FULLWIDTH CIRCUMFLEX ACCENT
+ {0xFF3F, 0xFF3F, prID, gcPc}, // FULLWIDTH LOW LINE
+ {0xFF40, 0xFF40, prID, gcSk}, // FULLWIDTH GRAVE ACCENT
+ {0xFF41, 0xFF5A, prID, gcLl}, // [26] FULLWIDTH LATIN SMALL LETTER A..FULLWIDTH LATIN SMALL LETTER Z
+ {0xFF5B, 0xFF5B, prOP, gcPs}, // FULLWIDTH LEFT CURLY BRACKET
+ {0xFF5C, 0xFF5C, prID, gcSm}, // FULLWIDTH VERTICAL LINE
+ {0xFF5D, 0xFF5D, prCL, gcPe}, // FULLWIDTH RIGHT CURLY BRACKET
+ {0xFF5E, 0xFF5E, prID, gcSm}, // FULLWIDTH TILDE
+ {0xFF5F, 0xFF5F, prOP, gcPs}, // FULLWIDTH LEFT WHITE PARENTHESIS
+ {0xFF60, 0xFF60, prCL, gcPe}, // FULLWIDTH RIGHT WHITE PARENTHESIS
+ {0xFF61, 0xFF61, prCL, gcPo}, // HALFWIDTH IDEOGRAPHIC FULL STOP
+ {0xFF62, 0xFF62, prOP, gcPs}, // HALFWIDTH LEFT CORNER BRACKET
+ {0xFF63, 0xFF63, prCL, gcPe}, // HALFWIDTH RIGHT CORNER BRACKET
+ {0xFF64, 0xFF64, prCL, gcPo}, // HALFWIDTH IDEOGRAPHIC COMMA
+ {0xFF65, 0xFF65, prNS, gcPo}, // HALFWIDTH KATAKANA MIDDLE DOT
+ {0xFF66, 0xFF66, prID, gcLo}, // HALFWIDTH KATAKANA LETTER WO
+ {0xFF67, 0xFF6F, prCJ, gcLo}, // [9] HALFWIDTH KATAKANA LETTER SMALL A..HALFWIDTH KATAKANA LETTER SMALL TU
+ {0xFF70, 0xFF70, prCJ, gcLm}, // HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK
+ {0xFF71, 0xFF9D, prID, gcLo}, // [45] HALFWIDTH KATAKANA LETTER A..HALFWIDTH KATAKANA LETTER N
+ {0xFF9E, 0xFF9F, prNS, gcLm}, // [2] HALFWIDTH KATAKANA VOICED SOUND MARK..HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK
+ {0xFFA0, 0xFFBE, prID, gcLo}, // [31] HALFWIDTH HANGUL FILLER..HALFWIDTH HANGUL LETTER HIEUH
+ {0xFFC2, 0xFFC7, prID, gcLo}, // [6] HALFWIDTH HANGUL LETTER A..HALFWIDTH HANGUL LETTER E
+ {0xFFCA, 0xFFCF, prID, gcLo}, // [6] HALFWIDTH HANGUL LETTER YEO..HALFWIDTH HANGUL LETTER OE
+ {0xFFD2, 0xFFD7, prID, gcLo}, // [6] HALFWIDTH HANGUL LETTER YO..HALFWIDTH HANGUL LETTER YU
+ {0xFFDA, 0xFFDC, prID, gcLo}, // [3] HALFWIDTH HANGUL LETTER EU..HALFWIDTH HANGUL LETTER I
+ {0xFFE0, 0xFFE0, prPO, gcSc}, // FULLWIDTH CENT SIGN
+ {0xFFE1, 0xFFE1, prPR, gcSc}, // FULLWIDTH POUND SIGN
+ {0xFFE2, 0xFFE2, prID, gcSm}, // FULLWIDTH NOT SIGN
+ {0xFFE3, 0xFFE3, prID, gcSk}, // FULLWIDTH MACRON
+ {0xFFE4, 0xFFE4, prID, gcSo}, // FULLWIDTH BROKEN BAR
+ {0xFFE5, 0xFFE6, prPR, gcSc}, // [2] FULLWIDTH YEN SIGN..FULLWIDTH WON SIGN
+ {0xFFE8, 0xFFE8, prAL, gcSo}, // HALFWIDTH FORMS LIGHT VERTICAL
+ {0xFFE9, 0xFFEC, prAL, gcSm}, // [4] HALFWIDTH LEFTWARDS ARROW..HALFWIDTH DOWNWARDS ARROW
+ {0xFFED, 0xFFEE, prAL, gcSo}, // [2] HALFWIDTH BLACK SQUARE..HALFWIDTH WHITE CIRCLE
+ {0xFFF9, 0xFFFB, prCM, gcCf}, // [3] INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR
+ {0xFFFC, 0xFFFC, prCB, gcSo}, // OBJECT REPLACEMENT CHARACTER
+ {0xFFFD, 0xFFFD, prAI, gcSo}, // REPLACEMENT CHARACTER
+ {0x10000, 0x1000B, prAL, gcLo}, // [12] LINEAR B SYLLABLE B008 A..LINEAR B SYLLABLE B046 JE
+ {0x1000D, 0x10026, prAL, gcLo}, // [26] LINEAR B SYLLABLE B036 JO..LINEAR B SYLLABLE B032 QO
+ {0x10028, 0x1003A, prAL, gcLo}, // [19] LINEAR B SYLLABLE B060 RA..LINEAR B SYLLABLE B042 WO
+ {0x1003C, 0x1003D, prAL, gcLo}, // [2] LINEAR B SYLLABLE B017 ZA..LINEAR B SYLLABLE B074 ZE
+ {0x1003F, 0x1004D, prAL, gcLo}, // [15] LINEAR B SYLLABLE B020 ZO..LINEAR B SYLLABLE B091 TWO
+ {0x10050, 0x1005D, prAL, gcLo}, // [14] LINEAR B SYMBOL B018..LINEAR B SYMBOL B089
+ {0x10080, 0x100FA, prAL, gcLo}, // [123] LINEAR B IDEOGRAM B100 MAN..LINEAR B IDEOGRAM VESSEL B305
+ {0x10100, 0x10102, prBA, gcPo}, // [3] AEGEAN WORD SEPARATOR LINE..AEGEAN CHECK MARK
+ {0x10107, 0x10133, prAL, gcNo}, // [45] AEGEAN NUMBER ONE..AEGEAN NUMBER NINETY THOUSAND
+ {0x10137, 0x1013F, prAL, gcSo}, // [9] AEGEAN WEIGHT BASE UNIT..AEGEAN MEASURE THIRD SUBUNIT
+ {0x10140, 0x10174, prAL, gcNl}, // [53] GREEK ACROPHONIC ATTIC ONE QUARTER..GREEK ACROPHONIC STRATIAN FIFTY MNAS
+ {0x10175, 0x10178, prAL, gcNo}, // [4] GREEK ONE HALF SIGN..GREEK THREE QUARTERS SIGN
+ {0x10179, 0x10189, prAL, gcSo}, // [17] GREEK YEAR SIGN..GREEK TRYBLION BASE SIGN
+ {0x1018A, 0x1018B, prAL, gcNo}, // [2] GREEK ZERO SIGN..GREEK ONE QUARTER SIGN
+ {0x1018C, 0x1018E, prAL, gcSo}, // [3] GREEK SINUSOID SIGN..NOMISMA SIGN
+ {0x10190, 0x1019C, prAL, gcSo}, // [13] ROMAN SEXTANS SIGN..ASCIA SYMBOL
+ {0x101A0, 0x101A0, prAL, gcSo}, // GREEK SYMBOL TAU RHO
+ {0x101D0, 0x101FC, prAL, gcSo}, // [45] PHAISTOS DISC SIGN PEDESTRIAN..PHAISTOS DISC SIGN WAVY BAND
+ {0x101FD, 0x101FD, prCM, gcMn}, // PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE
+ {0x10280, 0x1029C, prAL, gcLo}, // [29] LYCIAN LETTER A..LYCIAN LETTER X
+ {0x102A0, 0x102D0, prAL, gcLo}, // [49] CARIAN LETTER A..CARIAN LETTER UUU3
+ {0x102E0, 0x102E0, prCM, gcMn}, // COPTIC EPACT THOUSANDS MARK
+ {0x102E1, 0x102FB, prAL, gcNo}, // [27] COPTIC EPACT DIGIT ONE..COPTIC EPACT NUMBER NINE HUNDRED
+ {0x10300, 0x1031F, prAL, gcLo}, // [32] OLD ITALIC LETTER A..OLD ITALIC LETTER ESS
+ {0x10320, 0x10323, prAL, gcNo}, // [4] OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY
+ {0x1032D, 0x1032F, prAL, gcLo}, // [3] OLD ITALIC LETTER YE..OLD ITALIC LETTER SOUTHERN TSE
+ {0x10330, 0x10340, prAL, gcLo}, // [17] GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA
+ {0x10341, 0x10341, prAL, gcNl}, // GOTHIC LETTER NINETY
+ {0x10342, 0x10349, prAL, gcLo}, // [8] GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL
+ {0x1034A, 0x1034A, prAL, gcNl}, // GOTHIC LETTER NINE HUNDRED
+ {0x10350, 0x10375, prAL, gcLo}, // [38] OLD PERMIC LETTER AN..OLD PERMIC LETTER IA
+ {0x10376, 0x1037A, prCM, gcMn}, // [5] COMBINING OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII
+ {0x10380, 0x1039D, prAL, gcLo}, // [30] UGARITIC LETTER ALPA..UGARITIC LETTER SSU
+ {0x1039F, 0x1039F, prBA, gcPo}, // UGARITIC WORD DIVIDER
+ {0x103A0, 0x103C3, prAL, gcLo}, // [36] OLD PERSIAN SIGN A..OLD PERSIAN SIGN HA
+ {0x103C8, 0x103CF, prAL, gcLo}, // [8] OLD PERSIAN SIGN AURAMAZDAA..OLD PERSIAN SIGN BUUMISH
+ {0x103D0, 0x103D0, prBA, gcPo}, // OLD PERSIAN WORD DIVIDER
+ {0x103D1, 0x103D5, prAL, gcNl}, // [5] OLD PERSIAN NUMBER ONE..OLD PERSIAN NUMBER HUNDRED
+ {0x10400, 0x1044F, prAL, gcLC}, // [80] DESERET CAPITAL LETTER LONG I..DESERET SMALL LETTER EW
+ {0x10450, 0x1047F, prAL, gcLo}, // [48] SHAVIAN LETTER PEEP..SHAVIAN LETTER YEW
+ {0x10480, 0x1049D, prAL, gcLo}, // [30] OSMANYA LETTER ALEF..OSMANYA LETTER OO
+ {0x104A0, 0x104A9, prNU, gcNd}, // [10] OSMANYA DIGIT ZERO..OSMANYA DIGIT NINE
+ {0x104B0, 0x104D3, prAL, gcLu}, // [36] OSAGE CAPITAL LETTER A..OSAGE CAPITAL LETTER ZHA
+ {0x104D8, 0x104FB, prAL, gcLl}, // [36] OSAGE SMALL LETTER A..OSAGE SMALL LETTER ZHA
+ {0x10500, 0x10527, prAL, gcLo}, // [40] ELBASAN LETTER A..ELBASAN LETTER KHE
+ {0x10530, 0x10563, prAL, gcLo}, // [52] CAUCASIAN ALBANIAN LETTER ALT..CAUCASIAN ALBANIAN LETTER KIW
+ {0x1056F, 0x1056F, prAL, gcPo}, // CAUCASIAN ALBANIAN CITATION MARK
+ {0x10570, 0x1057A, prAL, gcLu}, // [11] VITHKUQI CAPITAL LETTER A..VITHKUQI CAPITAL LETTER GA
+ {0x1057C, 0x1058A, prAL, gcLu}, // [15] VITHKUQI CAPITAL LETTER HA..VITHKUQI CAPITAL LETTER RE
+ {0x1058C, 0x10592, prAL, gcLu}, // [7] VITHKUQI CAPITAL LETTER SE..VITHKUQI CAPITAL LETTER XE
+ {0x10594, 0x10595, prAL, gcLu}, // [2] VITHKUQI CAPITAL LETTER Y..VITHKUQI CAPITAL LETTER ZE
+ {0x10597, 0x105A1, prAL, gcLl}, // [11] VITHKUQI SMALL LETTER A..VITHKUQI SMALL LETTER GA
+ {0x105A3, 0x105B1, prAL, gcLl}, // [15] VITHKUQI SMALL LETTER HA..VITHKUQI SMALL LETTER RE
+ {0x105B3, 0x105B9, prAL, gcLl}, // [7] VITHKUQI SMALL LETTER SE..VITHKUQI SMALL LETTER XE
+ {0x105BB, 0x105BC, prAL, gcLl}, // [2] VITHKUQI SMALL LETTER Y..VITHKUQI SMALL LETTER ZE
+ {0x10600, 0x10736, prAL, gcLo}, // [311] LINEAR A SIGN AB001..LINEAR A SIGN A664
+ {0x10740, 0x10755, prAL, gcLo}, // [22] LINEAR A SIGN A701 A..LINEAR A SIGN A732 JE
+ {0x10760, 0x10767, prAL, gcLo}, // [8] LINEAR A SIGN A800..LINEAR A SIGN A807
+ {0x10780, 0x10785, prAL, gcLm}, // [6] MODIFIER LETTER SMALL CAPITAL AA..MODIFIER LETTER SMALL B WITH HOOK
+ {0x10787, 0x107B0, prAL, gcLm}, // [42] MODIFIER LETTER SMALL DZ DIGRAPH..MODIFIER LETTER SMALL V WITH RIGHT HOOK
+ {0x107B2, 0x107BA, prAL, gcLm}, // [9] MODIFIER LETTER SMALL CAPITAL Y..MODIFIER LETTER SMALL S WITH CURL
+ {0x10800, 0x10805, prAL, gcLo}, // [6] CYPRIOT SYLLABLE A..CYPRIOT SYLLABLE JA
+ {0x10808, 0x10808, prAL, gcLo}, // CYPRIOT SYLLABLE JO
+ {0x1080A, 0x10835, prAL, gcLo}, // [44] CYPRIOT SYLLABLE KA..CYPRIOT SYLLABLE WO
+ {0x10837, 0x10838, prAL, gcLo}, // [2] CYPRIOT SYLLABLE XA..CYPRIOT SYLLABLE XE
+ {0x1083C, 0x1083C, prAL, gcLo}, // CYPRIOT SYLLABLE ZA
+ {0x1083F, 0x1083F, prAL, gcLo}, // CYPRIOT SYLLABLE ZO
+ {0x10840, 0x10855, prAL, gcLo}, // [22] IMPERIAL ARAMAIC LETTER ALEPH..IMPERIAL ARAMAIC LETTER TAW
+ {0x10857, 0x10857, prBA, gcPo}, // IMPERIAL ARAMAIC SECTION SIGN
+ {0x10858, 0x1085F, prAL, gcNo}, // [8] IMPERIAL ARAMAIC NUMBER ONE..IMPERIAL ARAMAIC NUMBER TEN THOUSAND
+ {0x10860, 0x10876, prAL, gcLo}, // [23] PALMYRENE LETTER ALEPH..PALMYRENE LETTER TAW
+ {0x10877, 0x10878, prAL, gcSo}, // [2] PALMYRENE LEFT-POINTING FLEURON..PALMYRENE RIGHT-POINTING FLEURON
+ {0x10879, 0x1087F, prAL, gcNo}, // [7] PALMYRENE NUMBER ONE..PALMYRENE NUMBER TWENTY
+ {0x10880, 0x1089E, prAL, gcLo}, // [31] NABATAEAN LETTER FINAL ALEPH..NABATAEAN LETTER TAW
+ {0x108A7, 0x108AF, prAL, gcNo}, // [9] NABATAEAN NUMBER ONE..NABATAEAN NUMBER ONE HUNDRED
+ {0x108E0, 0x108F2, prAL, gcLo}, // [19] HATRAN LETTER ALEPH..HATRAN LETTER QOPH
+ {0x108F4, 0x108F5, prAL, gcLo}, // [2] HATRAN LETTER SHIN..HATRAN LETTER TAW
+ {0x108FB, 0x108FF, prAL, gcNo}, // [5] HATRAN NUMBER ONE..HATRAN NUMBER ONE HUNDRED
+ {0x10900, 0x10915, prAL, gcLo}, // [22] PHOENICIAN LETTER ALF..PHOENICIAN LETTER TAU
+ {0x10916, 0x1091B, prAL, gcNo}, // [6] PHOENICIAN NUMBER ONE..PHOENICIAN NUMBER THREE
+ {0x1091F, 0x1091F, prBA, gcPo}, // PHOENICIAN WORD SEPARATOR
+ {0x10920, 0x10939, prAL, gcLo}, // [26] LYDIAN LETTER A..LYDIAN LETTER C
+ {0x1093F, 0x1093F, prAL, gcPo}, // LYDIAN TRIANGULAR MARK
+ {0x10980, 0x1099F, prAL, gcLo}, // [32] MEROITIC HIEROGLYPHIC LETTER A..MEROITIC HIEROGLYPHIC SYMBOL VIDJ-2
+ {0x109A0, 0x109B7, prAL, gcLo}, // [24] MEROITIC CURSIVE LETTER A..MEROITIC CURSIVE LETTER DA
+ {0x109BC, 0x109BD, prAL, gcNo}, // [2] MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS..MEROITIC CURSIVE FRACTION ONE HALF
+ {0x109BE, 0x109BF, prAL, gcLo}, // [2] MEROITIC CURSIVE LOGOGRAM RMT..MEROITIC CURSIVE LOGOGRAM IMN
+ {0x109C0, 0x109CF, prAL, gcNo}, // [16] MEROITIC CURSIVE NUMBER ONE..MEROITIC CURSIVE NUMBER SEVENTY
+ {0x109D2, 0x109FF, prAL, gcNo}, // [46] MEROITIC CURSIVE NUMBER ONE HUNDRED..MEROITIC CURSIVE FRACTION TEN TWELFTHS
+ {0x10A00, 0x10A00, prAL, gcLo}, // KHAROSHTHI LETTER A
+ {0x10A01, 0x10A03, prCM, gcMn}, // [3] KHAROSHTHI VOWEL SIGN I..KHAROSHTHI VOWEL SIGN VOCALIC R
+ {0x10A05, 0x10A06, prCM, gcMn}, // [2] KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O
+ {0x10A0C, 0x10A0F, prCM, gcMn}, // [4] KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI SIGN VISARGA
+ {0x10A10, 0x10A13, prAL, gcLo}, // [4] KHAROSHTHI LETTER KA..KHAROSHTHI LETTER GHA
+ {0x10A15, 0x10A17, prAL, gcLo}, // [3] KHAROSHTHI LETTER CA..KHAROSHTHI LETTER JA
+ {0x10A19, 0x10A35, prAL, gcLo}, // [29] KHAROSHTHI LETTER NYA..KHAROSHTHI LETTER VHA
+ {0x10A38, 0x10A3A, prCM, gcMn}, // [3] KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW
+ {0x10A3F, 0x10A3F, prCM, gcMn}, // KHAROSHTHI VIRAMA
+ {0x10A40, 0x10A48, prAL, gcNo}, // [9] KHAROSHTHI DIGIT ONE..KHAROSHTHI FRACTION ONE HALF
+ {0x10A50, 0x10A57, prBA, gcPo}, // [8] KHAROSHTHI PUNCTUATION DOT..KHAROSHTHI PUNCTUATION DOUBLE DANDA
+ {0x10A58, 0x10A58, prAL, gcPo}, // KHAROSHTHI PUNCTUATION LINES
+ {0x10A60, 0x10A7C, prAL, gcLo}, // [29] OLD SOUTH ARABIAN LETTER HE..OLD SOUTH ARABIAN LETTER THETH
+ {0x10A7D, 0x10A7E, prAL, gcNo}, // [2] OLD SOUTH ARABIAN NUMBER ONE..OLD SOUTH ARABIAN NUMBER FIFTY
+ {0x10A7F, 0x10A7F, prAL, gcPo}, // OLD SOUTH ARABIAN NUMERIC INDICATOR
+ {0x10A80, 0x10A9C, prAL, gcLo}, // [29] OLD NORTH ARABIAN LETTER HEH..OLD NORTH ARABIAN LETTER ZAH
+ {0x10A9D, 0x10A9F, prAL, gcNo}, // [3] OLD NORTH ARABIAN NUMBER ONE..OLD NORTH ARABIAN NUMBER TWENTY
+ {0x10AC0, 0x10AC7, prAL, gcLo}, // [8] MANICHAEAN LETTER ALEPH..MANICHAEAN LETTER WAW
+ {0x10AC8, 0x10AC8, prAL, gcSo}, // MANICHAEAN SIGN UD
+ {0x10AC9, 0x10AE4, prAL, gcLo}, // [28] MANICHAEAN LETTER ZAYIN..MANICHAEAN LETTER TAW
+ {0x10AE5, 0x10AE6, prCM, gcMn}, // [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
+ {0x10AEB, 0x10AEF, prAL, gcNo}, // [5] MANICHAEAN NUMBER ONE..MANICHAEAN NUMBER ONE HUNDRED
+ {0x10AF0, 0x10AF5, prBA, gcPo}, // [6] MANICHAEAN PUNCTUATION STAR..MANICHAEAN PUNCTUATION TWO DOTS
+ {0x10AF6, 0x10AF6, prIN, gcPo}, // MANICHAEAN PUNCTUATION LINE FILLER
+ {0x10B00, 0x10B35, prAL, gcLo}, // [54] AVESTAN LETTER A..AVESTAN LETTER HE
+ {0x10B39, 0x10B3F, prBA, gcPo}, // [7] AVESTAN ABBREVIATION MARK..LARGE ONE RING OVER TWO RINGS PUNCTUATION
+ {0x10B40, 0x10B55, prAL, gcLo}, // [22] INSCRIPTIONAL PARTHIAN LETTER ALEPH..INSCRIPTIONAL PARTHIAN LETTER TAW
+ {0x10B58, 0x10B5F, prAL, gcNo}, // [8] INSCRIPTIONAL PARTHIAN NUMBER ONE..INSCRIPTIONAL PARTHIAN NUMBER ONE THOUSAND
+ {0x10B60, 0x10B72, prAL, gcLo}, // [19] INSCRIPTIONAL PAHLAVI LETTER ALEPH..INSCRIPTIONAL PAHLAVI LETTER TAW
+ {0x10B78, 0x10B7F, prAL, gcNo}, // [8] INSCRIPTIONAL PAHLAVI NUMBER ONE..INSCRIPTIONAL PAHLAVI NUMBER ONE THOUSAND
+ {0x10B80, 0x10B91, prAL, gcLo}, // [18] PSALTER PAHLAVI LETTER ALEPH..PSALTER PAHLAVI LETTER TAW
+ {0x10B99, 0x10B9C, prAL, gcPo}, // [4] PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT
+ {0x10BA9, 0x10BAF, prAL, gcNo}, // [7] PSALTER PAHLAVI NUMBER ONE..PSALTER PAHLAVI NUMBER ONE HUNDRED
+ {0x10C00, 0x10C48, prAL, gcLo}, // [73] OLD TURKIC LETTER ORKHON A..OLD TURKIC LETTER ORKHON BASH
+ {0x10C80, 0x10CB2, prAL, gcLu}, // [51] OLD HUNGARIAN CAPITAL LETTER A..OLD HUNGARIAN CAPITAL LETTER US
+ {0x10CC0, 0x10CF2, prAL, gcLl}, // [51] OLD HUNGARIAN SMALL LETTER A..OLD HUNGARIAN SMALL LETTER US
+ {0x10CFA, 0x10CFF, prAL, gcNo}, // [6] OLD HUNGARIAN NUMBER ONE..OLD HUNGARIAN NUMBER ONE THOUSAND
+ {0x10D00, 0x10D23, prAL, gcLo}, // [36] HANIFI ROHINGYA LETTER A..HANIFI ROHINGYA MARK NA KHONNA
+ {0x10D24, 0x10D27, prCM, gcMn}, // [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
+ {0x10D30, 0x10D39, prNU, gcNd}, // [10] HANIFI ROHINGYA DIGIT ZERO..HANIFI ROHINGYA DIGIT NINE
+ {0x10E60, 0x10E7E, prAL, gcNo}, // [31] RUMI DIGIT ONE..RUMI FRACTION TWO THIRDS
+ {0x10E80, 0x10EA9, prAL, gcLo}, // [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET
+ {0x10EAB, 0x10EAC, prCM, gcMn}, // [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10EAD, 0x10EAD, prBA, gcPd}, // YEZIDI HYPHENATION MARK
+ {0x10EB0, 0x10EB1, prAL, gcLo}, // [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10EFD, 0x10EFF, prCM, gcMn}, // [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
+ {0x10F00, 0x10F1C, prAL, gcLo}, // [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
+ {0x10F1D, 0x10F26, prAL, gcNo}, // [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF
+ {0x10F27, 0x10F27, prAL, gcLo}, // OLD SOGDIAN LIGATURE AYIN-DALETH
+ {0x10F30, 0x10F45, prAL, gcLo}, // [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN
+ {0x10F46, 0x10F50, prCM, gcMn}, // [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
+ {0x10F51, 0x10F54, prAL, gcNo}, // [4] SOGDIAN NUMBER ONE..SOGDIAN NUMBER ONE HUNDRED
+ {0x10F55, 0x10F59, prAL, gcPo}, // [5] SOGDIAN PUNCTUATION TWO VERTICAL BARS..SOGDIAN PUNCTUATION HALF CIRCLE WITH DOT
+ {0x10F70, 0x10F81, prAL, gcLo}, // [18] OLD UYGHUR LETTER ALEPH..OLD UYGHUR LETTER LESH
+ {0x10F82, 0x10F85, prCM, gcMn}, // [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
+ {0x10F86, 0x10F89, prAL, gcPo}, // [4] OLD UYGHUR PUNCTUATION BAR..OLD UYGHUR PUNCTUATION FOUR DOTS
+ {0x10FB0, 0x10FC4, prAL, gcLo}, // [21] CHORASMIAN LETTER ALEPH..CHORASMIAN LETTER TAW
+ {0x10FC5, 0x10FCB, prAL, gcNo}, // [7] CHORASMIAN NUMBER ONE..CHORASMIAN NUMBER ONE HUNDRED
+ {0x10FE0, 0x10FF6, prAL, gcLo}, // [23] ELYMAIC LETTER ALEPH..ELYMAIC LIGATURE ZAYIN-YODH
+ {0x11000, 0x11000, prCM, gcMc}, // BRAHMI SIGN CANDRABINDU
+ {0x11001, 0x11001, prCM, gcMn}, // BRAHMI SIGN ANUSVARA
+ {0x11002, 0x11002, prCM, gcMc}, // BRAHMI SIGN VISARGA
+ {0x11003, 0x11037, prAL, gcLo}, // [53] BRAHMI SIGN JIHVAMULIYA..BRAHMI LETTER OLD TAMIL NNNA
+ {0x11038, 0x11046, prCM, gcMn}, // [15] BRAHMI VOWEL SIGN AA..BRAHMI VIRAMA
+ {0x11047, 0x11048, prBA, gcPo}, // [2] BRAHMI DANDA..BRAHMI DOUBLE DANDA
+ {0x11049, 0x1104D, prAL, gcPo}, // [5] BRAHMI PUNCTUATION DOT..BRAHMI PUNCTUATION LOTUS
+ {0x11052, 0x11065, prAL, gcNo}, // [20] BRAHMI NUMBER ONE..BRAHMI NUMBER ONE THOUSAND
+ {0x11066, 0x1106F, prNU, gcNd}, // [10] BRAHMI DIGIT ZERO..BRAHMI DIGIT NINE
+ {0x11070, 0x11070, prCM, gcMn}, // BRAHMI SIGN OLD TAMIL VIRAMA
+ {0x11071, 0x11072, prAL, gcLo}, // [2] BRAHMI LETTER OLD TAMIL SHORT E..BRAHMI LETTER OLD TAMIL SHORT O
+ {0x11073, 0x11074, prCM, gcMn}, // [2] BRAHMI VOWEL SIGN OLD TAMIL SHORT E..BRAHMI VOWEL SIGN OLD TAMIL SHORT O
+ {0x11075, 0x11075, prAL, gcLo}, // BRAHMI LETTER OLD TAMIL LLA
+ {0x1107F, 0x1107F, prCM, gcMn}, // BRAHMI NUMBER JOINER
+ {0x11080, 0x11081, prCM, gcMn}, // [2] KAITHI SIGN CANDRABINDU..KAITHI SIGN ANUSVARA
+ {0x11082, 0x11082, prCM, gcMc}, // KAITHI SIGN VISARGA
+ {0x11083, 0x110AF, prAL, gcLo}, // [45] KAITHI LETTER A..KAITHI LETTER HA
+ {0x110B0, 0x110B2, prCM, gcMc}, // [3] KAITHI VOWEL SIGN AA..KAITHI VOWEL SIGN II
+ {0x110B3, 0x110B6, prCM, gcMn}, // [4] KAITHI VOWEL SIGN U..KAITHI VOWEL SIGN AI
+ {0x110B7, 0x110B8, prCM, gcMc}, // [2] KAITHI VOWEL SIGN O..KAITHI VOWEL SIGN AU
+ {0x110B9, 0x110BA, prCM, gcMn}, // [2] KAITHI SIGN VIRAMA..KAITHI SIGN NUKTA
+ {0x110BB, 0x110BC, prAL, gcPo}, // [2] KAITHI ABBREVIATION SIGN..KAITHI ENUMERATION SIGN
+ {0x110BD, 0x110BD, prAL, gcCf}, // KAITHI NUMBER SIGN
+ {0x110BE, 0x110C1, prBA, gcPo}, // [4] KAITHI SECTION MARK..KAITHI DOUBLE DANDA
+ {0x110C2, 0x110C2, prCM, gcMn}, // KAITHI VOWEL SIGN VOCALIC R
+ {0x110CD, 0x110CD, prAL, gcCf}, // KAITHI NUMBER SIGN ABOVE
+ {0x110D0, 0x110E8, prAL, gcLo}, // [25] SORA SOMPENG LETTER SAH..SORA SOMPENG LETTER MAE
+ {0x110F0, 0x110F9, prNU, gcNd}, // [10] SORA SOMPENG DIGIT ZERO..SORA SOMPENG DIGIT NINE
+ {0x11100, 0x11102, prCM, gcMn}, // [3] CHAKMA SIGN CANDRABINDU..CHAKMA SIGN VISARGA
+ {0x11103, 0x11126, prAL, gcLo}, // [36] CHAKMA LETTER AA..CHAKMA LETTER HAA
+ {0x11127, 0x1112B, prCM, gcMn}, // [5] CHAKMA VOWEL SIGN A..CHAKMA VOWEL SIGN UU
+ {0x1112C, 0x1112C, prCM, gcMc}, // CHAKMA VOWEL SIGN E
+ {0x1112D, 0x11134, prCM, gcMn}, // [8] CHAKMA VOWEL SIGN AI..CHAKMA MAAYYAA
+ {0x11136, 0x1113F, prNU, gcNd}, // [10] CHAKMA DIGIT ZERO..CHAKMA DIGIT NINE
+ {0x11140, 0x11143, prBA, gcPo}, // [4] CHAKMA SECTION MARK..CHAKMA QUESTION MARK
+ {0x11144, 0x11144, prAL, gcLo}, // CHAKMA LETTER LHAA
+ {0x11145, 0x11146, prCM, gcMc}, // [2] CHAKMA VOWEL SIGN AA..CHAKMA VOWEL SIGN EI
+ {0x11147, 0x11147, prAL, gcLo}, // CHAKMA LETTER VAA
+ {0x11150, 0x11172, prAL, gcLo}, // [35] MAHAJANI LETTER A..MAHAJANI LETTER RRA
+ {0x11173, 0x11173, prCM, gcMn}, // MAHAJANI SIGN NUKTA
+ {0x11174, 0x11174, prAL, gcPo}, // MAHAJANI ABBREVIATION SIGN
+ {0x11175, 0x11175, prBB, gcPo}, // MAHAJANI SECTION MARK
+ {0x11176, 0x11176, prAL, gcLo}, // MAHAJANI LIGATURE SHRI
+ {0x11180, 0x11181, prCM, gcMn}, // [2] SHARADA SIGN CANDRABINDU..SHARADA SIGN ANUSVARA
+ {0x11182, 0x11182, prCM, gcMc}, // SHARADA SIGN VISARGA
+ {0x11183, 0x111B2, prAL, gcLo}, // [48] SHARADA LETTER A..SHARADA LETTER HA
+ {0x111B3, 0x111B5, prCM, gcMc}, // [3] SHARADA VOWEL SIGN AA..SHARADA VOWEL SIGN II
+ {0x111B6, 0x111BE, prCM, gcMn}, // [9] SHARADA VOWEL SIGN U..SHARADA VOWEL SIGN O
+ {0x111BF, 0x111C0, prCM, gcMc}, // [2] SHARADA VOWEL SIGN AU..SHARADA SIGN VIRAMA
+ {0x111C1, 0x111C4, prAL, gcLo}, // [4] SHARADA SIGN AVAGRAHA..SHARADA OM
+ {0x111C5, 0x111C6, prBA, gcPo}, // [2] SHARADA DANDA..SHARADA DOUBLE DANDA
+ {0x111C7, 0x111C7, prAL, gcPo}, // SHARADA ABBREVIATION SIGN
+ {0x111C8, 0x111C8, prBA, gcPo}, // SHARADA SEPARATOR
+ {0x111C9, 0x111CC, prCM, gcMn}, // [4] SHARADA SANDHI MARK..SHARADA EXTRA SHORT VOWEL MARK
+ {0x111CD, 0x111CD, prAL, gcPo}, // SHARADA SUTRA MARK
+ {0x111CE, 0x111CE, prCM, gcMc}, // SHARADA VOWEL SIGN PRISHTHAMATRA E
+ {0x111CF, 0x111CF, prCM, gcMn}, // SHARADA SIGN INVERTED CANDRABINDU
+ {0x111D0, 0x111D9, prNU, gcNd}, // [10] SHARADA DIGIT ZERO..SHARADA DIGIT NINE
+ {0x111DA, 0x111DA, prAL, gcLo}, // SHARADA EKAM
+ {0x111DB, 0x111DB, prBB, gcPo}, // SHARADA SIGN SIDDHAM
+ {0x111DC, 0x111DC, prAL, gcLo}, // SHARADA HEADSTROKE
+ {0x111DD, 0x111DF, prBA, gcPo}, // [3] SHARADA CONTINUATION SIGN..SHARADA SECTION MARK-2
+ {0x111E1, 0x111F4, prAL, gcNo}, // [20] SINHALA ARCHAIC DIGIT ONE..SINHALA ARCHAIC NUMBER ONE THOUSAND
+ {0x11200, 0x11211, prAL, gcLo}, // [18] KHOJKI LETTER A..KHOJKI LETTER JJA
+ {0x11213, 0x1122B, prAL, gcLo}, // [25] KHOJKI LETTER NYA..KHOJKI LETTER LLA
+ {0x1122C, 0x1122E, prCM, gcMc}, // [3] KHOJKI VOWEL SIGN AA..KHOJKI VOWEL SIGN II
+ {0x1122F, 0x11231, prCM, gcMn}, // [3] KHOJKI VOWEL SIGN U..KHOJKI VOWEL SIGN AI
+ {0x11232, 0x11233, prCM, gcMc}, // [2] KHOJKI VOWEL SIGN O..KHOJKI VOWEL SIGN AU
+ {0x11234, 0x11234, prCM, gcMn}, // KHOJKI SIGN ANUSVARA
+ {0x11235, 0x11235, prCM, gcMc}, // KHOJKI SIGN VIRAMA
+ {0x11236, 0x11237, prCM, gcMn}, // [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
+ {0x11238, 0x11239, prBA, gcPo}, // [2] KHOJKI DANDA..KHOJKI DOUBLE DANDA
+ {0x1123A, 0x1123A, prAL, gcPo}, // KHOJKI WORD SEPARATOR
+ {0x1123B, 0x1123C, prBA, gcPo}, // [2] KHOJKI SECTION MARK..KHOJKI DOUBLE SECTION MARK
+ {0x1123D, 0x1123D, prAL, gcPo}, // KHOJKI ABBREVIATION SIGN
+ {0x1123E, 0x1123E, prCM, gcMn}, // KHOJKI SIGN SUKUN
+ {0x1123F, 0x11240, prAL, gcLo}, // [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I
+ {0x11241, 0x11241, prCM, gcMn}, // KHOJKI VOWEL SIGN VOCALIC R
+ {0x11280, 0x11286, prAL, gcLo}, // [7] MULTANI LETTER A..MULTANI LETTER GA
+ {0x11288, 0x11288, prAL, gcLo}, // MULTANI LETTER GHA
+ {0x1128A, 0x1128D, prAL, gcLo}, // [4] MULTANI LETTER CA..MULTANI LETTER JJA
+ {0x1128F, 0x1129D, prAL, gcLo}, // [15] MULTANI LETTER NYA..MULTANI LETTER BA
+ {0x1129F, 0x112A8, prAL, gcLo}, // [10] MULTANI LETTER BHA..MULTANI LETTER RHA
+ {0x112A9, 0x112A9, prBA, gcPo}, // MULTANI SECTION MARK
+ {0x112B0, 0x112DE, prAL, gcLo}, // [47] KHUDAWADI LETTER A..KHUDAWADI LETTER HA
+ {0x112DF, 0x112DF, prCM, gcMn}, // KHUDAWADI SIGN ANUSVARA
+ {0x112E0, 0x112E2, prCM, gcMc}, // [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
+ {0x112E3, 0x112EA, prCM, gcMn}, // [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
+ {0x112F0, 0x112F9, prNU, gcNd}, // [10] KHUDAWADI DIGIT ZERO..KHUDAWADI DIGIT NINE
+ {0x11300, 0x11301, prCM, gcMn}, // [2] GRANTHA SIGN COMBINING ANUSVARA ABOVE..GRANTHA SIGN CANDRABINDU
+ {0x11302, 0x11303, prCM, gcMc}, // [2] GRANTHA SIGN ANUSVARA..GRANTHA SIGN VISARGA
+ {0x11305, 0x1130C, prAL, gcLo}, // [8] GRANTHA LETTER A..GRANTHA LETTER VOCALIC L
+ {0x1130F, 0x11310, prAL, gcLo}, // [2] GRANTHA LETTER EE..GRANTHA LETTER AI
+ {0x11313, 0x11328, prAL, gcLo}, // [22] GRANTHA LETTER OO..GRANTHA LETTER NA
+ {0x1132A, 0x11330, prAL, gcLo}, // [7] GRANTHA LETTER PA..GRANTHA LETTER RA
+ {0x11332, 0x11333, prAL, gcLo}, // [2] GRANTHA LETTER LA..GRANTHA LETTER LLA
+ {0x11335, 0x11339, prAL, gcLo}, // [5] GRANTHA LETTER VA..GRANTHA LETTER HA
+ {0x1133B, 0x1133C, prCM, gcMn}, // [2] COMBINING BINDU BELOW..GRANTHA SIGN NUKTA
+ {0x1133D, 0x1133D, prAL, gcLo}, // GRANTHA SIGN AVAGRAHA
+ {0x1133E, 0x1133F, prCM, gcMc}, // [2] GRANTHA VOWEL SIGN AA..GRANTHA VOWEL SIGN I
+ {0x11340, 0x11340, prCM, gcMn}, // GRANTHA VOWEL SIGN II
+ {0x11341, 0x11344, prCM, gcMc}, // [4] GRANTHA VOWEL SIGN U..GRANTHA VOWEL SIGN VOCALIC RR
+ {0x11347, 0x11348, prCM, gcMc}, // [2] GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI
+ {0x1134B, 0x1134D, prCM, gcMc}, // [3] GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA
+ {0x11350, 0x11350, prAL, gcLo}, // GRANTHA OM
+ {0x11357, 0x11357, prCM, gcMc}, // GRANTHA AU LENGTH MARK
+ {0x1135D, 0x11361, prAL, gcLo}, // [5] GRANTHA SIGN PLUTA..GRANTHA LETTER VOCALIC LL
+ {0x11362, 0x11363, prCM, gcMc}, // [2] GRANTHA VOWEL SIGN VOCALIC L..GRANTHA VOWEL SIGN VOCALIC LL
+ {0x11366, 0x1136C, prCM, gcMn}, // [7] COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX
+ {0x11370, 0x11374, prCM, gcMn}, // [5] COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA
+ {0x11400, 0x11434, prAL, gcLo}, // [53] NEWA LETTER A..NEWA LETTER HA
+ {0x11435, 0x11437, prCM, gcMc}, // [3] NEWA VOWEL SIGN AA..NEWA VOWEL SIGN II
+ {0x11438, 0x1143F, prCM, gcMn}, // [8] NEWA VOWEL SIGN U..NEWA VOWEL SIGN AI
+ {0x11440, 0x11441, prCM, gcMc}, // [2] NEWA VOWEL SIGN O..NEWA VOWEL SIGN AU
+ {0x11442, 0x11444, prCM, gcMn}, // [3] NEWA SIGN VIRAMA..NEWA SIGN ANUSVARA
+ {0x11445, 0x11445, prCM, gcMc}, // NEWA SIGN VISARGA
+ {0x11446, 0x11446, prCM, gcMn}, // NEWA SIGN NUKTA
+ {0x11447, 0x1144A, prAL, gcLo}, // [4] NEWA SIGN AVAGRAHA..NEWA SIDDHI
+ {0x1144B, 0x1144E, prBA, gcPo}, // [4] NEWA DANDA..NEWA GAP FILLER
+ {0x1144F, 0x1144F, prAL, gcPo}, // NEWA ABBREVIATION SIGN
+ {0x11450, 0x11459, prNU, gcNd}, // [10] NEWA DIGIT ZERO..NEWA DIGIT NINE
+ {0x1145A, 0x1145B, prBA, gcPo}, // [2] NEWA DOUBLE COMMA..NEWA PLACEHOLDER MARK
+ {0x1145D, 0x1145D, prAL, gcPo}, // NEWA INSERTION SIGN
+ {0x1145E, 0x1145E, prCM, gcMn}, // NEWA SANDHI MARK
+ {0x1145F, 0x11461, prAL, gcLo}, // [3] NEWA LETTER VEDIC ANUSVARA..NEWA SIGN UPADHMANIYA
+ {0x11480, 0x114AF, prAL, gcLo}, // [48] TIRHUTA ANJI..TIRHUTA LETTER HA
+ {0x114B0, 0x114B2, prCM, gcMc}, // [3] TIRHUTA VOWEL SIGN AA..TIRHUTA VOWEL SIGN II
+ {0x114B3, 0x114B8, prCM, gcMn}, // [6] TIRHUTA VOWEL SIGN U..TIRHUTA VOWEL SIGN VOCALIC LL
+ {0x114B9, 0x114B9, prCM, gcMc}, // TIRHUTA VOWEL SIGN E
+ {0x114BA, 0x114BA, prCM, gcMn}, // TIRHUTA VOWEL SIGN SHORT E
+ {0x114BB, 0x114BE, prCM, gcMc}, // [4] TIRHUTA VOWEL SIGN AI..TIRHUTA VOWEL SIGN AU
+ {0x114BF, 0x114C0, prCM, gcMn}, // [2] TIRHUTA SIGN CANDRABINDU..TIRHUTA SIGN ANUSVARA
+ {0x114C1, 0x114C1, prCM, gcMc}, // TIRHUTA SIGN VISARGA
+ {0x114C2, 0x114C3, prCM, gcMn}, // [2] TIRHUTA SIGN VIRAMA..TIRHUTA SIGN NUKTA
+ {0x114C4, 0x114C5, prAL, gcLo}, // [2] TIRHUTA SIGN AVAGRAHA..TIRHUTA GVANG
+ {0x114C6, 0x114C6, prAL, gcPo}, // TIRHUTA ABBREVIATION SIGN
+ {0x114C7, 0x114C7, prAL, gcLo}, // TIRHUTA OM
+ {0x114D0, 0x114D9, prNU, gcNd}, // [10] TIRHUTA DIGIT ZERO..TIRHUTA DIGIT NINE
+ {0x11580, 0x115AE, prAL, gcLo}, // [47] SIDDHAM LETTER A..SIDDHAM LETTER HA
+ {0x115AF, 0x115B1, prCM, gcMc}, // [3] SIDDHAM VOWEL SIGN AA..SIDDHAM VOWEL SIGN II
+ {0x115B2, 0x115B5, prCM, gcMn}, // [4] SIDDHAM VOWEL SIGN U..SIDDHAM VOWEL SIGN VOCALIC RR
+ {0x115B8, 0x115BB, prCM, gcMc}, // [4] SIDDHAM VOWEL SIGN E..SIDDHAM VOWEL SIGN AU
+ {0x115BC, 0x115BD, prCM, gcMn}, // [2] SIDDHAM SIGN CANDRABINDU..SIDDHAM SIGN ANUSVARA
+ {0x115BE, 0x115BE, prCM, gcMc}, // SIDDHAM SIGN VISARGA
+ {0x115BF, 0x115C0, prCM, gcMn}, // [2] SIDDHAM SIGN VIRAMA..SIDDHAM SIGN NUKTA
+ {0x115C1, 0x115C1, prBB, gcPo}, // SIDDHAM SIGN SIDDHAM
+ {0x115C2, 0x115C3, prBA, gcPo}, // [2] SIDDHAM DANDA..SIDDHAM DOUBLE DANDA
+ {0x115C4, 0x115C5, prEX, gcPo}, // [2] SIDDHAM SEPARATOR DOT..SIDDHAM SEPARATOR BAR
+ {0x115C6, 0x115C8, prAL, gcPo}, // [3] SIDDHAM REPETITION MARK-1..SIDDHAM REPETITION MARK-3
+ {0x115C9, 0x115D7, prBA, gcPo}, // [15] SIDDHAM END OF TEXT MARK..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES
+ {0x115D8, 0x115DB, prAL, gcLo}, // [4] SIDDHAM LETTER THREE-CIRCLE ALTERNATE I..SIDDHAM LETTER ALTERNATE U
+ {0x115DC, 0x115DD, prCM, gcMn}, // [2] SIDDHAM VOWEL SIGN ALTERNATE U..SIDDHAM VOWEL SIGN ALTERNATE UU
+ {0x11600, 0x1162F, prAL, gcLo}, // [48] MODI LETTER A..MODI LETTER LLA
+ {0x11630, 0x11632, prCM, gcMc}, // [3] MODI VOWEL SIGN AA..MODI VOWEL SIGN II
+ {0x11633, 0x1163A, prCM, gcMn}, // [8] MODI VOWEL SIGN U..MODI VOWEL SIGN AI
+ {0x1163B, 0x1163C, prCM, gcMc}, // [2] MODI VOWEL SIGN O..MODI VOWEL SIGN AU
+ {0x1163D, 0x1163D, prCM, gcMn}, // MODI SIGN ANUSVARA
+ {0x1163E, 0x1163E, prCM, gcMc}, // MODI SIGN VISARGA
+ {0x1163F, 0x11640, prCM, gcMn}, // [2] MODI SIGN VIRAMA..MODI SIGN ARDHACANDRA
+ {0x11641, 0x11642, prBA, gcPo}, // [2] MODI DANDA..MODI DOUBLE DANDA
+ {0x11643, 0x11643, prAL, gcPo}, // MODI ABBREVIATION SIGN
+ {0x11644, 0x11644, prAL, gcLo}, // MODI SIGN HUVA
+ {0x11650, 0x11659, prNU, gcNd}, // [10] MODI DIGIT ZERO..MODI DIGIT NINE
+ {0x11660, 0x1166C, prBB, gcPo}, // [13] MONGOLIAN BIRGA WITH ORNAMENT..MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT
+ {0x11680, 0x116AA, prAL, gcLo}, // [43] TAKRI LETTER A..TAKRI LETTER RRA
+ {0x116AB, 0x116AB, prCM, gcMn}, // TAKRI SIGN ANUSVARA
+ {0x116AC, 0x116AC, prCM, gcMc}, // TAKRI SIGN VISARGA
+ {0x116AD, 0x116AD, prCM, gcMn}, // TAKRI VOWEL SIGN AA
+ {0x116AE, 0x116AF, prCM, gcMc}, // [2] TAKRI VOWEL SIGN I..TAKRI VOWEL SIGN II
+ {0x116B0, 0x116B5, prCM, gcMn}, // [6] TAKRI VOWEL SIGN U..TAKRI VOWEL SIGN AU
+ {0x116B6, 0x116B6, prCM, gcMc}, // TAKRI SIGN VIRAMA
+ {0x116B7, 0x116B7, prCM, gcMn}, // TAKRI SIGN NUKTA
+ {0x116B8, 0x116B8, prAL, gcLo}, // TAKRI LETTER ARCHAIC KHA
+ {0x116B9, 0x116B9, prAL, gcPo}, // TAKRI ABBREVIATION SIGN
+ {0x116C0, 0x116C9, prNU, gcNd}, // [10] TAKRI DIGIT ZERO..TAKRI DIGIT NINE
+ {0x11700, 0x1171A, prSA, gcLo}, // [27] AHOM LETTER KA..AHOM LETTER ALTERNATE BA
+ {0x1171D, 0x1171F, prSA, gcMn}, // [3] AHOM CONSONANT SIGN MEDIAL LA..AHOM CONSONANT SIGN MEDIAL LIGATING RA
+ {0x11720, 0x11721, prSA, gcMc}, // [2] AHOM VOWEL SIGN A..AHOM VOWEL SIGN AA
+ {0x11722, 0x11725, prSA, gcMn}, // [4] AHOM VOWEL SIGN I..AHOM VOWEL SIGN UU
+ {0x11726, 0x11726, prSA, gcMc}, // AHOM VOWEL SIGN E
+ {0x11727, 0x1172B, prSA, gcMn}, // [5] AHOM VOWEL SIGN AW..AHOM SIGN KILLER
+ {0x11730, 0x11739, prNU, gcNd}, // [10] AHOM DIGIT ZERO..AHOM DIGIT NINE
+ {0x1173A, 0x1173B, prSA, gcNo}, // [2] AHOM NUMBER TEN..AHOM NUMBER TWENTY
+ {0x1173C, 0x1173E, prBA, gcPo}, // [3] AHOM SIGN SMALL SECTION..AHOM SIGN RULAI
+ {0x1173F, 0x1173F, prSA, gcSo}, // AHOM SYMBOL VI
+ {0x11740, 0x11746, prSA, gcLo}, // [7] AHOM LETTER CA..AHOM LETTER LLA
+ {0x11800, 0x1182B, prAL, gcLo}, // [44] DOGRA LETTER A..DOGRA LETTER RRA
+ {0x1182C, 0x1182E, prCM, gcMc}, // [3] DOGRA VOWEL SIGN AA..DOGRA VOWEL SIGN II
+ {0x1182F, 0x11837, prCM, gcMn}, // [9] DOGRA VOWEL SIGN U..DOGRA SIGN ANUSVARA
+ {0x11838, 0x11838, prCM, gcMc}, // DOGRA SIGN VISARGA
+ {0x11839, 0x1183A, prCM, gcMn}, // [2] DOGRA SIGN VIRAMA..DOGRA SIGN NUKTA
+ {0x1183B, 0x1183B, prAL, gcPo}, // DOGRA ABBREVIATION SIGN
+ {0x118A0, 0x118DF, prAL, gcLC}, // [64] WARANG CITI CAPITAL LETTER NGAA..WARANG CITI SMALL LETTER VIYO
+ {0x118E0, 0x118E9, prNU, gcNd}, // [10] WARANG CITI DIGIT ZERO..WARANG CITI DIGIT NINE
+ {0x118EA, 0x118F2, prAL, gcNo}, // [9] WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY
+ {0x118FF, 0x118FF, prAL, gcLo}, // WARANG CITI OM
+ {0x11900, 0x11906, prAL, gcLo}, // [7] DIVES AKURU LETTER A..DIVES AKURU LETTER E
+ {0x11909, 0x11909, prAL, gcLo}, // DIVES AKURU LETTER O
+ {0x1190C, 0x11913, prAL, gcLo}, // [8] DIVES AKURU LETTER KA..DIVES AKURU LETTER JA
+ {0x11915, 0x11916, prAL, gcLo}, // [2] DIVES AKURU LETTER NYA..DIVES AKURU LETTER TTA
+ {0x11918, 0x1192F, prAL, gcLo}, // [24] DIVES AKURU LETTER DDA..DIVES AKURU LETTER ZA
+ {0x11930, 0x11935, prCM, gcMc}, // [6] DIVES AKURU VOWEL SIGN AA..DIVES AKURU VOWEL SIGN E
+ {0x11937, 0x11938, prCM, gcMc}, // [2] DIVES AKURU VOWEL SIGN AI..DIVES AKURU VOWEL SIGN O
+ {0x1193B, 0x1193C, prCM, gcMn}, // [2] DIVES AKURU SIGN ANUSVARA..DIVES AKURU SIGN CANDRABINDU
+ {0x1193D, 0x1193D, prCM, gcMc}, // DIVES AKURU SIGN HALANTA
+ {0x1193E, 0x1193E, prCM, gcMn}, // DIVES AKURU VIRAMA
+ {0x1193F, 0x1193F, prAL, gcLo}, // DIVES AKURU PREFIXED NASAL SIGN
+ {0x11940, 0x11940, prCM, gcMc}, // DIVES AKURU MEDIAL YA
+ {0x11941, 0x11941, prAL, gcLo}, // DIVES AKURU INITIAL RA
+ {0x11942, 0x11942, prCM, gcMc}, // DIVES AKURU MEDIAL RA
+ {0x11943, 0x11943, prCM, gcMn}, // DIVES AKURU SIGN NUKTA
+ {0x11944, 0x11946, prBA, gcPo}, // [3] DIVES AKURU DOUBLE DANDA..DIVES AKURU END OF TEXT MARK
+ {0x11950, 0x11959, prNU, gcNd}, // [10] DIVES AKURU DIGIT ZERO..DIVES AKURU DIGIT NINE
+ {0x119A0, 0x119A7, prAL, gcLo}, // [8] NANDINAGARI LETTER A..NANDINAGARI LETTER VOCALIC RR
+ {0x119AA, 0x119D0, prAL, gcLo}, // [39] NANDINAGARI LETTER E..NANDINAGARI LETTER RRA
+ {0x119D1, 0x119D3, prCM, gcMc}, // [3] NANDINAGARI VOWEL SIGN AA..NANDINAGARI VOWEL SIGN II
+ {0x119D4, 0x119D7, prCM, gcMn}, // [4] NANDINAGARI VOWEL SIGN U..NANDINAGARI VOWEL SIGN VOCALIC RR
+ {0x119DA, 0x119DB, prCM, gcMn}, // [2] NANDINAGARI VOWEL SIGN E..NANDINAGARI VOWEL SIGN AI
+ {0x119DC, 0x119DF, prCM, gcMc}, // [4] NANDINAGARI VOWEL SIGN O..NANDINAGARI SIGN VISARGA
+ {0x119E0, 0x119E0, prCM, gcMn}, // NANDINAGARI SIGN VIRAMA
+ {0x119E1, 0x119E1, prAL, gcLo}, // NANDINAGARI SIGN AVAGRAHA
+ {0x119E2, 0x119E2, prBB, gcPo}, // NANDINAGARI SIGN SIDDHAM
+ {0x119E3, 0x119E3, prAL, gcLo}, // NANDINAGARI HEADSTROKE
+ {0x119E4, 0x119E4, prCM, gcMc}, // NANDINAGARI VOWEL SIGN PRISHTHAMATRA E
+ {0x11A00, 0x11A00, prAL, gcLo}, // ZANABAZAR SQUARE LETTER A
+ {0x11A01, 0x11A0A, prCM, gcMn}, // [10] ZANABAZAR SQUARE VOWEL SIGN I..ZANABAZAR SQUARE VOWEL LENGTH MARK
+ {0x11A0B, 0x11A32, prAL, gcLo}, // [40] ZANABAZAR SQUARE LETTER KA..ZANABAZAR SQUARE LETTER KSSA
+ {0x11A33, 0x11A38, prCM, gcMn}, // [6] ZANABAZAR SQUARE FINAL CONSONANT MARK..ZANABAZAR SQUARE SIGN ANUSVARA
+ {0x11A39, 0x11A39, prCM, gcMc}, // ZANABAZAR SQUARE SIGN VISARGA
+ {0x11A3A, 0x11A3A, prAL, gcLo}, // ZANABAZAR SQUARE CLUSTER-INITIAL LETTER RA
+ {0x11A3B, 0x11A3E, prCM, gcMn}, // [4] ZANABAZAR SQUARE CLUSTER-FINAL LETTER YA..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA
+ {0x11A3F, 0x11A3F, prBB, gcPo}, // ZANABAZAR SQUARE INITIAL HEAD MARK
+ {0x11A40, 0x11A40, prAL, gcPo}, // ZANABAZAR SQUARE CLOSING HEAD MARK
+ {0x11A41, 0x11A44, prBA, gcPo}, // [4] ZANABAZAR SQUARE MARK TSHEG..ZANABAZAR SQUARE MARK LONG TSHEG
+ {0x11A45, 0x11A45, prBB, gcPo}, // ZANABAZAR SQUARE INITIAL DOUBLE-LINED HEAD MARK
+ {0x11A46, 0x11A46, prAL, gcPo}, // ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK
+ {0x11A47, 0x11A47, prCM, gcMn}, // ZANABAZAR SQUARE SUBJOINER
+ {0x11A50, 0x11A50, prAL, gcLo}, // SOYOMBO LETTER A
+ {0x11A51, 0x11A56, prCM, gcMn}, // [6] SOYOMBO VOWEL SIGN I..SOYOMBO VOWEL SIGN OE
+ {0x11A57, 0x11A58, prCM, gcMc}, // [2] SOYOMBO VOWEL SIGN AI..SOYOMBO VOWEL SIGN AU
+ {0x11A59, 0x11A5B, prCM, gcMn}, // [3] SOYOMBO VOWEL SIGN VOCALIC R..SOYOMBO VOWEL LENGTH MARK
+ {0x11A5C, 0x11A89, prAL, gcLo}, // [46] SOYOMBO LETTER KA..SOYOMBO CLUSTER-INITIAL LETTER SA
+ {0x11A8A, 0x11A96, prCM, gcMn}, // [13] SOYOMBO FINAL CONSONANT SIGN G..SOYOMBO SIGN ANUSVARA
+ {0x11A97, 0x11A97, prCM, gcMc}, // SOYOMBO SIGN VISARGA
+ {0x11A98, 0x11A99, prCM, gcMn}, // [2] SOYOMBO GEMINATION MARK..SOYOMBO SUBJOINER
+ {0x11A9A, 0x11A9C, prBA, gcPo}, // [3] SOYOMBO MARK TSHEG..SOYOMBO MARK DOUBLE SHAD
+ {0x11A9D, 0x11A9D, prAL, gcLo}, // SOYOMBO MARK PLUTA
+ {0x11A9E, 0x11AA0, prBB, gcPo}, // [3] SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO HEAD MARK WITH MOON AND SUN
+ {0x11AA1, 0x11AA2, prBA, gcPo}, // [2] SOYOMBO TERMINAL MARK-1..SOYOMBO TERMINAL MARK-2
+ {0x11AB0, 0x11ABF, prAL, gcLo}, // [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA
+ {0x11AC0, 0x11AF8, prAL, gcLo}, // [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL
+ {0x11B00, 0x11B09, prBB, gcPo}, // [10] DEVANAGARI HEAD MARK..DEVANAGARI SIGN MINDU
+ {0x11C00, 0x11C08, prAL, gcLo}, // [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L
+ {0x11C0A, 0x11C2E, prAL, gcLo}, // [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA
+ {0x11C2F, 0x11C2F, prCM, gcMc}, // BHAIKSUKI VOWEL SIGN AA
+ {0x11C30, 0x11C36, prCM, gcMn}, // [7] BHAIKSUKI VOWEL SIGN I..BHAIKSUKI VOWEL SIGN VOCALIC L
+ {0x11C38, 0x11C3D, prCM, gcMn}, // [6] BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN ANUSVARA
+ {0x11C3E, 0x11C3E, prCM, gcMc}, // BHAIKSUKI SIGN VISARGA
+ {0x11C3F, 0x11C3F, prCM, gcMn}, // BHAIKSUKI SIGN VIRAMA
+ {0x11C40, 0x11C40, prAL, gcLo}, // BHAIKSUKI SIGN AVAGRAHA
+ {0x11C41, 0x11C45, prBA, gcPo}, // [5] BHAIKSUKI DANDA..BHAIKSUKI GAP FILLER-2
+ {0x11C50, 0x11C59, prNU, gcNd}, // [10] BHAIKSUKI DIGIT ZERO..BHAIKSUKI DIGIT NINE
+ {0x11C5A, 0x11C6C, prAL, gcNo}, // [19] BHAIKSUKI NUMBER ONE..BHAIKSUKI HUNDREDS UNIT MARK
+ {0x11C70, 0x11C70, prBB, gcPo}, // MARCHEN HEAD MARK
+ {0x11C71, 0x11C71, prEX, gcPo}, // MARCHEN MARK SHAD
+ {0x11C72, 0x11C8F, prAL, gcLo}, // [30] MARCHEN LETTER KA..MARCHEN LETTER A
+ {0x11C92, 0x11CA7, prCM, gcMn}, // [22] MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA
+ {0x11CA9, 0x11CA9, prCM, gcMc}, // MARCHEN SUBJOINED LETTER YA
+ {0x11CAA, 0x11CB0, prCM, gcMn}, // [7] MARCHEN SUBJOINED LETTER RA..MARCHEN VOWEL SIGN AA
+ {0x11CB1, 0x11CB1, prCM, gcMc}, // MARCHEN VOWEL SIGN I
+ {0x11CB2, 0x11CB3, prCM, gcMn}, // [2] MARCHEN VOWEL SIGN U..MARCHEN VOWEL SIGN E
+ {0x11CB4, 0x11CB4, prCM, gcMc}, // MARCHEN VOWEL SIGN O
+ {0x11CB5, 0x11CB6, prCM, gcMn}, // [2] MARCHEN SIGN ANUSVARA..MARCHEN SIGN CANDRABINDU
+ {0x11D00, 0x11D06, prAL, gcLo}, // [7] MASARAM GONDI LETTER A..MASARAM GONDI LETTER E
+ {0x11D08, 0x11D09, prAL, gcLo}, // [2] MASARAM GONDI LETTER AI..MASARAM GONDI LETTER O
+ {0x11D0B, 0x11D30, prAL, gcLo}, // [38] MASARAM GONDI LETTER AU..MASARAM GONDI LETTER TRA
+ {0x11D31, 0x11D36, prCM, gcMn}, // [6] MASARAM GONDI VOWEL SIGN AA..MASARAM GONDI VOWEL SIGN VOCALIC R
+ {0x11D3A, 0x11D3A, prCM, gcMn}, // MASARAM GONDI VOWEL SIGN E
+ {0x11D3C, 0x11D3D, prCM, gcMn}, // [2] MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O
+ {0x11D3F, 0x11D45, prCM, gcMn}, // [7] MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI VIRAMA
+ {0x11D46, 0x11D46, prAL, gcLo}, // MASARAM GONDI REPHA
+ {0x11D47, 0x11D47, prCM, gcMn}, // MASARAM GONDI RA-KARA
+ {0x11D50, 0x11D59, prNU, gcNd}, // [10] MASARAM GONDI DIGIT ZERO..MASARAM GONDI DIGIT NINE
+ {0x11D60, 0x11D65, prAL, gcLo}, // [6] GUNJALA GONDI LETTER A..GUNJALA GONDI LETTER UU
+ {0x11D67, 0x11D68, prAL, gcLo}, // [2] GUNJALA GONDI LETTER EE..GUNJALA GONDI LETTER AI
+ {0x11D6A, 0x11D89, prAL, gcLo}, // [32] GUNJALA GONDI LETTER OO..GUNJALA GONDI LETTER SA
+ {0x11D8A, 0x11D8E, prCM, gcMc}, // [5] GUNJALA GONDI VOWEL SIGN AA..GUNJALA GONDI VOWEL SIGN UU
+ {0x11D90, 0x11D91, prCM, gcMn}, // [2] GUNJALA GONDI VOWEL SIGN EE..GUNJALA GONDI VOWEL SIGN AI
+ {0x11D93, 0x11D94, prCM, gcMc}, // [2] GUNJALA GONDI VOWEL SIGN OO..GUNJALA GONDI VOWEL SIGN AU
+ {0x11D95, 0x11D95, prCM, gcMn}, // GUNJALA GONDI SIGN ANUSVARA
+ {0x11D96, 0x11D96, prCM, gcMc}, // GUNJALA GONDI SIGN VISARGA
+ {0x11D97, 0x11D97, prCM, gcMn}, // GUNJALA GONDI VIRAMA
+ {0x11D98, 0x11D98, prAL, gcLo}, // GUNJALA GONDI OM
+ {0x11DA0, 0x11DA9, prNU, gcNd}, // [10] GUNJALA GONDI DIGIT ZERO..GUNJALA GONDI DIGIT NINE
+ {0x11EE0, 0x11EF2, prAL, gcLo}, // [19] MAKASAR LETTER KA..MAKASAR ANGKA
+ {0x11EF3, 0x11EF4, prCM, gcMn}, // [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
+ {0x11EF5, 0x11EF6, prCM, gcMc}, // [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x11EF7, 0x11EF8, prAL, gcPo}, // [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION
+ {0x11F00, 0x11F01, prCM, gcMn}, // [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prAL, gcLo}, // KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prCM, gcMc}, // KAWI SIGN VISARGA
+ {0x11F04, 0x11F10, prAL, gcLo}, // [13] KAWI LETTER A..KAWI LETTER O
+ {0x11F12, 0x11F33, prAL, gcLo}, // [34] KAWI LETTER KA..KAWI LETTER JNYA
+ {0x11F34, 0x11F35, prCM, gcMc}, // [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prCM, gcMn}, // [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prCM, gcMc}, // [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prCM, gcMn}, // KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prCM, gcMc}, // KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prCM, gcMn}, // KAWI CONJOINER
+ {0x11F43, 0x11F44, prBA, gcPo}, // [2] KAWI DANDA..KAWI DOUBLE DANDA
+ {0x11F45, 0x11F4F, prID, gcPo}, // [11] KAWI PUNCTUATION SECTION MARKER..KAWI PUNCTUATION CLOSING SPIRAL
+ {0x11F50, 0x11F59, prNU, gcNd}, // [10] KAWI DIGIT ZERO..KAWI DIGIT NINE
+ {0x11FB0, 0x11FB0, prAL, gcLo}, // LISU LETTER YHA
+ {0x11FC0, 0x11FD4, prAL, gcNo}, // [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH
+ {0x11FD5, 0x11FDC, prAL, gcSo}, // [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI
+ {0x11FDD, 0x11FE0, prPO, gcSc}, // [4] TAMIL SIGN KAACU..TAMIL SIGN VARAAKAN
+ {0x11FE1, 0x11FF1, prAL, gcSo}, // [17] TAMIL SIGN PAARAM..TAMIL SIGN VAKAIYARAA
+ {0x11FFF, 0x11FFF, prBA, gcPo}, // TAMIL PUNCTUATION END OF TEXT
+ {0x12000, 0x12399, prAL, gcLo}, // [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U
+ {0x12400, 0x1246E, prAL, gcNl}, // [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM
+ {0x12470, 0x12474, prBA, gcPo}, // [5] CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON
+ {0x12480, 0x12543, prAL, gcLo}, // [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU
+ {0x12F90, 0x12FF0, prAL, gcLo}, // [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114
+ {0x12FF1, 0x12FF2, prAL, gcPo}, // [2] CYPRO-MINOAN SIGN CM301..CYPRO-MINOAN SIGN CM302
+ {0x13000, 0x13257, prAL, gcLo}, // [600] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH O006
+ {0x13258, 0x1325A, prOP, gcLo}, // [3] EGYPTIAN HIEROGLYPH O006A..EGYPTIAN HIEROGLYPH O006C
+ {0x1325B, 0x1325D, prCL, gcLo}, // [3] EGYPTIAN HIEROGLYPH O006D..EGYPTIAN HIEROGLYPH O006F
+ {0x1325E, 0x13281, prAL, gcLo}, // [36] EGYPTIAN HIEROGLYPH O007..EGYPTIAN HIEROGLYPH O033
+ {0x13282, 0x13282, prCL, gcLo}, // EGYPTIAN HIEROGLYPH O033A
+ {0x13283, 0x13285, prAL, gcLo}, // [3] EGYPTIAN HIEROGLYPH O034..EGYPTIAN HIEROGLYPH O036
+ {0x13286, 0x13286, prOP, gcLo}, // EGYPTIAN HIEROGLYPH O036A
+ {0x13287, 0x13287, prCL, gcLo}, // EGYPTIAN HIEROGLYPH O036B
+ {0x13288, 0x13288, prOP, gcLo}, // EGYPTIAN HIEROGLYPH O036C
+ {0x13289, 0x13289, prCL, gcLo}, // EGYPTIAN HIEROGLYPH O036D
+ {0x1328A, 0x13378, prAL, gcLo}, // [239] EGYPTIAN HIEROGLYPH O037..EGYPTIAN HIEROGLYPH V011
+ {0x13379, 0x13379, prOP, gcLo}, // EGYPTIAN HIEROGLYPH V011A
+ {0x1337A, 0x1337B, prCL, gcLo}, // [2] EGYPTIAN HIEROGLYPH V011B..EGYPTIAN HIEROGLYPH V011C
+ {0x1337C, 0x1342F, prAL, gcLo}, // [180] EGYPTIAN HIEROGLYPH V012..EGYPTIAN HIEROGLYPH V011D
+ {0x13430, 0x13436, prGL, gcCf}, // [7] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH OVERLAY MIDDLE
+ {0x13437, 0x13437, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN SEGMENT
+ {0x13438, 0x13438, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x13439, 0x1343B, prGL, gcCf}, // [3] EGYPTIAN HIEROGLYPH INSERT AT MIDDLE..EGYPTIAN HIEROGLYPH INSERT AT BOTTOM
+ {0x1343C, 0x1343C, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN ENCLOSURE
+ {0x1343D, 0x1343D, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END ENCLOSURE
+ {0x1343E, 0x1343E, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN WALLED ENCLOSURE
+ {0x1343F, 0x1343F, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prCM, gcMn}, // EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13441, 0x13446, prAL, gcLo}, // [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN
+ {0x13447, 0x13455, prCM, gcMn}, // [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
+ {0x14400, 0x145CD, prAL, gcLo}, // [462] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A409
+ {0x145CE, 0x145CE, prOP, gcLo}, // ANATOLIAN HIEROGLYPH A410 BEGIN LOGOGRAM MARK
+ {0x145CF, 0x145CF, prCL, gcLo}, // ANATOLIAN HIEROGLYPH A410A END LOGOGRAM MARK
+ {0x145D0, 0x14646, prAL, gcLo}, // [119] ANATOLIAN HIEROGLYPH A411..ANATOLIAN HIEROGLYPH A530
+ {0x16800, 0x16A38, prAL, gcLo}, // [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ
+ {0x16A40, 0x16A5E, prAL, gcLo}, // [31] MRO LETTER TA..MRO LETTER TEK
+ {0x16A60, 0x16A69, prNU, gcNd}, // [10] MRO DIGIT ZERO..MRO DIGIT NINE
+ {0x16A6E, 0x16A6F, prBA, gcPo}, // [2] MRO DANDA..MRO DOUBLE DANDA
+ {0x16A70, 0x16ABE, prAL, gcLo}, // [79] TANGSA LETTER OZ..TANGSA LETTER ZA
+ {0x16AC0, 0x16AC9, prNU, gcNd}, // [10] TANGSA DIGIT ZERO..TANGSA DIGIT NINE
+ {0x16AD0, 0x16AED, prAL, gcLo}, // [30] BASSA VAH LETTER ENNI..BASSA VAH LETTER I
+ {0x16AF0, 0x16AF4, prCM, gcMn}, // [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
+ {0x16AF5, 0x16AF5, prBA, gcPo}, // BASSA VAH FULL STOP
+ {0x16B00, 0x16B2F, prAL, gcLo}, // [48] PAHAWH HMONG VOWEL KEEB..PAHAWH HMONG CONSONANT CAU
+ {0x16B30, 0x16B36, prCM, gcMn}, // [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
+ {0x16B37, 0x16B39, prBA, gcPo}, // [3] PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN CIM CHEEM
+ {0x16B3A, 0x16B3B, prAL, gcPo}, // [2] PAHAWH HMONG SIGN VOS THIAB..PAHAWH HMONG SIGN VOS FEEM
+ {0x16B3C, 0x16B3F, prAL, gcSo}, // [4] PAHAWH HMONG SIGN XYEEM NTXIV..PAHAWH HMONG SIGN XYEEM FAIB
+ {0x16B40, 0x16B43, prAL, gcLm}, // [4] PAHAWH HMONG SIGN VOS SEEV..PAHAWH HMONG SIGN IB YAM
+ {0x16B44, 0x16B44, prBA, gcPo}, // PAHAWH HMONG SIGN XAUS
+ {0x16B45, 0x16B45, prAL, gcSo}, // PAHAWH HMONG SIGN CIM TSOV ROG
+ {0x16B50, 0x16B59, prNU, gcNd}, // [10] PAHAWH HMONG DIGIT ZERO..PAHAWH HMONG DIGIT NINE
+ {0x16B5B, 0x16B61, prAL, gcNo}, // [7] PAHAWH HMONG NUMBER TENS..PAHAWH HMONG NUMBER TRILLIONS
+ {0x16B63, 0x16B77, prAL, gcLo}, // [21] PAHAWH HMONG SIGN VOS LUB..PAHAWH HMONG SIGN CIM NRES TOS
+ {0x16B7D, 0x16B8F, prAL, gcLo}, // [19] PAHAWH HMONG CLAN SIGN TSHEEJ..PAHAWH HMONG CLAN SIGN VWJ
+ {0x16E40, 0x16E7F, prAL, gcLC}, // [64] MEDEFAIDRIN CAPITAL LETTER M..MEDEFAIDRIN SMALL LETTER Y
+ {0x16E80, 0x16E96, prAL, gcNo}, // [23] MEDEFAIDRIN DIGIT ZERO..MEDEFAIDRIN DIGIT THREE ALTERNATE FORM
+ {0x16E97, 0x16E98, prBA, gcPo}, // [2] MEDEFAIDRIN COMMA..MEDEFAIDRIN FULL STOP
+ {0x16E99, 0x16E9A, prAL, gcPo}, // [2] MEDEFAIDRIN SYMBOL AIVA..MEDEFAIDRIN EXCLAMATION OH
+ {0x16F00, 0x16F4A, prAL, gcLo}, // [75] MIAO LETTER PA..MIAO LETTER RTE
+ {0x16F4F, 0x16F4F, prCM, gcMn}, // MIAO SIGN CONSONANT MODIFIER BAR
+ {0x16F50, 0x16F50, prAL, gcLo}, // MIAO LETTER NASALIZATION
+ {0x16F51, 0x16F87, prCM, gcMc}, // [55] MIAO SIGN ASPIRATION..MIAO VOWEL SIGN UI
+ {0x16F8F, 0x16F92, prCM, gcMn}, // [4] MIAO TONE RIGHT..MIAO TONE BELOW
+ {0x16F93, 0x16F9F, prAL, gcLm}, // [13] MIAO LETTER TONE-2..MIAO LETTER REFORMED TONE-8
+ {0x16FE0, 0x16FE1, prNS, gcLm}, // [2] TANGUT ITERATION MARK..NUSHU ITERATION MARK
+ {0x16FE2, 0x16FE2, prNS, gcPo}, // OLD CHINESE HOOK MARK
+ {0x16FE3, 0x16FE3, prNS, gcLm}, // OLD CHINESE ITERATION MARK
+ {0x16FE4, 0x16FE4, prGL, gcMn}, // KHITAN SMALL SCRIPT FILLER
+ {0x16FF0, 0x16FF1, prCM, gcMc}, // [2] VIETNAMESE ALTERNATE READING MARK CA..VIETNAMESE ALTERNATE READING MARK NHAY
+ {0x17000, 0x187F7, prID, gcLo}, // [6136] TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187F7
+ {0x18800, 0x18AFF, prID, gcLo}, // [768] TANGUT COMPONENT-001..TANGUT COMPONENT-768
+ {0x18B00, 0x18CD5, prAL, gcLo}, // [470] KHITAN SMALL SCRIPT CHARACTER-18B00..KHITAN SMALL SCRIPT CHARACTER-18CD5
+ {0x18D00, 0x18D08, prID, gcLo}, // [9] TANGUT IDEOGRAPH-18D00..TANGUT IDEOGRAPH-18D08
+ {0x1AFF0, 0x1AFF3, prAL, gcLm}, // [4] KATAKANA LETTER MINNAN TONE-2..KATAKANA LETTER MINNAN TONE-5
+ {0x1AFF5, 0x1AFFB, prAL, gcLm}, // [7] KATAKANA LETTER MINNAN TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-5
+ {0x1AFFD, 0x1AFFE, prAL, gcLm}, // [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
+ {0x1B000, 0x1B0FF, prID, gcLo}, // [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2
+ {0x1B100, 0x1B122, prID, gcLo}, // [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU
+ {0x1B132, 0x1B132, prCJ, gcLo}, // HIRAGANA LETTER SMALL KO
+ {0x1B150, 0x1B152, prCJ, gcLo}, // [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO
+ {0x1B155, 0x1B155, prCJ, gcLo}, // KATAKANA LETTER SMALL KO
+ {0x1B164, 0x1B167, prCJ, gcLo}, // [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
+ {0x1B170, 0x1B2FB, prID, gcLo}, // [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
+ {0x1BC00, 0x1BC6A, prAL, gcLo}, // [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
+ {0x1BC70, 0x1BC7C, prAL, gcLo}, // [13] DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK
+ {0x1BC80, 0x1BC88, prAL, gcLo}, // [9] DUPLOYAN AFFIX HIGH ACUTE..DUPLOYAN AFFIX HIGH VERTICAL
+ {0x1BC90, 0x1BC99, prAL, gcLo}, // [10] DUPLOYAN AFFIX LOW ACUTE..DUPLOYAN AFFIX LOW ARROW
+ {0x1BC9C, 0x1BC9C, prAL, gcSo}, // DUPLOYAN SIGN O WITH CROSS
+ {0x1BC9D, 0x1BC9E, prCM, gcMn}, // [2] DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK
+ {0x1BC9F, 0x1BC9F, prBA, gcPo}, // DUPLOYAN PUNCTUATION CHINOOK FULL STOP
+ {0x1BCA0, 0x1BCA3, prCM, gcCf}, // [4] SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP
+ {0x1CF00, 0x1CF2D, prCM, gcMn}, // [46] ZNAMENNY COMBINING MARK GORAZDO NIZKO S KRYZHEM ON LEFT..ZNAMENNY COMBINING MARK KRYZH ON LEFT
+ {0x1CF30, 0x1CF46, prCM, gcMn}, // [23] ZNAMENNY COMBINING TONAL RANGE MARK MRACHNO..ZNAMENNY PRIZNAK MODIFIER ROG
+ {0x1CF50, 0x1CFC3, prAL, gcSo}, // [116] ZNAMENNY NEUME KRYUK..ZNAMENNY NEUME PAUK
+ {0x1D000, 0x1D0F5, prAL, gcSo}, // [246] BYZANTINE MUSICAL SYMBOL PSILI..BYZANTINE MUSICAL SYMBOL GORGON NEO KATO
+ {0x1D100, 0x1D126, prAL, gcSo}, // [39] MUSICAL SYMBOL SINGLE BARLINE..MUSICAL SYMBOL DRUM CLEF-2
+ {0x1D129, 0x1D164, prAL, gcSo}, // [60] MUSICAL SYMBOL MULTIPLE MEASURE REST..MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE
+ {0x1D165, 0x1D166, prCM, gcMc}, // [2] MUSICAL SYMBOL COMBINING STEM..MUSICAL SYMBOL COMBINING SPRECHGESANG STEM
+ {0x1D167, 0x1D169, prCM, gcMn}, // [3] MUSICAL SYMBOL COMBINING TREMOLO-1..MUSICAL SYMBOL COMBINING TREMOLO-3
+ {0x1D16A, 0x1D16C, prAL, gcSo}, // [3] MUSICAL SYMBOL FINGERED TREMOLO-1..MUSICAL SYMBOL FINGERED TREMOLO-3
+ {0x1D16D, 0x1D172, prCM, gcMc}, // [6] MUSICAL SYMBOL COMBINING AUGMENTATION DOT..MUSICAL SYMBOL COMBINING FLAG-5
+ {0x1D173, 0x1D17A, prCM, gcCf}, // [8] MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE
+ {0x1D17B, 0x1D182, prCM, gcMn}, // [8] MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL COMBINING LOURE
+ {0x1D183, 0x1D184, prAL, gcSo}, // [2] MUSICAL SYMBOL ARPEGGIATO UP..MUSICAL SYMBOL ARPEGGIATO DOWN
+ {0x1D185, 0x1D18B, prCM, gcMn}, // [7] MUSICAL SYMBOL COMBINING DOIT..MUSICAL SYMBOL COMBINING TRIPLE TONGUE
+ {0x1D18C, 0x1D1A9, prAL, gcSo}, // [30] MUSICAL SYMBOL RINFORZANDO..MUSICAL SYMBOL DEGREE SLASH
+ {0x1D1AA, 0x1D1AD, prCM, gcMn}, // [4] MUSICAL SYMBOL COMBINING DOWN BOW..MUSICAL SYMBOL COMBINING SNAP PIZZICATO
+ {0x1D1AE, 0x1D1EA, prAL, gcSo}, // [61] MUSICAL SYMBOL PEDAL MARK..MUSICAL SYMBOL KORON
+ {0x1D200, 0x1D241, prAL, gcSo}, // [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54
+ {0x1D242, 0x1D244, prCM, gcMn}, // [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
+ {0x1D245, 0x1D245, prAL, gcSo}, // GREEK MUSICAL LEIMMA
+ {0x1D2C0, 0x1D2D3, prAL, gcNo}, // [20] KAKTOVIK NUMERAL ZERO..KAKTOVIK NUMERAL NINETEEN
+ {0x1D2E0, 0x1D2F3, prAL, gcNo}, // [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN
+ {0x1D300, 0x1D356, prAL, gcSo}, // [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING
+ {0x1D360, 0x1D378, prAL, gcNo}, // [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE
+ {0x1D400, 0x1D454, prAL, gcLC}, // [85] MATHEMATICAL BOLD CAPITAL A..MATHEMATICAL ITALIC SMALL G
+ {0x1D456, 0x1D49C, prAL, gcLC}, // [71] MATHEMATICAL ITALIC SMALL I..MATHEMATICAL SCRIPT CAPITAL A
+ {0x1D49E, 0x1D49F, prAL, gcLu}, // [2] MATHEMATICAL SCRIPT CAPITAL C..MATHEMATICAL SCRIPT CAPITAL D
+ {0x1D4A2, 0x1D4A2, prAL, gcLu}, // MATHEMATICAL SCRIPT CAPITAL G
+ {0x1D4A5, 0x1D4A6, prAL, gcLu}, // [2] MATHEMATICAL SCRIPT CAPITAL J..MATHEMATICAL SCRIPT CAPITAL K
+ {0x1D4A9, 0x1D4AC, prAL, gcLu}, // [4] MATHEMATICAL SCRIPT CAPITAL N..MATHEMATICAL SCRIPT CAPITAL Q
+ {0x1D4AE, 0x1D4B9, prAL, gcLC}, // [12] MATHEMATICAL SCRIPT CAPITAL S..MATHEMATICAL SCRIPT SMALL D
+ {0x1D4BB, 0x1D4BB, prAL, gcLl}, // MATHEMATICAL SCRIPT SMALL F
+ {0x1D4BD, 0x1D4C3, prAL, gcLl}, // [7] MATHEMATICAL SCRIPT SMALL H..MATHEMATICAL SCRIPT SMALL N
+ {0x1D4C5, 0x1D505, prAL, gcLC}, // [65] MATHEMATICAL SCRIPT SMALL P..MATHEMATICAL FRAKTUR CAPITAL B
+ {0x1D507, 0x1D50A, prAL, gcLu}, // [4] MATHEMATICAL FRAKTUR CAPITAL D..MATHEMATICAL FRAKTUR CAPITAL G
+ {0x1D50D, 0x1D514, prAL, gcLu}, // [8] MATHEMATICAL FRAKTUR CAPITAL J..MATHEMATICAL FRAKTUR CAPITAL Q
+ {0x1D516, 0x1D51C, prAL, gcLu}, // [7] MATHEMATICAL FRAKTUR CAPITAL S..MATHEMATICAL FRAKTUR CAPITAL Y
+ {0x1D51E, 0x1D539, prAL, gcLC}, // [28] MATHEMATICAL FRAKTUR SMALL A..MATHEMATICAL DOUBLE-STRUCK CAPITAL B
+ {0x1D53B, 0x1D53E, prAL, gcLu}, // [4] MATHEMATICAL DOUBLE-STRUCK CAPITAL D..MATHEMATICAL DOUBLE-STRUCK CAPITAL G
+ {0x1D540, 0x1D544, prAL, gcLu}, // [5] MATHEMATICAL DOUBLE-STRUCK CAPITAL I..MATHEMATICAL DOUBLE-STRUCK CAPITAL M
+ {0x1D546, 0x1D546, prAL, gcLu}, // MATHEMATICAL DOUBLE-STRUCK CAPITAL O
+ {0x1D54A, 0x1D550, prAL, gcLu}, // [7] MATHEMATICAL DOUBLE-STRUCK CAPITAL S..MATHEMATICAL DOUBLE-STRUCK CAPITAL Y
+ {0x1D552, 0x1D6A5, prAL, gcLC}, // [340] MATHEMATICAL DOUBLE-STRUCK SMALL A..MATHEMATICAL ITALIC SMALL DOTLESS J
+ {0x1D6A8, 0x1D6C0, prAL, gcLu}, // [25] MATHEMATICAL BOLD CAPITAL ALPHA..MATHEMATICAL BOLD CAPITAL OMEGA
+ {0x1D6C1, 0x1D6C1, prAL, gcSm}, // MATHEMATICAL BOLD NABLA
+ {0x1D6C2, 0x1D6DA, prAL, gcLl}, // [25] MATHEMATICAL BOLD SMALL ALPHA..MATHEMATICAL BOLD SMALL OMEGA
+ {0x1D6DB, 0x1D6DB, prAL, gcSm}, // MATHEMATICAL BOLD PARTIAL DIFFERENTIAL
+ {0x1D6DC, 0x1D6FA, prAL, gcLC}, // [31] MATHEMATICAL BOLD EPSILON SYMBOL..MATHEMATICAL ITALIC CAPITAL OMEGA
+ {0x1D6FB, 0x1D6FB, prAL, gcSm}, // MATHEMATICAL ITALIC NABLA
+ {0x1D6FC, 0x1D714, prAL, gcLl}, // [25] MATHEMATICAL ITALIC SMALL ALPHA..MATHEMATICAL ITALIC SMALL OMEGA
+ {0x1D715, 0x1D715, prAL, gcSm}, // MATHEMATICAL ITALIC PARTIAL DIFFERENTIAL
+ {0x1D716, 0x1D734, prAL, gcLC}, // [31] MATHEMATICAL ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD ITALIC CAPITAL OMEGA
+ {0x1D735, 0x1D735, prAL, gcSm}, // MATHEMATICAL BOLD ITALIC NABLA
+ {0x1D736, 0x1D74E, prAL, gcLl}, // [25] MATHEMATICAL BOLD ITALIC SMALL ALPHA..MATHEMATICAL BOLD ITALIC SMALL OMEGA
+ {0x1D74F, 0x1D74F, prAL, gcSm}, // MATHEMATICAL BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D750, 0x1D76E, prAL, gcLC}, // [31] MATHEMATICAL BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA
+ {0x1D76F, 0x1D76F, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD NABLA
+ {0x1D770, 0x1D788, prAL, gcLl}, // [25] MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA
+ {0x1D789, 0x1D789, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD PARTIAL DIFFERENTIAL
+ {0x1D78A, 0x1D7A8, prAL, gcLC}, // [31] MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL..MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA
+ {0x1D7A9, 0x1D7A9, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD ITALIC NABLA
+ {0x1D7AA, 0x1D7C2, prAL, gcLl}, // [25] MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA
+ {0x1D7C3, 0x1D7C3, prAL, gcSm}, // MATHEMATICAL SANS-SERIF BOLD ITALIC PARTIAL DIFFERENTIAL
+ {0x1D7C4, 0x1D7CB, prAL, gcLC}, // [8] MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL..MATHEMATICAL BOLD SMALL DIGAMMA
+ {0x1D7CE, 0x1D7FF, prNU, gcNd}, // [50] MATHEMATICAL BOLD DIGIT ZERO..MATHEMATICAL MONOSPACE DIGIT NINE
+ {0x1D800, 0x1D9FF, prAL, gcSo}, // [512] SIGNWRITING HAND-FIST INDEX..SIGNWRITING HEAD
+ {0x1DA00, 0x1DA36, prCM, gcMn}, // [55] SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN
+ {0x1DA37, 0x1DA3A, prAL, gcSo}, // [4] SIGNWRITING AIR BLOW SMALL ROTATIONS..SIGNWRITING BREATH EXHALE
+ {0x1DA3B, 0x1DA6C, prCM, gcMn}, // [50] SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT
+ {0x1DA6D, 0x1DA74, prAL, gcSo}, // [8] SIGNWRITING SHOULDER HIP SPINE..SIGNWRITING TORSO-FLOORPLANE TWISTING
+ {0x1DA75, 0x1DA75, prCM, gcMn}, // SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS
+ {0x1DA76, 0x1DA83, prAL, gcSo}, // [14] SIGNWRITING LIMB COMBINATION..SIGNWRITING LOCATION DEPTH
+ {0x1DA84, 0x1DA84, prCM, gcMn}, // SIGNWRITING LOCATION HEAD NECK
+ {0x1DA85, 0x1DA86, prAL, gcSo}, // [2] SIGNWRITING LOCATION TORSO..SIGNWRITING LOCATION LIMBS DIGITS
+ {0x1DA87, 0x1DA8A, prBA, gcPo}, // [4] SIGNWRITING COMMA..SIGNWRITING COLON
+ {0x1DA8B, 0x1DA8B, prAL, gcPo}, // SIGNWRITING PARENTHESIS
+ {0x1DA9B, 0x1DA9F, prCM, gcMn}, // [5] SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6
+ {0x1DAA1, 0x1DAAF, prCM, gcMn}, // [15] SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16
+ {0x1DF00, 0x1DF09, prAL, gcLl}, // [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
+ {0x1DF0A, 0x1DF0A, prAL, gcLo}, // LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
+ {0x1DF0B, 0x1DF1E, prAL, gcLl}, // [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1DF25, 0x1DF2A, prAL, gcLl}, // [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK
+ {0x1E000, 0x1E006, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
+ {0x1E008, 0x1E018, prCM, gcMn}, // [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
+ {0x1E01B, 0x1E021, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
+ {0x1E023, 0x1E024, prCM, gcMn}, // [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
+ {0x1E026, 0x1E02A, prCM, gcMn}, // [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E030, 0x1E06D, prAL, gcLm}, // [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE
+ {0x1E08F, 0x1E08F, prCM, gcMn}, // COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+ {0x1E100, 0x1E12C, prAL, gcLo}, // [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
+ {0x1E130, 0x1E136, prCM, gcMn}, // [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
+ {0x1E137, 0x1E13D, prAL, gcLm}, // [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
+ {0x1E140, 0x1E149, prNU, gcNd}, // [10] NYIAKENG PUACHUE HMONG DIGIT ZERO..NYIAKENG PUACHUE HMONG DIGIT NINE
+ {0x1E14E, 0x1E14E, prAL, gcLo}, // NYIAKENG PUACHUE HMONG LOGOGRAM NYAJ
+ {0x1E14F, 0x1E14F, prAL, gcSo}, // NYIAKENG PUACHUE HMONG CIRCLED CA
+ {0x1E290, 0x1E2AD, prAL, gcLo}, // [30] TOTO LETTER PA..TOTO LETTER A
+ {0x1E2AE, 0x1E2AE, prCM, gcMn}, // TOTO SIGN RISING TONE
+ {0x1E2C0, 0x1E2EB, prAL, gcLo}, // [44] WANCHO LETTER AA..WANCHO LETTER YIH
+ {0x1E2EC, 0x1E2EF, prCM, gcMn}, // [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E2F0, 0x1E2F9, prNU, gcNd}, // [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
+ {0x1E2FF, 0x1E2FF, prPR, gcSc}, // WANCHO NGUN SIGN
+ {0x1E4D0, 0x1E4EA, prAL, gcLo}, // [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL
+ {0x1E4EB, 0x1E4EB, prAL, gcLm}, // NAG MUNDARI SIGN OJOD
+ {0x1E4EC, 0x1E4EF, prCM, gcMn}, // [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E4F0, 0x1E4F9, prNU, gcNd}, // [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE
+ {0x1E7E0, 0x1E7E6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
+ {0x1E7E8, 0x1E7EB, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
+ {0x1E7ED, 0x1E7EE, prAL, gcLo}, // [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
+ {0x1E7F0, 0x1E7FE, prAL, gcLo}, // [15] ETHIOPIC SYLLABLE GURAGE QWI..ETHIOPIC SYLLABLE GURAGE PWEE
+ {0x1E800, 0x1E8C4, prAL, gcLo}, // [197] MENDE KIKAKUI SYLLABLE M001 KI..MENDE KIKAKUI SYLLABLE M060 NYON
+ {0x1E8C7, 0x1E8CF, prAL, gcNo}, // [9] MENDE KIKAKUI DIGIT ONE..MENDE KIKAKUI DIGIT NINE
+ {0x1E8D0, 0x1E8D6, prCM, gcMn}, // [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
+ {0x1E900, 0x1E943, prAL, gcLC}, // [68] ADLAM CAPITAL LETTER ALIF..ADLAM SMALL LETTER SHA
+ {0x1E944, 0x1E94A, prCM, gcMn}, // [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
+ {0x1E94B, 0x1E94B, prAL, gcLm}, // ADLAM NASALIZATION MARK
+ {0x1E950, 0x1E959, prNU, gcNd}, // [10] ADLAM DIGIT ZERO..ADLAM DIGIT NINE
+ {0x1E95E, 0x1E95F, prOP, gcPo}, // [2] ADLAM INITIAL EXCLAMATION MARK..ADLAM INITIAL QUESTION MARK
+ {0x1EC71, 0x1ECAB, prAL, gcNo}, // [59] INDIC SIYAQ NUMBER ONE..INDIC SIYAQ NUMBER PREFIXED NINE
+ {0x1ECAC, 0x1ECAC, prPO, gcSo}, // INDIC SIYAQ PLACEHOLDER
+ {0x1ECAD, 0x1ECAF, prAL, gcNo}, // [3] INDIC SIYAQ FRACTION ONE QUARTER..INDIC SIYAQ FRACTION THREE QUARTERS
+ {0x1ECB0, 0x1ECB0, prPO, gcSc}, // INDIC SIYAQ RUPEE MARK
+ {0x1ECB1, 0x1ECB4, prAL, gcNo}, // [4] INDIC SIYAQ NUMBER ALTERNATE ONE..INDIC SIYAQ ALTERNATE LAKH MARK
+ {0x1ED01, 0x1ED2D, prAL, gcNo}, // [45] OTTOMAN SIYAQ NUMBER ONE..OTTOMAN SIYAQ NUMBER NINETY THOUSAND
+ {0x1ED2E, 0x1ED2E, prAL, gcSo}, // OTTOMAN SIYAQ MARRATAN
+ {0x1ED2F, 0x1ED3D, prAL, gcNo}, // [15] OTTOMAN SIYAQ ALTERNATE NUMBER TWO..OTTOMAN SIYAQ FRACTION ONE SIXTH
+ {0x1EE00, 0x1EE03, prAL, gcLo}, // [4] ARABIC MATHEMATICAL ALEF..ARABIC MATHEMATICAL DAL
+ {0x1EE05, 0x1EE1F, prAL, gcLo}, // [27] ARABIC MATHEMATICAL WAW..ARABIC MATHEMATICAL DOTLESS QAF
+ {0x1EE21, 0x1EE22, prAL, gcLo}, // [2] ARABIC MATHEMATICAL INITIAL BEH..ARABIC MATHEMATICAL INITIAL JEEM
+ {0x1EE24, 0x1EE24, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL HEH
+ {0x1EE27, 0x1EE27, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL HAH
+ {0x1EE29, 0x1EE32, prAL, gcLo}, // [10] ARABIC MATHEMATICAL INITIAL YEH..ARABIC MATHEMATICAL INITIAL QAF
+ {0x1EE34, 0x1EE37, prAL, gcLo}, // [4] ARABIC MATHEMATICAL INITIAL SHEEN..ARABIC MATHEMATICAL INITIAL KHAH
+ {0x1EE39, 0x1EE39, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL DAD
+ {0x1EE3B, 0x1EE3B, prAL, gcLo}, // ARABIC MATHEMATICAL INITIAL GHAIN
+ {0x1EE42, 0x1EE42, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED JEEM
+ {0x1EE47, 0x1EE47, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED HAH
+ {0x1EE49, 0x1EE49, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED YEH
+ {0x1EE4B, 0x1EE4B, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED LAM
+ {0x1EE4D, 0x1EE4F, prAL, gcLo}, // [3] ARABIC MATHEMATICAL TAILED NOON..ARABIC MATHEMATICAL TAILED AIN
+ {0x1EE51, 0x1EE52, prAL, gcLo}, // [2] ARABIC MATHEMATICAL TAILED SAD..ARABIC MATHEMATICAL TAILED QAF
+ {0x1EE54, 0x1EE54, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED SHEEN
+ {0x1EE57, 0x1EE57, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED KHAH
+ {0x1EE59, 0x1EE59, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED DAD
+ {0x1EE5B, 0x1EE5B, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED GHAIN
+ {0x1EE5D, 0x1EE5D, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED DOTLESS NOON
+ {0x1EE5F, 0x1EE5F, prAL, gcLo}, // ARABIC MATHEMATICAL TAILED DOTLESS QAF
+ {0x1EE61, 0x1EE62, prAL, gcLo}, // [2] ARABIC MATHEMATICAL STRETCHED BEH..ARABIC MATHEMATICAL STRETCHED JEEM
+ {0x1EE64, 0x1EE64, prAL, gcLo}, // ARABIC MATHEMATICAL STRETCHED HEH
+ {0x1EE67, 0x1EE6A, prAL, gcLo}, // [4] ARABIC MATHEMATICAL STRETCHED HAH..ARABIC MATHEMATICAL STRETCHED KAF
+ {0x1EE6C, 0x1EE72, prAL, gcLo}, // [7] ARABIC MATHEMATICAL STRETCHED MEEM..ARABIC MATHEMATICAL STRETCHED QAF
+ {0x1EE74, 0x1EE77, prAL, gcLo}, // [4] ARABIC MATHEMATICAL STRETCHED SHEEN..ARABIC MATHEMATICAL STRETCHED KHAH
+ {0x1EE79, 0x1EE7C, prAL, gcLo}, // [4] ARABIC MATHEMATICAL STRETCHED DAD..ARABIC MATHEMATICAL STRETCHED DOTLESS BEH
+ {0x1EE7E, 0x1EE7E, prAL, gcLo}, // ARABIC MATHEMATICAL STRETCHED DOTLESS FEH
+ {0x1EE80, 0x1EE89, prAL, gcLo}, // [10] ARABIC MATHEMATICAL LOOPED ALEF..ARABIC MATHEMATICAL LOOPED YEH
+ {0x1EE8B, 0x1EE9B, prAL, gcLo}, // [17] ARABIC MATHEMATICAL LOOPED LAM..ARABIC MATHEMATICAL LOOPED GHAIN
+ {0x1EEA1, 0x1EEA3, prAL, gcLo}, // [3] ARABIC MATHEMATICAL DOUBLE-STRUCK BEH..ARABIC MATHEMATICAL DOUBLE-STRUCK DAL
+ {0x1EEA5, 0x1EEA9, prAL, gcLo}, // [5] ARABIC MATHEMATICAL DOUBLE-STRUCK WAW..ARABIC MATHEMATICAL DOUBLE-STRUCK YEH
+ {0x1EEAB, 0x1EEBB, prAL, gcLo}, // [17] ARABIC MATHEMATICAL DOUBLE-STRUCK LAM..ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN
+ {0x1EEF0, 0x1EEF1, prAL, gcSm}, // [2] ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL..ARABIC MATHEMATICAL OPERATOR HAH WITH DAL
+ {0x1F000, 0x1F02B, prID, gcSo}, // [44] MAHJONG TILE EAST WIND..MAHJONG TILE BACK
+ {0x1F02C, 0x1F02F, prID, gcCn}, // [4] ..
+ {0x1F030, 0x1F093, prID, gcSo}, // [100] DOMINO TILE HORIZONTAL BACK..DOMINO TILE VERTICAL-06-06
+ {0x1F094, 0x1F09F, prID, gcCn}, // [12] ..
+ {0x1F0A0, 0x1F0AE, prID, gcSo}, // [15] PLAYING CARD BACK..PLAYING CARD KING OF SPADES
+ {0x1F0AF, 0x1F0B0, prID, gcCn}, // [2] ..
+ {0x1F0B1, 0x1F0BF, prID, gcSo}, // [15] PLAYING CARD ACE OF HEARTS..PLAYING CARD RED JOKER
+ {0x1F0C0, 0x1F0C0, prID, gcCn}, //
+ {0x1F0C1, 0x1F0CF, prID, gcSo}, // [15] PLAYING CARD ACE OF DIAMONDS..PLAYING CARD BLACK JOKER
+ {0x1F0D0, 0x1F0D0, prID, gcCn}, //
+ {0x1F0D1, 0x1F0F5, prID, gcSo}, // [37] PLAYING CARD ACE OF CLUBS..PLAYING CARD TRUMP-21
+ {0x1F0F6, 0x1F0FF, prID, gcCn}, // [10] ..
+ {0x1F100, 0x1F10C, prAI, gcNo}, // [13] DIGIT ZERO FULL STOP..DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ZERO
+ {0x1F10D, 0x1F10F, prID, gcSo}, // [3] CIRCLED ZERO WITH SLASH..CIRCLED DOLLAR SIGN WITH OVERLAID BACKSLASH
+ {0x1F110, 0x1F12D, prAI, gcSo}, // [30] PARENTHESIZED LATIN CAPITAL LETTER A..CIRCLED CD
+ {0x1F12E, 0x1F12F, prAL, gcSo}, // [2] CIRCLED WZ..COPYLEFT SYMBOL
+ {0x1F130, 0x1F169, prAI, gcSo}, // [58] SQUARED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z
+ {0x1F16A, 0x1F16C, prAL, gcSo}, // [3] RAISED MC SIGN..RAISED MR SIGN
+ {0x1F16D, 0x1F16F, prID, gcSo}, // [3] CIRCLED CC..CIRCLED HUMAN FIGURE
+ {0x1F170, 0x1F1AC, prAI, gcSo}, // [61] NEGATIVE SQUARED LATIN CAPITAL LETTER A..SQUARED VOD
+ {0x1F1AD, 0x1F1AD, prID, gcSo}, // MASK WORK SYMBOL
+ {0x1F1AE, 0x1F1E5, prID, gcCn}, // [56] ..
+ {0x1F1E6, 0x1F1FF, prRI, gcSo}, // [26] REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z
+ {0x1F200, 0x1F202, prID, gcSo}, // [3] SQUARE HIRAGANA HOKA..SQUARED KATAKANA SA
+ {0x1F203, 0x1F20F, prID, gcCn}, // [13] ..
+ {0x1F210, 0x1F23B, prID, gcSo}, // [44] SQUARED CJK UNIFIED IDEOGRAPH-624B..SQUARED CJK UNIFIED IDEOGRAPH-914D
+ {0x1F23C, 0x1F23F, prID, gcCn}, // [4] ..
+ {0x1F240, 0x1F248, prID, gcSo}, // [9] TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-672C..TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6557
+ {0x1F249, 0x1F24F, prID, gcCn}, // [7] ..
+ {0x1F250, 0x1F251, prID, gcSo}, // [2] CIRCLED IDEOGRAPH ADVANTAGE..CIRCLED IDEOGRAPH ACCEPT
+ {0x1F252, 0x1F25F, prID, gcCn}, // [14] ..
+ {0x1F260, 0x1F265, prID, gcSo}, // [6] ROUNDED SYMBOL FOR FU..ROUNDED SYMBOL FOR CAI
+ {0x1F266, 0x1F2FF, prID, gcCn}, // [154] ..
+ {0x1F300, 0x1F384, prID, gcSo}, // [133] CYCLONE..CHRISTMAS TREE
+ {0x1F385, 0x1F385, prEB, gcSo}, // FATHER CHRISTMAS
+ {0x1F386, 0x1F39B, prID, gcSo}, // [22] FIREWORKS..CONTROL KNOBS
+ {0x1F39C, 0x1F39D, prAL, gcSo}, // [2] BEAMED ASCENDING MUSICAL NOTES..BEAMED DESCENDING MUSICAL NOTES
+ {0x1F39E, 0x1F3B4, prID, gcSo}, // [23] FILM FRAMES..FLOWER PLAYING CARDS
+ {0x1F3B5, 0x1F3B6, prAL, gcSo}, // [2] MUSICAL NOTE..MULTIPLE MUSICAL NOTES
+ {0x1F3B7, 0x1F3BB, prID, gcSo}, // [5] SAXOPHONE..VIOLIN
+ {0x1F3BC, 0x1F3BC, prAL, gcSo}, // MUSICAL SCORE
+ {0x1F3BD, 0x1F3C1, prID, gcSo}, // [5] RUNNING SHIRT WITH SASH..CHEQUERED FLAG
+ {0x1F3C2, 0x1F3C4, prEB, gcSo}, // [3] SNOWBOARDER..SURFER
+ {0x1F3C5, 0x1F3C6, prID, gcSo}, // [2] SPORTS MEDAL..TROPHY
+ {0x1F3C7, 0x1F3C7, prEB, gcSo}, // HORSE RACING
+ {0x1F3C8, 0x1F3C9, prID, gcSo}, // [2] AMERICAN FOOTBALL..RUGBY FOOTBALL
+ {0x1F3CA, 0x1F3CC, prEB, gcSo}, // [3] SWIMMER..GOLFER
+ {0x1F3CD, 0x1F3FA, prID, gcSo}, // [46] RACING MOTORCYCLE..AMPHORA
+ {0x1F3FB, 0x1F3FF, prEM, gcSk}, // [5] EMOJI MODIFIER FITZPATRICK TYPE-1-2..EMOJI MODIFIER FITZPATRICK TYPE-6
+ {0x1F400, 0x1F441, prID, gcSo}, // [66] RAT..EYE
+ {0x1F442, 0x1F443, prEB, gcSo}, // [2] EAR..NOSE
+ {0x1F444, 0x1F445, prID, gcSo}, // [2] MOUTH..TONGUE
+ {0x1F446, 0x1F450, prEB, gcSo}, // [11] WHITE UP POINTING BACKHAND INDEX..OPEN HANDS SIGN
+ {0x1F451, 0x1F465, prID, gcSo}, // [21] CROWN..BUSTS IN SILHOUETTE
+ {0x1F466, 0x1F478, prEB, gcSo}, // [19] BOY..PRINCESS
+ {0x1F479, 0x1F47B, prID, gcSo}, // [3] JAPANESE OGRE..GHOST
+ {0x1F47C, 0x1F47C, prEB, gcSo}, // BABY ANGEL
+ {0x1F47D, 0x1F480, prID, gcSo}, // [4] EXTRATERRESTRIAL ALIEN..SKULL
+ {0x1F481, 0x1F483, prEB, gcSo}, // [3] INFORMATION DESK PERSON..DANCER
+ {0x1F484, 0x1F484, prID, gcSo}, // LIPSTICK
+ {0x1F485, 0x1F487, prEB, gcSo}, // [3] NAIL POLISH..HAIRCUT
+ {0x1F488, 0x1F48E, prID, gcSo}, // [7] BARBER POLE..GEM STONE
+ {0x1F48F, 0x1F48F, prEB, gcSo}, // KISS
+ {0x1F490, 0x1F490, prID, gcSo}, // BOUQUET
+ {0x1F491, 0x1F491, prEB, gcSo}, // COUPLE WITH HEART
+ {0x1F492, 0x1F49F, prID, gcSo}, // [14] WEDDING..HEART DECORATION
+ {0x1F4A0, 0x1F4A0, prAL, gcSo}, // DIAMOND SHAPE WITH A DOT INSIDE
+ {0x1F4A1, 0x1F4A1, prID, gcSo}, // ELECTRIC LIGHT BULB
+ {0x1F4A2, 0x1F4A2, prAL, gcSo}, // ANGER SYMBOL
+ {0x1F4A3, 0x1F4A3, prID, gcSo}, // BOMB
+ {0x1F4A4, 0x1F4A4, prAL, gcSo}, // SLEEPING SYMBOL
+ {0x1F4A5, 0x1F4A9, prID, gcSo}, // [5] COLLISION SYMBOL..PILE OF POO
+ {0x1F4AA, 0x1F4AA, prEB, gcSo}, // FLEXED BICEPS
+ {0x1F4AB, 0x1F4AE, prID, gcSo}, // [4] DIZZY SYMBOL..WHITE FLOWER
+ {0x1F4AF, 0x1F4AF, prAL, gcSo}, // HUNDRED POINTS SYMBOL
+ {0x1F4B0, 0x1F4B0, prID, gcSo}, // MONEY BAG
+ {0x1F4B1, 0x1F4B2, prAL, gcSo}, // [2] CURRENCY EXCHANGE..HEAVY DOLLAR SIGN
+ {0x1F4B3, 0x1F4FF, prID, gcSo}, // [77] CREDIT CARD..PRAYER BEADS
+ {0x1F500, 0x1F506, prAL, gcSo}, // [7] TWISTED RIGHTWARDS ARROWS..HIGH BRIGHTNESS SYMBOL
+ {0x1F507, 0x1F516, prID, gcSo}, // [16] SPEAKER WITH CANCELLATION STROKE..BOOKMARK
+ {0x1F517, 0x1F524, prAL, gcSo}, // [14] LINK SYMBOL..INPUT SYMBOL FOR LATIN LETTERS
+ {0x1F525, 0x1F531, prID, gcSo}, // [13] FIRE..TRIDENT EMBLEM
+ {0x1F532, 0x1F549, prAL, gcSo}, // [24] BLACK SQUARE BUTTON..OM SYMBOL
+ {0x1F54A, 0x1F573, prID, gcSo}, // [42] DOVE OF PEACE..HOLE
+ {0x1F574, 0x1F575, prEB, gcSo}, // [2] MAN IN BUSINESS SUIT LEVITATING..SLEUTH OR SPY
+ {0x1F576, 0x1F579, prID, gcSo}, // [4] DARK SUNGLASSES..JOYSTICK
+ {0x1F57A, 0x1F57A, prEB, gcSo}, // MAN DANCING
+ {0x1F57B, 0x1F58F, prID, gcSo}, // [21] LEFT HAND TELEPHONE RECEIVER..TURNED OK HAND SIGN
+ {0x1F590, 0x1F590, prEB, gcSo}, // RAISED HAND WITH FINGERS SPLAYED
+ {0x1F591, 0x1F594, prID, gcSo}, // [4] REVERSED RAISED HAND WITH FINGERS SPLAYED..REVERSED VICTORY HAND
+ {0x1F595, 0x1F596, prEB, gcSo}, // [2] REVERSED HAND WITH MIDDLE FINGER EXTENDED..RAISED HAND WITH PART BETWEEN MIDDLE AND RING FINGERS
+ {0x1F597, 0x1F5D3, prID, gcSo}, // [61] WHITE DOWN POINTING LEFT HAND INDEX..SPIRAL CALENDAR PAD
+ {0x1F5D4, 0x1F5DB, prAL, gcSo}, // [8] DESKTOP WINDOW..DECREASE FONT SIZE SYMBOL
+ {0x1F5DC, 0x1F5F3, prID, gcSo}, // [24] COMPRESSION..BALLOT BOX WITH BALLOT
+ {0x1F5F4, 0x1F5F9, prAL, gcSo}, // [6] BALLOT SCRIPT X..BALLOT BOX WITH BOLD CHECK
+ {0x1F5FA, 0x1F5FF, prID, gcSo}, // [6] WORLD MAP..MOYAI
+ {0x1F600, 0x1F644, prID, gcSo}, // [69] GRINNING FACE..FACE WITH ROLLING EYES
+ {0x1F645, 0x1F647, prEB, gcSo}, // [3] FACE WITH NO GOOD GESTURE..PERSON BOWING DEEPLY
+ {0x1F648, 0x1F64A, prID, gcSo}, // [3] SEE-NO-EVIL MONKEY..SPEAK-NO-EVIL MONKEY
+ {0x1F64B, 0x1F64F, prEB, gcSo}, // [5] HAPPY PERSON RAISING ONE HAND..PERSON WITH FOLDED HANDS
+ {0x1F650, 0x1F675, prAL, gcSo}, // [38] NORTH WEST POINTING LEAF..SWASH AMPERSAND ORNAMENT
+ {0x1F676, 0x1F678, prQU, gcSo}, // [3] SANS-SERIF HEAVY DOUBLE TURNED COMMA QUOTATION MARK ORNAMENT..SANS-SERIF HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT
+ {0x1F679, 0x1F67B, prNS, gcSo}, // [3] HEAVY INTERROBANG ORNAMENT..HEAVY SANS-SERIF INTERROBANG ORNAMENT
+ {0x1F67C, 0x1F67F, prAL, gcSo}, // [4] VERY HEAVY SOLIDUS..REVERSE CHECKER BOARD
+ {0x1F680, 0x1F6A2, prID, gcSo}, // [35] ROCKET..SHIP
+ {0x1F6A3, 0x1F6A3, prEB, gcSo}, // ROWBOAT
+ {0x1F6A4, 0x1F6B3, prID, gcSo}, // [16] SPEEDBOAT..NO BICYCLES
+ {0x1F6B4, 0x1F6B6, prEB, gcSo}, // [3] BICYCLIST..PEDESTRIAN
+ {0x1F6B7, 0x1F6BF, prID, gcSo}, // [9] NO PEDESTRIANS..SHOWER
+ {0x1F6C0, 0x1F6C0, prEB, gcSo}, // BATH
+ {0x1F6C1, 0x1F6CB, prID, gcSo}, // [11] BATHTUB..COUCH AND LAMP
+ {0x1F6CC, 0x1F6CC, prEB, gcSo}, // SLEEPING ACCOMMODATION
+ {0x1F6CD, 0x1F6D7, prID, gcSo}, // [11] SHOPPING BAGS..ELEVATOR
+ {0x1F6D8, 0x1F6DB, prID, gcCn}, // [4] ..
+ {0x1F6DC, 0x1F6EC, prID, gcSo}, // [17] WIRELESS..AIRPLANE ARRIVING
+ {0x1F6ED, 0x1F6EF, prID, gcCn}, // [3] ..
+ {0x1F6F0, 0x1F6FC, prID, gcSo}, // [13] SATELLITE..ROLLER SKATE
+ {0x1F6FD, 0x1F6FF, prID, gcCn}, // [3] ..
+ {0x1F700, 0x1F773, prAL, gcSo}, // [116] ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE
+ {0x1F774, 0x1F776, prID, gcSo}, // [3] LOT OF FORTUNE..LUNAR ECLIPSE
+ {0x1F777, 0x1F77A, prID, gcCn}, // [4] ..
+ {0x1F77B, 0x1F77F, prID, gcSo}, // [5] HAUMEA..ORCUS
+ {0x1F780, 0x1F7D4, prAL, gcSo}, // [85] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..HEAVY TWELVE POINTED PINWHEEL STAR
+ {0x1F7D5, 0x1F7D9, prID, gcSo}, // [5] CIRCLED TRIANGLE..NINE POINTED WHITE STAR
+ {0x1F7DA, 0x1F7DF, prID, gcCn}, // [6] ..
+ {0x1F7E0, 0x1F7EB, prID, gcSo}, // [12] LARGE ORANGE CIRCLE..LARGE BROWN SQUARE
+ {0x1F7EC, 0x1F7EF, prID, gcCn}, // [4] ..
+ {0x1F7F0, 0x1F7F0, prID, gcSo}, // HEAVY EQUALS SIGN
+ {0x1F7F1, 0x1F7FF, prID, gcCn}, // [15] ..
+ {0x1F800, 0x1F80B, prAL, gcSo}, // [12] LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD
+ {0x1F80C, 0x1F80F, prID, gcCn}, // [4] ..
+ {0x1F810, 0x1F847, prAL, gcSo}, // [56] LEFTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD..DOWNWARDS HEAVY ARROW
+ {0x1F848, 0x1F84F, prID, gcCn}, // [8] ..
+ {0x1F850, 0x1F859, prAL, gcSo}, // [10] LEFTWARDS SANS-SERIF ARROW..UP DOWN SANS-SERIF ARROW
+ {0x1F85A, 0x1F85F, prID, gcCn}, // [6] ..
+ {0x1F860, 0x1F887, prAL, gcSo}, // [40] WIDE-HEADED LEFTWARDS LIGHT BARB ARROW..WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW
+ {0x1F888, 0x1F88F, prID, gcCn}, // [8] ..
+ {0x1F890, 0x1F8AD, prAL, gcSo}, // [30] LEFTWARDS TRIANGLE ARROWHEAD..WHITE ARROW SHAFT WIDTH TWO THIRDS
+ {0x1F8AE, 0x1F8AF, prID, gcCn}, // [2] ..
+ {0x1F8B0, 0x1F8B1, prID, gcSo}, // [2] ARROW POINTING UPWARDS THEN NORTH WEST..ARROW POINTING RIGHTWARDS THEN CURVING SOUTH WEST
+ {0x1F8B2, 0x1F8FF, prID, gcCn}, // [78] ..
+ {0x1F900, 0x1F90B, prAL, gcSo}, // [12] CIRCLED CROSS FORMEE WITH FOUR DOTS..DOWNWARD FACING NOTCHED HOOK WITH DOT
+ {0x1F90C, 0x1F90C, prEB, gcSo}, // PINCHED FINGERS
+ {0x1F90D, 0x1F90E, prID, gcSo}, // [2] WHITE HEART..BROWN HEART
+ {0x1F90F, 0x1F90F, prEB, gcSo}, // PINCHING HAND
+ {0x1F910, 0x1F917, prID, gcSo}, // [8] ZIPPER-MOUTH FACE..HUGGING FACE
+ {0x1F918, 0x1F91F, prEB, gcSo}, // [8] SIGN OF THE HORNS..I LOVE YOU HAND SIGN
+ {0x1F920, 0x1F925, prID, gcSo}, // [6] FACE WITH COWBOY HAT..LYING FACE
+ {0x1F926, 0x1F926, prEB, gcSo}, // FACE PALM
+ {0x1F927, 0x1F92F, prID, gcSo}, // [9] SNEEZING FACE..SHOCKED FACE WITH EXPLODING HEAD
+ {0x1F930, 0x1F939, prEB, gcSo}, // [10] PREGNANT WOMAN..JUGGLING
+ {0x1F93A, 0x1F93B, prID, gcSo}, // [2] FENCER..MODERN PENTATHLON
+ {0x1F93C, 0x1F93E, prEB, gcSo}, // [3] WRESTLERS..HANDBALL
+ {0x1F93F, 0x1F976, prID, gcSo}, // [56] DIVING MASK..FREEZING FACE
+ {0x1F977, 0x1F977, prEB, gcSo}, // NINJA
+ {0x1F978, 0x1F9B4, prID, gcSo}, // [61] DISGUISED FACE..BONE
+ {0x1F9B5, 0x1F9B6, prEB, gcSo}, // [2] LEG..FOOT
+ {0x1F9B7, 0x1F9B7, prID, gcSo}, // TOOTH
+ {0x1F9B8, 0x1F9B9, prEB, gcSo}, // [2] SUPERHERO..SUPERVILLAIN
+ {0x1F9BA, 0x1F9BA, prID, gcSo}, // SAFETY VEST
+ {0x1F9BB, 0x1F9BB, prEB, gcSo}, // EAR WITH HEARING AID
+ {0x1F9BC, 0x1F9CC, prID, gcSo}, // [17] MOTORIZED WHEELCHAIR..TROLL
+ {0x1F9CD, 0x1F9CF, prEB, gcSo}, // [3] STANDING PERSON..DEAF PERSON
+ {0x1F9D0, 0x1F9D0, prID, gcSo}, // FACE WITH MONOCLE
+ {0x1F9D1, 0x1F9DD, prEB, gcSo}, // [13] ADULT..ELF
+ {0x1F9DE, 0x1F9FF, prID, gcSo}, // [34] GENIE..NAZAR AMULET
+ {0x1FA00, 0x1FA53, prAL, gcSo}, // [84] NEUTRAL CHESS KING..BLACK CHESS KNIGHT-BISHOP
+ {0x1FA54, 0x1FA5F, prID, gcCn}, // [12] ..
+ {0x1FA60, 0x1FA6D, prID, gcSo}, // [14] XIANGQI RED GENERAL..XIANGQI BLACK SOLDIER
+ {0x1FA6E, 0x1FA6F, prID, gcCn}, // [2] ..
+ {0x1FA70, 0x1FA7C, prID, gcSo}, // [13] BALLET SHOES..CRUTCH
+ {0x1FA7D, 0x1FA7F, prID, gcCn}, // [3] ..
+ {0x1FA80, 0x1FA88, prID, gcSo}, // [9] YO-YO..FLUTE
+ {0x1FA89, 0x1FA8F, prID, gcCn}, // [7] ..
+ {0x1FA90, 0x1FABD, prID, gcSo}, // [46] RINGED PLANET..WING
+ {0x1FABE, 0x1FABE, prID, gcCn}, //
+ {0x1FABF, 0x1FAC2, prID, gcSo}, // [4] GOOSE..PEOPLE HUGGING
+ {0x1FAC3, 0x1FAC5, prEB, gcSo}, // [3] PREGNANT MAN..PERSON WITH CROWN
+ {0x1FAC6, 0x1FACD, prID, gcCn}, // [8] ..