diff --git a/tools/go.mod b/tools/go.mod index 51943f02..8dc7753d 100644 --- a/tools/go.mod +++ b/tools/go.mod @@ -2,7 +2,7 @@ module github.com/crc-org/vfkit/tools go 1.24.0 -require github.com/golangci/golangci-lint/v2 v2.5.0 +require github.com/golangci/golangci-lint/v2 v2.6.2 require ( 4d63.com/gocheckcompilerdirectives v1.3.0 // indirect @@ -11,7 +11,7 @@ require ( dev.gaijin.team/go/exhaustruct/v4 v4.0.0 // indirect dev.gaijin.team/go/golib v0.6.0 // indirect github.com/4meepo/tagalign v1.4.3 // indirect - github.com/Abirdcfly/dupword v0.1.6 // indirect + github.com/Abirdcfly/dupword v0.1.7 // indirect github.com/AdminBenni/iota-mixing v1.0.0 // indirect github.com/AlwxSin/noinlineerr v1.0.5 // indirect github.com/Antonboom/errname v1.1.1 // indirect @@ -19,7 +19,7 @@ require ( github.com/Antonboom/testifylint v1.6.4 // indirect github.com/BurntSushi/toml v1.5.0 // indirect github.com/Djarvur/go-err113 v0.1.1 // indirect - github.com/Masterminds/semver/v3 v3.3.1 // indirect + github.com/Masterminds/semver/v3 v3.4.0 // indirect github.com/MirrexOne/unqueryvet v1.2.1 // indirect github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect github.com/alecthomas/chroma/v2 v2.20.0 // indirect @@ -29,22 +29,22 @@ require ( github.com/alfatraining/structtag v1.0.0 // indirect github.com/alingse/asasalint v0.0.11 // indirect github.com/alingse/nilnesserr v0.2.0 // indirect - github.com/ashanbrown/forbidigo/v2 v2.1.0 // indirect - github.com/ashanbrown/makezero/v2 v2.0.1 // indirect + github.com/ashanbrown/forbidigo/v2 v2.3.0 // indirect + github.com/ashanbrown/makezero/v2 v2.1.0 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bkielbasa/cyclop v1.2.3 // indirect github.com/blizzy78/varnamelen v0.8.0 // indirect github.com/bombsimon/wsl/v4 v4.7.0 // indirect - github.com/bombsimon/wsl/v5 v5.2.0 // indirect + github.com/bombsimon/wsl/v5 v5.3.0 // indirect github.com/breml/bidichk v0.3.3 // indirect github.com/breml/errchkjson v0.4.1 // indirect github.com/butuzov/ireturn v0.4.0 // indirect github.com/butuzov/mirror v1.3.0 // indirect - github.com/catenacyber/perfsprint v0.9.1 // indirect + github.com/catenacyber/perfsprint v0.10.0 // indirect github.com/ccojocar/zxcvbn-go v1.0.4 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/charithe/durationcheck v0.0.10 // indirect + github.com/charithe/durationcheck v0.0.11 // indirect github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect github.com/charmbracelet/lipgloss v1.1.0 // indirect github.com/charmbracelet/x/ansi v0.8.0 // indirect @@ -63,8 +63,8 @@ require ( github.com/firefart/nonamedreturns v1.0.6 // indirect github.com/fsnotify/fsnotify v1.5.4 // indirect github.com/fzipp/gocyclo v0.6.0 // indirect - github.com/ghostiam/protogetter v0.3.16 // indirect - github.com/go-critic/go-critic v0.13.0 // indirect + github.com/ghostiam/protogetter v0.3.17 // indirect + github.com/go-critic/go-critic v0.14.2 // indirect github.com/go-toolsmith/astcast v1.1.0 // indirect github.com/go-toolsmith/astcopy v1.1.0 // indirect github.com/go-toolsmith/astequal v1.2.0 // indirect @@ -75,8 +75,8 @@ require ( github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect github.com/gobwas/glob v0.2.3 // indirect - github.com/godoc-lint/godoc-lint v0.10.0 // indirect - github.com/gofrs/flock v0.12.1 // indirect + github.com/godoc-lint/godoc-lint v0.10.1 // indirect + github.com/gofrs/flock v0.13.0 // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/golangci/asciicheck v0.5.0 // indirect github.com/golangci/dupl v0.0.0-20250308024227-f665c8d69b32 // indirect @@ -84,7 +84,6 @@ require ( github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d // indirect github.com/golangci/golines v0.0.0-20250217134842-442fd0091d95 // indirect github.com/golangci/misspell v0.7.0 // indirect - github.com/golangci/nilerr v0.0.0-20250918000102-015671e622fe // indirect github.com/golangci/plugin-module-register v0.1.2 // indirect github.com/golangci/revgrep v0.8.0 // indirect github.com/golangci/swaggoswag v0.0.0-20250504205917-77f2aca3143e // indirect @@ -94,6 +93,7 @@ require ( github.com/gostaticanalysis/analysisutil v0.7.1 // indirect github.com/gostaticanalysis/comment v1.5.0 // indirect github.com/gostaticanalysis/forcetypeassert v0.2.0 // indirect + github.com/gostaticanalysis/nilerr v0.1.2 // indirect github.com/hashicorp/go-immutable-radix/v2 v2.1.0 // indirect github.com/hashicorp/go-version v1.7.0 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect @@ -104,14 +104,14 @@ require ( github.com/jingyugao/rowserrcheck v1.1.1 // indirect github.com/jjti/go-spancheck v0.6.5 // indirect github.com/julz/importas v0.2.0 // indirect - github.com/karamaru-alpha/copyloopvar v1.2.1 // indirect + github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect github.com/kisielk/errcheck v1.9.0 // indirect github.com/kkHAIKE/contextcheck v1.1.6 // indirect github.com/kulti/thelper v0.7.1 // indirect - github.com/kunwardeep/paralleltest v1.0.14 // indirect + github.com/kunwardeep/paralleltest v1.0.15 // indirect github.com/lasiar/canonicalheader v1.1.2 // indirect - github.com/ldez/exptostd v0.4.4 // indirect - github.com/ldez/gomoddirectives v0.7.0 // indirect + github.com/ldez/exptostd v0.4.5 // indirect + github.com/ldez/gomoddirectives v0.7.1 // indirect github.com/ldez/grignotin v0.10.1 // indirect github.com/ldez/tagliatelle v0.7.2 // indirect github.com/ldez/usetesting v0.5.0 // indirect @@ -121,8 +121,8 @@ require ( github.com/magiconair/properties v1.8.6 // indirect github.com/manuelarte/embeddedstructfieldcheck v0.4.0 // indirect github.com/manuelarte/funcorder v0.5.0 // indirect - github.com/maratori/testableexamples v1.0.0 // indirect - github.com/maratori/testpackage v1.1.1 // indirect + github.com/maratori/testableexamples v1.0.1 // indirect + github.com/maratori/testpackage v1.1.2 // indirect github.com/matoous/godox v1.1.0 // indirect github.com/mattn/go-colorable v0.1.14 // indirect github.com/mattn/go-isatty v0.0.20 // indirect @@ -136,7 +136,7 @@ require ( github.com/nakabonne/nestif v0.3.1 // indirect github.com/nishanths/exhaustive v0.12.0 // indirect github.com/nishanths/predeclared v0.2.2 // indirect - github.com/nunnatsa/ginkgolinter v0.21.0 // indirect + github.com/nunnatsa/ginkgolinter v0.21.2 // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect @@ -145,8 +145,8 @@ require ( github.com/prometheus/client_model v0.2.0 // indirect github.com/prometheus/common v0.32.1 // indirect github.com/prometheus/procfs v0.7.3 // indirect - github.com/quasilyte/go-ruleguard v0.4.4 // indirect - github.com/quasilyte/go-ruleguard/dsl v0.3.22 // indirect + github.com/quasilyte/go-ruleguard v0.4.5 // indirect + github.com/quasilyte/go-ruleguard/dsl v0.3.23 // indirect github.com/quasilyte/gogrep v0.5.0 // indirect github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect @@ -159,7 +159,7 @@ require ( github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect github.com/sashamelentyev/interfacebloat v1.1.0 // indirect github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect - github.com/securego/gosec/v2 v2.22.8 // indirect + github.com/securego/gosec/v2 v2.22.10 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/sivchari/containedctx v1.0.3 // indirect github.com/sonatard/noctx v0.4.0 // indirect @@ -192,22 +192,22 @@ require ( gitlab.com/bosi/decorder v0.4.2 // indirect go-simpler.org/musttag v0.14.0 // indirect go-simpler.org/sloglint v0.11.1 // indirect - go.augendre.info/arangolint v0.2.0 // indirect - go.augendre.info/fatcontext v0.8.1 // indirect + go.augendre.info/arangolint v0.3.1 // indirect + go.augendre.info/fatcontext v0.9.0 // indirect go.uber.org/automaxprocs v1.6.0 // indirect go.uber.org/multierr v1.10.0 // indirect go.uber.org/zap v1.27.0 // indirect - golang.org/x/exp/typeparams v0.0.0-20250911091902-df9299821621 // indirect - golang.org/x/mod v0.28.0 // indirect - golang.org/x/sync v0.17.0 // indirect - golang.org/x/sys v0.36.0 // indirect - golang.org/x/text v0.29.0 // indirect - golang.org/x/tools v0.37.0 // indirect - google.golang.org/protobuf v1.36.6 // indirect + golang.org/x/exp/typeparams v0.0.0-20251023183803-a4bb9ffd2546 // indirect + golang.org/x/mod v0.29.0 // indirect + golang.org/x/sync v0.18.0 // indirect + golang.org/x/sys v0.37.0 // indirect + golang.org/x/text v0.30.0 // indirect + golang.org/x/tools v0.38.0 // indirect + google.golang.org/protobuf v1.36.8 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect honnef.co/go/tools v0.6.1 // indirect - mvdan.cc/gofumpt v0.9.1 // indirect - mvdan.cc/unparam v0.0.0-20250301125049-0df0534333a4 // indirect + mvdan.cc/gofumpt v0.9.2 // indirect + mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect ) diff --git a/tools/go.sum b/tools/go.sum index 137deee5..83d0191e 100644 --- a/tools/go.sum +++ b/tools/go.sum @@ -43,8 +43,8 @@ dev.gaijin.team/go/golib v0.6.0/go.mod h1:uY1mShx8Z/aNHWDyAkZTkX+uCi5PdX7KsG1eDQ dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/4meepo/tagalign v1.4.3 h1:Bnu7jGWwbfpAie2vyl63Zup5KuRv21olsPIha53BJr8= github.com/4meepo/tagalign v1.4.3/go.mod h1:00WwRjiuSbrRJnSVeGWPLp2epS5Q/l4UEy0apLLS37c= -github.com/Abirdcfly/dupword v0.1.6 h1:qeL6u0442RPRe3mcaLcbaCi2/Y/hOcdtw6DE9odjz9c= -github.com/Abirdcfly/dupword v0.1.6/go.mod h1:s+BFMuL/I4YSiFv29snqyjwzDp4b65W2Kvy+PKzZ6cw= +github.com/Abirdcfly/dupword v0.1.7 h1:2j8sInznrje4I0CMisSL6ipEBkeJUJAmK1/lfoNGWrQ= +github.com/Abirdcfly/dupword v0.1.7/go.mod h1:K0DkBeOebJ4VyOICFdppB23Q0YMOgVafM0zYW0n9lF4= github.com/AdminBenni/iota-mixing v1.0.0 h1:Os6lpjG2dp/AE5fYBPAA1zfa2qMdCAWwPMCgpwKq7wo= github.com/AdminBenni/iota-mixing v1.0.0/go.mod h1:i4+tpAaB+qMVIV9OK3m4/DAynOd5bQFaOu+2AhtBCNY= github.com/AlwxSin/noinlineerr v1.0.5 h1:RUjt63wk1AYWTXtVXbSqemlbVTb23JOSRiNsshj7TbY= @@ -61,8 +61,8 @@ github.com/BurntSushi/toml v1.5.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2 github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/Djarvur/go-err113 v0.1.1 h1:eHfopDqXRwAi+YmCUas75ZE0+hoBHJ2GQNLYRSxao4g= github.com/Djarvur/go-err113 v0.1.1/go.mod h1:IaWJdYFLg76t2ihfflPZnM1LIQszWOsFDh2hhhAVF6k= -github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4= -github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= +github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0= +github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM= github.com/MirrexOne/unqueryvet v1.2.1 h1:M+zdXMq84g+E1YOLa7g7ExN3dWfZQrdDSTCM7gC+m/A= github.com/MirrexOne/unqueryvet v1.2.1/go.mod h1:IWwCwMQlSWjAIteW0t+28Q5vouyktfujzYznSIWiuOg= github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4= @@ -90,10 +90,10 @@ github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQ github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= github.com/alingse/nilnesserr v0.2.0 h1:raLem5KG7EFVb4UIDAXgrv3N2JIaffeKNtcEXkEWd/w= github.com/alingse/nilnesserr v0.2.0/go.mod h1:1xJPrXonEtX7wyTq8Dytns5P2hNzoWymVUIaKm4HNFg= -github.com/ashanbrown/forbidigo/v2 v2.1.0 h1:NAxZrWqNUQiDz19FKScQ/xvwzmij6BiOw3S0+QUQ+Hs= -github.com/ashanbrown/forbidigo/v2 v2.1.0/go.mod h1:0zZfdNAuZIL7rSComLGthgc/9/n2FqspBOH90xlCHdA= -github.com/ashanbrown/makezero/v2 v2.0.1 h1:r8GtKetWOgoJ4sLyUx97UTwyt2dO7WkGFHizn/Lo8TY= -github.com/ashanbrown/makezero/v2 v2.0.1/go.mod h1:kKU4IMxmYW1M4fiEHMb2vc5SFoPzXvgbMR9gIp5pjSw= +github.com/ashanbrown/forbidigo/v2 v2.3.0 h1:OZZDOchCgsX5gvToVtEBoV2UWbFfI6RKQTir2UZzSxo= +github.com/ashanbrown/forbidigo/v2 v2.3.0/go.mod h1:5p6VmsG5/1xx3E785W9fouMxIOkvY2rRV9nMdWadd6c= +github.com/ashanbrown/makezero/v2 v2.1.0 h1:snuKYMbqosNokUKm+R6/+vOPs8yVAi46La7Ck6QYSaE= +github.com/ashanbrown/makezero/v2 v2.1.0/go.mod h1:aEGT/9q3S8DHeE57C88z2a6xydvgx8J5hgXIGWgo0MY= github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -106,8 +106,8 @@ github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= github.com/bombsimon/wsl/v4 v4.7.0 h1:1Ilm9JBPRczjyUs6hvOPKvd7VL1Q++PL8M0SXBDf+jQ= github.com/bombsimon/wsl/v4 v4.7.0/go.mod h1:uV/+6BkffuzSAVYD+yGyld1AChO7/EuLrCF/8xTiapg= -github.com/bombsimon/wsl/v5 v5.2.0 h1:PyCCwd3Q7abGs3e34IW4jLYlBS+FbsU6iK+Tb3NnDp4= -github.com/bombsimon/wsl/v5 v5.2.0/go.mod h1:Gp8lD04z27wm3FANIUPZycXp+8huVsn0oxc+n4qfV9I= +github.com/bombsimon/wsl/v5 v5.3.0 h1:nZWREJFL6U3vgW/B1lfDOigl+tEF6qgs6dGGbFeR0UM= +github.com/bombsimon/wsl/v5 v5.3.0/go.mod h1:Gp8lD04z27wm3FANIUPZycXp+8huVsn0oxc+n4qfV9I= github.com/breml/bidichk v0.3.3 h1:WSM67ztRusf1sMoqH6/c4OBCUlRVTKq+CbSeo0R17sE= github.com/breml/bidichk v0.3.3/go.mod h1:ISbsut8OnjB367j5NseXEGGgO/th206dVa427kR8YTE= github.com/breml/errchkjson v0.4.1 h1:keFSS8D7A2T0haP9kzZTi7o26r7kE3vymjZNeNDRDwg= @@ -116,8 +116,8 @@ github.com/butuzov/ireturn v0.4.0 h1:+s76bF/PfeKEdbG8b54aCocxXmi0wvYdOVsWxVO7n8E github.com/butuzov/ireturn v0.4.0/go.mod h1:ghI0FrCmap8pDWZwfPisFD1vEc56VKH4NpQUxDHta70= github.com/butuzov/mirror v1.3.0 h1:HdWCXzmwlQHdVhwvsfBb2Au0r3HyINry3bDWLYXiKoc= github.com/butuzov/mirror v1.3.0/go.mod h1:AEij0Z8YMALaq4yQj9CPPVYOyJQyiexpQEQgihajRfI= -github.com/catenacyber/perfsprint v0.9.1 h1:5LlTp4RwTooQjJCvGEFV6XksZvWE7wCOUvjD2z0vls0= -github.com/catenacyber/perfsprint v0.9.1/go.mod h1:q//VWC2fWbcdSLEY1R3l8n0zQCDPdE4IjZwyY1HMunM= +github.com/catenacyber/perfsprint v0.10.0 h1:AZj1mYyxbxLRqmnYOeguZXEQwWOgQGm2wzLI5d7Hl/0= +github.com/catenacyber/perfsprint v0.10.0/go.mod h1:DJTGsi/Zufpuus6XPGJyKOTMELe347o6akPvWG9Zcsc= github.com/ccojocar/zxcvbn-go v1.0.4 h1:FWnCIRMXPj43ukfX000kvBZvV6raSxakYr1nzyNrUcc= github.com/ccojocar/zxcvbn-go v1.0.4/go.mod h1:3GxGX+rHmueTUMvm5ium7irpyjmm7ikxYFOSJB21Das= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -125,8 +125,8 @@ github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/charithe/durationcheck v0.0.10 h1:wgw73BiocdBDQPik+zcEoBG/ob8uyBHf2iyoHGPf5w4= -github.com/charithe/durationcheck v0.0.10/go.mod h1:bCWXb7gYRysD1CU3C+u4ceO49LoGOY1C1L6uouGNreQ= +github.com/charithe/durationcheck v0.0.11 h1:g1/EX1eIiKS57NTWsYtHDZ/APfeXKhye1DidBcABctk= +github.com/charithe/durationcheck v0.0.11/go.mod h1:x5iZaixRNl8ctbM+3B2RrPG5t856TxRyVQEnbIEM2X4= github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc h1:4pZI35227imm7yK2bGPcfpFEmuY1gc2YSTShr4iJBfs= github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc/go.mod h1:X4/0JoqgTIPSFcRA/P6INZzIuyqdFY5rm8tb41s9okk= github.com/charmbracelet/lipgloss v1.1.0 h1:vYXsiLHVkK7fp74RkV7b2kq9+zDLoEU4MZoFqR/noCY= @@ -178,10 +178,10 @@ github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwV github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= -github.com/ghostiam/protogetter v0.3.16 h1:UkrisuJBYLnZW6FcYUNBDJOqY3X22RtoYMlCsiNlFFA= -github.com/ghostiam/protogetter v0.3.16/go.mod h1:4SRRIv6PcjkIMpUkRUsP4TsUTqO/N3Fmvwivuc/sCHA= -github.com/go-critic/go-critic v0.13.0 h1:kJzM7wzltQasSUXtYyTl6UaPVySO6GkaR1thFnJ6afY= -github.com/go-critic/go-critic v0.13.0/go.mod h1:M/YeuJ3vOCQDnP2SU+ZhjgRzwzcBW87JqLpMJLrZDLI= +github.com/ghostiam/protogetter v0.3.17 h1:sjGPErP9o7i2Ym+z3LsQzBdLCNaqbYy2iJQPxGXg04Q= +github.com/ghostiam/protogetter v0.3.17/go.mod h1:AivIX1eKA/TcUmzZdzbl+Tb8tjIe8FcyG6JFyemQAH4= +github.com/go-critic/go-critic v0.14.2 h1:PMvP5f+LdR8p6B29npvChUXbD1vrNlKDf60NJtgMBOo= +github.com/go-critic/go-critic v0.14.2/go.mod h1:xwntfW6SYAd7h1OqDzmN6hBX/JxsEKl5up/Y2bsxgVQ= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -223,10 +223,10 @@ github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUW github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= -github.com/godoc-lint/godoc-lint v0.10.0 h1:OcyrziBi18sQSEpib6NesVHEJ/Xcng97NunePBA48g4= -github.com/godoc-lint/godoc-lint v0.10.0/go.mod h1:KleLcHu/CGSvkjUH2RvZyoK1MBC7pDQg4NxMYLcBBsw= -github.com/gofrs/flock v0.12.1 h1:MTLVXXHf8ekldpJk3AKicLij9MdwOWkZ+a/jHHZby9E= -github.com/gofrs/flock v0.12.1/go.mod h1:9zxTsyu5xtJ9DK+1tFZyibEV7y3uwDxPPfbxeeHCoD0= +github.com/godoc-lint/godoc-lint v0.10.1 h1:ZPUVzlDtJfA+P688JfPJPkI/SuzcBr/753yGIk5bOPA= +github.com/godoc-lint/godoc-lint v0.10.1/go.mod h1:KleLcHu/CGSvkjUH2RvZyoK1MBC7pDQg4NxMYLcBBsw= +github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw= +github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= @@ -265,14 +265,12 @@ github.com/golangci/go-printf-func-name v0.1.1 h1:hIYTFJqAGp1iwoIfsNTpoq1xZAarog github.com/golangci/go-printf-func-name v0.1.1/go.mod h1:Es64MpWEZbh0UBtTAICOZiB+miW53w/K9Or/4QogJss= github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE= github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY= -github.com/golangci/golangci-lint/v2 v2.5.0 h1:BDRg4ASm4J1y/DSRY6zwJ5tr5Yy8ZqbZ79XrCeFxaQo= -github.com/golangci/golangci-lint/v2 v2.5.0/go.mod h1:IJtWJBZkLbx7AVrIUzLd8Oi3ADtwaNpWbR3wthVWHcc= +github.com/golangci/golangci-lint/v2 v2.6.2 h1:jkMSVv36JmyTENcEertckvimvjPcD5qxNM7W7qhECvI= +github.com/golangci/golangci-lint/v2 v2.6.2/go.mod h1:fSIMDiBt9kzdpnvvV7GO6iWzyv5uaeZ+iPor+2uRczE= github.com/golangci/golines v0.0.0-20250217134842-442fd0091d95 h1:AkK+w9FZBXlU/xUmBtSJN1+tAI4FIvy5WtnUnY8e4p8= github.com/golangci/golines v0.0.0-20250217134842-442fd0091d95/go.mod h1:k9mmcyWKSTMcPPvQUCfRWWQ9VHJ1U9Dc0R7kaXAgtnQ= github.com/golangci/misspell v0.7.0 h1:4GOHr/T1lTW0hhR4tgaaV1WS/lJ+ncvYCoFKmqJsj0c= github.com/golangci/misspell v0.7.0/go.mod h1:WZyyI2P3hxPY2UVHs3cS8YcllAeyfquQcKfdeE9AFVg= -github.com/golangci/nilerr v0.0.0-20250918000102-015671e622fe h1:F1pK9tBy41i7eesBFkSNMldwtiAaWiU+3fT/24sTnNI= -github.com/golangci/nilerr v0.0.0-20250918000102-015671e622fe/go.mod h1:CtTxAluxD2ng9aIT9bPrVoMuISFWCD+SaxtvYtdWA2k= github.com/golangci/plugin-module-register v0.1.2 h1:e5WM6PO6NIAEcij3B053CohVp3HIYbzSuP53UAYgOpg= github.com/golangci/plugin-module-register v0.1.2/go.mod h1:1+QGTsKBvAIvPvoY/os+G5eoqxWn70HYDm2uvUyGuVw= github.com/golangci/revgrep v0.8.0 h1:EZBctwbVd0aMeRnNUsFogoyayvKHyxlV3CdUA46FX2s= @@ -306,8 +304,8 @@ github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20250607225305-033d6d78b36a h1://KbezygeMJZCSHH+HgUZiTeSoiuFspbMg1ge+eFj18= -github.com/google/pprof v0.0.0-20250607225305-033d6d78b36a/go.mod h1:5hDyRhoBCxViHszMt12TnOpEI4VVi+U8Gm9iphldiMA= +github.com/google/pprof v0.0.0-20250820193118-f64d9cf942d6 h1:EEHtgt9IwisQ2AZ4pIsMjahcegHh6rmhqxzIRQIyepY= +github.com/google/pprof v0.0.0-20250820193118-f64d9cf942d6/go.mod h1:I6V7YzU0XDpsHqbsyrghnFZLO1gwK6NPTNvmetQIk9U= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= @@ -320,6 +318,8 @@ github.com/gostaticanalysis/comment v1.5.0 h1:X82FLl+TswsUMpMh17srGRuKaaXprTaytm github.com/gostaticanalysis/comment v1.5.0/go.mod h1:V6eb3gpCv9GNVqb6amXzEUX3jXLVK/AdA+IrAMSqvEc= github.com/gostaticanalysis/forcetypeassert v0.2.0 h1:uSnWrrUEYDr86OCxWa4/Tp2jeYDlogZiZHzGkWFefTk= github.com/gostaticanalysis/forcetypeassert v0.2.0/go.mod h1:M5iPavzE9pPqWyeiVXSFghQjljW1+l/Uke3PXHS6ILY= +github.com/gostaticanalysis/nilerr v0.1.2 h1:S6nk8a9N8g062nsx63kUkF6AzbHGw7zzyHMcpu52xQU= +github.com/gostaticanalysis/nilerr v0.1.2/go.mod h1:A19UHhoY3y8ahoL7YKz6sdjDtduwTSI4CsymaC2htPA= github.com/gostaticanalysis/testutil v0.3.1-0.20210208050101-bfb5c8eec0e4/go.mod h1:D+FIZ+7OahH3ePw/izIEeH5I06eKs1IKI4Xr64/Am3M= github.com/gostaticanalysis/testutil v0.5.0 h1:Dq4wT1DdTwTGCQQv3rl3IvD5Ld0E6HiY+3Zh0sUGqw8= github.com/gostaticanalysis/testutil v0.5.0/go.mod h1:OLQSbuM6zw2EvCcXTz1lVq5unyoNft372msDY0nY5Hs= @@ -358,8 +358,8 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/julz/importas v0.2.0 h1:y+MJN/UdL63QbFJHws9BVC5RpA2iq0kpjrFajTGivjQ= github.com/julz/importas v0.2.0/go.mod h1:pThlt589EnCYtMnmhmRYY/qn9lCf/frPOK+WMx3xiJY= -github.com/karamaru-alpha/copyloopvar v1.2.1 h1:wmZaZYIjnJ0b5UoKDjUHrikcV0zuPyyxI4SVplLd2CI= -github.com/karamaru-alpha/copyloopvar v1.2.1/go.mod h1:nFmMlFNlClC2BPvNaHMdkirmTJxVCY0lhxBtlfOypMM= +github.com/karamaru-alpha/copyloopvar v1.2.2 h1:yfNQvP9YaGQR7VaWLYcfZUlRP2eo2vhExWKxD/fP6q0= +github.com/karamaru-alpha/copyloopvar v1.2.2/go.mod h1:oY4rGZqZ879JkJMtX3RRkcXRkmUvH0x35ykgaKgsgJY= github.com/kisielk/errcheck v1.9.0 h1:9xt1zI9EBfcYBvdU1nVrzMzzUPUtPKs9bVSIM3TAb3M= github.com/kisielk/errcheck v1.9.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= @@ -377,14 +377,14 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kulti/thelper v0.7.1 h1:fI8QITAoFVLx+y+vSyuLBP+rcVIB8jKooNSCT2EiI98= github.com/kulti/thelper v0.7.1/go.mod h1:NsMjfQEy6sd+9Kfw8kCP61W1I0nerGSYSFnGaxQkcbs= -github.com/kunwardeep/paralleltest v1.0.14 h1:wAkMoMeGX/kGfhQBPODT/BL8XhK23ol/nuQ3SwFaUw8= -github.com/kunwardeep/paralleltest v1.0.14/go.mod h1:di4moFqtfz3ToSKxhNjhOZL+696QtJGCFe132CbBLGk= +github.com/kunwardeep/paralleltest v1.0.15 h1:ZMk4Qt306tHIgKISHWFJAO1IDQJLc6uDyJMLyncOb6w= +github.com/kunwardeep/paralleltest v1.0.15/go.mod h1:di4moFqtfz3ToSKxhNjhOZL+696QtJGCFe132CbBLGk= github.com/lasiar/canonicalheader v1.1.2 h1:vZ5uqwvDbyJCnMhmFYimgMZnJMjwljN5VGY0VKbMXb4= github.com/lasiar/canonicalheader v1.1.2/go.mod h1:qJCeLFS0G/QlLQ506T+Fk/fWMa2VmBUiEI2cuMK4djI= -github.com/ldez/exptostd v0.4.4 h1:58AtQjnLcT/tI5W/1KU7xE/O7zW9RAWB6c/ScQAnfus= -github.com/ldez/exptostd v0.4.4/go.mod h1:QfdzPw6oHjFVdNV7ILoPu5sw3OZ3OG1JS0I5JN3J4Js= -github.com/ldez/gomoddirectives v0.7.0 h1:EOx8Dd56BZYSez11LVgdj025lKwlP0/E5OLSl9HDwsY= -github.com/ldez/gomoddirectives v0.7.0/go.mod h1:wR4v8MN9J8kcwvrkzrx6sC9xe9Cp68gWYCsda5xvyGc= +github.com/ldez/exptostd v0.4.5 h1:kv2ZGUVI6VwRfp/+bcQ6Nbx0ghFWcGIKInkG/oFn1aQ= +github.com/ldez/exptostd v0.4.5/go.mod h1:QRjHRMXJrCTIm9WxVNH6VW7oN7KrGSht69bIRwvdFsM= +github.com/ldez/gomoddirectives v0.7.1 h1:FaULkvUIG36hj6chpwa+FdCNGZBsD7/fO+p7CCsM6pE= +github.com/ldez/gomoddirectives v0.7.1/go.mod h1:auDNtakWJR1rC+YX7ar+HmveqXATBAyEK1KYpsIRW/8= github.com/ldez/grignotin v0.10.1 h1:keYi9rYsgbvqAZGI1liek5c+jv9UUjbvdj3Tbn5fn4o= github.com/ldez/grignotin v0.10.1/go.mod h1:UlDbXFCARrXbWGNGP3S5vsysNXAPhnSuBufpTEbwOas= github.com/ldez/tagliatelle v0.7.2 h1:KuOlL70/fu9paxuxbeqlicJnCspCRjH0x8FW+NfgYUk= @@ -403,10 +403,10 @@ github.com/manuelarte/embeddedstructfieldcheck v0.4.0 h1:3mAIyaGRtjK6EO9E73JlXLt github.com/manuelarte/embeddedstructfieldcheck v0.4.0/go.mod h1:z8dFSyXqp+fC6NLDSljRJeNQJJDWnY7RoWFzV3PC6UM= github.com/manuelarte/funcorder v0.5.0 h1:llMuHXXbg7tD0i/LNw8vGnkDTHFpTnWqKPI85Rknc+8= github.com/manuelarte/funcorder v0.5.0/go.mod h1:Yt3CiUQthSBMBxjShjdXMexmzpP8YGvGLjrxJNkO2hA= -github.com/maratori/testableexamples v1.0.0 h1:dU5alXRrD8WKSjOUnmJZuzdxWOEQ57+7s93SLMxb2vI= -github.com/maratori/testableexamples v1.0.0/go.mod h1:4rhjL1n20TUTT4vdh3RDqSizKLyXp7K2u6HgraZCGzE= -github.com/maratori/testpackage v1.1.1 h1:S58XVV5AD7HADMmD0fNnziNHqKvSdDuEKdPD1rNTU04= -github.com/maratori/testpackage v1.1.1/go.mod h1:s4gRK/ym6AMrqpOa/kEbQTV4Q4jb7WeLZzVhVVVOQMc= +github.com/maratori/testableexamples v1.0.1 h1:HfOQXs+XgfeRBJ+Wz0XfH+FHnoY9TVqL6Fcevpzy4q8= +github.com/maratori/testableexamples v1.0.1/go.mod h1:XE2F/nQs7B9N08JgyRmdGjYVGqxWwClLPCGSQhXQSrQ= +github.com/maratori/testpackage v1.1.2 h1:ffDSh+AgqluCLMXhM19f/cpvQAKygKAJXFl9aUjmbqs= +github.com/maratori/testpackage v1.1.2/go.mod h1:8F24GdVDFW5Ew43Et02jamrVMNXLUNaOynhDssITGfc= github.com/matoous/godox v1.1.0 h1:W5mqwbyWrwZv6OQ5Z1a/DHGMOvXYCBP3+Ht7KMoJhq4= github.com/matoous/godox v1.1.0/go.mod h1:jgE/3fUXiTurkdHOLT5WEkThTSuE7yxHv5iWPa80afs= github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= @@ -442,12 +442,12 @@ github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhK github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs= github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= -github.com/nunnatsa/ginkgolinter v0.21.0 h1:IYwuX+ajy3G1MezlMLB1BENRtFj16+Evyi4uki1NOOQ= -github.com/nunnatsa/ginkgolinter v0.21.0/go.mod h1:QlzY9UP9zaqu58FjYxhp9bnjuwXwG1bfW5rid9ChNMw= -github.com/onsi/ginkgo/v2 v2.23.4 h1:ktYTpKJAVZnDT4VjxSbiBenUjmlL/5QkBEocaWXiQus= -github.com/onsi/ginkgo/v2 v2.23.4/go.mod h1:Bt66ApGPBFzHyR+JO10Zbt0Gsp4uWxu5mIOTusL46e8= -github.com/onsi/gomega v1.38.0 h1:c/WX+w8SLAinvuKKQFh77WEucCnPk4j2OTUr7lt7BeY= -github.com/onsi/gomega v1.38.0/go.mod h1:OcXcwId0b9QsE7Y49u+BTrL4IdKOBOKnD6VQNTJEB6o= +github.com/nunnatsa/ginkgolinter v0.21.2 h1:khzWfm2/Br8ZemX8QM1pl72LwM+rMeW6VUbQ4rzh0Po= +github.com/nunnatsa/ginkgolinter v0.21.2/go.mod h1:GItSI5fw7mCGLPmkvGYrr1kEetZe7B593jcyOpyabsY= +github.com/onsi/ginkgo/v2 v2.26.0 h1:1J4Wut1IlYZNEAWIV3ALrT9NfiaGW2cDCJQSFQMs/gE= +github.com/onsi/ginkgo/v2 v2.26.0/go.mod h1:qhEywmzWTBUY88kfO0BRvX4py7scov9yR+Az2oavUzw= +github.com/onsi/gomega v1.38.2 h1:eZCjf2xjZAqe+LeWvKb5weQ+NcPwX84kqJ0cZNxok2A= +github.com/onsi/gomega v1.38.2/go.mod h1:W2MJcYxRGV63b418Ai34Ud0hEdTVXq9NW9+Sx6uXf3k= github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU= github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w= @@ -490,10 +490,10 @@ github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/quasilyte/go-ruleguard v0.4.4 h1:53DncefIeLX3qEpjzlS1lyUmQoUEeOWPFWqaTJq9eAQ= -github.com/quasilyte/go-ruleguard v0.4.4/go.mod h1:Vl05zJ538vcEEwu16V/Hdu7IYZWyKSwIy4c88Ro1kRE= -github.com/quasilyte/go-ruleguard/dsl v0.3.22 h1:wd8zkOhSNr+I+8Qeciml08ivDt1pSXe60+5DqOpCjPE= -github.com/quasilyte/go-ruleguard/dsl v0.3.22/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/go-ruleguard v0.4.5 h1:AGY0tiOT5hJX9BTdx/xBdoCubQUAE2grkqY2lSwvZcA= +github.com/quasilyte/go-ruleguard v0.4.5/go.mod h1:Vl05zJ538vcEEwu16V/Hdu7IYZWyKSwIy4c88Ro1kRE= +github.com/quasilyte/go-ruleguard/dsl v0.3.23 h1:lxjt5B6ZCiBeeNO8/oQsegE6fLeCzuMRoVWSkXC4uvY= +github.com/quasilyte/go-ruleguard/dsl v0.3.23/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo= github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng= github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU= @@ -521,8 +521,8 @@ github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tM github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ= github.com/sashamelentyev/usestdlibvars v1.29.0 h1:8J0MoRrw4/NAXtjQqTHrbW9NN+3iMf7Knkq057v4XOQ= github.com/sashamelentyev/usestdlibvars v1.29.0/go.mod h1:8PpnjHMk5VdeWlVb4wCdrB8PNbLqZ3wBZTZWkrpZZL8= -github.com/securego/gosec/v2 v2.22.8 h1:3NMpmfXO8wAVFZPNsd3EscOTa32Jyo6FLLlW53bexMI= -github.com/securego/gosec/v2 v2.22.8/go.mod h1:ZAw8K2ikuH9qDlfdV87JmNghnVfKB1XC7+TVzk6Utto= +github.com/securego/gosec/v2 v2.22.10 h1:ntbBqdWXnu46DUOXn+R2SvPo3PiJCDugTCgTW2g4tQg= +github.com/securego/gosec/v2 v2.22.10/go.mod h1:9UNjK3tLpv/w2b0+7r82byV43wCJDNtEDQMeS+H/g2w= github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= @@ -615,10 +615,10 @@ go-simpler.org/musttag v0.14.0 h1:XGySZATqQYSEV3/YTy+iX+aofbZZllJaqwFWs+RTtSo= go-simpler.org/musttag v0.14.0/go.mod h1:uP8EymctQjJ4Z1kUnjX0u2l60WfUdQxCwSNKzE1JEOE= go-simpler.org/sloglint v0.11.1 h1:xRbPepLT/MHPTCA6TS/wNfZrDzkGvCCqUv4Bdwc3H7s= go-simpler.org/sloglint v0.11.1/go.mod h1:2PowwiCOK8mjiF+0KGifVOT8ZsCNiFzvfyJeJOIt8MQ= -go.augendre.info/arangolint v0.2.0 h1:2NP/XudpPmfBhQKX4rMk+zDYIj//qbt4hfZmSSTcpj8= -go.augendre.info/arangolint v0.2.0/go.mod h1:Vx4KSJwu48tkE+8uxuf0cbBnAPgnt8O1KWiT7bljq7w= -go.augendre.info/fatcontext v0.8.1 h1:/T4+cCjpL9g71gJpcFAgVo/K5VFpqlN+NPU7QXxD5+A= -go.augendre.info/fatcontext v0.8.1/go.mod h1:r3Qz4ZOzex66wfyyj5VZ1xUcl81vzvHQ6/GWzzlMEwA= +go.augendre.info/arangolint v0.3.1 h1:n2E6p8f+zfXSFLa2e2WqFPp4bfvcuRdd50y6cT65pSo= +go.augendre.info/arangolint v0.3.1/go.mod h1:6ZKzEzIZuBQwoSvlKT+qpUfIbBfFCE5gbAoTg0/117g= +go.augendre.info/fatcontext v0.9.0 h1:Gt5jGD4Zcj8CDMVzjOJITlSb9cEch54hjRRlN3qDojE= +go.augendre.info/fatcontext v0.9.0/go.mod h1:L94brOAT1OOUNue6ph/2HnwxoNlds9aXDF2FcUntbNw= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -632,6 +632,8 @@ go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ= go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= +go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -655,8 +657,8 @@ golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWB golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/exp/typeparams v0.0.0-20250911091902-df9299821621 h1:Yl4H5w2RV7L/dvSHp2GerziT5K2CORgFINPaMFxWGWw= -golang.org/x/exp/typeparams v0.0.0-20250911091902-df9299821621/go.mod h1:4Mzdyp/6jzw9auFDJ3OMF5qksa7UvPnzKqTVGcb04ms= +golang.org/x/exp/typeparams v0.0.0-20251023183803-a4bb9ffd2546 h1:HDjDiATsGqvuqvkDvgJjD1IgPrVekcSXVVE21JwvzGE= +golang.org/x/exp/typeparams v0.0.0-20251023183803-a4bb9ffd2546/go.mod h1:4Mzdyp/6jzw9auFDJ3OMF5qksa7UvPnzKqTVGcb04ms= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -684,8 +686,8 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91 golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.28.0 h1:gQBtGhjxykdjY9YhZpSlZIsbnaE2+PgjfLWUQTnoZ1U= -golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -724,8 +726,8 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= -golang.org/x/net v0.44.0 h1:evd8IRDyfNBMBTTY5XRF1vaZlD+EmWx6x8PkhR04H/I= -golang.org/x/net v0.44.0/go.mod h1:ECOoLqd5U3Lhyeyo/QDCEVQ4sNgYsqvCZ722XogGieY= +golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= +golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -747,8 +749,8 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= -golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= -golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -800,8 +802,8 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= -golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= +golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= @@ -818,8 +820,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.29.0 h1:1neNs90w9YzJ9BocxfsQNHKuAT4pkghyXc4nhZ6sJvk= -golang.org/x/text v0.29.0/go.mod h1:7MhJOA9CD2qZyOKYazxdYMF85OwPdEr9jTtBpO7ydH4= +golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= +golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -873,8 +875,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg= -golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE= -golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/tools/go/expect v0.1.1-deprecated h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM= golang.org/x/tools/go/expect v0.1.1-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY= golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated h1:1h2MnaIAIXISqTFKdENegdpAgUXz6NrPEsbIeWaBRvM= @@ -958,8 +960,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= -google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc= +google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -988,10 +990,10 @@ honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9 honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.6.1 h1:R094WgE8K4JirYjBaOpz/AvTyUu/3wbmAoskKN/pxTI= honnef.co/go/tools v0.6.1/go.mod h1:3puzxxljPCe8RGJX7BIy1plGbxEOZni5mR2aXe3/uk4= -mvdan.cc/gofumpt v0.9.1 h1:p5YT2NfFWsYyTieYgwcQ8aKV3xRvFH4uuN/zB2gBbMQ= -mvdan.cc/gofumpt v0.9.1/go.mod h1:3xYtNemnKiXaTh6R4VtlqDATFwBbdXI8lJvH/4qk7mw= -mvdan.cc/unparam v0.0.0-20250301125049-0df0534333a4 h1:WjUu4yQoT5BHT1w8Zu56SP8367OuBV5jvo+4Ulppyf8= -mvdan.cc/unparam v0.0.0-20250301125049-0df0534333a4/go.mod h1:rthT7OuvRbaGcd5ginj6dA2oLE7YNlta9qhBNNdCaLE= +mvdan.cc/gofumpt v0.9.2 h1:zsEMWL8SVKGHNztrx6uZrXdp7AX8r421Vvp23sz7ik4= +mvdan.cc/gofumpt v0.9.2/go.mod h1:iB7Hn+ai8lPvofHd9ZFGVg2GOr8sBUw1QUWjNbmIL/s= +mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 h1:ssMzja7PDPJV8FStj7hq9IKiuiKhgz9ErWw+m68e7DI= +mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15/go.mod h1:4M5MMXl2kW6fivUT6yRGpLLPNfuGtU2Z0cPvFquGDYU= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/tools/vendor/github.com/Abirdcfly/dupword/README.md b/tools/vendor/github.com/Abirdcfly/dupword/README.md index e6c5b919..a3db4c50 100644 --- a/tools/vendor/github.com/Abirdcfly/dupword/README.md +++ b/tools/vendor/github.com/Abirdcfly/dupword/README.md @@ -101,10 +101,14 @@ Flags: no effect (deprecated) -c int display offending line with this many lines of context (default -1) + -comments-only + check only comments, skip strings -cpuprofile string write CPU profile to this file -debug string debug flags, any subset of "fpstv" + -diff + with -fix, don't update the files, but print a unified diff -fix apply all suggested fixes -flags @@ -130,7 +134,7 @@ Flags: ### 5. my advice -use `--keyword=the,and,a` and `-fix` together. I think that specifying only commonly repeated prepositions can effectively avoid false positives. +use `--keyword=the,and,a` and `-fix` together. I think that specifying only commonly repeated prepositions can effectively avoid false positives. see [dupword#4](https://github.com/Abirdcfly/dupword/issues/4) for real code example. diff --git a/tools/vendor/github.com/Abirdcfly/dupword/dupword.go b/tools/vendor/github.com/Abirdcfly/dupword/dupword.go index c066f9a0..a3eaced8 100644 --- a/tools/vendor/github.com/Abirdcfly/dupword/dupword.go +++ b/tools/vendor/github.com/Abirdcfly/dupword/dupword.go @@ -98,20 +98,25 @@ func NewAnalyzer() *analysis.Analyzer { a.Flags.Init(Name, flag.ExitOnError) a.Flags.Var(&analyzer.keywords, "keyword", "keywords for detecting duplicate words") a.Flags.Var(&analyzer.ignoreWords, "ignore", "ignore words") + a.Flags.BoolVar(&analyzer.commentsOnly, "comments-only", false, "check only comments, skip strings") a.Flags.Var(version{}, "V", "print version and exit") return a } type analyzer struct { - keywords keywords - ignoreWords ignore + keywords keywords + ignoreWords ignore + commentsOnly bool } func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) { for _, file := range pass.Files { a.fixDuplicateWordInComment(pass, file) } + if a.commentsOnly { + return nil, nil + } inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ (*ast.BasicLit)(nil), diff --git a/tools/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md b/tools/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md index f95a504f..fabe5e43 100644 --- a/tools/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md +++ b/tools/vendor/github.com/Masterminds/semver/v3/CHANGELOG.md @@ -1,5 +1,31 @@ # Changelog +## 3.4.0 (2025-06-27) + +### Added + +- #268: Added property to Constraints to include prereleases for Check and Validate + +### Changed + +- #263: Updated Go testing for 1.24, 1.23, and 1.22 +- #269: Updated the error message handling for message case and wrapping errors +- #266: Restore the ability to have leading 0's when parsing with NewVersion. + Opt-out of this by setting CoerceNewVersion to false. + +### Fixed + +- #257: Fixed the CodeQL link (thanks @dmitris) +- #262: Restored detailed errors when failed to parse with NewVersion. Opt-out + of this by setting DetailedNewVersionErrors to false for faster performance. +- #267: Handle pre-releases for an "and" group if one constraint includes them + +## 3.3.1 (2024-11-19) + +### Fixed + +- #253: Fix for allowing some version that were invalid + ## 3.3.0 (2024-08-27) ### Added @@ -137,7 +163,7 @@ functions. These are described in the added and changed sections below. - #78: Fix unchecked error in example code (thanks @ravron) - #70: Fix the handling of pre-releases and the 0.0.0 release edge case - #97: Fixed copyright file for proper display on GitHub -- #107: Fix handling prerelease when sorting alphanum and num +- #107: Fix handling prerelease when sorting alphanum and num - #109: Fixed where Validate sometimes returns wrong message on error ## 1.4.2 (2018-04-10) diff --git a/tools/vendor/github.com/Masterminds/semver/v3/README.md b/tools/vendor/github.com/Masterminds/semver/v3/README.md index ed569360..2f56c676 100644 --- a/tools/vendor/github.com/Masterminds/semver/v3/README.md +++ b/tools/vendor/github.com/Masterminds/semver/v3/README.md @@ -50,6 +50,18 @@ other versions, convert the version back into a string, and get the original string. Getting the original string is useful if the semantic version was coerced into a valid form. +There are package level variables that affect how `NewVersion` handles parsing. + +- `CoerceNewVersion` is `true` by default. When set to `true` it coerces non-compliant + versions into SemVer. For example, allowing a leading 0 in a major, minor, or patch + part. This enables the use of CalVer in versions even when not compliant with SemVer. + When set to `false` less coercion work is done. +- `DetailedNewVersionErrors` provides more detailed errors. It only has an affect when + `CoerceNewVersion` is set to `false`. When `DetailedNewVersionErrors` is set to `true` + it can provide some more insight into why a version is invalid. Setting + `DetailedNewVersionErrors` to `false` is faster on performance but provides less + detailed error messages if a version fails to parse. + ## Sorting Semantic Versions A set of versions can be sorted using the `sort` package from the standard library. @@ -160,6 +172,10 @@ means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case sensitivity doesn't apply here. This is due to ASCII sort ordering which is what the spec specifies. +The `Constraints` instance returned from `semver.NewConstraint()` has a property +`IncludePrerelease` that, when set to true, will return prerelease versions when calls +to `Check()` and `Validate()` are made. + ### Hyphen Range Comparisons There are multiple methods to handle ranges and the first is hyphens ranges. @@ -250,7 +266,7 @@ or [create a pull request](https://github.com/Masterminds/semver/pulls). Security is an important consideration for this project. The project currently uses the following tools to help discover security issues: -* [CodeQL](https://github.com/Masterminds/semver) +* [CodeQL](https://codeql.github.com) * [gosec](https://github.com/securego/gosec) * Daily Fuzz testing diff --git a/tools/vendor/github.com/Masterminds/semver/v3/constraints.go b/tools/vendor/github.com/Masterminds/semver/v3/constraints.go index 8461c7ed..8b7a10f8 100644 --- a/tools/vendor/github.com/Masterminds/semver/v3/constraints.go +++ b/tools/vendor/github.com/Masterminds/semver/v3/constraints.go @@ -12,6 +12,13 @@ import ( // checked against. type Constraints struct { constraints [][]*constraint + containsPre []bool + + // IncludePrerelease specifies if pre-releases should be included in + // the results. Note, if a constraint range has a prerelease than + // prereleases will be included for that AND group even if this is + // set to false. + IncludePrerelease bool } // NewConstraint returns a Constraints instance that a Version instance can @@ -22,11 +29,10 @@ func NewConstraint(c string) (*Constraints, error) { c = rewriteRange(c) ors := strings.Split(c, "||") - or := make([][]*constraint, len(ors)) + lenors := len(ors) + or := make([][]*constraint, lenors) + hasPre := make([]bool, lenors) for k, v := range ors { - - // TODO: Find a way to validate and fetch all the constraints in a simpler form - // Validate the segment if !validConstraintRegex.MatchString(v) { return nil, fmt.Errorf("improper constraint: %s", v) @@ -43,12 +49,22 @@ func NewConstraint(c string) (*Constraints, error) { return nil, err } + // If one of the constraints has a prerelease record this. + // This information is used when checking all in an "and" + // group to ensure they all check for prereleases. + if pc.con.pre != "" { + hasPre[k] = true + } + result[i] = pc } or[k] = result } - o := &Constraints{constraints: or} + o := &Constraints{ + constraints: or, + containsPre: hasPre, + } return o, nil } @@ -57,10 +73,10 @@ func (cs Constraints) Check(v *Version) bool { // TODO(mattfarina): For v4 of this library consolidate the Check and Validate // functions as the underlying functions make that possible now. // loop over the ORs and check the inner ANDs - for _, o := range cs.constraints { + for i, o := range cs.constraints { joy := true for _, c := range o { - if check, _ := c.check(v); !check { + if check, _ := c.check(v, (cs.IncludePrerelease || cs.containsPre[i])); !check { joy = false break } @@ -83,12 +99,12 @@ func (cs Constraints) Validate(v *Version) (bool, []error) { // Capture the prerelease message only once. When it happens the first time // this var is marked var prerelesase bool - for _, o := range cs.constraints { + for i, o := range cs.constraints { joy := true for _, c := range o { // Before running the check handle the case there the version is // a prerelease and the check is not searching for prereleases. - if c.con.pre == "" && v.pre != "" { + if !(cs.IncludePrerelease || cs.containsPre[i]) && v.pre != "" { if !prerelesase { em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) e = append(e, em) @@ -98,7 +114,7 @@ func (cs Constraints) Validate(v *Version) (bool, []error) { } else { - if _, err := c.check(v); err != nil { + if _, err := c.check(v, (cs.IncludePrerelease || cs.containsPre[i])); err != nil { e = append(e, err) joy = false } @@ -227,8 +243,8 @@ type constraint struct { } // Check if a version meets the constraint -func (c *constraint) check(v *Version) (bool, error) { - return constraintOps[c.origfunc](v, c) +func (c *constraint) check(v *Version, includePre bool) (bool, error) { + return constraintOps[c.origfunc](v, c, includePre) } // String prints an individual constraint into a string @@ -236,7 +252,7 @@ func (c *constraint) string() string { return c.origfunc + c.orig } -type cfunc func(v *Version, c *constraint) (bool, error) +type cfunc func(v *Version, c *constraint, includePre bool) (bool, error) func parseConstraint(c string) (*constraint, error) { if len(c) > 0 { @@ -272,7 +288,7 @@ func parseConstraint(c string) (*constraint, error) { // The constraintRegex should catch any regex parsing errors. So, // we should never get here. - return nil, errors.New("constraint Parser Error") + return nil, errors.New("constraint parser error") } cs.con = con @@ -290,7 +306,7 @@ func parseConstraint(c string) (*constraint, error) { // The constraintRegex should catch any regex parsing errors. So, // we should never get here. - return nil, errors.New("constraint Parser Error") + return nil, errors.New("constraint parser error") } cs := &constraint{ @@ -305,16 +321,14 @@ func parseConstraint(c string) (*constraint, error) { } // Constraint functions -func constraintNotEqual(v *Version, c *constraint) (bool, error) { - if c.dirty { - - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { - return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) - } +func constraintNotEqual(v *Version, c *constraint, includePre bool) (bool, error) { + // The existence of prereleases is checked at the group level and passed in. + // Exit early if the version has a prerelease but those are to be ignored. + if v.Prerelease() != "" && !includePre { + return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) + } + if c.dirty { if c.con.Major() != v.Major() { return true, nil } @@ -345,12 +359,11 @@ func constraintNotEqual(v *Version, c *constraint) (bool, error) { return true, nil } -func constraintGreaterThan(v *Version, c *constraint) (bool, error) { +func constraintGreaterThan(v *Version, c *constraint, includePre bool) (bool, error) { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { + // The existence of prereleases is checked at the group level and passed in. + // Exit early if the version has a prerelease but those are to be ignored. + if v.Prerelease() != "" && !includePre { return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) } @@ -391,11 +404,10 @@ func constraintGreaterThan(v *Version, c *constraint) (bool, error) { return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig) } -func constraintLessThan(v *Version, c *constraint) (bool, error) { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { +func constraintLessThan(v *Version, c *constraint, includePre bool) (bool, error) { + // The existence of prereleases is checked at the group level and passed in. + // Exit early if the version has a prerelease but those are to be ignored. + if v.Prerelease() != "" && !includePre { return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) } @@ -406,12 +418,11 @@ func constraintLessThan(v *Version, c *constraint) (bool, error) { return false, fmt.Errorf("%s is greater than or equal to %s", v, c.orig) } -func constraintGreaterThanEqual(v *Version, c *constraint) (bool, error) { +func constraintGreaterThanEqual(v *Version, c *constraint, includePre bool) (bool, error) { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { + // The existence of prereleases is checked at the group level and passed in. + // Exit early if the version has a prerelease but those are to be ignored. + if v.Prerelease() != "" && !includePre { return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) } @@ -422,11 +433,10 @@ func constraintGreaterThanEqual(v *Version, c *constraint) (bool, error) { return false, fmt.Errorf("%s is less than %s", v, c.orig) } -func constraintLessThanEqual(v *Version, c *constraint) (bool, error) { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { +func constraintLessThanEqual(v *Version, c *constraint, includePre bool) (bool, error) { + // The existence of prereleases is checked at the group level and passed in. + // Exit early if the version has a prerelease but those are to be ignored. + if v.Prerelease() != "" && !includePre { return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) } @@ -455,11 +465,10 @@ func constraintLessThanEqual(v *Version, c *constraint) (bool, error) { // ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0 // ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0 // ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0 -func constraintTilde(v *Version, c *constraint) (bool, error) { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { +func constraintTilde(v *Version, c *constraint, includePre bool) (bool, error) { + // The existence of prereleases is checked at the group level and passed in. + // Exit early if the version has a prerelease but those are to be ignored. + if v.Prerelease() != "" && !includePre { return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) } @@ -487,16 +496,15 @@ func constraintTilde(v *Version, c *constraint) (bool, error) { // When there is a .x (dirty) status it automatically opts in to ~. Otherwise // it's a straight = -func constraintTildeOrEqual(v *Version, c *constraint) (bool, error) { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { +func constraintTildeOrEqual(v *Version, c *constraint, includePre bool) (bool, error) { + // The existence of prereleases is checked at the group level and passed in. + // Exit early if the version has a prerelease but those are to be ignored. + if v.Prerelease() != "" && !includePre { return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) } if c.dirty { - return constraintTilde(v, c) + return constraintTilde(v, c, includePre) } eq := v.Equal(c.con) @@ -516,11 +524,10 @@ func constraintTildeOrEqual(v *Version, c *constraint) (bool, error) { // ^0.0.3 --> >=0.0.3 <0.0.4 // ^0.0 --> >=0.0.0 <0.1.0 // ^0 --> >=0.0.0 <1.0.0 -func constraintCaret(v *Version, c *constraint) (bool, error) { - // If there is a pre-release on the version but the constraint isn't looking - // for them assume that pre-releases are not compatible. See issue 21 for - // more details. - if v.Prerelease() != "" && c.con.Prerelease() == "" { +func constraintCaret(v *Version, c *constraint, includePre bool) (bool, error) { + // The existence of prereleases is checked at the group level and passed in. + // Exit early if the version has a prerelease but those are to be ignored. + if v.Prerelease() != "" && !includePre { return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) } diff --git a/tools/vendor/github.com/Masterminds/semver/v3/version.go b/tools/vendor/github.com/Masterminds/semver/v3/version.go index 304edc34..7a3ba738 100644 --- a/tools/vendor/github.com/Masterminds/semver/v3/version.go +++ b/tools/vendor/github.com/Masterminds/semver/v3/version.go @@ -14,28 +14,40 @@ import ( // The compiled version of the regex created at init() is cached here so it // only needs to be created once. var versionRegex *regexp.Regexp +var looseVersionRegex *regexp.Regexp + +// CoerceNewVersion sets if leading 0's are allowd in the version part. Leading 0's are +// not allowed in a valid semantic version. When set to true, NewVersion will coerce +// leading 0's into a valid version. +var CoerceNewVersion = true + +// DetailedNewVersionErrors specifies if detailed errors are returned from the NewVersion +// function. This is used when CoerceNewVersion is set to false. If set to false +// ErrInvalidSemVer is returned for an invalid version. This does not apply to +// StrictNewVersion. Setting this function to false returns errors more quickly. +var DetailedNewVersionErrors = true var ( // ErrInvalidSemVer is returned a version is found to be invalid when // being parsed. - ErrInvalidSemVer = errors.New("Invalid Semantic Version") + ErrInvalidSemVer = errors.New("invalid semantic version") // ErrEmptyString is returned when an empty string is passed in for parsing. - ErrEmptyString = errors.New("Version string empty") + ErrEmptyString = errors.New("version string empty") // ErrInvalidCharacters is returned when invalid characters are found as // part of a version - ErrInvalidCharacters = errors.New("Invalid characters in version") + ErrInvalidCharacters = errors.New("invalid characters in version") // ErrSegmentStartsZero is returned when a version segment starts with 0. // This is invalid in SemVer. - ErrSegmentStartsZero = errors.New("Version segment starts with 0") + ErrSegmentStartsZero = errors.New("version segment starts with 0") // ErrInvalidMetadata is returned when the metadata is an invalid format - ErrInvalidMetadata = errors.New("Invalid Metadata string") + ErrInvalidMetadata = errors.New("invalid metadata string") // ErrInvalidPrerelease is returned when the pre-release is an invalid format - ErrInvalidPrerelease = errors.New("Invalid Prerelease string") + ErrInvalidPrerelease = errors.New("invalid prerelease string") ) // semVerRegex is the regular expression used to parse a semantic version. @@ -45,6 +57,12 @@ const semVerRegex string = `v?(0|[1-9]\d*)(?:\.(0|[1-9]\d*))?(?:\.(0|[1-9]\d*))? `(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?` + `(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?` +// looseSemVerRegex is a regular expression that lets invalid semver expressions through +// with enough detail that certain errors can be checked for. +const looseSemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` + + `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + + `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + // Version represents a single semantic version. type Version struct { major, minor, patch uint64 @@ -55,6 +73,7 @@ type Version struct { func init() { versionRegex = regexp.MustCompile("^" + semVerRegex + "$") + looseVersionRegex = regexp.MustCompile("^" + looseSemVerRegex + "$") } const ( @@ -142,8 +161,27 @@ func StrictNewVersion(v string) (*Version, error) { // attempts to convert it to SemVer. If you want to validate it was a strict // semantic version at parse time see StrictNewVersion(). func NewVersion(v string) (*Version, error) { + if CoerceNewVersion { + return coerceNewVersion(v) + } m := versionRegex.FindStringSubmatch(v) if m == nil { + + // Disabling detailed errors is first so that it is in the fast path. + if !DetailedNewVersionErrors { + return nil, ErrInvalidSemVer + } + + // Check for specific errors with the semver string and return a more detailed + // error. + m = looseVersionRegex.FindStringSubmatch(v) + if m == nil { + return nil, ErrInvalidSemVer + } + err := validateVersion(m) + if err != nil { + return nil, err + } return nil, ErrInvalidSemVer } @@ -156,13 +194,13 @@ func NewVersion(v string) (*Version, error) { var err error sv.major, err = strconv.ParseUint(m[1], 10, 64) if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) + return nil, fmt.Errorf("error parsing version segment: %w", err) } if m[2] != "" { sv.minor, err = strconv.ParseUint(m[2], 10, 64) if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) + return nil, fmt.Errorf("error parsing version segment: %w", err) } } else { sv.minor = 0 @@ -171,7 +209,61 @@ func NewVersion(v string) (*Version, error) { if m[3] != "" { sv.patch, err = strconv.ParseUint(m[3], 10, 64) if err != nil { - return nil, fmt.Errorf("Error parsing version segment: %s", err) + return nil, fmt.Errorf("error parsing version segment: %w", err) + } + } else { + sv.patch = 0 + } + + // Perform some basic due diligence on the extra parts to ensure they are + // valid. + + if sv.pre != "" { + if err = validatePrerelease(sv.pre); err != nil { + return nil, err + } + } + + if sv.metadata != "" { + if err = validateMetadata(sv.metadata); err != nil { + return nil, err + } + } + + return sv, nil +} + +func coerceNewVersion(v string) (*Version, error) { + m := looseVersionRegex.FindStringSubmatch(v) + if m == nil { + return nil, ErrInvalidSemVer + } + + sv := &Version{ + metadata: m[8], + pre: m[5], + original: v, + } + + var err error + sv.major, err = strconv.ParseUint(m[1], 10, 64) + if err != nil { + return nil, fmt.Errorf("error parsing version segment: %w", err) + } + + if m[2] != "" { + sv.minor, err = strconv.ParseUint(strings.TrimPrefix(m[2], "."), 10, 64) + if err != nil { + return nil, fmt.Errorf("error parsing version segment: %w", err) + } + } else { + sv.minor = 0 + } + + if m[3] != "" { + sv.patch, err = strconv.ParseUint(strings.TrimPrefix(m[3], "."), 10, 64) + if err != nil { + return nil, fmt.Errorf("error parsing version segment: %w", err) } } else { sv.patch = 0 @@ -615,7 +707,7 @@ func validatePrerelease(p string) error { eparts := strings.Split(p, ".") for _, p := range eparts { if p == "" { - return ErrInvalidMetadata + return ErrInvalidPrerelease } else if containsOnly(p, num) { if len(p) > 1 && p[0] == '0' { return ErrSegmentStartsZero @@ -643,3 +735,54 @@ func validateMetadata(m string) error { } return nil } + +// validateVersion checks for common validation issues but may not catch all errors +func validateVersion(m []string) error { + var err error + var v string + if m[1] != "" { + if len(m[1]) > 1 && m[1][0] == '0' { + return ErrSegmentStartsZero + } + _, err = strconv.ParseUint(m[1], 10, 64) + if err != nil { + return fmt.Errorf("error parsing version segment: %w", err) + } + } + + if m[2] != "" { + v = strings.TrimPrefix(m[2], ".") + if len(v) > 1 && v[0] == '0' { + return ErrSegmentStartsZero + } + _, err = strconv.ParseUint(v, 10, 64) + if err != nil { + return fmt.Errorf("error parsing version segment: %w", err) + } + } + + if m[3] != "" { + v = strings.TrimPrefix(m[3], ".") + if len(v) > 1 && v[0] == '0' { + return ErrSegmentStartsZero + } + _, err = strconv.ParseUint(v, 10, 64) + if err != nil { + return fmt.Errorf("error parsing version segment: %w", err) + } + } + + if m[5] != "" { + if err = validatePrerelease(m[5]); err != nil { + return err + } + } + + if m[8] != "" { + if err = validateMetadata(m[8]); err != nil { + return err + } + } + + return nil +} diff --git a/tools/vendor/github.com/ashanbrown/forbidigo/v2/forbidigo/forbidigo.go b/tools/vendor/github.com/ashanbrown/forbidigo/v2/forbidigo/forbidigo.go index 6f4e78cc..913b45e9 100644 --- a/tools/vendor/github.com/ashanbrown/forbidigo/v2/forbidigo/forbidigo.go +++ b/tools/vendor/github.com/ashanbrown/forbidigo/v2/forbidigo/forbidigo.go @@ -374,6 +374,8 @@ func typeNameWithPackage(t types.Type) (typeName, packagePath string, ok bool) { } switch t := t.(type) { + case *types.Alias: + return typeNameWithPackage(t.Rhs()) case *types.Named: obj := t.Obj() pkg := obj.Pkg() diff --git a/tools/vendor/github.com/bombsimon/wsl/v5/analyzer.go b/tools/vendor/github.com/bombsimon/wsl/v5/analyzer.go index ffc5d0f6..38a84973 100644 --- a/tools/vendor/github.com/bombsimon/wsl/v5/analyzer.go +++ b/tools/vendor/github.com/bombsimon/wsl/v5/analyzer.go @@ -12,7 +12,7 @@ import ( "golang.org/x/tools/go/analysis" ) -const version = "wsl version v5.2.0" +const version = "wsl version v5.3.0" func NewAnalyzer(config *Configuration) *analysis.Analyzer { wa := &wslAnalyzer{config: config} diff --git a/tools/vendor/github.com/bombsimon/wsl/v5/wsl.go b/tools/vendor/github.com/bombsimon/wsl/v5/wsl.go index 49f06f9e..9bf62c2a 100644 --- a/tools/vendor/github.com/bombsimon/wsl/v5/wsl.go +++ b/tools/vendor/github.com/bombsimon/wsl/v5/wsl.go @@ -292,12 +292,12 @@ func (w *WSL) checkCuddlingMaxAllowed( } numStmtsAbove := w.numberOfStatementsAbove(cursor) - previousIdents := identsFromNode(previousNode, true) + previousIdents := w.identsFromNode(previousNode, true) // If we don't have any statements above, we only care about potential error // cuddling (for if statements) so check that. if numStmtsAbove == 0 { - w.checkError(numStmtsAbove, stmt, previousNode, previousIdents, cursor) + w.checkError(numStmtsAbove, stmt, previousNode, cursor) return } @@ -335,7 +335,7 @@ func (w *WSL) checkCuddlingMaxAllowed( // FEATURE(AllowWholeBlock): Allow identifier used anywhere in block // (including recursive blocks). if w.config.AllowWholeBlock { - allIdentsInBlock := identsFromNode(stmt, false) + allIdentsInBlock := w.identsFromNode(stmt, false) if checkIntersection(allIdentsInBlock) { return } @@ -343,13 +343,13 @@ func (w *WSL) checkCuddlingMaxAllowed( // FEATURE(AllowFirstInBlock): Allow identifiers used first in block. if !w.config.AllowWholeBlock && w.config.AllowFirstInBlock { - firstStmtIdents := identsFromNode(firstBlockStmt, true) + firstStmtIdents := w.identsFromNode(firstBlockStmt, true) if checkIntersection(firstStmtIdents) { return } } - currentIdents := identsFromNode(stmt, true) + currentIdents := w.identsFromNode(stmt, true) if checkIntersection(currentIdents) { return } @@ -412,7 +412,7 @@ func (w *WSL) checkCuddlingWithoutIntersection(stmt ast.Node, cursor *Cursor) { prevIsValidType := previousNode == nil || prevIsAssign || prevIsDecl || prevIsIncDec if _, ok := w.config.Checks[CheckAssignExpr]; !ok { - if _, ok := previousNode.(*ast.ExprStmt); ok && hasIntersection(stmt, previousNode) { + if _, ok := previousNode.(*ast.ExprStmt); ok && w.hasIntersection(stmt, previousNode) { prevIsValidType = prevIsValidType || ok } } @@ -506,7 +506,7 @@ func (w *WSL) checkAppend(stmt *ast.AssignStmt, cursor *Cursor) { return } - if !hasIntersection(appendNode, previousNode) { + if !w.hasIntersection(appendNode, previousNode) { w.addErrorNoIntersection(stmt.Pos(), CheckAppend) } } @@ -577,7 +577,7 @@ func (w *WSL) checkDefer(stmt *ast.DeferStmt, cursor *Cursor) { cursor.Previous() cursor.Previous() - if hasIntersection(cursor.Stmt(), stmt) { + if w.hasIntersection(cursor.Stmt(), stmt) { return 1, false } } @@ -593,7 +593,6 @@ func (w *WSL) checkError( stmtsAbove int, ifStmt ast.Node, previousNode ast.Node, - previousIdents []*ast.Ident, cursor *Cursor, ) { if _, ok := w.config.Checks[CheckErr]; !ok { @@ -612,6 +611,18 @@ func (w *WSL) checkError( return } + // If we actually have statements above we can't possibly need to remove any + // empty lines. + if stmtsAbove > 0 { + return + } + + // If the error checking has an init condition (e.g. if err := f();) we + // don't want to check cuddling since the error is now assigned on this row. + if stmt.Init != nil { + return + } + // The condition must be a binary expression (X OP Y) binaryExpr, ok := stmt.Cond.(*ast.BinaryExpr) if !ok { @@ -643,22 +654,26 @@ func (w *WSL) checkError( return } - // Previous node must be assign or decl where the error was actually - // defined. - _, isAssign := previousNode.(*ast.AssignStmt) - _, isDecl := previousNode.(*ast.DeclStmt) + previousIdents := []*ast.Ident{} - if !isAssign && !isDecl { - return + if assign, ok := previousNode.(*ast.AssignStmt); ok { + for _, lhs := range assign.Lhs { + previousIdents = append(previousIdents, w.identsFromNode(lhs, true)...) + } } - // If there are no statements above or the statement above doesn't contain - // any idents there can't be any error to cuddle. - if stmtsAbove > 0 || len(previousIdents) == 0 { - return + if decl, ok := previousNode.(*ast.DeclStmt); ok { + if genDecl, ok := decl.Decl.(*ast.GenDecl); ok { + for _, spec := range genDecl.Specs { + if vs, ok := spec.(*ast.ValueSpec); ok { + previousIdents = append(previousIdents, vs.Names...) + } + } + } } - // Ensure that the error was defined on the line above. + // Ensure that the error checked on this line was assigned or declared in + // the previous statement. if len(identIntersection([]*ast.Ident{xIdent}, previousIdents)) == 0 { return } @@ -780,7 +795,6 @@ func (w *WSL) checkIf(stmt *ast.IfStmt, cursor *Cursor, isElse bool) { w.numberOfStatementsAbove(cursor), stmt, previousNode, - identsFromNode(previousNode, true), cursor, ) } @@ -1141,7 +1155,7 @@ func (w *WSL) maybeCheckBlock( if check != CheckSwitch && check != CheckTypeSwitch && check != CheckSelect { blockList = blockStmt.List } else { - allowedIdents = identsFromCaseArms(node) + allowedIdents = w.identsFromCaseArms(node) } w.checkCuddlingBlock(node, blockList, allowedIdents, cursor, 1) @@ -1289,13 +1303,13 @@ func asGenDeclWithValueSpecs(n ast.Node) *ast.GenDecl { return genDecl } -func hasIntersection(a, b ast.Node) bool { - return len(nodeIdentIntersection(a, b)) > 0 +func (w *WSL) hasIntersection(a, b ast.Node) bool { + return len(w.nodeIdentIntersection(a, b)) > 0 } -func nodeIdentIntersection(a, b ast.Node) []*ast.Ident { - aI := identsFromNode(a, true) - bI := identsFromNode(b, true) +func (w *WSL) nodeIdentIntersection(a, b ast.Node) []*ast.Ident { + aI := w.identsFromNode(a, true) + bI := w.identsFromNode(b, true) return identIntersection(aI, bI) } @@ -1314,7 +1328,31 @@ func identIntersection(a, b []*ast.Ident) []*ast.Ident { return intersects } -func identsFromNode(node ast.Node, skipBlock bool) []*ast.Ident { +func isTypeOrPredeclConst(obj types.Object) bool { + switch o := obj.(type) { + case *types.TypeName: + // Covers predeclared types ("string", "int", ...) and user types. + return true + case *types.Const: + // true/false/iota are universe consts. + return o.Parent() == types.Universe + case *types.Nil: + return true + case *types.PkgName: + // Skip package qualifiers like "fmt" in fmt.Println + return true + default: + return false + } +} + +// identsFromNode returns all *ast.Ident in a node except: +// - type names (types.TypeName) +// - builtin constants from the universe (true, false, iota) +// - nil (*types.Nil) +// - package names (types.PkgName) +// - the blank identifier "_" +func (w *WSL) identsFromNode(node ast.Node, skipBlock bool) []*ast.Ident { var ( idents []*ast.Ident seen = map[string]struct{}{} @@ -1324,6 +1362,24 @@ func identsFromNode(node ast.Node, skipBlock bool) []*ast.Ident { return idents } + addIdent := func(ident *ast.Ident) { + if ident == nil { + return + } + + name := ident.Name + if name == "" || name == "_" { + return + } + + if _, ok := seen[name]; ok { + return + } + + idents = append(idents, ident) + seen[name] = struct{}{} + } + ast.Inspect(node, func(n ast.Node) bool { if skipBlock { if _, ok := n.(*ast.BlockStmt); ok { @@ -1331,27 +1387,45 @@ func identsFromNode(node ast.Node, skipBlock bool) []*ast.Ident { } } - if ident, ok := n.(*ast.Ident); ok { - if _, exists := seen[ident.Name]; !exists { - idents = append(idents, ident) - seen[ident.Name] = struct{}{} - } + ident, ok := n.(*ast.Ident) + if !ok { + return true + } + + // Prefer Uses over Defs; fall back to Defs if not a use site. + var typesObject types.Object + if obj := w.typeInfo.Uses[ident]; obj != nil { + typesObject = obj + } else if obj := w.typeInfo.Defs[ident]; obj != nil { + typesObject = obj } + // Unresolved (could be a build-tag or syntax artifact). Keep it. + if typesObject == nil { + addIdent(ident) + return true + } + + if isTypeOrPredeclConst(typesObject) { + return true + } + + addIdent(ident) + return true }) return idents } -func identsFromCaseArms(node ast.Node) []*ast.Ident { +func (w *WSL) identsFromCaseArms(node ast.Node) []*ast.Ident { var ( idents []*ast.Ident nodes []ast.Stmt seen = map[string]struct{}{} addUnseen = func(node ast.Node) { - for _, ident := range identsFromNode(node, true) { + for _, ident := range w.identsFromNode(node, true) { if _, ok := seen[ident.Name]; ok { continue } diff --git a/tools/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go b/tools/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go index c40e84d4..42eb0259 100644 --- a/tools/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go +++ b/tools/vendor/github.com/catenacyber/perfsprint/analyzer/analyzer.go @@ -2,6 +2,7 @@ package analyzer import ( "bytes" + "fmt" "go/ast" "go/format" "go/token" @@ -33,13 +34,20 @@ type optionStr struct { strconcat bool } +type optionConcatLoop struct { + enabled bool + otherOps bool +} + type perfSprint struct { - intFormat optionInt - errFormat optionErr - strFormat optionStr + intFormat optionInt + errFormat optionErr + strFormat optionStr + concatLoop optionConcatLoop boolFormat bool hexFormat bool + fiximports bool } @@ -48,6 +56,7 @@ func newPerfSprint() *perfSprint { intFormat: optionInt{enabled: true, intConv: true}, errFormat: optionErr{enabled: true, errError: false, errorf: true}, strFormat: optionStr{enabled: true, sprintf1: true, strconcat: true}, + concatLoop: optionConcatLoop{enabled: true, otherOps: false}, boolFormat: true, hexFormat: true, fiximports: true, @@ -65,6 +74,8 @@ const ( checkerBoolFormat = "bool-format" // checkerHexFormat checks for hexadecimal formatting. checkerHexFormat = "hex-format" + // checkerConcatLoop checks for concatenation in loop. + checkerConcatLoop = "concat-loop" // checkerFixImports fix needed imports from other fixes. checkerFixImports = "fiximports" ) @@ -87,6 +98,8 @@ func New() *analysis.Analyzer { r.Flags.BoolVar(&n.boolFormat, checkerBoolFormat, n.boolFormat, "enable/disable optimization of bool formatting") r.Flags.BoolVar(&n.hexFormat, checkerHexFormat, n.hexFormat, "enable/disable optimization of hex formatting") + r.Flags.BoolVar(&n.concatLoop.enabled, checkerConcatLoop, n.concatLoop.enabled, "enable/disable optimization of concat loop") + r.Flags.BoolVar(&n.concatLoop.otherOps, "loop-other-ops", n.concatLoop.otherOps, "optimization of concat loop even with other operations") r.Flags.BoolVar(&n.strFormat.enabled, checkerStringFormat, n.strFormat.enabled, "enable/disable optimization of string formatting") r.Flags.BoolVar(&n.strFormat.sprintf1, "sprintf1", n.strFormat.sprintf1, "optimizes fmt.Sprintf with only one argument") r.Flags.BoolVar(&n.strFormat.strconcat, "strconcat", n.strFormat.strconcat, "optimizes into strings concatenation") @@ -108,6 +121,338 @@ func isConcatable(verb string) bool { return (hasPrefix || hasSuffix) && !(hasPrefix && hasSuffix) } +func isStringAdd(st *ast.AssignStmt, idname string) ast.Expr { + // right is one + if len(st.Rhs) == 1 { + // right is addition + add, ok := st.Rhs[0].(*ast.BinaryExpr) + if ok && add.Op == token.ADD { + // right is addition to same ident name + x, ok := add.X.(*ast.Ident) + if ok && x.Name == idname { + return add.Y + } + } + } + return nil +} + +func (n *perfSprint) reportConcatLoop(pass *analysis.Pass, neededPackages map[string]map[string]struct{}, node ast.Node, adds map[string][]*ast.AssignStmt) *analysis.Diagnostic { + fname := pass.Fset.File(node.Pos()).Name() + if _, ok := neededPackages[fname]; !ok { + neededPackages[fname] = make(map[string]struct{}) + } + // note that we will need strings package + neededPackages[fname]["strings"] = struct{}{} + + // sort for reproducibility + keys := make([]string, 0, len(adds)) + for k := range adds { + keys = append(keys, k) + } + sort.Strings(keys) + + // use line number to define a unique variable name for the strings Builder + loopStartLine := pass.Fset.Position(node.Pos()).Line + + // If the loop does more with the string than concatenations + // add a TODO/FIXME comment that the fix is likely incomplete/incorrect + addTODO := "" + ast.Inspect(node, func(n ast.Node) bool { + if len(addTODO) > 0 { + // already found one, stop recursing + return false + } + switch x := n.(type) { + case *ast.AssignStmt: + // skip if this is one string concatenation that we are fixing + if (x.Tok == token.ASSIGN || x.Tok == token.ADD_ASSIGN) && len(x.Lhs) == 1 { + id, ok := x.Lhs[0].(*ast.Ident) + if ok { + _, ok = adds[id.Name] + if ok { + return false + } + } + } + case *ast.Ident: + _, ok := adds[x.Name] + if ok { + // The variable name is used in some place else + addTODO = x.Name + return false + } + } + return true + }) + + prefix := "" + suffix := "" + if len(addTODO) > 0 { + if !n.concatLoop.otherOps { + return nil + } + prefix = fmt.Sprintf("// FIXME check usages of string identifier %s (and mayber others) in loop\n", addTODO) + } + // The fix contains 3 parts + // before the loop: declare the strings Builders + // during the loop: replace concatenation with Builder.WriteString + // after the loop: use the Builder.String to append to the pre-existing string + for _, k := range keys { + // lol + prefix += fmt.Sprintf("var %sSb%d strings.Builder\n", k, loopStartLine) + suffix += fmt.Sprintf("\n%s += %sSb%d.String()", k, k, loopStartLine) + } + te := []analysis.TextEdit{ + { + Pos: node.Pos(), + End: node.Pos(), + NewText: []byte(prefix), + }, + } + for _, k := range keys { + v := adds[k] + for _, st := range v { + // s += "x" -> use "x" + added := st.Rhs[0] + if st.Tok == token.ASSIGN { + // s = s + "x" -> use just "x", not `s + "x"` + added = isStringAdd(st, k) + } + te = append(te, analysis.TextEdit{ + Pos: st.Pos(), + End: added.Pos(), + NewText: []byte(fmt.Sprintf("%sSb%d.WriteString(", k, loopStartLine)), + }) + te = append(te, analysis.TextEdit{ + Pos: added.End(), + End: added.End(), + NewText: []byte(")"), + }) + } + } + te = append(te, analysis.TextEdit{ + Pos: node.End(), + End: node.End(), + NewText: []byte(suffix), + }) + + return newAnalysisDiagnostic( + checkerConcatLoop, + adds[keys[0]][0], + "string concatenation in a loop", + []analysis.SuggestedFix{ + { + Message: "Use a strings.Builder", + TextEdits: te, + }, + }, + ) +} + +func (n *perfSprint) runConcatLoop(pass *analysis.Pass, neededPackages map[string]map[string]struct{}) { + insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + // 2 different kinds of loops in go + nodeFilter := []ast.Node{ + (*ast.RangeStmt)(nil), + (*ast.ForStmt)(nil), + } + insp.Preorder(nodeFilter, func(node ast.Node) { + // set of variable names declared insied the loop + declInLoop := make(map[string]bool) + var bl []ast.Stmt + // just take the list of instruction of the loop + switch ra := node.(type) { + case *ast.RangeStmt: + bl = ra.Body.List + case *ast.ForStmt: + bl = ra.Body.List + } + // set of results : mapping a variable name to a list of statements like `s +=` + // one loop may be bad for multiple string variables, + // each being concatenated in multiple statements + adds := make(map[string][]*ast.AssignStmt) + for bs := 0; bs < len(bl); bs++ { + switch st := bl[bs].(type) { + case *ast.IfStmt: + // explore breadth first, but go inside the if/else blocks + if st.Body != nil { + bl = append(bl, st.Body.List...) + } + el, ok := st.Else.(*ast.BlockStmt) + if ok && el != nil { + bl = append(bl, el.List...) + } + case *ast.DeclStmt: + // identifiers defined within loop do not count + de, ok := st.Decl.(*ast.GenDecl) + if !ok { + break + } + if len(de.Specs) != 1 { + break + } + // is it possible to have len(de.Specs) > 1 for ValueSpec ? + vs, ok := de.Specs[0].(*ast.ValueSpec) + if !ok { + break + } + for n := range vs.Names { + declInLoop[vs.Names[n].Name] = true + } + case *ast.AssignStmt: + for n := range st.Lhs { + id, ok := st.Lhs[n].(*ast.Ident) + if !ok { + break + } + switch st.Tok { + case token.DEFINE: + declInLoop[id.Name] = true + case token.ASSIGN, token.ADD_ASSIGN: + if n > 0 { + // do not search bugs for multi-assign + break + } + _, local := declInLoop[id.Name] + if local { + break + } + ti, ok := pass.TypesInfo.Types[id] + if !ok || ti.Type.String() != "string" { + break + } + if st.Tok == token.ASSIGN { + if isStringAdd(st, id.Name) == nil { + break + } + } + // found a bad string concat in the loop + adds[id.Name] = append(adds[id.Name], st) + } + } + } + } + if len(adds) > 0 { + d := n.reportConcatLoop(pass, neededPackages, node, adds) + if d != nil { + pass.Report(*d) + } + } + }) +} + +func (n *perfSprint) fixImports(pass *analysis.Pass, neededPackages map[string]map[string]struct{}, removedFmtUsages map[string]int) { + if !n.fiximports { + return + } + for _, pkg := range pass.Pkg.Imports() { + if pkg.Path() == "fmt" { + insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.SelectorExpr)(nil), + } + insp.Preorder(nodeFilter, func(node ast.Node) { + selec := node.(*ast.SelectorExpr) + selecok, ok := selec.X.(*ast.Ident) + if ok { + pkgname, ok := pass.TypesInfo.ObjectOf(selecok).(*types.PkgName) + if ok && pkgname.Name() == pkg.Name() { + fname := pass.Fset.File(pkgname.Pos()).Name() + removedFmtUsages[fname]-- + } + } + }) + } else if pkg.Path() == "errors" || pkg.Path() == "strconv" || pkg.Path() == "encoding/hex" || pkg.Path() == "strings" { + insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.ImportSpec)(nil), + } + insp.Preorder(nodeFilter, func(node ast.Node) { + gd := node.(*ast.ImportSpec) + if gd.Path.Value == strconv.Quote(pkg.Path()) { + fname := pass.Fset.File(gd.Pos()).Name() + if _, ok := neededPackages[fname]; ok { + delete(neededPackages[fname], pkg.Path()) + } + } + }) + } + } + insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.File)(nil), + } + insp.Preorder(nodeFilter, func(node ast.Node) { + gd := node.(*ast.File) + fname := pass.Fset.File(gd.Pos()).Name() + removed, hasFmt := removedFmtUsages[fname] + if (!hasFmt || removed < 0) && len(neededPackages[fname]) == 0 { + return + } + fix := "" + var ar analysis.Range + ar = gd.Decls[0] + start := gd.Decls[0].Pos() + end := gd.Decls[0].Pos() + if len(gd.Imports) == 0 { + fix += "import (\n" + } else { + id := gd.Decls[0].(*ast.GenDecl) + start = id.Specs[0].Pos() + end = id.Specs[0].Pos() + if removedFmtUsages[fname] >= 0 { + for sp := range id.Specs { + is := id.Specs[sp].(*ast.ImportSpec) + if is.Path.Value == strconv.Quote("fmt") { + ar = is + start = is.Pos() + end = is.End() + break + } + } + } + } + keys := make([]string, 0, len(neededPackages[fname])) + for k := range neededPackages[fname] { + keys = append(keys, k) + } + sort.Strings(keys) + for _, k := range keys { + already := false + knames := strings.Split(k, "/") + kname := knames[len(knames)-1] + for i := range gd.Imports { // quadratic + if (gd.Imports[i].Name != nil && gd.Imports[i].Name.Name == kname) || (gd.Imports[i].Name == nil && strings.HasSuffix(gd.Imports[i].Path.Value, "/"+kname+`"`)) { + already = true + } + } + if already { + fix = fix + "\t\"" + k + "\" //TODO FIXME\n" + } else { + fix = fix + "\t\"" + k + "\"\n" + } + } + if len(gd.Imports) == 0 { + fix += ")\n" + } + pass.Report(*newAnalysisDiagnostic( + checkerFixImports, + ar, + "Fix imports", + []analysis.SuggestedFix{ + { + Message: "Fix imports", + TextEdits: []analysis.TextEdit{{ + Pos: start, + End: end, + NewText: []byte(fix), + }}, + }, + })) + }) +} + func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) { if !n.intFormat.enabled { n.intFormat.intConv = false @@ -121,6 +466,11 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) { n.strFormat.strconcat = false } + neededPackages := make(map[string]map[string]struct{}) + if n.concatLoop.enabled { + n.runConcatLoop(pass, neededPackages) + } + removedFmtUsages := make(map[string]int) var fmtSprintObj, fmtSprintfObj, fmtErrorfObj types.Object for _, pkg := range pass.Pkg.Imports() { if pkg.Path() == "fmt" { @@ -130,10 +480,11 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) { } } if fmtSprintfObj == nil && fmtSprintObj == nil && fmtErrorfObj == nil { + if len(neededPackages) > 0 { + n.fixImports(pass, neededPackages, removedFmtUsages) + } return nil, nil } - removedFmtUsages := make(map[string]int) - neededPackages := make(map[string]map[string]struct{}) insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ @@ -530,79 +881,8 @@ func (n *perfSprint) run(pass *analysis.Pass) (interface{}, error) { } }) - if len(removedFmtUsages) > 0 && n.fiximports { - for _, pkg := range pass.Pkg.Imports() { - if pkg.Path() == "fmt" { - insp = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter = []ast.Node{ - (*ast.SelectorExpr)(nil), - } - insp.Preorder(nodeFilter, func(node ast.Node) { - selec := node.(*ast.SelectorExpr) - selecok, ok := selec.X.(*ast.Ident) - if ok { - pkgname, ok := pass.TypesInfo.ObjectOf(selecok).(*types.PkgName) - if ok && pkgname.Name() == pkg.Name() { - fname := pass.Fset.File(pkgname.Pos()).Name() - removedFmtUsages[fname]-- - } - } - }) - } else if pkg.Path() == "errors" || pkg.Path() == "strconv" || pkg.Path() == "encoding/hex" { - insp = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter = []ast.Node{ - (*ast.ImportSpec)(nil), - } - insp.Preorder(nodeFilter, func(node ast.Node) { - gd := node.(*ast.ImportSpec) - if gd.Path.Value == strconv.Quote(pkg.Path()) { - fname := pass.Fset.File(gd.Pos()).Name() - if _, ok := neededPackages[fname]; ok { - delete(neededPackages[fname], pkg.Path()) - } - } - }) - } - } - insp = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - nodeFilter = []ast.Node{ - (*ast.ImportSpec)(nil), - } - insp.Preorder(nodeFilter, func(node ast.Node) { - gd := node.(*ast.ImportSpec) - if gd.Path.Value == `"fmt"` { - fix := "" - fname := pass.Fset.File(gd.Pos()).Name() - if removedFmtUsages[fname] < 0 { - fix += `"fmt"` - if len(neededPackages[fname]) == 0 { - return - } - } - keys := make([]string, 0, len(neededPackages[fname])) - for k := range neededPackages[fname] { - keys = append(keys, k) - } - sort.Strings(keys) - for _, k := range keys { - fix = fix + "\n\t\"" + k + `"` - } - pass.Report(*newAnalysisDiagnostic( - checkerFixImports, - gd, - "Fix imports", - []analysis.SuggestedFix{ - { - Message: "Fix imports", - TextEdits: []analysis.TextEdit{{ - Pos: gd.Pos(), - End: gd.End(), - NewText: []byte(fix), - }}, - }, - })) - } - }) + if len(removedFmtUsages) > 0 || len(neededPackages) > 0 { + n.fixImports(pass, neededPackages, removedFmtUsages) } return nil, nil diff --git a/tools/vendor/github.com/charithe/durationcheck/README.md b/tools/vendor/github.com/charithe/durationcheck/README.md index 6f4279bd..4aa9558f 100644 --- a/tools/vendor/github.com/charithe/durationcheck/README.md +++ b/tools/vendor/github.com/charithe/durationcheck/README.md @@ -33,7 +33,7 @@ See the [test cases](testdata/src/a/a.go) for more examples of the types of erro Installation ------------- -Requires Go 1.11 or above. +Requires Go 1.14 or above. ``` go get -u github.com/charithe/durationcheck/cmd/durationcheck diff --git a/tools/vendor/github.com/charithe/durationcheck/durationcheck.go b/tools/vendor/github.com/charithe/durationcheck/durationcheck.go index c47b3a76..5fcb98a8 100644 --- a/tools/vendor/github.com/charithe/durationcheck/durationcheck.go +++ b/tools/vendor/github.com/charithe/durationcheck/durationcheck.go @@ -32,6 +32,7 @@ func run(pass *analysis.Pass) (interface{}, error) { nodeTypes := []ast.Node{ (*ast.BinaryExpr)(nil), + (*ast.AssignStmt)(nil), } inspect.Preorder(nodeTypes, check(pass)) @@ -52,25 +53,45 @@ func hasImport(pkg *types.Package, importPath string) bool { // check contains the logic for checking that time.Duration is used correctly in the code being analysed func check(pass *analysis.Pass) func(ast.Node) { return func(node ast.Node) { - expr := node.(*ast.BinaryExpr) - // we are only interested in multiplication - if expr.Op != token.MUL { - return + switch expr := node.(type) { + case *ast.BinaryExpr: + checkBinaryExpr(pass, expr) + case *ast.AssignStmt: + if expr.Tok != token.MUL_ASSIGN { + return + } + // '*=' assignment requires single-valued expressions + if len(expr.Lhs) != 1 || len(expr.Rhs) != 1 { + return + } + checkBinaryExpr(pass, &ast.BinaryExpr{ + X: expr.Lhs[0], + OpPos: expr.TokPos, + Op: expr.Tok, + Y: expr.Rhs[0], + }) } + } +} - // get the types of the two operands - x, xOK := pass.TypesInfo.Types[expr.X] - y, yOK := pass.TypesInfo.Types[expr.Y] +func checkBinaryExpr(pass *analysis.Pass, expr *ast.BinaryExpr) { + // we are only interested in multiplication + if expr.Op != token.MUL && expr.Op != token.MUL_ASSIGN { + return + } - if !xOK || !yOK { - return - } + // get the types of the two operands + x, xOK := pass.TypesInfo.Types[expr.X] + y, yOK := pass.TypesInfo.Types[expr.Y] - if isDuration(x.Type) && isDuration(y.Type) { - // check that both sides are acceptable expressions - if isUnacceptableExpr(pass, expr.X) && isUnacceptableExpr(pass, expr.Y) { - pass.Reportf(expr.Pos(), "Multiplication of durations: `%s`", formatNode(expr)) - } + if !xOK || !yOK { + return + } + + if isDuration(x.Type) && isDuration(y.Type) { + // check that both sides are acceptable expressions + if isUnacceptableExpr(pass, expr.X) && isUnacceptableExpr(pass, expr.Y) { + pass.Reportf(expr.Pos(), "Multiplication of durations: `%s`", formatNode(expr)) } } } diff --git a/tools/vendor/github.com/ghostiam/protogetter/flake.lock b/tools/vendor/github.com/ghostiam/protogetter/flake.lock index 46930d25..664ccdaa 100644 --- a/tools/vendor/github.com/ghostiam/protogetter/flake.lock +++ b/tools/vendor/github.com/ghostiam/protogetter/flake.lock @@ -20,11 +20,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1741513245, - "narHash": "sha256-7rTAMNTY1xoBwz0h7ZMtEcd8LELk9R5TzBPoHuhNSCk=", + "lastModified": 1759381078, + "narHash": "sha256-gTrEEp5gEspIcCOx9PD8kMaF1iEmfBcTbO0Jag2QhQs=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "e3e32b642a31e6714ec1b712de8c91a3352ce7e1", + "rev": "7df7ff7d8e00218376575f0acdcc5d66741351ee", "type": "github" }, "original": { diff --git a/tools/vendor/github.com/ghostiam/protogetter/flake.nix b/tools/vendor/github.com/ghostiam/protogetter/flake.nix index 3e2a6ce6..4bbe4850 100644 --- a/tools/vendor/github.com/ghostiam/protogetter/flake.nix +++ b/tools/vendor/github.com/ghostiam/protogetter/flake.nix @@ -16,6 +16,7 @@ system: let pkgs = import nixpkgs { inherit system; }; + go = pkgs.go_1_24; buildInputs = with pkgs; [ go coreutils @@ -35,7 +36,7 @@ export FLAKE_ROOT="$(nix flake metadata | grep 'Resolved URL' | awk '{print $3}' | sed 's/^path://' | sed 's/^git+file:\/\///')" export HISTFILE="$FLAKE_ROOT/.nix_bash_history" - export GOROOT="${pkgs.go}/share/go" + export GOROOT="${go}/share/go" ''; in { @@ -57,7 +58,7 @@ cd "$FLAKE_ROOT" echo "Replace GOPATH" - xmlstarlet ed -L -u '//project/component[@name="GOROOT"]/@url' -v 'file://${pkgs.go}/share/go' .idea/workspace.xml + xmlstarlet ed -L -u '//project/component[@name="GOROOT"]/@url' -v 'file://${go}/share/go' .idea/workspace.xml exit 0 '' diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go index 9be45ccc..22a6267f 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go @@ -87,7 +87,8 @@ func (c *badCondChecker) checkExpr(expr ast.Expr) { func (c *badCondChecker) equalToBoth(lhs, rhs *ast.BinaryExpr) bool { return lhs.Op == token.EQL && rhs.Op == token.EQL && - astequal.Expr(lhs.X, rhs.X) + astequal.Expr(lhs.X, rhs.X) && + typep.SideEffectFree(c.ctx.TypesInfo, lhs.Y) && typep.SideEffectFree(c.ctx.TypesInfo, rhs.Y) } func (c *badCondChecker) lessAndGreater(lhs, rhs *ast.BinaryExpr) bool { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go index 6c684505..8f5cf97f 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go @@ -148,7 +148,7 @@ func (c *badRegexpChecker) walk(e syntax.Expr) { case syntax.OpCaret: if !c.isGoodAnchor(e) { - c.warn("dangling or redundant ^, maybe \\^ is intended?") + c.warnf("dangling or redundant ^, maybe \\^ is intended?") } default: @@ -176,11 +176,11 @@ func (c *badRegexpChecker) updateFlagState(state *regexpFlagState, e syntax.Expr if clearing { if !state[ch] { - c.warn("clearing unset flag %c in %s", ch, e.Value) + c.warnf("clearing unset flag %c in %s", ch, e.Value) } } else { if state[ch] { - c.warn("redundant flag %c in %s", ch, e.Value) + c.warnf("redundant flag %c in %s", ch, e.Value) } } state[ch] = !clearing @@ -198,7 +198,7 @@ func (c *badRegexpChecker) checkNestedQuantifier(e syntax.Expr) { switch x.Op { case syntax.OpPlus, syntax.OpStar: - c.warn("repeated greedy quantifier in %s", e.Value) + c.warnf("repeated greedy quantifier in %s", e.Value) } } @@ -208,7 +208,7 @@ func (c *badRegexpChecker) checkAltDups(alt syntax.Expr) { set := make(map[string]struct{}, len(alt.Args)) for _, a := range alt.Args { if _, ok := set[a.Value]; ok { - c.warn("`%s` is duplicated in %s", a.Value, alt.Value) + c.warnf("`%s` is duplicated in %s", a.Value, alt.Value) } set[a.Value] = struct{}{} } @@ -232,7 +232,7 @@ func (c *badRegexpChecker) checkAltAnchor(alt syntax.Expr) { } } if matched { - c.warn("^ applied only to `%s` in %s", first.Value[len(`^`):], alt.Value) + c.warnf("^ applied only to `%s` in %s", first.Value[len(`^`):], alt.Value) } } @@ -247,7 +247,7 @@ func (c *badRegexpChecker) checkAltAnchor(alt syntax.Expr) { } } if matched { - c.warn("$ applied only to `%s` in %s", last.Value[:len(last.Value)-len(`$`)], alt.Value) + c.warnf("$ applied only to `%s` in %s", last.Value[:len(last.Value)-len(`$`)], alt.Value) } } } @@ -429,7 +429,7 @@ func (c *badRegexpChecker) isGoodAnchor(e syntax.Expr) bool { return false } -func (c *badRegexpChecker) warn(format string, args ...interface{}) { +func (c *badRegexpChecker) warnf(format string, args ...interface{}) { c.ctx.Warn(c.cause, format, args...) } diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/checkers.go b/tools/vendor/github.com/go-critic/go-critic/checkers/checkers.go index 5797dafd..751c7150 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/checkers.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/checkers.go @@ -1,4 +1,4 @@ -// Package checkers is a gocritic linter main checkers collection. +// Package checkers is a go-critic linter main checkers collection. package checkers import ( diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go index c61d773d..bb41d478 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go @@ -8,6 +8,8 @@ import ( "github.com/go-critic/go-critic/linter" ) +const deprecatedPrefix = "Deprecated: " + func init() { var info linter.CheckerInfo info.Name = "deprecatedComment" @@ -91,20 +93,25 @@ func (c *deprecatedCommentChecker) VisitDocComment(doc *ast.CommentGroup) { // // TODO(quasilyte): there are also multi-line deprecation comments. + // prev stores the previous line after it was trimmed. + // It's used to check whether the deprecation prefix is at the beginning of a new paragraph. + var prev string + for _, comment := range doc.List { if strings.HasPrefix(comment.Text, "/*") { // TODO(quasilyte): handle multi-line doc comments. continue } - l := comment.Text[len("//"):] - if len(l) < len("Deprecated: ") { + rawLine := strings.TrimPrefix(comment.Text, "//") + l := strings.TrimSpace(rawLine) + if len(rawLine) < len(deprecatedPrefix) { + prev = l continue } - l = strings.TrimSpace(l) // Check whether someone messed up with a prefix casing. upcase := strings.ToUpper(l) - if strings.HasPrefix(upcase, "DEPRECATED: ") && !strings.HasPrefix(l, "Deprecated: ") { + if strings.HasPrefix(upcase, "DEPRECATED: ") && !strings.HasPrefix(l, deprecatedPrefix) { c.warnCasing(comment, l) return } @@ -134,6 +141,12 @@ func (c *deprecatedCommentChecker) VisitDocComment(doc *ast.CommentGroup) { return } } + + if strings.HasPrefix(l, deprecatedPrefix) && prev != "" { + c.warnParagraph(comment) + return + } + prev = l } } @@ -154,3 +167,7 @@ func (c *deprecatedCommentChecker) warnTypo(cause ast.Node, line string) { word := strings.Split(line, ":")[0] c.ctx.Warn(cause, "typo in `%s`; should be `Deprecated`", word) } + +func (c *deprecatedCommentChecker) warnParagraph(cause ast.Node) { + c.ctx.Warn(cause, "`Deprecated: ` notices should be in a dedicated paragraph, separated from the rest") +} diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/dupOption_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/dupOption_checker.go new file mode 100644 index 00000000..5e7eeda1 --- /dev/null +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/dupOption_checker.go @@ -0,0 +1,118 @@ +package checkers + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astfmt" +) + +func init() { + var info linter.CheckerInfo + info.Name = "dupOption" + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} + info.Summary = "Detects duplicated option function arguments in variadic function calls" + info.Before = `doSomething(name, + withWidth(w), + withHeight(h), + withWidth(w), +)` + info.After = `doSomething(name, + withWidth(w), + withHeight(h), +)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &dupOptionChecker{ctx: ctx} + return astwalk.WalkerForExpr(c), nil + }) +} + +type dupOptionChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *dupOptionChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + variadicArgs, argType := c.getVariadicArgs(call) + if len(variadicArgs) == 0 { + return + } + + if !c.isOptionType(argType) { + return + } + + dupArgs := c.findDupArgs(variadicArgs) + for _, arg := range dupArgs { + c.warn(arg) + } +} + +func (c *dupOptionChecker) getVariadicArgs(call *ast.CallExpr) ([]ast.Expr, types.Type) { + if len(call.Args) == 0 { + return nil, nil + } + + // skip for someFunc(a, b ...) + if call.Ellipsis != token.NoPos { + return nil, nil + } + + funType := c.ctx.TypeOf(call.Fun) + sign, ok := funType.(*types.Signature) + if !ok || !sign.Variadic() { + return nil, nil + } + + last := sign.Params().Len() - 1 + sliceType, ok := sign.Params().At(last).Type().(*types.Slice) + if !ok { + return nil, nil + } + + if last > len(call.Args) { + return nil, nil + } + + argType := sliceType.Elem() + return call.Args[last:], argType +} + +func (c *dupOptionChecker) isOptionType(typeInfo types.Type) bool { + typeInfo = typeInfo.Underlying() + + sign, ok := typeInfo.(*types.Signature) + if !ok { + return false + } + + if sign.Params().Len() == 0 { + return false + } + + return true +} + +func (c *dupOptionChecker) findDupArgs(args []ast.Expr) []ast.Expr { + codeMap := make(map[string]bool) + dupArgs := make([]ast.Expr, 0) + for _, arg := range args { + code := astfmt.Sprint(arg) + if codeMap[code] { + dupArgs = append(dupArgs, arg) + continue + } + codeMap[code] = true + } + return dupArgs +} + +func (c *dupOptionChecker) warn(arg ast.Node) { + c.ctx.Warn(arg, "function argument `%s` is duplicated", arg) +} diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go index 9889f48e..19b20e42 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go @@ -45,6 +45,12 @@ func (c *exitAfterDeferChecker) VisitFuncDecl(fn *ast.FuncDecl) { var deferStmt *ast.DeferStmt pre := func(cur *astutil.Cursor) bool { + // If we found a defer statement in the function post traversal. + // and are looking at the Else branch during a pre traversal, stop seeking as it could be false positive. + if deferStmt != nil && cur.Name() == "Else" { + return false + } + // Don't recurse into local anonymous functions. return !astp.IsFuncLit(cur.Node()) } diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go b/tools/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go index ace418d4..66414585 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go @@ -2521,6 +2521,61 @@ var PrecompiledRules = &ir.File{ }, }, }, + { + Line: 799, + Name: "zeroByteRepeat", + MatcherName: "m", + DocTags: []string{"performance"}, + DocSummary: "Detects bytes.Repeat with 0 value", + DocBefore: "bytes.Repeat([]byte{0}, x)", + DocAfter: "make([]byte, x)", + Rules: []ir.Rule{ + { + Line: 800, + SyntaxPatterns: []ir.PatternString{{Line: 800, Value: "bytes.Repeat([]byte{0}, $x)"}}, + ReportTemplate: "avoid bytes.Repeat([]byte{0}, $x); consider using make([]byte, $x) instead", + SuggestTemplate: "make([]byte, $x)", + }, + { + Line: 805, + SyntaxPatterns: []ir.PatternString{{Line: 805, Value: "bytes.Repeat([]byte{$x}, $n)"}}, + ReportTemplate: "avoid bytes.Repeat with a const 0; use make([]byte, $n) instead", + SuggestTemplate: "make([]byte, $n)", + WhereExpr: ir.FilterExpr{ + Line: 806, + Op: ir.FilterAndOp, + Src: "m[\"x\"].Const && m[\"x\"].Value.Int() == 0", + Args: []ir.FilterExpr{ + { + Line: 806, + Op: ir.FilterVarConstOp, + Src: "m[\"x\"].Const", + Value: "x", + }, + { + Line: 806, + Op: ir.FilterEqOp, + Src: "m[\"x\"].Value.Int() == 0", + Args: []ir.FilterExpr{ + { + Line: 806, + Op: ir.FilterVarValueIntOp, + Src: "m[\"x\"].Value.Int()", + Value: "x", + }, + { + Line: 806, + Op: ir.FilterIntOp, + Src: "0", + Value: int64(0), + }, + }, + }, + }, + }, + }, + }, + }, }, } diff --git a/tools/vendor/github.com/go-critic/go-critic/linter/linter.go b/tools/vendor/github.com/go-critic/go-critic/linter/linter.go index d4bc1753..c751d94c 100644 --- a/tools/vendor/github.com/go-critic/go-critic/linter/linter.go +++ b/tools/vendor/github.com/go-critic/go-critic/linter/linter.go @@ -249,8 +249,8 @@ func NewContext(fset *token.FileSet, sizes types.Sizes) *Context { // It's permitted to have "go" prefix (e.g. "go1.5"). // // Empty string (the default) means that we make no -// Go version assumptions and (like gocritic does) behave -// like all features are available. To make gocritic +// Go version assumptions and (like go-critic does) behave +// like all features are available. To make go-critic // more conservative, the upper Go version level should be adjusted. func (c *Context) SetGoVersion(version string) { v, err := ParseGoVersion(version) diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/analysis/analyzer.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/analysis/analyzer.go index f0c04d42..05239005 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/analysis/analyzer.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/analysis/analyzer.go @@ -1,3 +1,4 @@ +// Package analysis provides the main analyzer implementation. package analysis import ( diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/deprecated/deprecated.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/deprecated/deprecated.go index 37fbeb1f..08915f6c 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/deprecated/deprecated.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/deprecated/deprecated.go @@ -1,3 +1,5 @@ +// Package deprecated provides a checker for correct usage of deprecation +// markers. package deprecated import ( @@ -9,10 +11,9 @@ import ( "github.com/godoc-lint/godoc-lint/pkg/util" ) -// DeprecatedRule is the corresponding rule name. -const DeprecatedRule = model.DeprecatedRule +const deprecatedRule = model.DeprecatedRule -var ruleSet = model.RuleSet{}.Add(DeprecatedRule) +var ruleSet = model.RuleSet{}.Add(deprecatedRule) // DeprecatedChecker checks correct usage of "Deprecated:" markers. type DeprecatedChecker struct{} @@ -31,7 +32,7 @@ func (r *DeprecatedChecker) GetCoveredRules() model.RuleSet { func (r *DeprecatedChecker) Apply(actx *model.AnalysisContext) error { docs := make(map[*model.CommentGroup]struct{}, 10*len(actx.InspectorResult.Files)) - for _, ir := range util.AnalysisApplicableFiles(actx, false, model.RuleSet{}.Add(DeprecatedRule)) { + for _, ir := range util.AnalysisApplicableFiles(actx, false, model.RuleSet{}.Add(deprecatedRule)) { if ir.PackageDoc != nil { docs[ir.PackageDoc] = struct{}{} } @@ -58,13 +59,14 @@ func (r *DeprecatedChecker) Apply(actx *model.AnalysisContext) error { return nil } -// probableDeprecationRE finds probable deprecation markers, including the correct usage. +// probableDeprecationRE finds probable deprecation markers, including the +// correct usage. var probableDeprecationRE = regexp.MustCompile(`(?i)^deprecated:.?`) const correctDeprecationMarker = "Deprecated: " func checkDeprecations(actx *model.AnalysisContext, doc *model.CommentGroup) { - if doc.DisabledRules.All || doc.DisabledRules.Rules.Has(DeprecatedRule) { + if doc.DisabledRules.All || doc.DisabledRules.Rules.Has(deprecatedRule) { return } diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/max_len/max_len.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/max_len/max_len.go index dbed1db7..1d647c49 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/max_len/max_len.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/max_len/max_len.go @@ -1,18 +1,19 @@ +// Package max_len provides a checker for maximum line length of godocs. package max_len import ( "fmt" gdc "go/doc/comment" "strings" + "unicode/utf8" "github.com/godoc-lint/godoc-lint/pkg/model" "github.com/godoc-lint/godoc-lint/pkg/util" ) -// MaxLenRule is the corresponding rule name. -const MaxLenRule = model.MaxLenRule +const maxLenRule = model.MaxLenRule -var ruleSet = model.RuleSet{}.Add(MaxLenRule) +var ruleSet = model.RuleSet{}.Add(maxLenRule) // MaxLenChecker checks maximum line length of godocs. type MaxLenChecker struct{} @@ -34,7 +35,7 @@ func (r *MaxLenChecker) Apply(actx *model.AnalysisContext) error { docs := make(map[*model.CommentGroup]struct{}, 10*len(actx.InspectorResult.Files)) - for _, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(MaxLenRule)) { + for _, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(maxLenRule)) { if ir.PackageDoc != nil { docs[ir.PackageDoc] = struct{}{} } @@ -57,7 +58,7 @@ func (r *MaxLenChecker) Apply(actx *model.AnalysisContext) error { } func checkMaxLen(actx *model.AnalysisContext, doc *model.CommentGroup, maxLen int) { - if doc.DisabledRules.All || doc.DisabledRules.Rules.Has(MaxLenRule) { + if doc.DisabledRules.All || doc.DisabledRules.Rules.Has(maxLenRule) { return } @@ -78,13 +79,14 @@ func checkMaxLen(actx *model.AnalysisContext, doc *model.CommentGroup, maxLen in Content: nonCodeBlocks, } text := string((&gdc.Printer{}).Comment(strippedCodeAndLinks)) - lines := strings.Split(removeCarriageReturn(text), "\n") + linesIter := strings.SplitSeq(removeCarriageReturn(text), "\n") - for _, l := range lines { - if len(l) <= maxLen { + for l := range linesIter { + lineLen := utf8.RuneCountInString(l) + if lineLen <= maxLen { continue } - actx.Pass.ReportRangef(&doc.CG, "godoc line is too long (%d > %d)", len(l), maxLen) + actx.Pass.ReportRangef(&doc.CG, "godoc line is too long (%d > %d)", lineLen, maxLen) break } } diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/no_unused_link/no_unused_link.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/no_unused_link/no_unused_link.go index 1fe56173..8910204b 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/no_unused_link/no_unused_link.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/no_unused_link/no_unused_link.go @@ -1,3 +1,4 @@ +// Package no_unused_link provides a checker for unused links in godocs. package no_unused_link import ( @@ -5,10 +6,9 @@ import ( "github.com/godoc-lint/godoc-lint/pkg/util" ) -// NoUnusedLinkRule is the corresponding rule name. -const NoUnusedLinkRule = model.NoUnusedLinkRule +const noUnusedLinkRule = model.NoUnusedLinkRule -var ruleSet = model.RuleSet{}.Add(NoUnusedLinkRule) +var ruleSet = model.RuleSet{}.Add(noUnusedLinkRule) // NoUnusedLinkChecker checks for unused links. type NoUnusedLinkChecker struct{} @@ -29,7 +29,7 @@ func (r *NoUnusedLinkChecker) Apply(actx *model.AnalysisContext) error { docs := make(map[*model.CommentGroup]struct{}, 10*len(actx.InspectorResult.Files)) - for _, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(NoUnusedLinkRule)) { + for _, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(noUnusedLinkRule)) { if ir.PackageDoc != nil { docs[ir.PackageDoc] = struct{}{} } @@ -52,7 +52,7 @@ func (r *NoUnusedLinkChecker) Apply(actx *model.AnalysisContext) error { } func checkNoUnusedLink(actx *model.AnalysisContext, doc *model.CommentGroup) { - if doc.DisabledRules.All || doc.DisabledRules.Rules.Has(NoUnusedLinkRule) { + if doc.DisabledRules.All || doc.DisabledRules.Rules.Has(noUnusedLinkRule) { return } diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/pkg_doc/pkg_doc.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/pkg_doc/pkg_doc.go index 960cf32c..199b1f54 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/pkg_doc/pkg_doc.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/pkg_doc/pkg_doc.go @@ -1,3 +1,4 @@ +// Package pkg_doc provides a checker for package godocs. package pkg_doc import ( @@ -10,15 +11,15 @@ import ( ) const ( - PkgDocRule = model.PkgDocRule - SinglePkgDocRule = model.SinglePkgDocRule - RequirePkgDocRule = model.RequirePkgDocRule + pkgDocRule = model.PkgDocRule + singlePkgDocRule = model.SinglePkgDocRule + requirePkgDocRule = model.RequirePkgDocRule ) var ruleSet = model.RuleSet{}.Add( - PkgDocRule, - SinglePkgDocRule, - RequirePkgDocRule, + pkgDocRule, + singlePkgDocRule, + requirePkgDocRule, ) // PkgDocChecker checks package godocs. @@ -46,18 +47,18 @@ const commandPkgName = "main" const commandTestPkgName = "main_test" func checkPkgDocRule(actx *model.AnalysisContext) { - if !actx.Config.IsAnyRuleApplicable(model.RuleSet{}.Add(PkgDocRule)) { + if !actx.Config.IsAnyRuleApplicable(model.RuleSet{}.Add(pkgDocRule)) { return } includeTests := actx.Config.GetRuleOptions().PkgDocIncludeTests - for f, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(PkgDocRule)) { + for f, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(pkgDocRule)) { if ir.PackageDoc == nil { continue } - if ir.PackageDoc.DisabledRules.All || ir.PackageDoc.DisabledRules.Rules.Has(PkgDocRule) { + if ir.PackageDoc.DisabledRules.All || ir.PackageDoc.DisabledRules.Rules.Has(pkgDocRule) { continue } @@ -124,7 +125,7 @@ func checkPkgDocPrefix(text string, packageName string) (string, bool) { } func checkSinglePkgDocRule(actx *model.AnalysisContext) { - if !actx.Config.IsAnyRuleApplicable(model.RuleSet{}.Add(SinglePkgDocRule)) { + if !actx.Config.IsAnyRuleApplicable(model.RuleSet{}.Add(singlePkgDocRule)) { return } @@ -132,12 +133,12 @@ func checkSinglePkgDocRule(actx *model.AnalysisContext) { documentedPkgs := make(map[string][]*ast.File, 2) - for f, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(SinglePkgDocRule)) { + for f, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(singlePkgDocRule)) { if ir.PackageDoc == nil || ir.PackageDoc.Text == "" { continue } - if ir.PackageDoc.DisabledRules.All || ir.PackageDoc.DisabledRules.Rules.Has(SinglePkgDocRule) { + if ir.PackageDoc.DisabledRules.All || ir.PackageDoc.DisabledRules.Rules.Has(singlePkgDocRule) { continue } @@ -160,7 +161,7 @@ func checkSinglePkgDocRule(actx *model.AnalysisContext) { } func checkRequirePkgDocRule(actx *model.AnalysisContext) { - if !actx.Config.IsAnyRuleApplicable(model.RuleSet{}.Add(RequirePkgDocRule)) { + if !actx.Config.IsAnyRuleApplicable(model.RuleSet{}.Add(requirePkgDocRule)) { return } @@ -168,7 +169,7 @@ func checkRequirePkgDocRule(actx *model.AnalysisContext) { pkgFiles := make(map[string][]*ast.File, 2) - for f := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(RequirePkgDocRule)) { + for f := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(requirePkgDocRule)) { pkg := f.Name.Name if _, ok := pkgFiles[pkg]; !ok { pkgFiles[pkg] = make([]*ast.File, 0, len(actx.Pass.Files)) @@ -185,7 +186,7 @@ func checkRequirePkgDocRule(actx *model.AnalysisContext) { continue } - if ir.PackageDoc.DisabledRules.All || ir.PackageDoc.DisabledRules.Rules.Has(RequirePkgDocRule) { + if ir.PackageDoc.DisabledRules.All || ir.PackageDoc.DisabledRules.Rules.Has(requirePkgDocRule) { continue } diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/registry.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/registry.go index 98173344..8452fa02 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/registry.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/registry.go @@ -1,3 +1,4 @@ +// Package check provides a registry of checkers. package check import ( diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/require_doc/require_doc.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/require_doc/require_doc.go index 99dcc9c1..9b0ba1af 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/require_doc/require_doc.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/require_doc/require_doc.go @@ -1,3 +1,4 @@ +// Package require_doc provides a checker that requires symbols to have godocs. package require_doc import ( @@ -9,10 +10,9 @@ import ( "github.com/godoc-lint/godoc-lint/pkg/util" ) -// RequireDocRule is the corresponding rule name. -const RequireDocRule = model.RequireDocRule +const requireDocRule = model.RequireDocRule -var ruleSet = model.RuleSet{}.Add(RequireDocRule) +var ruleSet = model.RuleSet{}.Add(requireDocRule) // RequireDocChecker checks required godocs. type RequireDocChecker struct{} @@ -37,14 +37,14 @@ func (r *RequireDocChecker) Apply(actx *model.AnalysisContext) error { return nil } - for _, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(RequireDocRule)) { + for _, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(requireDocRule)) { for _, decl := range ir.SymbolDecl { isExported := ast.IsExported(decl.Name) if isExported && !requirePublic || !isExported && !requirePrivate { continue } - if decl.Doc != nil && (decl.Doc.DisabledRules.All || decl.Doc.DisabledRules.Rules.Has(RequireDocRule)) { + if decl.Doc != nil && (decl.Doc.DisabledRules.All || decl.Doc.DisabledRules.Rules.Has(requireDocRule)) { continue } diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/shared/deprecated.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/shared/deprecated.go index 8365a212..8ebed0e2 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/shared/deprecated.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/shared/deprecated.go @@ -1,3 +1,4 @@ +// Package shared provides shared utilities for checkers. package shared import ( @@ -5,6 +6,8 @@ import ( "strings" ) +// HasDeprecatedParagraph reports whether the given comment blocks contain a +// paragraph starting with deprecation marker. func HasDeprecatedParagraph(blocks []comment.Block) bool { for _, block := range blocks { par, ok := block.(*comment.Paragraph) diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/start_with_name/start_with_name.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/start_with_name/start_with_name.go index da658316..af83b99a 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/start_with_name/start_with_name.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/check/start_with_name/start_with_name.go @@ -1,3 +1,5 @@ +// Package start_with_name provides a checker for godocs starting with the +// symbol name. package start_with_name import ( @@ -10,10 +12,9 @@ import ( "github.com/godoc-lint/godoc-lint/pkg/util" ) -// StartWithNameRule is the corresponding rule name. -const StartWithNameRule = model.StartWithNameRule +const startWithNameRule = model.StartWithNameRule -var ruleSet = model.RuleSet{}.Add(StartWithNameRule) +var ruleSet = model.RuleSet{}.Add(startWithNameRule) // StartWithNameChecker checks starter of godocs. type StartWithNameChecker struct{} @@ -30,14 +31,14 @@ func (r *StartWithNameChecker) GetCoveredRules() model.RuleSet { // Apply implements the corresponding interface method. func (r *StartWithNameChecker) Apply(actx *model.AnalysisContext) error { - if !actx.Config.IsAnyRuleApplicable(model.RuleSet{}.Add(StartWithNameRule)) { + if !actx.Config.IsAnyRuleApplicable(model.RuleSet{}.Add(startWithNameRule)) { return nil } includeTests := actx.Config.GetRuleOptions().StartWithNameIncludeTests includePrivate := actx.Config.GetRuleOptions().StartWithNameIncludeUnexported - for _, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(StartWithNameRule)) { + for _, ir := range util.AnalysisApplicableFiles(actx, includeTests, model.RuleSet{}.Add(startWithNameRule)) { for _, decl := range ir.SymbolDecl { isExported := ast.IsExported(decl.Name) if !isExported && !includePrivate { @@ -52,7 +53,7 @@ func (r *StartWithNameChecker) Apply(actx *model.AnalysisContext) error { continue } - if decl.Doc.DisabledRules.All || decl.Doc.DisabledRules.Rules.Has(StartWithNameRule) { + if decl.Doc.DisabledRules.All || decl.Doc.DisabledRules.Rules.Has(startWithNameRule) { continue } diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/compose/compose.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/compose/compose.go index d1ceee04..7ec04e34 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/compose/compose.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/compose/compose.go @@ -1,3 +1,4 @@ +// Package compose provides composition of the linter components. package compose import ( @@ -10,6 +11,7 @@ import ( "github.com/godoc-lint/godoc-lint/pkg/model" ) +// Composition holds the composed components of the linter. type Composition struct { Registry model.Registry ConfigBuilder model.ConfigBuilder @@ -17,6 +19,7 @@ type Composition struct { Analyzer model.Analyzer } +// CompositionConfig holds the configuration for composing the linter. type CompositionConfig struct { BaseDir string ExitFunc func(int, error) @@ -28,6 +31,7 @@ type CompositionConfig struct { BaseDirPlainConfig *config.PlainConfig } +// Compose composes the linter components based on the given configuration. func Compose(c CompositionConfig) *Composition { if c.BaseDir == "" { // It's a best effort to use the current working directory if not set. diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/builder.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/builder.go index d7175463..aa25cf18 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/builder.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/builder.go @@ -32,6 +32,9 @@ func NewConfigBuilder(baseDir string) *ConfigBuilder { } } +// WithBaseDirPlainConfig sets the plain config for the base directory. This is +// meant to be used for integrating with umbrella linters (e.g. golangci-lint) +// where the root config comes from a different source/format. func (cb *ConfigBuilder) WithBaseDirPlainConfig(baseDirPlainConfig *PlainConfig) *ConfigBuilder { cb.baseDirPlainConfig = baseDirPlainConfig return cb diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/doc.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/doc.go new file mode 100644 index 00000000..3a015d98 --- /dev/null +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/doc.go @@ -0,0 +1,2 @@ +// Package config provides configuration building and management. +package config diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/plain.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/plain.go index f5a9dfdd..4cb75d8d 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/plain.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/config/plain.go @@ -22,6 +22,8 @@ type PlainConfig struct { Options *PlainRuleOptions `yaml:"options" mapstructure:"options"` } +// PlainRuleOptions represents the plain rule options as users would provide via +// a config file (e.g., a YAML file). type PlainRuleOptions struct { MaxLenLength *uint `option:"max-len/length" yaml:"max-len/length" mapstructure:"max-len/length"` MaxLenIncludeTests *bool `option:"max-len/include-tests" yaml:"max-len/include-tests" mapstructure:"max-len/include-tests"` @@ -73,6 +75,7 @@ func transferOptions(target *model.RuleOptions, source *PlainRuleOptions) { } } +// Validate validates the plain configuration. func (pcfg *PlainConfig) Validate() error { var errs []error diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/inspect/inspector.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/inspect/inspector.go index 44e162a6..7ea535ee 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/inspect/inspector.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/inspect/inspector.go @@ -1,3 +1,4 @@ +// Package inspect provides the pre-run inspection analyzer. package inspect import ( diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/config.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/config.go index 6d7ca88f..78fa62a2 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/config.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/config.go @@ -15,16 +15,22 @@ type ConfigBuilder interface { GetConfig(cwd string) (Config, error) } +// DefaultSet defines the default set of rules to enable. type DefaultSet string const ( - DefaultSetAll DefaultSet = "all" - DefaultSetNone DefaultSet = "none" + // DefaultSetAll enables all rules. + DefaultSetAll DefaultSet = "all" + // DefaultSetNone disables all rules. + DefaultSetNone DefaultSet = "none" + // DefaultSetBasic enables a basic set of rules. DefaultSetBasic DefaultSet = "basic" + // DefaultDefaultSet is the default set of rules to enable. DefaultDefaultSet = DefaultSetBasic ) +// DefaultSetToRules maps default sets to the corresponding rule sets. var DefaultSetToRules = map[DefaultSet]RuleSet{ DefaultSetAll: AllRules, DefaultSetNone: {}, @@ -38,6 +44,7 @@ var DefaultSetToRules = map[DefaultSet]RuleSet{ }(), } +// DefaultSetValues holds the valid values for DefaultSet. var DefaultSetValues = func() []DefaultSet { values := slices.Collect(maps.Keys(DefaultSetToRules)) slices.Sort(values) diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/doc.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/doc.go new file mode 100644 index 00000000..b4afa8d4 --- /dev/null +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/doc.go @@ -0,0 +1,2 @@ +// Package model provides data models for the linter. +package model diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/inspector.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/inspector.go index 4fbfe3c9..3166e17f 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/inspector.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/inspector.go @@ -19,6 +19,7 @@ type InspectorResult struct { Files map[*ast.File]*FileInspection } +// FileInspection represents the inspection result for a single file. type FileInspection struct { // DisabledRules contains information about rules disabled at top level. DisabledRules InspectorResultDisableRules @@ -43,11 +44,16 @@ type InspectorResultDisableRules struct { type SymbolDeclKind string const ( - SymbolDeclKindBad SymbolDeclKind = "bad" - SymbolDeclKindFunc SymbolDeclKind = "func" + // SymbolDeclKindBad represents an unknown declaration kind. + SymbolDeclKindBad SymbolDeclKind = "bad" + // SymbolDeclKindFunc represents a function declaration kind. + SymbolDeclKindFunc SymbolDeclKind = "func" + // SymbolDeclKindConst represents a const declaration kind. SymbolDeclKindConst SymbolDeclKind = "const" - SymbolDeclKindType SymbolDeclKind = "type" - SymbolDeclKindVar SymbolDeclKind = "var" + // SymbolDeclKindType represents a type declaration kind. + SymbolDeclKindType SymbolDeclKind = "type" + // SymbolDeclKindVar represents a var declaration kind. + SymbolDeclKindVar SymbolDeclKind = "var" ) // SymbolDecl represents a top level declaration. diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/rule.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/rule.go index 654e0035..c53fcf61 100644 --- a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/rule.go +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/model/rule.go @@ -4,14 +4,22 @@ package model type Rule string const ( - PkgDocRule Rule = "pkg-doc" - SinglePkgDocRule Rule = "single-pkg-doc" + // PkgDocRule represents the "pkg-doc" rule. + PkgDocRule Rule = "pkg-doc" + // SinglePkgDocRule represents the "single-pkg-doc" rule. + SinglePkgDocRule Rule = "single-pkg-doc" + // RequirePkgDocRule represents the "require-pkg-doc" rule. RequirePkgDocRule Rule = "require-pkg-doc" + // StartWithNameRule represents the "start-with-name" rule. StartWithNameRule Rule = "start-with-name" - RequireDocRule Rule = "require-doc" - DeprecatedRule Rule = "deprecated" - MaxLenRule Rule = "max-len" - NoUnusedLinkRule Rule = "no-unused-link" + // RequireDocRule represents the "require-doc" rule. + RequireDocRule Rule = "require-doc" + // DeprecatedRule represents the "deprecated" rule. + DeprecatedRule Rule = "deprecated" + // MaxLenRule represents the "max-len" rule. + MaxLenRule Rule = "max-len" + // NoUnusedLinkRule represents the "no-unused-link" rule. + NoUnusedLinkRule Rule = "no-unused-link" ) // AllRules is the set of all supported rules. diff --git a/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/util/doc.go b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/util/doc.go new file mode 100644 index 00000000..1c40b746 --- /dev/null +++ b/tools/vendor/github.com/godoc-lint/godoc-lint/pkg/util/doc.go @@ -0,0 +1,2 @@ +// Package util provides utility functions for the linter. +package util diff --git a/tools/vendor/github.com/gofrs/flock/.golangci.yml b/tools/vendor/github.com/gofrs/flock/.golangci.yml index 3ad88a38..bc837b26 100644 --- a/tools/vendor/github.com/gofrs/flock/.golangci.yml +++ b/tools/vendor/github.com/gofrs/flock/.golangci.yml @@ -1,5 +1,12 @@ -run: - timeout: 10m +version: "2" + +formatters: + enable: + - gofumpt + - goimports + settings: + gofumpt: + extra-rules: true linters: enable: @@ -18,9 +25,7 @@ linters: - gocritic - godot - godox - - gofumpt - goheader - - goimports - gomoddirectives - goprintffuncname - gosec @@ -31,84 +36,81 @@ linters: - misspell - nolintlint - revive - - stylecheck - - tenv + - staticcheck - testifylint - thelper - unconvert - unparam - usestdlibvars - whitespace - -linters-settings: - misspell: - locale: US - godox: - keywords: - - FIXME - goheader: - template: |- - Copyright 2015 Tim Heckman. All rights reserved. - Copyright 2018-{{ YEAR }} The Gofrs. All rights reserved. - Use of this source code is governed by the BSD 3-Clause - license that can be found in the LICENSE file. - gofumpt: - extra-rules: true - gocritic: - enabled-tags: - - diagnostic - - style - - performance - disabled-checks: - - paramTypeCombine # already handle by gofumpt.extra-rules - - whyNoLint # already handle by nonolint - - unnamedResult - - hugeParam - - sloppyReassign - - rangeValCopy - - octalLiteral - - ptrToRefParam - - appendAssign - - ruleguard - - httpNoBody - - exposedSyncMutex - - revive: - rules: - - name: struct-tag - - name: blank-imports - - name: context-as-argument - - name: context-keys-type - - name: dot-imports - - name: error-return - - name: error-strings - - name: error-naming - - name: exported - - name: if-return - - name: increment-decrement - - name: var-naming - - name: var-declaration - - name: package-comments - - name: range - - name: receiver-naming - - name: time-naming - - name: unexported-return - - name: indent-error-flow - - name: errorf - - name: empty-block - - name: superfluous-else - - name: unused-parameter - - name: unreachable-code - - name: redefines-builtin-id + - wsl_v5 + settings: + gocritic: + disabled-checks: + - paramTypeCombine # already handle by gofumpt.extra-rules + - whyNoLint # already handle by nonolint + - unnamedResult + - hugeParam + - sloppyReassign + - rangeValCopy + - octalLiteral + - ptrToRefParam + - appendAssign + - ruleguard + - httpNoBody + - exposedSyncMutex + enabled-tags: + - diagnostic + - style + - performance + godox: + keywords: + - FIXME + goheader: + template: |- + Copyright 2015 Tim Heckman. All rights reserved. + Copyright 2018-{{ YEAR }} The Gofrs. All rights reserved. + Use of this source code is governed by the BSD 3-Clause + license that can be found in the LICENSE file. + gosec: + excludes: + - G115 + misspell: + locale: US + revive: + rules: + - name: struct-tag + - name: blank-imports + - name: context-as-argument + - name: context-keys-type + - name: dot-imports + - name: error-return + - name: error-strings + - name: error-naming + - name: exported + - name: if-return + - name: increment-decrement + - name: var-naming + - name: var-declaration + - name: package-comments + - name: range + - name: receiver-naming + - name: time-naming + - name: unexported-return + - name: indent-error-flow + - name: errorf + - name: empty-block + - name: superfluous-else + - name: unused-parameter + - name: unreachable-code + - name: redefines-builtin-id + exclusions: + presets: + - comments + - common-false-positives + - std-error-handling issues: - exclude-use-default: true max-issues-per-linter: 0 max-same-issues: 0 -output: - show-stats: true - sort-results: true - sort-order: - - linter - - file diff --git a/tools/vendor/github.com/gofrs/flock/LICENSE b/tools/vendor/github.com/gofrs/flock/LICENSE index 7de525bf..c785e5e4 100644 --- a/tools/vendor/github.com/gofrs/flock/LICENSE +++ b/tools/vendor/github.com/gofrs/flock/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2018-2024, The Gofrs +Copyright (c) 2018-2025, The Gofrs Copyright (c) 2015-2020, Tim Heckman All rights reserved. diff --git a/tools/vendor/github.com/gofrs/flock/flock.go b/tools/vendor/github.com/gofrs/flock/flock.go index ff942b22..4cb0746a 100644 --- a/tools/vendor/github.com/gofrs/flock/flock.go +++ b/tools/vendor/github.com/gofrs/flock/flock.go @@ -1,5 +1,5 @@ // Copyright 2015 Tim Heckman. All rights reserved. -// Copyright 2018-2024 The Gofrs. All rights reserved. +// Copyright 2018-2025 The Gofrs. All rights reserved. // Use of this source code is governed by the BSD 3-Clause // license that can be found in the LICENSE file. @@ -62,6 +62,7 @@ type Flock struct { func New(path string, opts ...Option) *Flock { // create it if it doesn't exist, and open the file read-only. flags := os.O_CREATE + switch runtime.GOOS { case "aix", "solaris", "illumos": // AIX cannot preform write-lock (i.e. exclusive) on a read-only file. @@ -124,6 +125,22 @@ func (f *Flock) RLocked() bool { return f.r } +// Stat returns the FileInfo structure describing the lock file. +// If the lock file does not exist or cannot be accessed, an error is returned. +// +// This can be used to check the modification time of the lock file, +// which is useful for detecting stale locks. +func (f *Flock) Stat() (fs.FileInfo, error) { + f.m.RLock() + defer f.m.RUnlock() + + if f.fh != nil { + return f.fh.Stat() + } + + return os.Stat(f.path) +} + func (f *Flock) String() string { return f.path } @@ -158,7 +175,6 @@ func tryCtx(ctx context.Context, fn func() (bool, error), retryDelay time.Durati case <-ctx.Done(): return false, ctx.Err() case <-time.After(retryDelay): - // try again } } } diff --git a/tools/vendor/github.com/gofrs/flock/flock_others.go b/tools/vendor/github.com/gofrs/flock/flock_others.go index 18b14f1b..92d0f7e9 100644 --- a/tools/vendor/github.com/gofrs/flock/flock_others.go +++ b/tools/vendor/github.com/gofrs/flock/flock_others.go @@ -1,3 +1,8 @@ +// Copyright 2015 Tim Heckman. All rights reserved. +// Copyright 2018-2025 The Gofrs. All rights reserved. +// Use of this source code is governed by the BSD 3-Clause +// license that can be found in the LICENSE file. + //go:build (!unix && !windows) || plan9 package flock diff --git a/tools/vendor/github.com/gofrs/flock/flock_unix.go b/tools/vendor/github.com/gofrs/flock/flock_unix.go index cf8919c7..77de7a88 100644 --- a/tools/vendor/github.com/gofrs/flock/flock_unix.go +++ b/tools/vendor/github.com/gofrs/flock/flock_unix.go @@ -1,5 +1,5 @@ // Copyright 2015 Tim Heckman. All rights reserved. -// Copyright 2018-2024 The Gofrs. All rights reserved. +// Copyright 2018-2025 The Gofrs. All rights reserved. // Use of this source code is governed by the BSD 3-Clause // license that can be found in the LICENSE file. @@ -155,6 +155,7 @@ func (f *Flock) try(locked *bool, flag int) (bool, error) { } var retried bool + retry: err := unix.Flock(int(f.fh.Fd()), flag|unix.LOCK_NB) diff --git a/tools/vendor/github.com/gofrs/flock/flock_unix_fcntl.go b/tools/vendor/github.com/gofrs/flock/flock_unix_fcntl.go index ea007b47..05c2f88c 100644 --- a/tools/vendor/github.com/gofrs/flock/flock_unix_fcntl.go +++ b/tools/vendor/github.com/gofrs/flock/flock_unix_fcntl.go @@ -1,5 +1,5 @@ // Copyright 2015 Tim Heckman. All rights reserved. -// Copyright 2018-2024 The Gofrs. All rights reserved. +// Copyright 2018-2025 The Gofrs. All rights reserved. // Use of this source code is governed by the BSD 3-Clause // license that can be found in the LICENSE file. diff --git a/tools/vendor/github.com/gofrs/flock/flock_windows.go b/tools/vendor/github.com/gofrs/flock/flock_windows.go index dfd31e15..aa144f15 100644 --- a/tools/vendor/github.com/gofrs/flock/flock_windows.go +++ b/tools/vendor/github.com/gofrs/flock/flock_windows.go @@ -1,5 +1,5 @@ // Copyright 2015 Tim Heckman. All rights reserved. -// Copyright 2018-2024 The Gofrs. All rights reserved. +// Copyright 2018-2025 The Gofrs. All rights reserved. // Use of this source code is governed by the BSD 3-Clause // license that can be found in the LICENSE file. @@ -23,6 +23,8 @@ const winLockfileSharedLock = 0x00000000 // ErrorLockViolation is the error code returned from the Windows syscall when a lock would block, // and you ask to fail immediately. +// +//nolint:errname // It should be renamed to `ErrLockViolation`. const ErrorLockViolation windows.Errno = 0x21 // 33 // Lock is a blocking call to try and take an exclusive file lock. diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/internal/cache/cache.go b/tools/vendor/github.com/golangci/golangci-lint/v2/internal/cache/cache.go index 627993d2..138a3614 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/internal/cache/cache.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/internal/cache/cache.go @@ -166,7 +166,7 @@ func (c *Cache) computePkgHash(pkg *packages.Package) (hashResults, error) { fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) - for _, f := range pkg.CompiledGoFiles { + for _, f := range slices.Concat(pkg.CompiledGoFiles, pkg.IgnoredFiles) { h, fErr := c.fileHash(f) if fErr != nil { return nil, fmt.Errorf("failed to calculate file %s hash: %w", f, fErr) diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/flagsets.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/flagsets.go index cc4b0043..2b61217c 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/flagsets.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/flagsets.go @@ -138,5 +138,5 @@ func setupIssuesFlagSet(v *viper.Viper, fs *pflag.FlagSet) { internal.AddFlagAndBind(v, fs, fs.Bool, "whole-files", "issues.whole-files", false, color.GreenString("Show issues in any part of update files (requires new-from-rev or new-from-patch)")) internal.AddFlagAndBind(v, fs, fs.Bool, "fix", "issues.fix", false, - color.GreenString("Fix found issues (if it's supported by the linter)")) + color.GreenString("Apply the fixes detected by the linters and formatters (if it's supported by the linter)")) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/fmt.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/fmt.go index 3292af3e..ab1aef45 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/fmt.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/fmt.go @@ -113,14 +113,11 @@ func (c *fmtCommand) preRunE(_ *cobra.Command, _ []string) error { } func (c *fmtCommand) execute(_ *cobra.Command, args []string) error { - paths, err := cleanArgs(args) - if err != nil { - return fmt.Errorf("failed to clean arguments: %w", err) - } + paths := cleanArgs(args) c.log.Infof("Formatting Go files...") - err = c.runner.Run(paths) + err := c.runner.Run(paths) if err != nil { return fmt.Errorf("failed to process files: %w", err) } @@ -134,25 +131,15 @@ func (c *fmtCommand) persistentPostRun(_ *cobra.Command, _ []string) { } } -func cleanArgs(args []string) ([]string, error) { +func cleanArgs(args []string) []string { if len(args) == 0 { - abs, err := filepath.Abs(".") - if err != nil { - return nil, err - } - - return []string{abs}, nil + return []string{"."} } var expanded []string for _, arg := range args { - abs, err := filepath.Abs(strings.ReplaceAll(arg, "...", "")) - if err != nil { - return nil, err - } - - expanded = append(expanded, abs) + expanded = append(expanded, filepath.Clean(strings.ReplaceAll(arg, "...", ""))) } - return expanded, nil + return expanded } diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/internal/builder.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/internal/builder.go index 5bb9b472..bfd242f1 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/internal/builder.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/internal/builder.go @@ -257,7 +257,7 @@ func (b Builder) createVersion(orig string) (string, error) { continue } - dh, err := dirhash.HashDir(plugin.Path, "", dirhash.DefaultHash) + dh, err := hashDir(plugin.Path, "", dirhash.DefaultHash) if err != nil { return "", fmt.Errorf("hash plugin directory: %w", err) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/internal/dirhash.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/internal/dirhash.go new file mode 100644 index 00000000..16ea6a85 --- /dev/null +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/commands/internal/dirhash.go @@ -0,0 +1,93 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package internal + +import ( + "fmt" + "io" + "os" + "path/filepath" + "strings" + + "golang.org/x/mod/sumdb/dirhash" +) + +// Slightly modified copy of [dirhash.HashDir]. +// https://github.com/golang/mod/blob/v0.28.0/sumdb/dirhash/hash.go#L67-L79 +func hashDir(dir, prefix string, hash dirhash.Hash) (string, error) { + files, err := dirFiles(dir, prefix) + if err != nil { + return "", err + } + + osOpen := func(name string) (io.ReadCloser, error) { + return os.Open(filepath.Join(dir, strings.TrimPrefix(name, prefix))) + } + + return hash(files, osOpen) +} + +// Modified copy of [dirhash.DirFiles]. +// https://github.com/golang/mod/blob/v0.28.0/sumdb/dirhash/hash.go#L81-L109 +// And adapted to globally follows the rules from https://github.com/golang/mod/blob/v0.28.0/zip/zip.go +func dirFiles(dir, prefix string) ([]string, error) { + var files []string + + dir = filepath.Clean(dir) + + err := filepath.Walk(dir, func(file string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + if info.IsDir() { + if dir == file { + // Don't skip the top-level directory. + return nil + } + + switch info.Name() { + // Skip vendor and node directories. + case "vendor", "node_modules": + return filepath.SkipDir + + // Skip VCS directories. + case ".bzr", ".git", ".hg", ".svn": + return filepath.SkipDir + } + + // Skip submodules (directories containing go.mod files). + if goModInfo, err := os.Lstat(filepath.Join(dir, "go.mod")); err == nil && !goModInfo.IsDir() { + return filepath.SkipDir + } + + return nil + } + + if file == dir { + return fmt.Errorf("%s is not a directory", dir) + } + + if !info.Mode().IsRegular() { + return nil + } + + rel := file + + if dir != "." { + rel = file[len(dir)+1:] + } + + f := filepath.Join(prefix, rel) + + files = append(files, filepath.ToSlash(f)) + + return nil + }) + if err != nil { + return nil, err + } + return files, nil +} diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/config/linters_settings.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/config/linters_settings.go index a33a70bf..ba60dd2c 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/config/linters_settings.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/config/linters_settings.go @@ -131,6 +131,7 @@ var defaultLintersSettings = LintersSettings{ StrConcat: true, BoolFormat: true, HexFormat: true, + ConcatLoop: true, }, Prealloc: PreallocSettings{ Simple: true, @@ -269,6 +270,7 @@ type LintersSettings struct { Makezero MakezeroSettings `mapstructure:"makezero"` Misspell MisspellSettings `mapstructure:"misspell"` Mnd MndSettings `mapstructure:"mnd"` + Modernize ModernizeSettings `mapstructure:"modernize"` MustTag MustTagSettings `mapstructure:"musttag"` Nakedret NakedretSettings `mapstructure:"nakedret"` Nestif NestifSettings `mapstructure:"nestif"` @@ -384,8 +386,9 @@ type DuplSettings struct { } type DupWordSettings struct { - Keywords []string `mapstructure:"keywords"` - Ignore []string `mapstructure:"ignore"` + Keywords []string `mapstructure:"keywords"` + Ignore []string `mapstructure:"ignore"` + CommentsOnly bool `mapstructure:"comments-only"` } type EmbeddedStructFieldCheckSettings struct { @@ -758,6 +761,10 @@ type MndSettings struct { IgnoredFunctions []string `mapstructure:"ignored-functions"` } +type ModernizeSettings struct { + Disable []string `mapstructure:"disable"` +} + type NoLintLintSettings struct { RequireExplanation bool `mapstructure:"require-explanation"` RequireSpecific bool `mapstructure:"require-specific"` @@ -789,6 +796,9 @@ type PerfSprintSettings struct { BoolFormat bool `mapstructure:"bool-format"` HexFormat bool `mapstructure:"hex-format"` + + ConcatLoop bool `mapstructure:"concat-loop"` + LoopOtherOps bool `mapstructure:"loop-other-ops"` } type PreallocSettings struct { diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/goanalysis/runner_loadingpackage.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/goanalysis/runner_loadingpackage.go index aefcf69f..217803bb 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/goanalysis/runner_loadingpackage.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/goanalysis/runner_loadingpackage.go @@ -63,15 +63,13 @@ func (lp *loadingPackage) analyzeRecursive(ctx context.Context, cancel context.C } func (lp *loadingPackage) analyze(ctx context.Context, cancel context.CancelFunc, loadMode LoadMode, loadSem chan struct{}) { - loadSem <- struct{}{} - defer func() { - <-loadSem - }() - select { case <-ctx.Done(): return - default: + case loadSem <- struct{}{}: + defer func() { + <-loadSem + }() } // Save memory on unused more fields. diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/goformat/runner.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/goformat/runner.go index f8762615..650fb8f5 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/goformat/runner.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/goformat/runner.go @@ -223,8 +223,14 @@ func NewRunnerOptions(cfg *config.Config, diff, diffColored, stdin bool) (Runner return RunnerOptions{}, fmt.Errorf("get base path: %w", err) } + // Required to be consistent with `RunnerOptions.MatchAnyPattern`. + absBasePath, err := filepath.Abs(basePath) + if err != nil { + return RunnerOptions{}, err + } + opts := RunnerOptions{ - basePath: basePath, + basePath: absBasePath, generated: cfg.Formatters.Exclusions.Generated, diff: diff || diffColored, colors: diffColored, @@ -251,7 +257,12 @@ func (o RunnerOptions) MatchAnyPattern(path string) (bool, error) { return false, nil } - rel, err := filepath.Rel(o.basePath, path) + abs, err := filepath.Abs(path) + if err != nil { + return false, err + } + + rel, err := filepath.Rel(o.basePath, abs) if err != nil { return false, err } @@ -272,7 +283,7 @@ func skipDir(name string) bool { return true default: - return strings.HasPrefix(name, ".") + return strings.HasPrefix(name, ".") && name != "." } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/dupword/dupword.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/dupword/dupword.go index 7b989bc2..0308534e 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/dupword/dupword.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/dupword/dupword.go @@ -14,8 +14,9 @@ func New(settings *config.DupWordSettings) *goanalysis.Linter { if settings != nil { cfg = map[string]any{ - "keyword": strings.Join(settings.Keywords, ","), - "ignore": strings.Join(settings.Ignore, ","), + "keyword": strings.Join(settings.Keywords, ","), + "ignore": strings.Join(settings.Ignore, ","), + "comments-only": settings.CommentsOnly, } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/govet/govet.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/govet/govet.go index e4de5b0f..7755e4ec 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/govet/govet.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/govet/govet.go @@ -109,7 +109,7 @@ var ( waitgroup.Analyzer, } - // https://github.com/golang/go/blob/go1.23.0/src/cmd/vet/main.go#L55-L87 + // https://github.com/golang/go/blob/go1.25.2/src/cmd/vet/main.go#L57-L91 defaultAnalyzers = []*analysis.Analyzer{ appends.Analyzer, asmdecl.Analyzer, @@ -124,6 +124,7 @@ var ( directive.Analyzer, errorsas.Analyzer, framepointer.Analyzer, + hostport.Analyzer, httpresponse.Analyzer, ifaceassert.Analyzer, loopclosure.Analyzer, @@ -144,6 +145,7 @@ var ( unreachable.Analyzer, unsafeptr.Analyzer, unusedresult.Analyzer, + waitgroup.Analyzer, } ) diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/modernize/modernize.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/modernize/modernize.go new file mode 100644 index 00000000..97825c07 --- /dev/null +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/modernize/modernize.go @@ -0,0 +1,34 @@ +package modernize + +import ( + "slices" + + "github.com/golangci/golangci-lint/v2/pkg/config" + "github.com/golangci/golangci-lint/v2/pkg/goanalysis" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/modernize" +) + +func New(settings *config.ModernizeSettings) *goanalysis.Linter { + var analyzers []*analysis.Analyzer + + if settings == nil { + analyzers = modernize.Suite + } else { + for _, analyzer := range modernize.Suite { + if slices.Contains(settings.Disable, analyzer.Name) { + continue + } + + analyzers = append(analyzers, analyzer) + } + } + + return goanalysis.NewLinter( + "modernize", + "A suite of analyzers that suggest simplifications to Go code, using modern language and library features.", + analyzers, + nil). + WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/nilerr/nilerr.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/nilerr/nilerr.go index f49592aa..fa5dcdc2 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/nilerr/nilerr.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/nilerr/nilerr.go @@ -1,7 +1,7 @@ package nilerr import ( - "github.com/golangci/nilerr" + "github.com/gostaticanalysis/nilerr" "github.com/golangci/golangci-lint/v2/pkg/goanalysis" ) diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/perfsprint/perfsprint.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/perfsprint/perfsprint.go index 9684c27c..e06b5c2a 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/perfsprint/perfsprint.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/golinters/perfsprint/perfsprint.go @@ -28,6 +28,9 @@ func New(settings *config.PerfSprintSettings) *goanalysis.Linter { cfg["bool-format"] = settings.BoolFormat cfg["hex-format"] = settings.HexFormat + + cfg["concat-loop"] = settings.ConcatLoop + cfg["loop-other-ops"] = settings.LoopOtherOps } return goanalysis. diff --git a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/lint/lintersdb/builder_linter.go b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/lint/lintersdb/builder_linter.go index 4250679e..6e9cef1d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/lint/lintersdb/builder_linter.go +++ b/tools/vendor/github.com/golangci/golangci-lint/v2/pkg/lint/lintersdb/builder_linter.go @@ -73,6 +73,7 @@ import ( "github.com/golangci/golangci-lint/v2/pkg/golinters/mirror" "github.com/golangci/golangci-lint/v2/pkg/golinters/misspell" "github.com/golangci/golangci-lint/v2/pkg/golinters/mnd" + "github.com/golangci/golangci-lint/v2/pkg/golinters/modernize" "github.com/golangci/golangci-lint/v2/pkg/golinters/musttag" "github.com/golangci/golangci-lint/v2/pkg/golinters/nakedret" "github.com/golangci/golangci-lint/v2/pkg/golinters/nestif" @@ -382,6 +383,11 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithSince("v1.22.0"). WithURL("https://github.com/tommy-muehle/go-mnd"), + linter.NewConfig(modernize.New(&cfg.Linters.Settings.Modernize)). + WithSince("v2.6.0"). + WithLoadForGoAnalysis(). + WithURL("https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize"), + linter.NewConfig(gomoddirectives.New(&cfg.Linters.Settings.GoModDirectives)). WithSince("v1.39.0"). WithURL("https://github.com/ldez/gomoddirectives"), @@ -449,7 +455,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { linter.NewConfig(iotamixing.New(&cfg.Linters.Settings.IotaMixing)). WithSince("v2.5.0"). - WithURL("github.com/AdminBenni/iota-mixing"), + WithURL("https://github.com/AdminBenni/iota-mixing"), linter.NewConfig(ireturn.New(&cfg.Linters.Settings.Ireturn)). WithSince("v1.43.0"). diff --git a/tools/vendor/github.com/golangci/nilerr/.gitignore b/tools/vendor/github.com/golangci/nilerr/.gitignore deleted file mode 100644 index cca36a5f..00000000 --- a/tools/vendor/github.com/golangci/nilerr/.gitignore +++ /dev/null @@ -1,30 +0,0 @@ -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool -*.out -coverage.html - -# IDE files -.idea/ -.vscode/ -*.swp -*.swo -*~ - -# OS files -.DS_Store -.DS_Store? -._* -.Spotlight-V100 -.Trashes -ehthumbs.db -Thumbs.db - -# Temporary files -*.tmp -*.log - -# Binaries for programs and plugins -*.exe -/nilerr diff --git a/tools/vendor/github.com/golangci/nilerr/Makefile b/tools/vendor/github.com/golangci/nilerr/Makefile deleted file mode 100644 index 7ca9bd22..00000000 --- a/tools/vendor/github.com/golangci/nilerr/Makefile +++ /dev/null @@ -1,12 +0,0 @@ -.PHONY: clean test build - -default: clean test build - -clean: - rm -rf dist/ cover.out - -test: clean - go test -v -cover ./... - -build: - go build -ldflags "-s -w" -trimpath ./cmd/nilerr/ diff --git a/tools/vendor/github.com/golangci/nilerr/LICENSE b/tools/vendor/github.com/gostaticanalysis/nilerr/LICENSE similarity index 100% rename from tools/vendor/github.com/golangci/nilerr/LICENSE rename to tools/vendor/github.com/gostaticanalysis/nilerr/LICENSE diff --git a/tools/vendor/github.com/golangci/nilerr/README.md b/tools/vendor/github.com/gostaticanalysis/nilerr/README.md similarity index 100% rename from tools/vendor/github.com/golangci/nilerr/README.md rename to tools/vendor/github.com/gostaticanalysis/nilerr/README.md diff --git a/tools/vendor/github.com/golangci/nilerr/nilerr.go b/tools/vendor/github.com/gostaticanalysis/nilerr/nilerr.go similarity index 95% rename from tools/vendor/github.com/golangci/nilerr/nilerr.go rename to tools/vendor/github.com/gostaticanalysis/nilerr/nilerr.go index 240de6de..4615e6d1 100644 --- a/tools/vendor/github.com/golangci/nilerr/nilerr.go +++ b/tools/vendor/github.com/gostaticanalysis/nilerr/nilerr.go @@ -25,7 +25,7 @@ var Analyzer = &analysis.Analyzer{ const Doc = "nilerr checks returning nil when err is not nil" -func run(pass *analysis.Pass) (any, error) { +func run(pass *analysis.Pass) (interface{}, error) { funcs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs cmaps := pass.ResultOf[commentmap.Analyzer].(comment.Maps) @@ -67,6 +67,8 @@ func run(pass *analysis.Pass) (any, error) { return nil, nil } +// getValueLineNumbers returns the line numbers. +// `seen` is used to avoid infinite loop. func getValueLineNumbers(pass *analysis.Pass, v ssa.Value, seen map[string]struct{}) []int { if phi, ok := v.(*ssa.Phi); ok { result := make([]int, 0, len(phi.Edges)) @@ -74,6 +76,7 @@ func getValueLineNumbers(pass *analysis.Pass, v ssa.Value, seen map[string]struc for _, edge := range phi.Edges { if _, ok := seen[edge.Name()]; ok { if edge.Pos() == token.NoPos { + // Skip elements without a position. continue } @@ -194,8 +197,10 @@ func isReturnError(b *ssa.BasicBlock, errVal ssa.Value) *ssa.Return { return nil } - if slices.Contains(ret.Results, errVal) { - return ret + for _, v := range ret.Results { + if v == errVal { + return ret + } } return nil diff --git a/tools/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go b/tools/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go index aebbe70e..6f59e840 100644 --- a/tools/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go +++ b/tools/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go @@ -13,7 +13,8 @@ import ( const Doc = `check that tests use t.Parallel() method It also checks that the t.Parallel is used if multiple tests cases are run as part of single test. As part of ensuring parallel tests works as expected it checks for reinitializing of the range value -over the test cases.(https://tinyurl.com/y6555cy6)` +over the test cases.(https://tinyurl.com/y6555cy6) +With the -checkcleanup flag, it also checks that defer is not used with t.Parallel (use t.Cleanup instead).` func NewAnalyzer() *analysis.Analyzer { return newParallelAnalyzer().analyzer @@ -27,6 +28,7 @@ type parallelAnalyzer struct { ignoreMissing bool ignoreMissingSubtests bool ignoreLoopVar bool + checkCleanup bool } func newParallelAnalyzer() *parallelAnalyzer { @@ -36,6 +38,7 @@ func newParallelAnalyzer() *parallelAnalyzer { flags.BoolVar(&a.ignoreMissing, "i", false, "ignore missing calls to t.Parallel") flags.BoolVar(&a.ignoreMissingSubtests, "ignoremissingsubtests", false, "ignore missing calls to t.Parallel in subtests") flags.BoolVar(&a.ignoreLoopVar, "ignoreloopVar", false, "ignore loop variable detection") + flags.BoolVar(&a.checkCleanup, "checkcleanup", false, "check that defer is not used with t.Parallel (use t.Cleanup instead)") a.analyzer = &analysis.Analyzer{ Name: "paralleltest", @@ -51,11 +54,13 @@ type testFunctionAnalysis struct { funcCantParallelMethod, rangeStatementOverTestCasesExists, rangeStatementHasParallelMethod, - rangeStatementCantParallelMethod bool + rangeStatementCantParallelMethod, + funcHasDeferStatement bool loopVariableUsedInRun *string numberOfTestRun int positionOfTestRunNode []ast.Node rangeNode ast.Node + deferStatements []ast.Node } type testRunAnalysis struct { @@ -84,6 +89,7 @@ func (a *parallelAnalyzer) analyzeTestRun(pass *analysis.Pass, n ast.Node, testV return true }) } else if ident, ok := callExpr.Args[1].(*ast.Ident); ok { + // Case 2: Direct function identifier: t.Run("name", myFunc) foundFunc := false for _, file := range pass.Files { for _, decl := range file.Decls { @@ -104,6 +110,9 @@ func (a *parallelAnalyzer) analyzeTestRun(pass *analysis.Pass, n ast.Node, testV if !foundFunc { analysis.hasParallel = false } + } else if builderCall, ok := callExpr.Args[1].(*ast.CallExpr); ok { + // Case 3: Function call that returns a function: t.Run("name", builder()) + analysis.hasParallel = a.checkBuilderFunctionForParallel(pass, builderCall) } } @@ -126,6 +135,12 @@ func (a *parallelAnalyzer) analyzeTestFunction(pass *analysis.Pass, funcDecl *as for _, l := range funcDecl.Body.List { switch v := l.(type) { + case *ast.DeferStmt: + if a.checkCleanup { + analysis.funcHasDeferStatement = true + analysis.deferStatements = append(analysis.deferStatements, v) + } + case *ast.ExprStmt: ast.Inspect(v, func(n ast.Node) bool { if !analysis.funcHasParallelMethod { @@ -211,6 +226,89 @@ func (a *parallelAnalyzer) analyzeTestFunction(pass *analysis.Pass, funcDecl *as } } } + + if a.checkCleanup && analysis.funcHasParallelMethod && analysis.funcHasDeferStatement { + for _, deferStmt := range analysis.deferStatements { + pass.Reportf(deferStmt.Pos(), "Function %s uses defer with t.Parallel, use t.Cleanup instead to ensure cleanup runs after parallel subtests complete", funcDecl.Name.Name) + } + } +} + +// checkBuilderFunctionForParallel analyzes a function call that returns a test function +// to see if the returned function contains t.Parallel() +func (a *parallelAnalyzer) checkBuilderFunctionForParallel(pass *analysis.Pass, builderCall *ast.CallExpr) bool { + // Get the name of the builder function being called + var builderFuncName string + switch fun := builderCall.Fun.(type) { + case *ast.Ident: + builderFuncName = fun.Name + case *ast.SelectorExpr: + // Handle method calls like obj.Builder() + builderFuncName = fun.Sel.Name + default: + return false + } + + if builderFuncName == "" { + return false + } + + // Find the builder function declaration + for _, file := range pass.Files { + for _, decl := range file.Decls { + funcDecl, ok := decl.(*ast.FuncDecl) + if !ok || funcDecl.Name.Name != builderFuncName { + continue + } + + // Found the builder function, analyze it and return immediately + hasParallel := false + ast.Inspect(funcDecl, func(n ast.Node) bool { + // Look for return statements + returnStmt, ok := n.(*ast.ReturnStmt) + if !ok || len(returnStmt.Results) == 0 { + return true + } + + // Check if the return value is a function literal + for _, result := range returnStmt.Results { + if funcLit, ok := result.(*ast.FuncLit); ok { + // Get the parameter name from the returned function + var paramName string + if funcLit.Type != nil && funcLit.Type.Params != nil && len(funcLit.Type.Params.List) > 0 { + param := funcLit.Type.Params.List[0] + if len(param.Names) > 0 { + paramName = param.Names[0].Name + } + } + + // Inspect the returned function for t.Parallel() + if paramName != "" { + ast.Inspect(funcLit, func(p ast.Node) bool { + if methodParallelIsCalledInTestFunction(p, paramName) { + hasParallel = true + return false + } + return true + }) + + // Exit inspection immediately if we found t.Parallel() + if hasParallel { + return false + } + } + } + } + // Continue to next return statement if t.Parallel() not found yet + return true + }) + + // Return immediately after processing the matching function + return hasParallel + } + } + + return false } func (a *parallelAnalyzer) run(pass *analysis.Pass) (interface{}, error) { diff --git a/tools/vendor/github.com/ldez/exptostd/.golangci.yml b/tools/vendor/github.com/ldez/exptostd/.golangci.yml index e45135c6..2675c286 100644 --- a/tools/vendor/github.com/ldez/exptostd/.golangci.yml +++ b/tools/vendor/github.com/ldez/exptostd/.golangci.yml @@ -27,6 +27,7 @@ linters: - testpackage - tparallel - varnamelen + - wsl # deprecated settings: depguard: rules: diff --git a/tools/vendor/github.com/ldez/exptostd/exptostd.go b/tools/vendor/github.com/ldez/exptostd/exptostd.go index 7205816b..aa5dc5ad 100644 --- a/tools/vendor/github.com/ldez/exptostd/exptostd.go +++ b/tools/vendor/github.com/ldez/exptostd/exptostd.go @@ -267,46 +267,54 @@ func (a *analyzer) detectPackageUsage(pass *analysis.Pass, } func (a *analyzer) detectConstraintsUsage(pass *analysis.Pass, expr ast.Expr, result *Result, goVersion int) { - selExpr, ok := expr.(*ast.SelectorExpr) - if !ok { - return - } - - ident, ok := selExpr.X.(*ast.Ident) - if !ok { - return - } + switch selExpr := expr.(type) { + case *ast.SelectorExpr: + ident, ok := selExpr.X.(*ast.Ident) + if !ok { + return + } - if !isPackageUsed(pass, ident, pkgExpConstraints) { - return - } + if !isPackageUsed(pass, ident, pkgExpConstraints) { + return + } - rp, ok := a.constraintsPkgReplacements[selExpr.Sel.Name] - if !ok { - result.shouldKeepImport = true - return - } + rp, ok := a.constraintsPkgReplacements[selExpr.Sel.Name] + if !ok { + result.shouldKeepImport = true + return + } - if !a.skipGoVersionDetection && rp.MinGo > goVersion { - result.shouldKeepImport = true - return - } + if !a.skipGoVersionDetection && rp.MinGo > goVersion { + result.shouldKeepImport = true + return + } - diagnostic := analysis.Diagnostic{ - Pos: selExpr.Pos(), - Message: fmt.Sprintf("%s.%s can be replaced by %s", pkgExpConstraints, selExpr.Sel.Name, rp.Text), - } + diagnostic := analysis.Diagnostic{ + Pos: selExpr.Pos(), + Message: fmt.Sprintf("%s.%s can be replaced by %s", pkgExpConstraints, selExpr.Sel.Name, rp.Text), + } - if rp.Suggested != nil { - fix, err := rp.Suggested(selExpr) - if err != nil { - diagnostic.Message = fmt.Sprintf("Suggested fix error: %v", err) - } else { - diagnostic.SuggestedFixes = append(diagnostic.SuggestedFixes, fix) + if rp.Suggested != nil { + fix, err := rp.Suggested(selExpr) + if err != nil { + diagnostic.Message = fmt.Sprintf("Suggested fix error: %v", err) + } else { + diagnostic.SuggestedFixes = append(diagnostic.SuggestedFixes, fix) + } } - } - pass.Report(diagnostic) + pass.Report(diagnostic) + + case *ast.BinaryExpr: + a.detectConstraintsUsage(pass, selExpr.X, result, goVersion) + a.detectConstraintsUsage(pass, selExpr.Y, result, goVersion) + + case *ast.UnaryExpr: + a.detectConstraintsUsage(pass, selExpr.X, result, goVersion) + + default: + return + } } func (a *analyzer) suggestReplaceImport(pass *analysis.Pass, imports map[string]*ast.ImportSpec, shouldKeep bool, importPath, stdPackage string) { @@ -320,7 +328,7 @@ func (a *analyzer) suggestReplaceImport(pass *analysis.Pass, imports map[string] pass.Report(analysis.Diagnostic{ Pos: imp.Pos(), End: imp.End(), - Message: fmt.Sprintf("Import statement '%s' can be replaced by '%s'", src, stdPackage), + Message: fmt.Sprintf("Import statement '%s' may be replaced by '%s'", src, stdPackage), SuggestedFixes: []analysis.SuggestedFix{{ TextEdits: []analysis.TextEdit{{ Pos: imp.Path.Pos(), diff --git a/tools/vendor/github.com/ldez/gomoddirectives/.golangci.yml b/tools/vendor/github.com/ldez/gomoddirectives/.golangci.yml index 880eb12c..8eb11ff0 100644 --- a/tools/vendor/github.com/ldez/gomoddirectives/.golangci.yml +++ b/tools/vendor/github.com/ldez/gomoddirectives/.golangci.yml @@ -28,6 +28,7 @@ linters: - tparallel - varnamelen - wrapcheck + - wsl # deprecated settings: depguard: diff --git a/tools/vendor/github.com/ldez/gomoddirectives/readme.md b/tools/vendor/github.com/ldez/gomoddirectives/readme.md index 355472f6..054af462 100644 --- a/tools/vendor/github.com/ldez/gomoddirectives/readme.md +++ b/tools/vendor/github.com/ldez/gomoddirectives/readme.md @@ -35,9 +35,9 @@ linters: # Default: false exclude-forbidden: true - # Forbid the use of the `ignore` directives. + # Forbid the use of the `ignore` directives (go >= 1.25). # Default: false - exclude-ignore: true + ignore-forbidden: true # Forbid the use of the `toolchain` directive. # Default: false diff --git a/tools/vendor/github.com/maratori/testableexamples/pkg/testableexamples/testableexamples.go b/tools/vendor/github.com/maratori/testableexamples/pkg/testableexamples/testableexamples.go index 26d22c70..91a8509d 100644 --- a/tools/vendor/github.com/maratori/testableexamples/pkg/testableexamples/testableexamples.go +++ b/tools/vendor/github.com/maratori/testableexamples/pkg/testableexamples/testableexamples.go @@ -13,7 +13,7 @@ func NewAnalyzer() *analysis.Analyzer { return &analysis.Analyzer{ Name: "testableexamples", Doc: "linter checks if examples are testable (have an expected output)", - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { testFiles := make([]*ast.File, 0, len(pass.Files)) for _, file := range pass.Files { fileName := pass.Fset.File(file.Pos()).Name() diff --git a/tools/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go b/tools/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go index 2e057297..6eebabc2 100644 --- a/tools/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go +++ b/tools/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go @@ -2,11 +2,11 @@ package testpackage import ( "flag" + "go/ast" "regexp" + "slices" "strings" - "go/ast" - "golang.org/x/tools/go/analysis" ) @@ -25,10 +25,8 @@ const ( func processTestFile(pass *analysis.Pass, f *ast.File, allowedPackages []string) { packageName := f.Name.Name - for _, p := range allowedPackages { - if p == packageName { - return - } + if slices.Contains(allowedPackages, packageName) { + return } if !strings.HasSuffix(packageName, "_test") { @@ -51,7 +49,7 @@ func NewAnalyzer() *analysis.Analyzer { Name: "testpackage", Doc: "linter that makes you use a separate _test package", Flags: fs, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { allowedPackages := strings.Split(allowPackagesStr, ",") skipFile, err := regexp.Compile(skipFileRegexp) if err != nil { diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/Makefile b/tools/vendor/github.com/nunnatsa/ginkgolinter/Makefile index d34bb341..647ca168 100644 --- a/tools/vendor/github.com/nunnatsa/ginkgolinter/Makefile +++ b/tools/vendor/github.com/nunnatsa/ginkgolinter/Makefile @@ -13,12 +13,15 @@ unit-test: build-for-windows: GOOS=windows GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64.exe ./cmd/ginkgolinter + GOOS=windows GOARCH=arm64 go build $(BUILD_ARGS) -o bin/ginkgolinter-arm64.exe ./cmd/ginkgolinter build-for-mac: GOOS=darwin GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64-darwin ./cmd/ginkgolinter + GOOS=darwin GOARCH=arm64 go build $(BUILD_ARGS) -o bin/ginkgolinter-arm64-darwin ./cmd/ginkgolinter build-for-linux: GOOS=linux GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64-linux ./cmd/ginkgolinter + GOOS=linux GOARCH=arm64 go build $(BUILD_ARGS) -o bin/ginkgolinter-arm64-linux ./cmd/ginkgolinter GOOS=linux GOARCH=386 go build $(BUILD_ARGS) -o bin/ginkgolinter-386-linux ./cmd/ginkgolinter build-all: build build-for-linux build-for-mac build-for-windows diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go b/tools/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go index 246ecbce..aafa93f4 100644 --- a/tools/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go +++ b/tools/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go @@ -113,20 +113,17 @@ func (r *CapRule) fixComparison(gexp *expression.GomegaExpression) bool { return true } -func (r *CapRule) handleBeNumerically(gexp *expression.GomegaExpression, matcher *matcher.BeNumericallyMatcher) bool { - op := matcher.GetOp() - val := matcher.GetValue() - isValZero := val.String() == "0" - isValOne := val.String() == "1" +func (r *CapRule) handleBeNumerically(gexp *expression.GomegaExpression, mtchr *matcher.BeNumericallyMatcher) bool { + op := mtchr.GetOp() - if (op == token.GTR && isValZero) || (op == token.GEQ && isValOne) { - gexp.ReverseAssertionFuncLogic() - gexp.SetMatcherCapZero() - } else if op == token.EQL { - gexp.SetMatcherCap(matcher.GetValueExpr()) + if op == token.EQL { + gexp.SetMatcherCap(mtchr.GetValueExpr()) } else if op == token.NEQ { gexp.ReverseAssertionFuncLogic() - gexp.SetMatcherCap(matcher.GetValueExpr()) + gexp.SetMatcherCap(mtchr.GetValueExpr()) + } else if gexp.MatcherTypeIs(matcher.GreaterThanZero) { + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherCapZero() } else { return false } diff --git a/tools/vendor/github.com/quasilyte/go-ruleguard/dsl/dsl.go b/tools/vendor/github.com/quasilyte/go-ruleguard/dsl/dsl.go index d3c73bdd..f46f721a 100644 --- a/tools/vendor/github.com/quasilyte/go-ruleguard/dsl/dsl.go +++ b/tools/vendor/github.com/quasilyte/go-ruleguard/dsl/dsl.go @@ -16,6 +16,10 @@ type Matcher map[string]Var // `a/b/foo.Bar` type during the pattern execution. func (m Matcher) Import(pkgPath string) {} +// ImportAs is like Import, but can handle "/v2" packages +// and package name conflicts (e.g. "x/path" vs "y/path"). +func (m Matcher) ImportAs(pkgPath, localName string) {} + // Match specifies a set of patterns that match a rule being defined. // Pattern matching succeeds if at least 1 pattern matches. // @@ -106,7 +110,7 @@ type Var struct { Const bool // ConstSlice reports whether expr matched by var is a slice literal - // consisting of contant elements. + // consisting of constant elements. // // We need a separate Const-like predicate here because Go doesn't // treat slices of const elements as constants, so including diff --git a/tools/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go b/tools/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go index cc40506a..d970d772 100644 --- a/tools/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go +++ b/tools/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go @@ -7,6 +7,7 @@ import ( "go/token" "go/types" "path" + "regexp" "strconv" "strings" @@ -41,10 +42,11 @@ func ConvertFile(ctx *Context, f *ast.File) (result *ir.File, err error) { }() conv := &converter{ - types: ctx.Types, - pkg: ctx.Pkg, - fset: ctx.Fset, - src: ctx.Src, + types: ctx.Types, + pkg: ctx.Pkg, + fset: ctx.Fset, + src: ctx.Src, + versionPathRe: regexp.MustCompile(`^v[0-9]+$`), } result = conv.ConvertFile(f) return result, nil @@ -66,6 +68,8 @@ type converter struct { fset *token.FileSet src []byte + versionPathRe *regexp.Regexp + group *ir.RuleGroup groupFuncs []localMacroFunc @@ -224,6 +228,11 @@ func (conv *converter) convertRuleGroup(decl *ast.FuncDecl) *ir.RuleGroup { panic(conv.errorf(call, "Import() should be used before any rules definitions")) } conv.doMatcherImport(call) + case "ImportAs": + if seenRules { + panic(conv.errorf(call, "ImportAs() should be used before any rules definitions")) + } + conv.doMatcherImportAs(call) default: seenRules = true conv.convertRuleExpr(call) @@ -375,7 +384,24 @@ func (conv *converter) localDefine(assign *ast.AssignStmt) { func (conv *converter) doMatcherImport(call *ast.CallExpr) { pkgPath := conv.parseStringArg(call.Args[0]) + + // Try to be at least somewhat module-aware. + // If the last path part is "/v%d", we might want to take + // the previous path part as a package name. pkgName := path.Base(pkgPath) + if conv.versionPathRe.MatchString(pkgName) { + pkgName = path.Base(path.Dir(pkgPath)) + } + + conv.group.Imports = append(conv.group.Imports, ir.PackageImport{ + Path: pkgPath, + Name: pkgName, + }) +} + +func (conv *converter) doMatcherImportAs(call *ast.CallExpr) { + pkgPath := conv.parseStringArg(call.Args[0]) + pkgName := conv.parseStringArg(call.Args[1]) conv.group.Imports = append(conv.group.Imports, ir.PackageImport{ Path: pkgPath, Name: pkgName, diff --git a/tools/vendor/github.com/securego/gosec/v2/Makefile b/tools/vendor/github.com/securego/gosec/v2/Makefile index b23ba9d6..d9880bbf 100644 --- a/tools/vendor/github.com/securego/gosec/v2/Makefile +++ b/tools/vendor/github.com/securego/gosec/v2/Makefile @@ -14,7 +14,6 @@ GO := GO111MODULE=on go GOPATH ?= $(shell $(GO) env GOPATH) GOBIN ?= $(GOPATH)/bin GOSEC ?= $(GOBIN)/gosec -GINKGO ?= $(GOBIN)/ginkgo GO_MINOR_VERSION = $(shell $(GO) version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f2) GOVULN_MIN_VERSION = 17 GO_VERSION = 1.25 @@ -26,18 +25,13 @@ LDFLAGS = -ldflags "\ default: $(MAKE) build -install-test-deps: - go install github.com/onsi/ginkgo/v2/ginkgo@latest - go install golang.org/x/crypto/...@latest - go install github.com/lib/pq/...@latest - install-govulncheck: @if [ $(GO_MINOR_VERSION) -gt $(GOVULN_MIN_VERSION) ]; then \ go install golang.org/x/vuln/cmd/govulncheck@latest; \ fi -test: install-test-deps build-race fmt vet sec govulncheck - $(GINKGO) -v --fail-fast +test: build-race fmt vet sec govulncheck + go run github.com/onsi/ginkgo/v2/ginkgo -- --ginkgo.v --ginkgo.fail-fast fmt: @echo "FORMATTING" @@ -62,7 +56,7 @@ govulncheck: install-govulncheck govulncheck ./...; \ fi -test-coverage: install-test-deps +test-coverage: go test -race -v -count=1 -coverprofile=coverage.out ./... build: diff --git a/tools/vendor/github.com/securego/gosec/v2/action.yml b/tools/vendor/github.com/securego/gosec/v2/action.yml index 85e6cda2..b19d17ee 100644 --- a/tools/vendor/github.com/securego/gosec/v2/action.yml +++ b/tools/vendor/github.com/securego/gosec/v2/action.yml @@ -1,19 +1,19 @@ -name: 'Gosec Security Checker' -description: 'Runs the gosec security checker' -author: '@ccojocar' +name: "Gosec Security Checker" +description: "Runs the gosec security checker" +author: "@ccojocar" inputs: args: - description: 'Arguments for gosec' + description: "Arguments for gosec" required: true - default: '-h' + default: "-h" runs: - using: 'docker' - image: 'docker://securego/gosec:2.22.7' - args: - - ${{ inputs.args }} + using: "docker" + image: "docker://securego/gosec:2.22.9" + args: + - ${{ inputs.args }} branding: - icon: 'shield' - color: 'blue' + icon: "shield" + color: "blue" diff --git a/tools/vendor/github.com/securego/gosec/v2/analyzer.go b/tools/vendor/github.com/securego/gosec/v2/analyzer.go index 34ac82b5..371904b6 100644 --- a/tools/vendor/github.com/securego/gosec/v2/analyzer.go +++ b/tools/vendor/github.com/securego/gosec/v2/analyzer.go @@ -28,6 +28,7 @@ import ( "path/filepath" "reflect" "regexp" + "runtime/debug" "strconv" "strings" "sync" @@ -413,7 +414,17 @@ func (gosec *Analyzer) CheckRules(pkg *packages.Package) { func (gosec *Analyzer) CheckAnalyzers(pkg *packages.Package) { ssaResult, err := gosec.buildSSA(pkg) if err != nil || ssaResult == nil { - gosec.logger.Printf("Error building the SSA representation of the package %q: %s", pkg.Name, err) + errMessage := "Error building the SSA representation of the package " + pkg.Name + ": " + if err != nil { + errMessage += err.Error() + } + if ssaResult == nil { + if err != nil { + errMessage += ", " + } + errMessage += "no ssa result" + } + gosec.logger.Print(errMessage) return } @@ -485,7 +496,10 @@ func (gosec *Analyzer) generatedFiles(pkg *packages.Package) map[string]bool { func (gosec *Analyzer) buildSSA(pkg *packages.Package) (interface{}, error) { defer func() { if r := recover(); r != nil { - gosec.logger.Printf("Panic when running SSA analyser on package: %s", pkg.Name) + gosec.logger.Printf( + "Panic when running SSA analyzer on package: %s. Panic: %v\nStack trace:\n%s", + pkg.Name, r, debug.Stack(), + ) } }() ssaPass := &analysis.Pass{ diff --git a/tools/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go b/tools/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go index 968102f2..2347af07 100644 --- a/tools/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go +++ b/tools/vendor/github.com/securego/gosec/v2/analyzers/slice_bounds.go @@ -81,29 +81,64 @@ func runSliceBounds(pass *analysis.Pass) (interface{}, error) { for _, s := range violations { switch s := s.(type) { case *ssa.Slice: - issue := newIssue( + issues[s] = newIssue( pass.Analyzer.Name, "slice bounds out of range", pass.Fset, s.Pos(), issue.Low, issue.High) - issues[s] = issue case *ssa.IndexAddr: - issue := newIssue( + issues[s] = newIssue( pass.Analyzer.Name, "slice index out of range", pass.Fset, s.Pos(), issue.Low, issue.High) - issues[s] = issue } } } } } } + case *ssa.IndexAddr: + switch indexInstr := instr.X.(type) { + case *ssa.Const: + if indexInstr.Type().String()[:2] == "[]" { + if indexInstr.Value == nil { + issues[instr] = newIssue( + pass.Analyzer.Name, + "slice index out of range", + pass.Fset, + instr.Pos(), + issue.Low, + issue.High) + + break + } + } + case *ssa.Alloc: + if instr.Pos() > 0 { + typeStr := indexInstr.Type().String() + arrayLen, err := extractArrayAllocValue(typeStr) // preallocated array + if err != nil { + break + } + + _, err = extractIntValueIndexAddr(instr, arrayLen) + if err != nil { + break + } + issues[instr] = newIssue( + pass.Analyzer.Name, + "slice index out of range", + pass.Fset, + instr.Pos(), + issue.Low, + issue.High) + } + } } } } @@ -143,7 +178,7 @@ func runSliceBounds(pass *analysis.Pass) (interface{}, error) { if err != nil { break } - if isSliceIndexInsideBounds(0, value, indexValue) { + if isSliceIndexInsideBounds(value, indexValue) { delete(issues, instr) } } @@ -161,8 +196,8 @@ func runSliceBounds(pass *analysis.Pass) (interface{}, error) { } foundIssues := []*issue.Issue{} - for _, issue := range issues { - foundIssues = append(foundIssues, issue) + for _, v := range issues { + foundIssues = append(foundIssues, v) } if len(foundIssues) > 0 { return foundIssues, nil @@ -192,7 +227,11 @@ func trackSliceBounds(depth int, sliceCap int, slice ssa.Node, violations *[]ssa } case *ssa.IndexAddr: indexValue, err := extractIntValue(refinstr.Index.String()) - if err == nil && !isSliceIndexInsideBounds(0, sliceCap, indexValue) { + if err == nil && !isSliceIndexInsideBounds(sliceCap, indexValue) { + *violations = append(*violations, refinstr) + } + indexValue, err = extractIntValueIndexAddr(refinstr, sliceCap) + if err == nil && !isSliceIndexInsideBounds(sliceCap, indexValue) { *violations = append(*violations, refinstr) } case *ssa.Call: @@ -217,6 +256,32 @@ func trackSliceBounds(depth int, sliceCap int, slice ssa.Node, violations *[]ssa } } +func extractIntValueIndexAddr(refinstr *ssa.IndexAddr, sliceCap int) (int, error) { + var indexIncr, sliceIncr int + + for _, block := range refinstr.Block().Preds { + for _, instr := range block.Instrs { + switch instr := instr.(type) { + case *ssa.BinOp: + _, index, err := extractBinOpBound(instr) + if err != nil { + return 0, err + } + switch instr.Op { + case token.LSS: + indexIncr-- + } + + if !isSliceIndexInsideBounds(sliceCap+sliceIncr, index+indexIncr) { + return index, nil + } + } + } + } + + return 0, errors.New("no found") +} + func checkAllSlicesBounds(depth int, sliceCap int, slice *ssa.Slice, violations *[]ssa.Instruction, ifs map[ssa.If]*ssa.BinOp) { if depth == maxDepth { return @@ -303,9 +368,14 @@ func invBound(bound bound) bound { } } +var errExtractBinOp = fmt.Errorf("unable to extract constant from binop") + func extractBinOpBound(binop *ssa.BinOp) (bound, int, error) { if binop.X != nil { if x, ok := binop.X.(*ssa.Const); ok { + if x.Value == nil { + return lowerUnbounded, 0, errExtractBinOp + } value, err := strconv.Atoi(x.Value.String()) if err != nil { return lowerUnbounded, value, err @@ -324,6 +394,9 @@ func extractBinOpBound(binop *ssa.BinOp) (bound, int, error) { } if binop.Y != nil { if y, ok := binop.Y.(*ssa.Const); ok { + if y.Value == nil { + return lowerUnbounded, 0, errExtractBinOp + } value, err := strconv.Atoi(y.Value.String()) if err != nil { return lowerUnbounded, value, err @@ -340,11 +413,11 @@ func extractBinOpBound(binop *ssa.BinOp) (bound, int, error) { } } } - return lowerUnbounded, 0, fmt.Errorf("unable to extract constant from binop") + return lowerUnbounded, 0, errExtractBinOp } -func isSliceIndexInsideBounds(l, h int, index int) bool { - return (l <= index && index < h) +func isSliceIndexInsideBounds(h int, index int) bool { + return (0 <= index && index < h) } func isSliceInsideBounds(l, h int, cl, ch int) bool { @@ -370,6 +443,10 @@ func extractSliceBounds(slice *ssa.Slice) (int, int) { } func extractIntValue(value string) (int, error) { + if i, err := extractIntValuePhi(value); err == nil { + return i, nil + } + parts := strings.Split(value, ":") if len(parts) != 2 { return 0, fmt.Errorf("invalid value: %s", value) @@ -381,7 +458,7 @@ func extractIntValue(value string) (int, error) { } func extractSliceCapFromAlloc(instr string) (int, error) { - re := regexp.MustCompile(`new \[(\d+)\]*`) + re := regexp.MustCompile(`new \[(\d+)\].*`) var sliceCap int matches := re.FindAllStringSubmatch(instr, -1) if matches == nil { @@ -397,3 +474,39 @@ func extractSliceCapFromAlloc(instr string) (int, error) { return 0, errors.New("no slice cap found") } + +func extractIntValuePhi(value string) (int, error) { + re := regexp.MustCompile(`phi \[.+: (\d+):.+, .*\].*`) + var sliceCap int + matches := re.FindAllStringSubmatch(value, -1) + if matches == nil { + return sliceCap, fmt.Errorf("invalid value: %s", value) + } + + if len(matches) > 0 { + m := matches[0] + if len(m) > 1 { + return strconv.Atoi(m[1]) + } + } + + return 0, fmt.Errorf("invalid value: %s", value) +} + +func extractArrayAllocValue(value string) (int, error) { + re := regexp.MustCompile(`.*\[(\d+)\].*`) + var sliceCap int + matches := re.FindAllStringSubmatch(value, -1) + if matches == nil { + return sliceCap, fmt.Errorf("invalid value: %s", value) + } + + if len(matches) > 0 { + m := matches[0] + if len(m) > 1 { + return strconv.Atoi(m[1]) + } + } + + return 0, fmt.Errorf("invalid value: %s", value) +} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/readfile.go b/tools/vendor/github.com/securego/gosec/v2/rules/readfile.go index da6b9c96..aa0fbb51 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/readfile.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/readfile.go @@ -25,9 +25,12 @@ import ( type readfile struct { issue.MetaData gosec.CallList - pathJoin gosec.CallList - clean gosec.CallList + pathJoin gosec.CallList + clean gosec.CallList + // cleanedVar maps the declaration node of an identifier to the Clean() call node cleanedVar map[any]ast.Node + // joinedVar maps the declaration node of an identifier to the Join() call node + joinedVar map[any]ast.Node } // ID returns the identifier for this rule @@ -61,6 +64,7 @@ func (r *readfile) isJoinFunc(n ast.Node, c *gosec.Context) bool { // isFilepathClean checks if there is a filepath.Clean for given variable func (r *readfile) isFilepathClean(n *ast.Ident, c *gosec.Context) bool { + // quick lookup: was this var's declaration recorded as a Clean() call? if _, ok := r.cleanedVar[n.Obj.Decl]; ok { return true } @@ -90,35 +94,170 @@ func (r *readfile) trackFilepathClean(n ast.Node) { } } -// Match inspects AST nodes to determine if the match the methods `os.Open` or `ioutil.ReadFile` +// trackJoinAssignStmt tracks assignments where RHS is a Join(...) call and LHS is an identifier +func (r *readfile) trackJoinAssignStmt(node *ast.AssignStmt, c *gosec.Context) { + if len(node.Rhs) == 0 { + return + } + if call, ok := node.Rhs[0].(*ast.CallExpr); ok { + if r.pathJoin.ContainsPkgCallExpr(call, c, false) != nil { + // LHS must be an identifier (simple case) + if len(node.Lhs) > 0 { + if ident, ok := node.Lhs[0].(*ast.Ident); ok && ident.Obj != nil { + r.joinedVar[ident.Obj.Decl] = call + } + } + } + } +} + +// osRootSuggestion returns an Autofix suggesting the use of os.Root where supported +// to constrain file access under a fixed directory and mitigate traversal risks. +func (r *readfile) osRootSuggestion() string { + major, minor, _ := gosec.GoVersion() + if major == 1 && minor >= 24 { + return "Consider using os.Root to scope file access under a fixed root (Go >=1.24). Prefer root.Open/root.Stat over os.Open/os.Stat to prevent directory traversal." + } + return "" +} + +// isSafeJoin checks if path is baseDir + filepath.Clean(fn) joined. +// improvements over earlier naive version: +// - allow baseDir as a BasicLit or as an identifier that resolves to a string constant +// - accept Clean(...) being either a CallExpr or an identifier previously recorded as Clean result +func (r *readfile) isSafeJoin(call *ast.CallExpr, c *gosec.Context) bool { + join := r.pathJoin.ContainsPkgCallExpr(call, c, false) + if join == nil { + return false + } + + // We expect join.Args to include a baseDir-like arg and a cleaned path arg. + var foundBaseDir bool + var foundCleanArg bool + + for _, arg := range join.Args { + switch a := arg.(type) { + case *ast.BasicLit: + // literal string or similar — treat as possible baseDir + foundBaseDir = true + case *ast.Ident: + // If ident is resolvable to a constant string (TryResolve true), treat as baseDir. + // Or if ident refers to a variable that was itself assigned from a constant BasicLit, + // it's considered safe as baseDir. + if gosec.TryResolve(a, c) { + foundBaseDir = true + } else { + // It might be a cleaned variable: e.g. cleanPath := filepath.Clean(fn) + if r.isFilepathClean(a, c) { + foundCleanArg = true + } + } + case *ast.CallExpr: + // If an argument is a Clean() call directly, mark clean arg found. + if r.clean.ContainsPkgCallExpr(a, c, false) != nil { + foundCleanArg = true + } + default: + // ignore other types + } + } + + return foundBaseDir && foundCleanArg +} + func (r *readfile) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { + // Track filepath.Clean usages so identifiers assigned from Clean() are known. if node := r.clean.ContainsPkgCallExpr(n, c, false); node != nil { r.trackFilepathClean(n) return nil, nil - } else if node := r.ContainsPkgCallExpr(n, c, false); node != nil { - for _, arg := range node.Args { - // handles path joining functions in Arg - // eg. os.Open(filepath.Join("/tmp/", file)) - if callExpr, ok := arg.(*ast.CallExpr); ok { - if r.isJoinFunc(callExpr, c) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + + // Track Join assignments if we see an AssignStmt whose RHS is a Join call. + if assign, ok := n.(*ast.AssignStmt); ok { + // track join result assigned to a variable, e.g., fullPath := filepath.Join(baseDir, cleanPath) + r.trackJoinAssignStmt(assign, c) + // also track Clean assignment if present on RHS + if len(assign.Rhs) > 0 { + if call, ok := assign.Rhs[0].(*ast.CallExpr); ok { + if r.clean.ContainsPkgCallExpr(call, c, false) != nil { + r.trackFilepathClean(call) } } - // handles binary string concatenation eg. ioutil.Readfile("/tmp/" + file + "/blob") - if binExp, ok := arg.(*ast.BinaryExpr); ok { - // resolve all found identities from the BinaryExpr - if _, ok := gosec.FindVarIdentities(binExp, c); ok { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + // continue, don't return here — other checks may apply + } + + // Now check for file-reading calls (os.Open, os.OpenFile, ioutil.ReadFile etc.) + if node := r.ContainsPkgCallExpr(n, c, false); node != nil { + if len(node.Args) == 0 { + return nil, nil + } + arg := node.Args[0] + + // If argument is a call expression, check for Join/Clean patterns. + if callExpr, ok := arg.(*ast.CallExpr); ok { + // If this call matches a safe Join(baseDir, Clean(...)) pattern, treat as safe. + if r.isSafeJoin(callExpr, c) { + // safe pattern detected; do not raise an issue + return nil, nil + } + // If the argument is a Join call but not safe per above, flag it (as before) + if r.isJoinFunc(callExpr, c) { + iss := c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence) + if s := r.osRootSuggestion(); s != "" { + iss.Autofix = s + } + return iss, nil + } + } + + // If arg is an identifier that was assigned from a Join(...) call, check that recorded Join call. + if ident, ok := arg.(*ast.Ident); ok { + if ident.Obj != nil { + if joinCall, ok := r.joinedVar[ident.Obj.Decl]; ok { + // If the identifier itself was later cleaned, treat as safe regardless of original Join args + if r.isFilepathClean(ident, c) { + return nil, nil + } + // joinCall is a *ast.CallExpr; check if that join is a safe join + if jc, ok := joinCall.(*ast.CallExpr); ok { + if r.isSafeJoin(jc, c) { + return nil, nil + } + // join exists but is not safe: flag it + iss := c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence) + if s := r.osRootSuggestion(); s != "" { + iss.Autofix = s + } + return iss, nil + } } } + } - if ident, ok := arg.(*ast.Ident); ok { - obj := c.Info.ObjectOf(ident) - if _, ok := obj.(*types.Var); ok && - !gosec.TryResolve(ident, c) && - !r.isFilepathClean(ident, c) { - return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil + // handles binary string concatenation eg. ioutil.Readfile("/tmp/" + file + "/blob") + if binExp, ok := arg.(*ast.BinaryExpr); ok { + // resolve all found identities from the BinaryExpr + if _, ok := gosec.FindVarIdentities(binExp, c); ok { + iss := c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence) + if s := r.osRootSuggestion(); s != "" { + iss.Autofix = s + } + return iss, nil + } + } + + // if it's a plain identifier, and not resolved and not cleaned, flag it + if ident, ok := arg.(*ast.Ident); ok { + obj := c.Info.ObjectOf(ident) + if _, ok := obj.(*types.Var); ok && + !gosec.TryResolve(ident, c) && + !r.isFilepathClean(ident, c) { + iss := c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence) + if s := r.osRootSuggestion(); s != "" { + iss.Autofix = s } + return iss, nil } } } @@ -138,6 +277,7 @@ func NewReadFile(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { Confidence: issue.High, }, cleanedVar: map[any]ast.Node{}, + joinedVar: map[any]ast.Node{}, } rule.pathJoin.Add("path/filepath", "Join") rule.pathJoin.Add("path", "Join") @@ -149,5 +289,5 @@ func NewReadFile(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { rule.Add("os", "Open") rule.Add("os", "OpenFile") rule.Add("os", "Create") - return rule, []ast.Node{(*ast.CallExpr)(nil)} + return rule, []ast.Node{(*ast.CallExpr)(nil), (*ast.AssignStmt)(nil)} } diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/sql.go b/tools/vendor/github.com/securego/gosec/v2/rules/sql.go index 42ace6c1..622c2fe2 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/sql.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/sql.go @@ -191,6 +191,11 @@ func (s *sqlStrConcat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*issu if injection := s.findInjectionInBranch(ctx, decl.Rhs); injection != nil { return ctx.NewIssue(injection, s.ID(), s.What, s.Severity, s.Confidence), nil } + case *ast.ValueSpec: + // handle: var query string = "SELECT ...'" + user + if injection := s.findInjectionInBranch(ctx, decl.Values); injection != nil { + return ctx.NewIssue(injection, s.ID(), s.What, s.Severity, s.Confidence), nil + } } } diff --git a/tools/vendor/go.augendre.info/arangolint/pkg/analyzer/analyzer.go b/tools/vendor/go.augendre.info/arangolint/pkg/analyzer/analyzer.go index 75152960..53f19e2e 100644 --- a/tools/vendor/go.augendre.info/arangolint/pkg/analyzer/analyzer.go +++ b/tools/vendor/go.augendre.info/arangolint/pkg/analyzer/analyzer.go @@ -1,10 +1,20 @@ // Package analyzer contains tools for analyzing arangodb usage. -// It focuses on github.com/arangodb/go-driver/v2. +// +// Scope and limits of the analysis: +// - Intra-procedural only: we do not follow calls across function boundaries. +// - Flow/block sensitive within the current function: we scan statements that +// occur before a call site in the nearest block and its ancestor blocks. +// - Conservative by design: when options come from an unknown factory/helper +// call, we assume AllowImplicit is set to prevent false positives. +// +// The analyzer focuses on github.com/arangodb/go-driver/v2. package analyzer import ( "errors" "go/ast" + "go/token" + "go/types" "strings" "golang.org/x/tools/go/analysis" @@ -12,6 +22,17 @@ import ( "golang.org/x/tools/go/ast/inspector" ) +const ( + allowImplicitFieldName = "AllowImplicit" + msgMissingAllowImplicit = "missing AllowImplicit option" + methodBeginTransaction = "BeginTransaction" + expectedBeginTxnArgs = 3 + arangoDatabaseTypeSuffix = "github.com/arangodb/go-driver/v2/arangodb.Database" + arangoPackageSuffix = "github.com/arangodb/go-driver/v2/arangodb" +) + +var errInvalidAnalysis = errors.New("invalid analysis") + // NewAnalyzer returns an arangolint analyzer. func NewAnalyzer() *analysis.Analyzer { return &analysis.Analyzer{ @@ -22,117 +43,776 @@ func NewAnalyzer() *analysis.Analyzer { } } -var ( - errUnknown = errors.New("unknown node type") - errInvalidAnalysis = errors.New("invalid analysis") -) - -const missingAllowImplicitOptionMsg = "missing AllowImplicit option" - func run(pass *analysis.Pass) (interface{}, error) { inspctr, typeValid := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !typeValid { return nil, errInvalidAnalysis } - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), + // Visit only call expressions and get the traversal stack from the inspector. + nodeFilter := []ast.Node{(*ast.CallExpr)(nil)} + inspctr.WithStack(nodeFilter, func(node ast.Node, push bool, stack []ast.Node) (proceed bool) { + if !push { + return true + } + + // node is guaranteed to be *ast.CallExpr due to the filter above. + call := node.(*ast.CallExpr) //nolint:forcetypeassert + handleBeginTransactionCall(call, pass, stack) + + return true + }) + + return nil, nil //nolint:nilnil +} + +// handleBeginTransactionCall validates BeginTransaction(...) call sites. +// Analysis is intra-procedural and flow/block-sensitive: it scans statements +// that appear before the call within the nearest and ancestor blocks. +// For third-argument values produced by unknown factory/helper calls, the +// analyzer remains conservative (assumes AllowImplicit) to avoid +// false positives that could annoy users. +func handleBeginTransactionCall(call *ast.CallExpr, pass *analysis.Pass, stack []ast.Node) { + if !isBeginTransaction(call, pass) { + return } - inspctr.Preorder(nodeFilter, func(node ast.Node) { - call, isCall := node.(*ast.CallExpr) - if !isCall { - return - } + diag := analysis.Diagnostic{ + Pos: call.Args[2].Pos(), + Message: msgMissingAllowImplicit, + } + + // Normalize the 3rd argument by unwrapping parentheses + arg := unwrapParens(call.Args[2]) + + if shouldReportMissingAllowImplicit(arg, pass, stack, call.Pos()) { + pass.Report(diag) + } +} - if !isBeginTransaction(call, pass) { - return +// shouldReportMissingAllowImplicit returns true when the provided 3rd argument +// expression should trigger the "missing AllowImplicit" diagnostic, and false +// when the argument is known to have AllowImplicit set (or when we must stay +// conservative and avoid reporting). +func shouldReportMissingAllowImplicit( + arg ast.Expr, + pass *analysis.Pass, + stack []ast.Node, + callPos token.Pos, +) bool { + switch optsExpr := arg.(type) { + case *ast.Ident: + // direct identifier or nil + if isNilIdent(optsExpr) { + return true } - diag := analysis.Diagnostic{ - Pos: call.Pos(), - Message: missingAllowImplicitOptionMsg, + return !hasAllowImplicitForIdent(optsExpr, pass, stack, callPos) + + case *ast.UnaryExpr: + // &CompositeLit or &ident or &index + if has, ok := compositeAllowsImplicit(optsExpr); ok { + return !has + } + // not a composite literal, try &ident + if id, ok := optsExpr.X.(*ast.Ident); ok { + return !hasAllowImplicitForIdent(id, pass, stack, callPos) } + // not &ident, try &index (e.g., &arr[i]) + if idx, ok := optsExpr.X.(*ast.IndexExpr); ok { + return !hasAllowImplicitForIndex(idx, pass, stack, callPos) + } + // Unknown &shape: stay conservative (do not report) + return false - switch typedArg := call.Args[2].(type) { - case *ast.Ident: - if typedArg.Name == "nil" { - pass.Report(diag) + case *ast.SelectorExpr: + // s.opts (or nested) passed as options + return !hasAllowImplicitForSelector(optsExpr, pass, stack, callPos) - return - } - case *ast.UnaryExpr: - elts, err := getElts(typedArg.X) - if err != nil { - return - } + case *ast.IndexExpr: + // opts passed as an indexed element, e.g., arr[i] + return !hasAllowImplicitForIndex(optsExpr, pass, stack, callPos) - if !eltsHasAllowImplicit(elts) { - pass.Report(analysis.Diagnostic{ - Pos: call.Pos(), - Message: missingAllowImplicitOptionMsg, - }) - } + case *ast.CallExpr: + // Typed conversion like (*arangodb.BeginTransactionOptions)(nil) + if isTypeConversionToTxnOptionsPtrNil(optsExpr, pass) { + return true + } + // For other calls (factory/helpers), we stay conservative to avoid false positives. + return false + } - return + // Default: unknown expression shapes — stay conservative and do not report. + return false +} + +func unwrapParens(arg ast.Expr) ast.Expr { + for { + switch pe := arg.(type) { + case *ast.ParenExpr: + arg = pe.X + default: + return arg } - }) + } +} - return nil, nil //nolint:nilnil +// isNilIdent reports whether e is an identifier named "nil". +func isNilIdent(e ast.Expr) bool { + id, ok := e.(*ast.Ident) + + return ok && id.Name == "nil" +} + +// isAllowImplicitSelector reports whether s selects the AllowImplicit field. +func isAllowImplicitSelector(s *ast.SelectorExpr) bool { + return s != nil && s.Sel != nil && s.Sel.Name == allowImplicitFieldName } +// isBeginTransaction reports whether call is a call to arangodb.Database.BeginTransaction. +// It prefers selection-based detection via TypesInfo.Selections to support wrappers or +// types that embed arangodb.Database. If selection info is unavailable, it falls back +// to checking the receiver type's string suffix for .../arangodb.Database to handle +// aliases or named types that preserve the type name. func isBeginTransaction(call *ast.CallExpr, pass *analysis.Pass) bool { selExpr, isSelector := call.Fun.(*ast.SelectorExpr) if !isSelector { return false } + if selExpr.Sel == nil || selExpr.Sel.Name != methodBeginTransaction { + return false + } + + // Validate expected args count with extracted constant for clarity + if len(call.Args) != expectedBeginTxnArgs { + return false + } + xType := pass.TypesInfo.TypeOf(selExpr.X) if xType == nil { return false } - const arangoStruct = "github.com/arangodb/go-driver/v2/arangodb.Database" + // Try to find the arangodb package in the current package imports and get the Database type. + var dbType types.Type + + for _, imp := range pass.Pkg.Imports() { + if strings.HasSuffix(imp.Path(), arangoPackageSuffix) { + if obj := imp.Scope().Lookup("Database"); obj != nil { + if tn, ok := obj.(*types.TypeName); ok { + dbType = tn.Type() + } + } + + break + } + } + + if dbType != nil { + // If the receiver's type is assignable to arangodb.Database, it's a valid BeginTransaction call. + if types.AssignableTo(xType, dbType) { + return true + } + } + + // Last resort: direct receiver type match or alias that preserves the type name suffix + return strings.HasSuffix(xType.String(), arangoDatabaseTypeSuffix) +} + +// hasAllowImplicitForSelector checks if a selector expression (e.g., s.opts) +// has had its AllowImplicit field set prior to the call position within +// the nearest or any ancestor block. This is a conservative intra-procedural check. +func hasAllowImplicitForSelector( + sel *ast.SelectorExpr, + pass *analysis.Pass, + stack []ast.Node, + callPos token.Pos, +) bool { + // Special case: selector rooted at an index expression, e.g., arr[i].opts. + // In this case we must match both the base array/slice object and the specific index. + if innerIdx, ok := sel.X.(*ast.IndexExpr); ok { + blocks := ancestorBlocks(stack) + + return scanPriorStatements(blocks, callPos, func(stmt ast.Stmt) bool { + return setsAllowImplicitForIndex(stmt, innerIdx, pass) + }) + } + + // Try to resolve the root identifier normally (handles ident, parens, star, chained selectors) + root := rootIdent(sel) - if !strings.HasSuffix(xType.String(), arangoStruct) || - selExpr.Sel.Name != "BeginTransaction" { + // Fallback: selector rooted indirectly via slice/index (e.g., arr[1:].opts) + if root == nil { + if innerIdx, ok := sel.X.(*ast.IndexExpr); ok { + root = rootIdent(innerIdx.X) + } + } + + if root == nil { return false } - const expectedArgsCount = 3 + rootObj := pass.TypesInfo.ObjectOf(root) + if rootObj == nil { + return false + } - return len(call.Args) == expectedArgsCount + blocks := ancestorBlocks(stack) + + return scanPriorStatements(blocks, callPos, func(stmt ast.Stmt) bool { + return setsAllowImplicitForObjectInAssign(stmt, rootObj, pass) + }) } -func getElts(node ast.Node) ([]ast.Expr, error) { - switch typedNode := node.(type) { - case *ast.CompositeLit: - return typedNode.Elts, nil - default: - return nil, errUnknown +// setsAllowImplicitForObjectInAssign reports true if the statement assigns to +// X.AllowImplicit and the root identifier of X matches the provided object. +func setsAllowImplicitForObjectInAssign(stmt ast.Stmt, obj types.Object, pass *analysis.Pass) bool { + assign, ok := stmt.(*ast.AssignStmt) + if !ok { + return false } + + for _, lhs := range assign.Lhs { + sel, ok := lhs.(*ast.SelectorExpr) + if !ok { + continue + } + + if !isAllowImplicitSelector(sel) { + continue + } + + // Try standard root resolution first (ident, parens, star, chained selectors) + if r := rootIdent(sel.X); r != nil { + if pass.TypesInfo.ObjectOf(r) == obj { + return true + } + + // Not the same object; proceed to next LHS + continue + } + + // Handle index expression roots like arr[i].AllowImplicit + if idx, ok := sel.X.(*ast.IndexExpr); ok { + if root := rootIdent(idx.X); root != nil && pass.TypesInfo.ObjectOf(root) == obj { + return true + } + } + // Handle nested selector rooted by an index expression, e.g., arr[i].opts.AllowImplicit + if innerSel, ok := sel.X.(*ast.SelectorExpr); ok { + if idx, ok := innerSel.X.(*ast.IndexExpr); ok { + if root := rootIdent(idx.X); root != nil && pass.TypesInfo.ObjectOf(root) == obj { + return true + } + } + } + } + + return false } -func eltsHasAllowImplicit(elts []ast.Expr) bool { - for _, elt := range elts { - if eltIsAllowImplicit(elt) { - return true +// hasAllowImplicitForIdent checks whether the given identifier (variable or pointer to options) +// has the AllowImplicit option explicitly set before the call position within the nearest or any ancestor block. +func hasAllowImplicitForIdent( + id *ast.Ident, + pass *analysis.Pass, + stack []ast.Node, + callPos token.Pos, +) bool { + obj := pass.TypesInfo.ObjectOf(id) + if obj == nil { + return false + } + + blocks := ancestorBlocks(stack) + // Walk from the nearest block outward and scan statements before the call position + if scanPriorStatements(blocks, callPos, func(stmt ast.Stmt) bool { + return stmtSetsAllowImplicitForObj(stmt, obj, pass) + }) { + return true + } + + // If not found in local/ancestor blocks, also check for package-level (global) + // variable declarations that initialize AllowImplicit. + if hasAllowImplicitForPackageVar(pass, obj) { + return true + } + + return false +} + +// ancestorBlocks returns the list of enclosing blocks for the current node, from +// nearest to outermost. This supports intra-procedural, flow-sensitive scans of +// statements that occur before the call site. +func ancestorBlocks(stack []ast.Node) []*ast.BlockStmt { + var blks []*ast.BlockStmt + for i := len(stack) - 1; i >= 0; i-- { + if blk, ok := stack[i].(*ast.BlockStmt); ok { + blks = append(blks, blk) + } + } + + return blks +} + +// scanPriorStatements iterates statements in the provided blocks in lexical order, +// visiting only statements that appear before the provided 'until' position. It stops +// early and returns true when visit returns true. +func scanPriorStatements(blocks []*ast.BlockStmt, until token.Pos, visit func(ast.Stmt) bool) bool { + for _, blk := range blocks { + for _, stmt := range blk.List { + if stmt == nil { + continue + } + + if stmt.Pos() >= until { + break + } + + if visit(stmt) { + return true + } } } return false } -func eltIsAllowImplicit(expr ast.Expr) bool { - switch typedNode := expr.(type) { - case *ast.KeyValueExpr: - ident, ok := typedNode.Key.(*ast.Ident) +func initHasAllowImplicitForObj( + assign *ast.AssignStmt, + obj types.Object, + pass *analysis.Pass, +) bool { + // find the RHS corresponding to our obj + for lhsIndex, lhs := range assign.Lhs { + id, ok := lhs.(*ast.Ident) if !ok { - return false + continue + } + + if pass.TypesInfo.ObjectOf(id) != obj { + continue } - return ident.Name == "AllowImplicit" + var rhsValue ast.Expr + + switch { + case len(assign.Rhs) == len(assign.Lhs): + rhsValue = assign.Rhs[lhsIndex] + case len(assign.Rhs) == 1: + rhsValue = assign.Rhs[0] + default: + continue + } + + // Check for AllowImplicit in either &CompositeLit or CompositeLit via helper + if has, ok := compositeAllowsImplicit(rhsValue); ok { + return has + } + } + + return false +} + +func declInitHasAllowImplicitForObj(stmt ast.Stmt, obj types.Object, pass *analysis.Pass) bool { + declStmt, isDeclStmt := stmt.(*ast.DeclStmt) + if !isDeclStmt { + return false + } + + genDecl, isGenDecl := declStmt.Decl.(*ast.GenDecl) + if !isGenDecl || genDecl.Tok != token.VAR { + return false + } + + for _, spec := range genDecl.Specs { + valueSpec, isValueSpec := spec.(*ast.ValueSpec) + if !isValueSpec { + continue + } + + if valueSpecHasAllowImplicitForObj(valueSpec, obj, pass) { + return true + } + } + + return false +} + +func valueSpecHasAllowImplicitForObj( + valueSpec *ast.ValueSpec, + obj types.Object, + pass *analysis.Pass, +) bool { + // find the index corresponding to our obj + targetIndex := -1 + + for i, name := range valueSpec.Names { + if pass.TypesInfo.ObjectOf(name) == obj { + targetIndex = i + + break + } + } + + if targetIndex == -1 { + return false + } + + // pick the value expression for this name + var rhsValue ast.Expr + + switch { + case targetIndex < len(valueSpec.Values): + rhsValue = valueSpec.Values[targetIndex] + case len(valueSpec.Values) == 1: + rhsValue = valueSpec.Values[0] default: return false } + + // Check for AllowImplicit in either &CompositeLit or CompositeLit via helper + if has, ok := compositeAllowsImplicit(rhsValue); ok { + return has + } + + return false +} + +func stmtSetsAllowImplicitForObj(stmt ast.Stmt, obj types.Object, pass *analysis.Pass) bool { + // Direct assignment like opts.AllowImplicit = true + if setsAllowImplicitForObjectInAssign(stmt, obj, pass) { + return true + } + + // Variable initialization via assignment (short var or regular assignment) + if assign, ok := stmt.(*ast.AssignStmt); ok { + if initHasAllowImplicitForObj(assign, obj, pass) { + return true + } + } + + // Variable declaration with initialization + if declInitHasAllowImplicitForObj(stmt, obj, pass) { + return true + } + + // Control-flow constructs that may contain relevant prior mutations/initializations + switch stmtNode := stmt.(type) { + case *ast.IfStmt: + if handleIfAllowImplicit(stmtNode, obj, pass) { + return true + } + case *ast.ForStmt: + if handleForAllowImplicit(stmtNode, obj, pass) { + return true + } + case *ast.RangeStmt: + if handleRangeAllowImplicit(stmtNode, obj, pass) { + return true + } + case *ast.SwitchStmt: + if handleSwitchAllowImplicit(stmtNode, obj, pass) { + return true + } + } + + return false +} + +// rootIdent returns the underlying identifier by peeling parens, stars, +// selectors, index, and slice expressions. It is intended for cases where we +// must resolve the base collection identifier behind arr[i], arr[1:], etc. +func rootIdent(expr ast.Expr) *ast.Ident { + for { + switch typedExpr := expr.(type) { + case *ast.Ident: + return typedExpr + case *ast.ParenExpr: + expr = typedExpr.X + case *ast.StarExpr: + expr = typedExpr.X + case *ast.SelectorExpr: + expr = typedExpr.X + case *ast.IndexExpr: + expr = typedExpr.X + case *ast.SliceExpr: + expr = typedExpr.X + default: + return nil + } + } +} + +// isTypeConversionToTxnOptionsPtrNil reports whether call is a type conversion to a +// pointer type with a single nil argument, e.g. (*arangodb.BeginTransactionOptions)(nil). +// This recognizes explicit nil options passed via a cast. +func isTypeConversionToTxnOptionsPtrNil(call *ast.CallExpr, pass *analysis.Pass) bool { + // single arg must be a nil identifier + if len(call.Args) != 1 { + return false + } + + if !isNilIdent(call.Args[0]) { + return false + } + // Check the target type is a pointer type + if t := pass.TypesInfo.TypeOf(call.Fun); t != nil { + if _, ok := t.(*types.Pointer); ok { + return true + } + } + // Fallback to a syntactic check + fun := call.Fun + for { + if p, ok := fun.(*ast.ParenExpr); ok { + fun = p.X + + continue + } + + break + } + + _, ok := fun.(*ast.StarExpr) + + return ok +} + +// hasAllowImplicitForPackageVar scans all files for top-level var declarations +// of the given object and returns true if its initialization sets AllowImplicit. +func hasAllowImplicitForPackageVar(pass *analysis.Pass, obj types.Object) bool { + // Only variables can be relevant here, but the object identity check below + // will safely no-op for others. + for _, f := range pass.Files { + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.VAR { + continue + } + + for _, spec := range genDecl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + + if valueSpecHasAllowImplicitForObj(valueSpec, obj, pass) { + return true + } + } + } + } + + return false +} + +// compositeAllowsImplicit reports whether expr is a composite literal (or address-of one) +// that contains a KeyValueExpr with key ident named allowImplicitFieldName ("AllowImplicit"). +// It returns (has, ok) where ok indicates the expression was a recognized composite literal shape. +func compositeAllowsImplicit(expr ast.Expr) (bool, bool) { + expr = unwrapParens(expr) + + // handle address-of &CompositeLit + if ue, ok := expr.(*ast.UnaryExpr); ok { + expr = unwrapParens(ue.X) + } + + // handle CompositeLit + if cl, ok := expr.(*ast.CompositeLit); ok { + for _, elt := range cl.Elts { + if kv, ok := elt.(*ast.KeyValueExpr); ok { + if ident, ok := kv.Key.(*ast.Ident); ok && ident.Name == allowImplicitFieldName { + return true, true + } + } + } + + return false, true + } + + return false, false +} + +// handleIfAllowImplicit scans the if statement's body and else branches for assignments or initializations +// that set AllowImplicit for the given object. Behavior mirrors the inline logic previously in +// stmtSetsAllowImplicitForObj; extracted for readability only. +func handleIfAllowImplicit(stmtNode *ast.IfStmt, obj types.Object, pass *analysis.Pass) bool { + // Recurse into body statements + for _, st := range stmtNode.Body.List { + if stmtSetsAllowImplicitForObj(st, obj, pass) { + return true + } + } + // Else can be another IfStmt (else-if) or a BlockStmt + switch elseNode := stmtNode.Else.(type) { + case *ast.BlockStmt: + for _, st := range elseNode.List { + if stmtSetsAllowImplicitForObj(st, obj, pass) { + return true + } + } + case *ast.IfStmt: + if stmtSetsAllowImplicitForObj(elseNode, obj, pass) { + return true + } + } + + return false +} + +// handleForAllowImplicit scans a for statement's init and body for relevant initializations/assignments. +func handleForAllowImplicit(stmtNode *ast.ForStmt, obj types.Object, pass *analysis.Pass) bool { + // e.g., for i := 0; i < n; i++ { opts.AllowImplicit = true } + if assign, ok := stmtNode.Init.(*ast.AssignStmt); ok { + if initHasAllowImplicitForObj(assign, obj, pass) { + return true + } + } + + for _, st := range stmtNode.Body.List { + if stmtSetsAllowImplicitForObj(st, obj, pass) { + return true + } + } + + return false +} + +// handleSwitchAllowImplicit scans a switch statement's init and case bodies. +func handleSwitchAllowImplicit( + stmtNode *ast.SwitchStmt, + obj types.Object, + pass *analysis.Pass, +) bool { + if assign, ok := stmtNode.Init.(*ast.AssignStmt); ok { + if initHasAllowImplicitForObj(assign, obj, pass) { + return true + } + } + + for _, cc := range stmtNode.Body.List { + if clause, ok := cc.(*ast.CaseClause); ok { + for _, st := range clause.Body { + if stmtSetsAllowImplicitForObj(st, obj, pass) { + return true + } + } + } + } + + return false +} + +// handleRangeAllowImplicit scans a range statement's body for assignments or initializations +// that set AllowImplicit for the given object. Mirrors ForStmt handling semantics. +func handleRangeAllowImplicit(stmtNode *ast.RangeStmt, obj types.Object, pass *analysis.Pass) bool { + if stmtNode == nil || stmtNode.Body == nil { + return false + } + + for _, st := range stmtNode.Body.List { + if stmtSetsAllowImplicitForObj(st, obj, pass) { + return true + } + } + + return false +} + +// hasAllowImplicitForIndex checks if an index expression (e.g., arr[i] or arr[i]. via nested selectors) +// refers to an array/slice element whose AllowImplicit field was set prior to the call position within +// the nearest or any ancestor block. We require both the same base identifier and the same index (when resolvable). +func hasAllowImplicitForIndex( + idx *ast.IndexExpr, + pass *analysis.Pass, + stack []ast.Node, + callPos token.Pos, +) bool { + if idx == nil { + return false + } + + blocks := ancestorBlocks(stack) + + return scanPriorStatements(blocks, callPos, func(stmt ast.Stmt) bool { + return setsAllowImplicitForIndex(stmt, idx, pass) + }) +} + +// sameIndex reports whether two index expressions refer to the same constant index. +// It only returns true for simple integer literals with the same value. For other +// shapes it returns false (unknown), keeping analysis conservative. +func sameIndex(a, exprB ast.Expr) bool { + a = unwrapParens(a) + exprB = unwrapParens(exprB) + + litA, oka := a.(*ast.BasicLit) + + litB, okb := exprB.(*ast.BasicLit) + if !oka || !okb { + return false + } + + if litA.Kind != token.INT || litB.Kind != token.INT { + return false + } + + return litA.Value == litB.Value +} + +// sameIndexBase reports whether two index expressions share the same base identifier +// (array/slice variable) and the same constant index. +func sameIndexBase(idxA, idxB *ast.IndexExpr, pass *analysis.Pass) bool { + baseA := rootIdent(idxA.X) + baseB := rootIdent(idxB.X) + + if baseA == nil || baseB == nil { + return false + } + + if pass.TypesInfo.ObjectOf(baseA) != pass.TypesInfo.ObjectOf(baseB) { + return false + } + + return sameIndex(idxA.Index, idxB.Index) +} + +// setsAllowImplicitForIndex reports true if stmt assigns to AllowImplicit for the +// specific element referenced by target (matching both base and index). +func setsAllowImplicitForIndex(stmt ast.Stmt, target *ast.IndexExpr, pass *analysis.Pass) bool { + assign, ok := stmt.(*ast.AssignStmt) + if !ok { + return false + } + + for _, lhs := range assign.Lhs { + sel, ok := lhs.(*ast.SelectorExpr) + if !ok { + continue + } + + if !isAllowImplicitSelector(sel) { + continue + } + + // Direct element field assignment: arr[i].AllowImplicit = ... + if idx, ok := sel.X.(*ast.IndexExpr); ok { + if sameIndexBase(idx, target, pass) { + return true + } + } + + // Nested field after element selection: arr[i].opts.AllowImplicit = ... + if innerSel, ok := sel.X.(*ast.SelectorExpr); ok { + if idx, ok := innerSel.X.(*ast.IndexExpr); ok { + if sameIndexBase(idx, target, pass) { + return true + } + } + } + } + + return false } diff --git a/tools/vendor/go.augendre.info/fatcontext/pkg/analyzer/analyzer.go b/tools/vendor/go.augendre.info/fatcontext/pkg/analyzer/analyzer.go index d37b4cc2..88c75bdb 100644 --- a/tools/vendor/go.augendre.info/fatcontext/pkg/analyzer/analyzer.go +++ b/tools/vendor/go.augendre.info/fatcontext/pkg/analyzer/analyzer.go @@ -9,7 +9,6 @@ import ( "go/ast" "go/printer" "go/token" - "slices" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -205,7 +204,7 @@ func findNestedContext(pass *analysis.Pass, node ast.Node, stmts []ast.Stmt) *as varName := getVarName(pass, assignStmt) // If the assignment is to a known empty context, mark this variable as reset - if isContextFunction(assignStmt.Rhs[0], "Background", "TODO") { + if isEmptyContext(assignStmt.Rhs[0]) { if varName != "" { resetContexts[varName] = true } @@ -280,7 +279,7 @@ func render(fset *token.FileSet, x interface{}) ([]byte, error) { return buf.Bytes(), nil } -func isContextFunction(exp ast.Expr, fnName ...string) bool { +func isEmptyContext(exp ast.Expr) bool { call, typeValid := exp.(*ast.CallExpr) if !typeValid { return false @@ -296,7 +295,54 @@ func isContextFunction(exp ast.Expr, fnName ...string) bool { return false } - return ident.Name == "context" && slices.Contains(fnName, selector.Sel.Name) + isContextPackage := ident.Name == "context" + isSafeFunc := selector.Sel.Name == "Background" || selector.Sel.Name == "TODO" + + // context.Background and context.TODO are safe + if isContextPackage && isSafeFunc { + return true + } + + // Looking for a call to testing.T.Context or testing.B.Context or testing.TB.Context. + // Checking if the called function is Context, to avoid unnecessary work. + if selector.Sel.Name != "Context" { + return false + } + + if ident.Obj == nil || ident.Obj.Decl == nil { + return false + } + + decl, typeValid := ident.Obj.Decl.(*ast.Field) + if !typeValid { + return false + } + + // Unpack the StarExpr if necessary (as in *testing.T vs. testing.TB) + // This makes it a StarExpr ---^ + var declType any = decl.Type + + if star, typeValid := decl.Type.(*ast.StarExpr); typeValid { + declType = star.X + } + + declSelector, typeValid := declType.(*ast.SelectorExpr) + if !typeValid { + return false + } + + declIdent, typeValid := declSelector.X.(*ast.Ident) + if !typeValid { + return false + } + + isTestingPackage := declIdent.Name == "testing" + isValidType := declSelector.Sel.Name == "T" || + declSelector.Sel.Name == "B" || + declSelector.Sel.Name == "TB" + isSafeFunc = selector.Sel.Name == "Context" + + return isTestingPackage && isValidType && isSafeFunc } func isWithinLoop(exp ast.Expr, node ast.Node, pass *analysis.Pass) bool { diff --git a/tools/vendor/golang.org/x/exp/typeparams/common.go b/tools/vendor/golang.org/x/exp/typeparams/common.go index 7f867cf8..820ca7bd 100644 --- a/tools/vendor/golang.org/x/exp/typeparams/common.go +++ b/tools/vendor/golang.org/x/exp/typeparams/common.go @@ -116,7 +116,7 @@ func OriginMethod(fn *types.Func) *types.Func { // implements the following rule for uninstantiated generic types: // // If V and T are generic named types, then V is considered assignable to T if, -// for every possible instantation of V[A_1, ..., A_N], the instantiation +// for every possible instantiation of V[A_1, ..., A_N], the instantiation // T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N]. // // If T has structural constraints, they must be satisfied by V. diff --git a/tools/vendor/golang.org/x/sync/errgroup/errgroup.go b/tools/vendor/golang.org/x/sync/errgroup/errgroup.go index 1d8cffae..2f45dbc8 100644 --- a/tools/vendor/golang.org/x/sync/errgroup/errgroup.go +++ b/tools/vendor/golang.org/x/sync/errgroup/errgroup.go @@ -3,7 +3,7 @@ // license that can be found in the LICENSE file. // Package errgroup provides synchronization, error propagation, and Context -// cancelation for groups of goroutines working on subtasks of a common task. +// cancellation for groups of goroutines working on subtasks of a common task. // // [errgroup.Group] is related to [sync.WaitGroup] but adds handling of tasks // returning errors. diff --git a/tools/vendor/golang.org/x/sys/unix/affinity_linux.go b/tools/vendor/golang.org/x/sys/unix/affinity_linux.go index 3c7a6d6e..3ea47038 100644 --- a/tools/vendor/golang.org/x/sys/unix/affinity_linux.go +++ b/tools/vendor/golang.org/x/sys/unix/affinity_linux.go @@ -41,6 +41,15 @@ func (s *CPUSet) Zero() { clear(s[:]) } +// Fill adds all possible CPU bits to the set s. On Linux, [SchedSetaffinity] +// will silently ignore any invalid CPU bits in [CPUSet] so this is an +// efficient way of resetting the CPU affinity of a process. +func (s *CPUSet) Fill() { + for i := range s { + s[i] = ^cpuMask(0) + } +} + func cpuBitsIndex(cpu int) int { return cpu / _NCPUBITS } diff --git a/tools/vendor/golang.org/x/sys/unix/fdset.go b/tools/vendor/golang.org/x/sys/unix/fdset.go index 9e83d18c..62ed1264 100644 --- a/tools/vendor/golang.org/x/sys/unix/fdset.go +++ b/tools/vendor/golang.org/x/sys/unix/fdset.go @@ -23,7 +23,5 @@ func (fds *FdSet) IsSet(fd int) bool { // Zero clears the set fds. func (fds *FdSet) Zero() { - for i := range fds.Bits { - fds.Bits[i] = 0 - } + clear(fds.Bits[:]) } diff --git a/tools/vendor/golang.org/x/sys/unix/ifreq_linux.go b/tools/vendor/golang.org/x/sys/unix/ifreq_linux.go index 848840ae..309f5a2b 100644 --- a/tools/vendor/golang.org/x/sys/unix/ifreq_linux.go +++ b/tools/vendor/golang.org/x/sys/unix/ifreq_linux.go @@ -111,9 +111,7 @@ func (ifr *Ifreq) SetUint32(v uint32) { // clear zeroes the ifreq's union field to prevent trailing garbage data from // being sent to the kernel if an ifreq is reused. func (ifr *Ifreq) clear() { - for i := range ifr.raw.Ifru { - ifr.raw.Ifru[i] = 0 - } + clear(ifr.raw.Ifru[:]) } // TODO(mdlayher): export as IfreqData? For now we can provide helpers such as diff --git a/tools/vendor/golang.org/x/sys/unix/mkall.sh b/tools/vendor/golang.org/x/sys/unix/mkall.sh index e6f31d37..d0ed6119 100644 --- a/tools/vendor/golang.org/x/sys/unix/mkall.sh +++ b/tools/vendor/golang.org/x/sys/unix/mkall.sh @@ -49,6 +49,7 @@ esac if [[ "$GOOS" = "linux" ]]; then # Use the Docker-based build system # Files generated through docker (use $cmd so you can Ctl-C the build or run) + set -e $cmd docker build --tag generate:$GOOS $GOOS $cmd docker run --interactive --tty --volume $(cd -- "$(dirname -- "$0")/.." && pwd):/build generate:$GOOS exit diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_linux.go b/tools/vendor/golang.org/x/sys/unix/syscall_linux.go index 4958a657..9439af96 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_linux.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_linux.go @@ -801,9 +801,7 @@ func (sa *SockaddrPPPoE) sockaddr() (unsafe.Pointer, _Socklen, error) { // one. The kernel expects SID to be in network byte order. binary.BigEndian.PutUint16(sa.raw[6:8], sa.SID) copy(sa.raw[8:14], sa.Remote) - for i := 14; i < 14+IFNAMSIZ; i++ { - sa.raw[i] = 0 - } + clear(sa.raw[14 : 14+IFNAMSIZ]) copy(sa.raw[14:], sa.Dev) return unsafe.Pointer(&sa.raw), SizeofSockaddrPPPoX, nil } diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_netbsd.go b/tools/vendor/golang.org/x/sys/unix/syscall_netbsd.go index 88162099..34a46769 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_netbsd.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_netbsd.go @@ -248,6 +248,23 @@ func Statvfs(path string, buf *Statvfs_t) (err error) { return Statvfs1(path, buf, ST_WAIT) } +func Getvfsstat(buf []Statvfs_t, flags int) (n int, err error) { + var ( + _p0 unsafe.Pointer + bufsize uintptr + ) + if len(buf) > 0 { + _p0 = unsafe.Pointer(&buf[0]) + bufsize = unsafe.Sizeof(Statvfs_t{}) * uintptr(len(buf)) + } + r0, _, e1 := Syscall(SYS_GETVFSSTAT, uintptr(_p0), bufsize, uintptr(flags)) + n = int(r0) + if e1 != 0 { + err = e1 + } + return +} + /* * Exposed directly */ diff --git a/tools/vendor/golang.org/x/sys/windows/syscall_windows.go b/tools/vendor/golang.org/x/sys/windows/syscall_windows.go index 640f6b15..bd513373 100644 --- a/tools/vendor/golang.org/x/sys/windows/syscall_windows.go +++ b/tools/vendor/golang.org/x/sys/windows/syscall_windows.go @@ -321,6 +321,8 @@ func NewCallbackCDecl(fn interface{}) uintptr { //sys SetConsoleOutputCP(cp uint32) (err error) = kernel32.SetConsoleOutputCP //sys WriteConsole(console Handle, buf *uint16, towrite uint32, written *uint32, reserved *byte) (err error) = kernel32.WriteConsoleW //sys ReadConsole(console Handle, buf *uint16, toread uint32, read *uint32, inputControl *byte) (err error) = kernel32.ReadConsoleW +//sys GetNumberOfConsoleInputEvents(console Handle, numevents *uint32) (err error) = kernel32.GetNumberOfConsoleInputEvents +//sys FlushConsoleInputBuffer(console Handle) (err error) = kernel32.FlushConsoleInputBuffer //sys resizePseudoConsole(pconsole Handle, size uint32) (hr error) = kernel32.ResizePseudoConsole //sys CreateToolhelp32Snapshot(flags uint32, processId uint32) (handle Handle, err error) [failretval==InvalidHandle] = kernel32.CreateToolhelp32Snapshot //sys Module32First(snapshot Handle, moduleEntry *ModuleEntry32) (err error) = kernel32.Module32FirstW diff --git a/tools/vendor/golang.org/x/sys/windows/types_windows.go b/tools/vendor/golang.org/x/sys/windows/types_windows.go index 993a2297..358be3c7 100644 --- a/tools/vendor/golang.org/x/sys/windows/types_windows.go +++ b/tools/vendor/golang.org/x/sys/windows/types_windows.go @@ -65,6 +65,22 @@ var signals = [...]string{ 15: "terminated", } +// File flags for [os.OpenFile]. The O_ prefix is used to indicate +// that these flags are specific to the OpenFile function. +const ( + O_FILE_FLAG_OPEN_NO_RECALL = FILE_FLAG_OPEN_NO_RECALL + O_FILE_FLAG_OPEN_REPARSE_POINT = FILE_FLAG_OPEN_REPARSE_POINT + O_FILE_FLAG_SESSION_AWARE = FILE_FLAG_SESSION_AWARE + O_FILE_FLAG_POSIX_SEMANTICS = FILE_FLAG_POSIX_SEMANTICS + O_FILE_FLAG_BACKUP_SEMANTICS = FILE_FLAG_BACKUP_SEMANTICS + O_FILE_FLAG_DELETE_ON_CLOSE = FILE_FLAG_DELETE_ON_CLOSE + O_FILE_FLAG_SEQUENTIAL_SCAN = FILE_FLAG_SEQUENTIAL_SCAN + O_FILE_FLAG_RANDOM_ACCESS = FILE_FLAG_RANDOM_ACCESS + O_FILE_FLAG_NO_BUFFERING = FILE_FLAG_NO_BUFFERING + O_FILE_FLAG_OVERLAPPED = FILE_FLAG_OVERLAPPED + O_FILE_FLAG_WRITE_THROUGH = FILE_FLAG_WRITE_THROUGH +) + const ( FILE_READ_DATA = 0x00000001 FILE_READ_ATTRIBUTES = 0x00000080 diff --git a/tools/vendor/golang.org/x/sys/windows/zsyscall_windows.go b/tools/vendor/golang.org/x/sys/windows/zsyscall_windows.go index 641a5f4b..426151a0 100644 --- a/tools/vendor/golang.org/x/sys/windows/zsyscall_windows.go +++ b/tools/vendor/golang.org/x/sys/windows/zsyscall_windows.go @@ -238,6 +238,7 @@ var ( procFindResourceW = modkernel32.NewProc("FindResourceW") procFindVolumeClose = modkernel32.NewProc("FindVolumeClose") procFindVolumeMountPointClose = modkernel32.NewProc("FindVolumeMountPointClose") + procFlushConsoleInputBuffer = modkernel32.NewProc("FlushConsoleInputBuffer") procFlushFileBuffers = modkernel32.NewProc("FlushFileBuffers") procFlushViewOfFile = modkernel32.NewProc("FlushViewOfFile") procFormatMessageW = modkernel32.NewProc("FormatMessageW") @@ -284,6 +285,7 @@ var ( procGetNamedPipeHandleStateW = modkernel32.NewProc("GetNamedPipeHandleStateW") procGetNamedPipeInfo = modkernel32.NewProc("GetNamedPipeInfo") procGetNamedPipeServerProcessId = modkernel32.NewProc("GetNamedPipeServerProcessId") + procGetNumberOfConsoleInputEvents = modkernel32.NewProc("GetNumberOfConsoleInputEvents") procGetOverlappedResult = modkernel32.NewProc("GetOverlappedResult") procGetPriorityClass = modkernel32.NewProc("GetPriorityClass") procGetProcAddress = modkernel32.NewProc("GetProcAddress") @@ -2111,6 +2113,14 @@ func FindVolumeMountPointClose(findVolumeMountPoint Handle) (err error) { return } +func FlushConsoleInputBuffer(console Handle) (err error) { + r1, _, e1 := syscall.SyscallN(procFlushConsoleInputBuffer.Addr(), uintptr(console)) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func FlushFileBuffers(handle Handle) (err error) { r1, _, e1 := syscall.SyscallN(procFlushFileBuffers.Addr(), uintptr(handle)) if r1 == 0 { @@ -2481,6 +2491,14 @@ func GetNamedPipeServerProcessId(pipe Handle, serverProcessID *uint32) (err erro return } +func GetNumberOfConsoleInputEvents(console Handle, numevents *uint32) (err error) { + r1, _, e1 := syscall.SyscallN(procGetNumberOfConsoleInputEvents.Addr(), uintptr(console), uintptr(unsafe.Pointer(numevents))) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func GetOverlappedResult(handle Handle, overlapped *Overlapped, done *uint32, wait bool) (err error) { var _p0 uint32 if wait { diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go index 1aa7afb9..efbf05d5 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go @@ -237,7 +237,7 @@ Files: // so accumulate them all and then prefer the one that // matches build.Default.GOARCH. var archCandidates []*asmArch - for _, fld := range strings.Fields(m[1]) { + for fld := range strings.FieldsSeq(m[1]) { for _, a := range arches { if a.name == fld { archCandidates = append(archCandidates, a) diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go index 1914bb47..dfe68d9b 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go @@ -19,7 +19,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil" ) //go:embed doc.go @@ -66,8 +66,8 @@ func run(pass *analysis.Pass) (any, error) { !isMapIndex(pass.TypesInfo, lhs) && reflect.TypeOf(lhs) == reflect.TypeOf(rhs) { // short-circuit the heavy-weight gofmt check - le = analysisinternal.Format(pass.Fset, lhs) - re := analysisinternal.Format(pass.Fset, rhs) + le = astutil.Format(pass.Fset, lhs) + re := astutil.Format(pass.Fset, rhs) if le == re { isSelfAssign = true } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go index 82d5439c..ddd875b2 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go @@ -14,7 +14,8 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -30,7 +31,7 @@ var Analyzer = &analysis.Analyzer{ } func run(pass *analysis.Pass) (any, error) { - if !analysisinternal.Imports(pass.Pkg, "sync/atomic") { + if !typesinternal.Imports(pass.Pkg, "sync/atomic") { return nil, nil // doesn't directly import sync/atomic } @@ -54,7 +55,7 @@ func run(pass *analysis.Pass) (any, error) { continue } obj := typeutil.Callee(pass.TypesInfo, call) - if analysisinternal.IsFunctionNamed(obj, "sync/atomic", "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr") { + if typesinternal.IsFunctionNamed(obj, "sync/atomic", "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr") { checkAtomicAddAssignment(pass, n.Lhs[i], call) } } @@ -72,7 +73,7 @@ func checkAtomicAddAssignment(pass *analysis.Pass, left ast.Expr, call *ast.Call arg := call.Args[0] broken := false - gofmt := func(e ast.Expr) string { return analysisinternal.Format(pass.Fset, e) } + gofmt := func(e ast.Expr) string { return astutil.Format(pass.Fset, e) } if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND { broken = gofmt(left) == gofmt(uarg.X) diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go index 2508b41f..84699dd0 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go @@ -18,7 +18,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) const Doc = "check for non-64-bits-aligned arguments to sync/atomic functions" @@ -35,7 +35,7 @@ func run(pass *analysis.Pass) (any, error) { if 8*pass.TypesSizes.Sizeof(types.Typ[types.Uintptr]) == 64 { return nil, nil // 64-bit platform } - if !analysisinternal.Imports(pass.Pkg, "sync/atomic") { + if !typesinternal.Imports(pass.Pkg, "sync/atomic") { return nil, nil // doesn't directly import sync/atomic } @@ -54,7 +54,7 @@ func run(pass *analysis.Pass) (any, error) { inspect.Preorder(nodeFilter, func(node ast.Node) { call := node.(*ast.CallExpr) obj := typeutil.Callee(pass.TypesInfo, call) - if analysisinternal.IsFunctionNamed(obj, "sync/atomic", funcNames...) { + if typesinternal.IsFunctionNamed(obj, "sync/atomic", funcNames...) { // For all the listed functions, the expression to check is always the first function argument. check64BitAlignment(pass, obj.Name(), call.Args[0]) } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go index e1cf9f9b..3c2a82dc 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go @@ -15,7 +15,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil" ) const Doc = "check for common mistakes involving boolean operators" @@ -104,7 +104,7 @@ func (op boolOp) commutativeSets(info *types.Info, e *ast.BinaryExpr, seen map[* func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) { seen := make(map[string]bool) for _, e := range exprs { - efmt := analysisinternal.Format(pass.Fset, e) + efmt := astutil.Format(pass.Fset, e) if seen[efmt] { pass.ReportRangef(e, "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt) } else { @@ -150,8 +150,8 @@ func (op boolOp) checkSuspect(pass *analysis.Pass, exprs []ast.Expr) { } // e is of the form 'x != c' or 'x == c'. - xfmt := analysisinternal.Format(pass.Fset, x) - efmt := analysisinternal.Format(pass.Fset, e) + xfmt := astutil.Format(pass.Fset, x) + efmt := astutil.Format(pass.Fset, e) if prev, found := seen[xfmt]; found { // checkRedundant handles the case in which efmt == prev. if efmt != prev { diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go index 6c7a0df5..91aac676 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go @@ -15,6 +15,8 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/versions" ) const Doc = "check //go:build and // +build directives" @@ -55,7 +57,6 @@ func runBuildTag(pass *analysis.Pass) (any, error) { func checkGoFile(pass *analysis.Pass, f *ast.File) { var check checker check.init(pass) - defer check.finish() for _, group := range f.Comments { // A +build comment is ignored after or adjoining the package declaration. @@ -77,6 +78,27 @@ func checkGoFile(pass *analysis.Pass, f *ast.File) { check.comment(c.Slash, c.Text) } } + + check.finish() + + // For Go 1.18+ files, offer a fix to remove the +build lines + // if they passed all consistency checks. + if check.crossCheck && !versions.Before(pass.TypesInfo.FileVersions[f], "go1.18") { + for _, rng := range check.plusBuildRanges { + check.pass.Report(analysis.Diagnostic{ + Pos: rng.Pos(), + End: rng.End(), + Message: "+build line is no longer needed", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Remove obsolete +build line", + TextEdits: []analysis.TextEdit{{ + Pos: rng.Pos(), + End: rng.End(), + }}, + }}, + }) + } + } } func checkOtherFile(pass *analysis.Pass, filename string) error { @@ -96,15 +118,15 @@ func checkOtherFile(pass *analysis.Pass, filename string) error { } type checker struct { - pass *analysis.Pass - plusBuildOK bool // "+build" lines still OK - goBuildOK bool // "go:build" lines still OK - crossCheck bool // cross-check go:build and +build lines when done reading file - inStar bool // currently in a /* */ comment - goBuildPos token.Pos // position of first go:build line found - plusBuildPos token.Pos // position of first "+build" line found - goBuild constraint.Expr // go:build constraint found - plusBuild constraint.Expr // AND of +build constraints found + pass *analysis.Pass + plusBuildOK bool // "+build" lines still OK + goBuildOK bool // "go:build" lines still OK + crossCheck bool // cross-check go:build and +build lines when done reading file + inStar bool // currently in a /* */ comment + goBuildPos token.Pos // position of first go:build line found + plusBuildRanges []analysis.Range // range of each "+build" line found + goBuild constraint.Expr // go:build constraint found + plusBuild constraint.Expr // AND of +build constraints found } func (check *checker) init(pass *analysis.Pass) { @@ -272,6 +294,8 @@ func (check *checker) goBuildLine(pos token.Pos, line string) { } func (check *checker) plusBuildLine(pos token.Pos, line string) { + plusBuildRange := analysisinternal.Range(pos, pos+token.Pos(len(line))) + line = strings.TrimSpace(line) if !constraint.IsPlusBuild(line) { // Comment with +build but not at beginning. @@ -286,9 +310,7 @@ func (check *checker) plusBuildLine(pos token.Pos, line string) { check.crossCheck = false } - if check.plusBuildPos == token.NoPos { - check.plusBuildPos = pos - } + check.plusBuildRanges = append(check.plusBuildRanges, plusBuildRange) // testing hack: stop at // ERROR if i := strings.Index(line, " // ERROR "); i >= 0 { @@ -298,7 +320,7 @@ func (check *checker) plusBuildLine(pos token.Pos, line string) { fields := strings.Fields(line[len("//"):]) // IsPlusBuildConstraint check above implies fields[0] == "+build" for _, arg := range fields[1:] { - for _, elem := range strings.Split(arg, ",") { + for elem := range strings.SplitSeq(arg, ",") { if strings.HasPrefix(elem, "!!") { check.pass.Reportf(pos, "invalid double negative in build constraint: %s", arg) check.crossCheck = false @@ -336,19 +358,19 @@ func (check *checker) plusBuildLine(pos token.Pos, line string) { } func (check *checker) finish() { - if !check.crossCheck || check.plusBuildPos == token.NoPos || check.goBuildPos == token.NoPos { + if !check.crossCheck || len(check.plusBuildRanges) == 0 || check.goBuildPos == token.NoPos { return } // Have both //go:build and // +build, // with no errors found (crossCheck still true). // Check they match. - var want constraint.Expr lines, err := constraint.PlusBuildLines(check.goBuild) if err != nil { check.pass.Reportf(check.goBuildPos, "%v", err) return } + var want constraint.Expr for _, line := range lines { y, err := constraint.Parse(line) if err != nil { @@ -363,7 +385,8 @@ func (check *checker) finish() { } } if want.String() != check.plusBuild.String() { - check.pass.Reportf(check.plusBuildPos, "+build lines do not match //go:build condition") + check.pass.ReportRangef(check.plusBuildRanges[0], "+build lines do not match //go:build condition") + check.crossCheck = false // don't offer fix to remove +build return } } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go index d9189b5b..bf1202b9 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go @@ -18,7 +18,7 @@ import ( "strconv" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) const debug = false @@ -41,7 +41,7 @@ var Analyzer = &analysis.Analyzer{ } func run(pass *analysis.Pass) (any, error) { - if !analysisinternal.Imports(pass.Pkg, "runtime/cgo") { + if !typesinternal.Imports(pass.Pkg, "runtime/cgo") { return nil, nil // doesn't use cgo } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go index a4e455d9..4190cc59 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go @@ -16,8 +16,9 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" "golang.org/x/tools/internal/versions" ) @@ -86,7 +87,7 @@ func checkCopyLocksAssign(pass *analysis.Pass, assign *ast.AssignStmt, goversion lhs := assign.Lhs for i, x := range assign.Rhs { if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "assignment copies lock value to %v: %v", analysisinternal.Format(pass.Fset, assign.Lhs[i]), path) + pass.ReportRangef(x, "assignment copies lock value to %v: %v", astutil.Format(pass.Fset, assign.Lhs[i]), path) lhs = nil // An lhs has been reported. We prefer the assignment warning and do not report twice. } } @@ -100,7 +101,7 @@ func checkCopyLocksAssign(pass *analysis.Pass, assign *ast.AssignStmt, goversion if id, ok := l.(*ast.Ident); ok && id.Name != "_" { if obj := pass.TypesInfo.Defs[id]; obj != nil && obj.Type() != nil { if path := lockPath(pass.Pkg, obj.Type(), nil); path != nil { - pass.ReportRangef(l, "for loop iteration copies lock value to %v: %v", analysisinternal.Format(pass.Fset, l), path) + pass.ReportRangef(l, "for loop iteration copies lock value to %v: %v", astutil.Format(pass.Fset, l), path) } } } @@ -132,7 +133,7 @@ func checkCopyLocksCompositeLit(pass *analysis.Pass, cl *ast.CompositeLit) { x = node.Value } if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "literal copies lock value from %v: %v", analysisinternal.Format(pass.Fset, x), path) + pass.ReportRangef(x, "literal copies lock value from %v: %v", astutil.Format(pass.Fset, x), path) } } } @@ -157,13 +158,16 @@ func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) { } if fun, ok := pass.TypesInfo.Uses[id].(*types.Builtin); ok { switch fun.Name() { - case "new", "len", "cap", "Sizeof", "Offsetof", "Alignof": + case "len", "cap", "Sizeof", "Offsetof", "Alignof": + // The argument of this operation is used only + // for its type (e.g. len(array)), or the operation + // does not copy a lock (e.g. len(slice)). return } } for _, x := range ce.Args { if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "call of %s copies lock value: %v", analysisinternal.Format(pass.Fset, ce.Fun), path) + pass.ReportRangef(x, "call of %s copies lock value: %v", astutil.Format(pass.Fset, ce.Fun), path) } } } @@ -230,7 +234,7 @@ func checkCopyLocksRangeVar(pass *analysis.Pass, rtok token.Token, e ast.Expr) { return } if path := lockPath(pass.Pkg, typ, nil); path != nil { - pass.Reportf(e.Pos(), "range var %s copies lock: %v", analysisinternal.Format(pass.Fset, e), path) + pass.Reportf(e.Pos(), "range var %s copies lock: %v", astutil.Format(pass.Fset, e), path) } } @@ -350,7 +354,7 @@ func lockPath(tpkg *types.Package, typ types.Type, seen map[types.Type]bool) typ // In go1.10, sync.noCopy did not implement Locker. // (The Unlock method was added only in CL 121876.) // TODO(adonovan): remove workaround when we drop go1.10. - if analysisinternal.IsTypeNamed(typ, "sync", "noCopy") { + if typesinternal.IsTypeNamed(typ, "sync", "noCopy") { return []string{typ.String()} } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go index d15e3bc5..5e3d1a35 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go @@ -14,7 +14,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) const Doc = `check for calls of reflect.DeepEqual on error values @@ -35,7 +35,7 @@ var Analyzer = &analysis.Analyzer{ } func run(pass *analysis.Pass) (any, error) { - if !analysisinternal.Imports(pass.Pkg, "reflect") { + if !typesinternal.Imports(pass.Pkg, "reflect") { return nil, nil // doesn't directly import reflect } @@ -47,7 +47,7 @@ func run(pass *analysis.Pass) (any, error) { inspect.Preorder(nodeFilter, func(n ast.Node) { call := n.(*ast.CallExpr) obj := typeutil.Callee(pass.TypesInfo, call) - if analysisinternal.IsFunctionNamed(obj, "reflect", "DeepEqual") && hasError(pass, call.Args[0]) && hasError(pass, call.Args[1]) { + if typesinternal.IsFunctionNamed(obj, "reflect", "DeepEqual") && hasError(pass, call.Args[0]) && hasError(pass, call.Args[1]) { pass.ReportRangef(call, "avoid using reflect.DeepEqual with errors") } }) diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/defers/defers.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/defers/defers.go index e11957f2..bf62d327 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/defers/defers.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/defers/defers.go @@ -13,7 +13,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -29,14 +29,14 @@ var Analyzer = &analysis.Analyzer{ } func run(pass *analysis.Pass) (any, error) { - if !analysisinternal.Imports(pass.Pkg, "time") { + if !typesinternal.Imports(pass.Pkg, "time") { return nil, nil } checkDeferCall := func(node ast.Node) bool { switch v := node.(type) { case *ast.CallExpr: - if analysisinternal.IsFunctionNamed(typeutil.Callee(pass.TypesInfo, v), "time", "Since") { + if typesinternal.IsFunctionNamed(typeutil.Callee(pass.TypesInfo, v), "time", "Since") { pass.Reportf(v.Pos(), "call to time.Since is not deferred") } case *ast.FuncLit: diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go index b8d29d01..b3df9992 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go @@ -12,22 +12,20 @@ import ( "go/types" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/typesinternal/typeindex" ) const Doc = `report passing non-pointer or non-error values to errors.As -The errorsas analysis reports calls to errors.As where the type +The errorsas analyzer reports calls to errors.As where the type of the second argument is not a pointer to a type implementing error.` var Analyzer = &analysis.Analyzer{ Name: "errorsas", Doc: Doc, URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/errorsas", - Requires: []*analysis.Analyzer{inspect.Analyzer}, + Requires: []*analysis.Analyzer{typeindexanalyzer.Analyzer}, Run: run, } @@ -39,38 +37,31 @@ func run(pass *analysis.Pass) (any, error) { return nil, nil } - if !analysisinternal.Imports(pass.Pkg, "errors") { - return nil, nil // doesn't directly import errors - } - - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + ) - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - inspect.Preorder(nodeFilter, func(n ast.Node) { - call := n.(*ast.CallExpr) - obj := typeutil.Callee(pass.TypesInfo, call) - if !analysisinternal.IsFunctionNamed(obj, "errors", "As") { - return - } + for curCall := range index.Calls(index.Object("errors", "As")) { + call := curCall.Node().(*ast.CallExpr) if len(call.Args) < 2 { - return // not enough arguments, e.g. called with return values of another function + continue // spread call: errors.As(pair()) } - if err := checkAsTarget(pass, call.Args[1]); err != nil { + + // Check for incorrect arguments. + if err := checkAsTarget(info, call.Args[1]); err != nil { pass.ReportRangef(call, "%v", err) + continue } - }) + } return nil, nil } -var errorType = types.Universe.Lookup("error").Type() - // checkAsTarget reports an error if the second argument to errors.As is invalid. -func checkAsTarget(pass *analysis.Pass, e ast.Expr) error { - t := pass.TypesInfo.Types[e].Type - if it, ok := t.Underlying().(*types.Interface); ok && it.NumMethods() == 0 { - // A target of interface{} is always allowed, since it often indicates +func checkAsTarget(info *types.Info, e ast.Expr) error { + t := info.Types[e].Type + if types.Identical(t.Underlying(), anyType) { + // A target of any is always allowed, since it often indicates // a value forwarded from another source. return nil } @@ -78,12 +69,16 @@ func checkAsTarget(pass *analysis.Pass, e ast.Expr) error { if !ok { return errors.New("second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type") } - if pt.Elem() == errorType { + if types.Identical(pt.Elem(), errorType) { return errors.New("second argument to errors.As should not be *error") } - _, ok = pt.Elem().Underlying().(*types.Interface) - if ok || types.Implements(pt.Elem(), errorType.Underlying().(*types.Interface)) { - return nil + if !types.IsInterface(pt.Elem()) && !types.AssignableTo(pt.Elem(), errorType) { + return errors.New("second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type") } - return errors.New("second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type") + return nil } + +var ( + anyType = types.Universe.Lookup("any").Type() + errorType = types.Universe.Lookup("error").Type() +) diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/hostport/hostport.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/hostport/hostport.go index e808b1aa..07f15496 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/hostport/hostport.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/hostport/hostport.go @@ -10,7 +10,9 @@ import ( "fmt" "go/ast" "go/constant" + "go/token" "go/types" + "strconv" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -57,13 +59,16 @@ func run(pass *analysis.Pass) (any, error) { } // checkAddr reports a diagnostic (and returns true) if e - // is a call of the form fmt.Sprintf("%d:%d", ...). + // is a call of the form fmt.Sprintf("%s:%d", ...). // The diagnostic includes a fix. // // dialCall is non-nil if the Dial call is non-local // but within the same file. checkAddr := func(e ast.Expr, dialCall *ast.CallExpr) { - if call, ok := e.(*ast.CallExpr); ok && typeutil.Callee(info, call) == fmtSprintf { + if call, ok := e.(*ast.CallExpr); ok && + len(call.Args) == 3 && + typeutil.Callee(info, call) == fmtSprintf { + // Examine format string. formatArg := call.Args[0] if tv := info.Types[formatArg]; tv.Value != nil { @@ -99,21 +104,41 @@ func run(pass *analysis.Pass) (any, error) { // Turn numeric port into a string. if numericPort { - // port => fmt.Sprintf("%d", port) - // 123 => "123" port := call.Args[2] - newPort := fmt.Sprintf(`fmt.Sprintf("%%d", %s)`, port) - if port := info.Types[port].Value; port != nil { - if i, ok := constant.Int64Val(port); ok { - newPort = fmt.Sprintf(`"%d"`, i) // numeric constant + + // Is port an integer literal? + // + // (Don't allow arbitrary constants k otherwise the + // transformation k => fmt.Sprintf("%d", "123") + // loses the symbolic connection to k.) + var kPort int64 = -1 + if lit, ok := port.(*ast.BasicLit); ok && lit.Kind == token.INT { + if v, err := strconv.ParseInt(lit.Value, 0, 64); err == nil { + kPort = v } } - - edits = append(edits, analysis.TextEdit{ - Pos: port.Pos(), - End: port.End(), - NewText: []byte(newPort), - }) + if kPort >= 0 { + // literal: 0x7B => "123" + edits = append(edits, analysis.TextEdit{ + Pos: port.Pos(), + End: port.End(), + NewText: fmt.Appendf(nil, `"%d"`, kPort), // (decimal) + }) + } else { + // non-literal: port => fmt.Sprintf("%d", port) + edits = append(edits, []analysis.TextEdit{ + { + Pos: port.Pos(), + End: port.Pos(), + NewText: []byte(`fmt.Sprintf("%d", `), + }, + { + Pos: port.End(), + End: port.End(), + NewText: []byte(`)`), + }, + }...) + } } // Refer to Dial call, if not adjacent. diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/httpmux/httpmux.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/httpmux/httpmux.go index 655b78fd..a4f00e2c 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/httpmux/httpmux.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/httpmux/httpmux.go @@ -17,7 +17,6 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typesinternal" ) @@ -46,7 +45,7 @@ func run(pass *analysis.Pass) (any, error) { return nil, nil } } - if !analysisinternal.Imports(pass.Pkg, "net/http") { + if !typesinternal.Imports(pass.Pkg, "net/http") { return nil, nil } // Look for calls to ServeMux.Handle or ServeMux.HandleFunc. @@ -79,7 +78,7 @@ func isServeMuxRegisterCall(pass *analysis.Pass, call *ast.CallExpr) bool { if fn == nil { return false } - if analysisinternal.IsFunctionNamed(fn, "net/http", "Handle", "HandleFunc") { + if typesinternal.IsFunctionNamed(fn, "net/http", "Handle", "HandleFunc") { return true } if !isMethodNamed(fn, "net/http", "Handle", "HandleFunc") { @@ -87,7 +86,7 @@ func isServeMuxRegisterCall(pass *analysis.Pass, call *ast.CallExpr) bool { } recv := fn.Type().(*types.Signature).Recv() // isMethodNamed() -> non-nil isPtr, named := typesinternal.ReceiverNamed(recv) - return isPtr && analysisinternal.IsTypeNamed(named, "net/http", "ServeMux") + return isPtr && typesinternal.IsTypeNamed(named, "net/http", "ServeMux") } // isMethodNamed reports when a function f is a method, diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go index e9acd965..37ecb652 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go @@ -13,7 +13,6 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typesinternal" ) @@ -46,7 +45,7 @@ func run(pass *analysis.Pass) (any, error) { // Fast path: if the package doesn't import net/http, // skip the traversal. - if !analysisinternal.Imports(pass.Pkg, "net/http") { + if !typesinternal.Imports(pass.Pkg, "net/http") { return nil, nil } @@ -118,7 +117,7 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool { return false // the function called does not return two values. } isPtr, named := typesinternal.ReceiverNamed(res.At(0)) - if !isPtr || named == nil || !analysisinternal.IsTypeNamed(named, "net/http", "Response") { + if !isPtr || named == nil || !typesinternal.IsTypeNamed(named, "net/http", "Response") { return false // the first return type is not *http.Response. } @@ -133,11 +132,11 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool { return ok && id.Name == "http" // function in net/http package. } - if analysisinternal.IsTypeNamed(typ, "net/http", "Client") { + if typesinternal.IsTypeNamed(typ, "net/http", "Client") { return true // method on http.Client. } ptr, ok := types.Unalias(typ).(*types.Pointer) - return ok && analysisinternal.IsTypeNamed(ptr.Elem(), "net/http", "Client") // method on *http.Client. + return ok && typesinternal.IsTypeNamed(ptr.Elem(), "net/http", "Client") // method on *http.Client. } // restOfBlock, given a traversal stack, finds the innermost containing diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go index 2580a0ac..8432e963 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go @@ -14,7 +14,6 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typesinternal" "golang.org/x/tools/internal/versions" ) @@ -369,5 +368,5 @@ func isMethodCall(info *types.Info, expr ast.Expr, pkgPath, typeName, method str // Check that the receiver is a . or // *.. _, named := typesinternal.ReceiverNamed(recv) - return analysisinternal.IsTypeNamed(named, pkgPath, typeName) + return typesinternal.IsTypeNamed(named, pkgPath, typeName) } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go index c0746789..cc0bf0fd 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go @@ -16,8 +16,8 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/cfg" - "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -50,7 +50,7 @@ var contextPackage = "context" // checkLostCancel analyzes a single named or literal function. func run(pass *analysis.Pass) (any, error) { // Fast path: bypass check if file doesn't use context.WithCancel. - if !analysisinternal.Imports(pass.Pkg, contextPackage) { + if !typesinternal.Imports(pass.Pkg, contextPackage) { return nil, nil } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/any.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/any.go new file mode 100644 index 00000000..05999f8f --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/any.go @@ -0,0 +1,61 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" +) + +var AnyAnalyzer = &analysis.Analyzer{ + Name: "any", + Doc: analysisinternal.MustExtractDoc(doc, "any"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + }, + Run: runAny, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#any", +} + +// The any pass replaces interface{} with go1.18's 'any'. +func runAny(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.18") { + for curIface := range curFile.Preorder((*ast.InterfaceType)(nil)) { + iface := curIface.Node().(*ast.InterfaceType) + + if iface.Methods.NumFields() == 0 { + // Check that 'any' is not shadowed. + if lookup(pass.TypesInfo, curIface, "any") == builtinAny { + pass.Report(analysis.Diagnostic{ + Pos: iface.Pos(), + End: iface.End(), + Message: "interface{} can be replaced by any", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace interface{} by any", + TextEdits: []analysis.TextEdit{ + { + Pos: iface.Pos(), + End: iface.End(), + NewText: []byte("any"), + }, + }, + }}, + }) + } + } + } + } + return nil, nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/bloop.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/bloop.go new file mode 100644 index 00000000..eb1ac170 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/bloop.go @@ -0,0 +1,250 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/moreiters" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var BLoopAnalyzer = &analysis.Analyzer{ + Name: "bloop", + Doc: analysisinternal.MustExtractDoc(doc, "bloop"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: bloop, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#bloop", +} + +// bloop updates benchmarks that use "for range b.N", replacing it +// with go1.24's b.Loop() and eliminating any preceding +// b.{Start,Stop,Reset}Timer calls. +// +// Variants: +// +// for i := 0; i < b.N; i++ {} => for b.Loop() {} +// for range b.N {} +func bloop(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + if !typesinternal.Imports(pass.Pkg, "testing") { + return nil, nil + } + + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + ) + + // edits computes the text edits for a matched for/range loop + // at the specified cursor. b is the *testing.B value, and + // (start, end) is the portion using b.N to delete. + edits := func(curLoop inspector.Cursor, b ast.Expr, start, end token.Pos) (edits []analysis.TextEdit) { + curFn, _ := enclosingFunc(curLoop) + // Within the same function, delete all calls to + // b.{Start,Stop,Timer} that precede the loop. + filter := []ast.Node{(*ast.ExprStmt)(nil), (*ast.FuncLit)(nil)} + curFn.Inspect(filter, func(cur inspector.Cursor) (descend bool) { + node := cur.Node() + if is[*ast.FuncLit](node) { + return false // don't descend into FuncLits (e.g. sub-benchmarks) + } + stmt := node.(*ast.ExprStmt) + if stmt.Pos() > start { + return false // not preceding: stop + } + if call, ok := stmt.X.(*ast.CallExpr); ok { + obj := typeutil.Callee(info, call) + if typesinternal.IsMethodNamed(obj, "testing", "B", "StopTimer", "StartTimer", "ResetTimer") { + // Delete call statement. + // TODO(adonovan): delete following newline, or + // up to start of next stmt? (May delete a comment.) + edits = append(edits, analysis.TextEdit{ + Pos: stmt.Pos(), + End: stmt.End(), + }) + } + } + return true + }) + + // Replace ...b.N... with b.Loop(). + return append(edits, analysis.TextEdit{ + Pos: start, + End: end, + NewText: fmt.Appendf(nil, "%s.Loop()", astutil.Format(pass.Fset, b)), + }) + } + + // Find all for/range statements. + loops := []ast.Node{ + (*ast.ForStmt)(nil), + (*ast.RangeStmt)(nil), + } + for curFile := range filesUsing(inspect, info, "go1.24") { + for curLoop := range curFile.Preorder(loops...) { + switch n := curLoop.Node().(type) { + case *ast.ForStmt: + // for _; i < b.N; _ {} + if cmp, ok := n.Cond.(*ast.BinaryExpr); ok && cmp.Op == token.LSS { + if sel, ok := cmp.Y.(*ast.SelectorExpr); ok && + sel.Sel.Name == "N" && + typesinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") && usesBenchmarkNOnce(curLoop, info) { + + delStart, delEnd := n.Cond.Pos(), n.Cond.End() + + // Eliminate variable i if no longer needed: + // for i := 0; i < b.N; i++ { + // ...no references to i... + // } + body, _ := curLoop.LastChild() + if v := isIncrementLoop(info, n); v != nil && + !uses(index, body, v) { + delStart, delEnd = n.Init.Pos(), n.Post.End() + } + + pass.Report(analysis.Diagnostic{ + // Highlight "i < b.N". + Pos: n.Cond.Pos(), + End: n.Cond.End(), + Message: "b.N can be modernized using b.Loop()", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace b.N with b.Loop()", + TextEdits: edits(curLoop, sel.X, delStart, delEnd), + }}, + }) + } + } + + case *ast.RangeStmt: + // for range b.N {} -> for b.Loop() {} + // + // TODO(adonovan): handle "for i := range b.N". + if sel, ok := n.X.(*ast.SelectorExpr); ok && + n.Key == nil && + n.Value == nil && + sel.Sel.Name == "N" && + typesinternal.IsPointerToNamed(info.TypeOf(sel.X), "testing", "B") && usesBenchmarkNOnce(curLoop, info) { + + pass.Report(analysis.Diagnostic{ + // Highlight "range b.N". + Pos: n.Range, + End: n.X.End(), + Message: "b.N can be modernized using b.Loop()", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace b.N with b.Loop()", + TextEdits: edits(curLoop, sel.X, n.Range, n.X.End()), + }}, + }) + } + } + } + } + return nil, nil +} + +// uses reports whether the subtree cur contains a use of obj. +func uses(index *typeindex.Index, cur inspector.Cursor, obj types.Object) bool { + for use := range index.Uses(obj) { + if cur.Contains(use) { + return true + } + } + return false +} + +// enclosingFunc returns the cursor for the innermost Func{Decl,Lit} +// that encloses c, if any. +func enclosingFunc(c inspector.Cursor) (inspector.Cursor, bool) { + return moreiters.First(c.Enclosing((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil))) +} + +// usesBenchmarkNOnce reports whether a b.N loop should be modernized to b.Loop(). +// Only modernize loops that are: +// 1. Directly in a benchmark function (not in nested functions) +// - b.Loop() must be called in the same goroutine as the benchmark function +// - Function literals are often used with goroutines (go func(){...}) +// +// 2. The only b.N loop in that benchmark function +// - b.Loop() can only be called once per benchmark execution +// - Multiple calls result in "B.Loop called with timer stopped" error +func usesBenchmarkNOnce(c inspector.Cursor, info *types.Info) bool { + // Find the enclosing benchmark function + curFunc, ok := enclosingFunc(c) + if !ok { + return false + } + + // Check if this is actually a benchmark function + fdecl, ok := curFunc.Node().(*ast.FuncDecl) + if !ok { + return false // not in a function; or, inside a FuncLit + } + if !isBenchmarkFunc(fdecl) { + return false + } + + // Count b.N references in this benchmark function + bnRefCount := 0 + filter := []ast.Node{(*ast.SelectorExpr)(nil), (*ast.FuncLit)(nil)} + curFunc.Inspect(filter, func(cur inspector.Cursor) bool { + switch n := cur.Node().(type) { + case *ast.FuncLit: + return false // don't descend into nested function literals + case *ast.SelectorExpr: + if n.Sel.Name == "N" && typesinternal.IsPointerToNamed(info.TypeOf(n.X), "testing", "B") { + bnRefCount++ + } + } + return true + }) + + // Only modernize if there's exactly one b.N reference + return bnRefCount == 1 +} + +// isBenchmarkFunc reports whether f is a benchmark function. +func isBenchmarkFunc(f *ast.FuncDecl) bool { + return f.Recv == nil && + f.Name != nil && + f.Name.IsExported() && + strings.HasPrefix(f.Name.Name, "Benchmark") && + f.Type.Params != nil && + len(f.Type.Params.List) == 1 +} + +// isIncrementLoop reports whether loop has the form "for i := 0; ...; i++ { ... }", +// and if so, it returns the symbol for the index variable. +func isIncrementLoop(info *types.Info, loop *ast.ForStmt) *types.Var { + if assign, ok := loop.Init.(*ast.AssignStmt); ok && + assign.Tok == token.DEFINE && + len(assign.Rhs) == 1 && + isZeroIntLiteral(info, assign.Rhs[0]) && + is[*ast.IncDecStmt](loop.Post) && + loop.Post.(*ast.IncDecStmt).Tok == token.INC && + astutil.EqualSyntax(loop.Post.(*ast.IncDecStmt).X, assign.Lhs[0]) { + return info.Defs[assign.Lhs[0].(*ast.Ident)].(*types.Var) + } + return nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/doc.go new file mode 100644 index 00000000..5d8f93c1 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/doc.go @@ -0,0 +1,439 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package modernize provides a suite of analyzers that suggest +simplifications to Go code, using modern language and library +features. + +Each diagnostic provides a fix. Our intent is that these fixes may +be safely applied en masse without changing the behavior of your +program. In some cases the suggested fixes are imperfect and may +lead to (for example) unused imports or unused local variables, +causing build breakage. However, these problems are generally +trivial to fix. We regard any modernizer whose fix changes program +behavior to have a serious bug and will endeavor to fix it. + +To apply all modernization fixes en masse, you can use the +following command: + + $ go run golang.org/x/tools/go/analysis/passes/modernize/cmd/modernize@latest -fix ./... + +(Do not use "go get -tool" to add gopls as a dependency of your +module; gopls commands must be built from their release branch.) + +If the tool warns of conflicting fixes, you may need to run it more +than once until it has applied all fixes cleanly. This command is +not an officially supported interface and may change in the future. + +Changes produced by this tool should be reviewed as usual before +being merged. In some cases, a loop may be replaced by a simple +function call, causing comments within the loop to be discarded. +Human judgment may be required to avoid losing comments of value. + +The modernize suite contains many analyzers. Diagnostics from some, +such as "any" (which replaces "interface{}" with "any" where it +is safe to do so), are particularly numerous. It may ease the burden of +code review to apply fixes in two steps, the first consisting only of +fixes from the "any" analyzer, the second consisting of all +other analyzers. This can be achieved using flags, as in this example: + + $ modernize -any=true -fix ./... + $ modernize -any=false -fix ./... + +# Analyzer appendclipped + +appendclipped: simplify append chains using slices.Concat + +The appendclipped analyzer suggests replacing chains of append calls with a +single call to slices.Concat, which was added in Go 1.21. For example, +append(append(s, s1...), s2...) would be simplified to slices.Concat(s, s1, s2). + +In the simple case of appending to a newly allocated slice, such as +append([]T(nil), s...), the analyzer suggests the more concise slices.Clone(s). +For byte slices, it will prefer bytes.Clone if the "bytes" package is +already imported. + +This fix is only applied when the base of the append tower is a +"clipped" slice, meaning its length and capacity are equal (e.g. +x[:0:0] or []T{}). This is to avoid changing program behavior by +eliminating intended side effects on the base slice's underlying +array. + +This analyzer is currently disabled by default as the +transformation does not preserve the nilness of the base slice in +all cases; see https://go.dev/issue/73557. + +# Analyzer bloop + +bloop: replace for-range over b.N with b.Loop + +The bloop analyzer suggests replacing benchmark loops of the form +`for i := 0; i < b.N; i++` or `for range b.N` with the more modern +`for b.Loop()`, which was added in Go 1.24. + +This change makes benchmark code more readable and also removes the need for +manual timer control, so any preceding calls to b.StartTimer, b.StopTimer, +or b.ResetTimer within the same function will also be removed. + +Caveats: The b.Loop() method is designed to prevent the compiler from +optimizing away the benchmark loop, which can occasionally result in +slower execution due to increased allocations in some specific cases. + +# Analyzer any + +any: replace interface{} with any + +The any analyzer suggests replacing uses of the empty interface type, +`interface{}`, with the `any` alias, which was introduced in Go 1.18. +This is a purely stylistic change that makes code more readable. + +# Analyzer errorsastype + +errorsastype: replace errors.As with errors.AsType[T] + +This analyzer suggests fixes to simplify uses of [errors.As] of +this form: + + var myerr *MyErr + if errors.As(err, &myerr) { + handle(myerr) + } + +by using the less error-prone generic [errors.AsType] function, +introduced in Go 1.26: + + if myerr, ok := errors.AsType[*MyErr](err); ok { + handle(myerr) + } + +The fix is only offered if the var declaration has the form shown and +there are no uses of myerr outside the if statement. + +# Analyzer fmtappendf + +fmtappendf: replace []byte(fmt.Sprintf) with fmt.Appendf + +The fmtappendf analyzer suggests replacing `[]byte(fmt.Sprintf(...))` with +`fmt.Appendf(nil, ...)`. This avoids the intermediate allocation of a string +by Sprintf, making the code more efficient. The suggestion also applies to +fmt.Sprint and fmt.Sprintln. + +# Analyzer forvar + +forvar: remove redundant re-declaration of loop variables + +The forvar analyzer removes unnecessary shadowing of loop variables. +Before Go 1.22, it was common to write `for _, x := range s { x := x ... }` +to create a fresh variable for each iteration. Go 1.22 changed the semantics +of `for` loops, making this pattern redundant. This analyzer removes the +unnecessary `x := x` statement. + +This fix only applies to `range` loops. + +# Analyzer mapsloop + +mapsloop: replace explicit loops over maps with calls to maps package + +The mapsloop analyzer replaces loops of the form + + for k, v := range x { m[k] = v } + +with a single call to a function from the `maps` package, added in Go 1.23. +Depending on the context, this could be `maps.Copy`, `maps.Insert`, +`maps.Clone`, or `maps.Collect`. + +The transformation to `maps.Clone` is applied conservatively, as it +preserves the nilness of the source map, which may be a subtle change in +behavior if the original code did not handle a nil map in the same way. + +# Analyzer minmax + +minmax: replace if/else statements with calls to min or max + +The minmax analyzer simplifies conditional assignments by suggesting the use +of the built-in `min` and `max` functions, introduced in Go 1.21. For example, + + if a < b { x = a } else { x = b } + +is replaced by + + x = min(a, b). + +This analyzer avoids making suggestions for floating-point types, +as the behavior of `min` and `max` with NaN values can differ from +the original if/else statement. + +# Analyzer newexpr + +newexpr: simplify code by using go1.26's new(expr) + +This analyzer finds declarations of functions of this form: + + func varOf(x int) *int { return &x } + +and suggests a fix to turn them into inlinable wrappers around +go1.26's built-in new(expr) function: + + func varOf(x int) *int { return new(x) } + +In addition, this analyzer suggests a fix for each call +to one of the functions before it is transformed, so that + + use(varOf(123)) + +is replaced by: + + use(new(123)) + +(Wrapper functions such as varOf are common when working with Go +serialization packages such as for JSON or protobuf, where pointers +are often used to express optionality.) + +# Analyzer omitzero + +omitzero: suggest replacing omitempty with omitzero for struct fields + +The omitzero analyzer identifies uses of the `omitempty` JSON struct tag on +fields that are themselves structs. The `omitempty` tag has no effect on +struct-typed fields. The analyzer offers two suggestions: either remove the +tag, or replace it with `omitzero` (added in Go 1.24), which correctly +omits the field if the struct value is zero. + +Replacing `omitempty` with `omitzero` is a change in behavior. The +original code would always encode the struct field, whereas the +modified code will omit it if it is a zero-value. + +# Analyzer rangeint + +rangeint: replace 3-clause for loops with for-range over integers + +The rangeint analyzer suggests replacing traditional for loops such +as + + for i := 0; i < n; i++ { ... } + +with the more idiomatic Go 1.22 style: + + for i := range n { ... } + +This transformation is applied only if (a) the loop variable is not +modified within the loop body and (b) the loop's limit expression +is not modified within the loop, as `for range` evaluates its +operand only once. + +# Analyzer reflecttypefor + +reflecttypefor: replace reflect.TypeOf(x) with TypeFor[T]() + +This analyzer suggests fixes to replace uses of reflect.TypeOf(x) with +reflect.TypeFor, introduced in go1.22, when the desired runtime type +is known at compile time, for example: + + reflect.TypeOf(uint32(0)) -> reflect.TypeFor[uint32]() + reflect.TypeOf((*ast.File)(nil)) -> reflect.TypeFor[*ast.File]() + +It also offers a fix to simplify the construction below, which uses +reflect.TypeOf to return the runtime type for an interface type, + + reflect.TypeOf((*io.Reader)(nil)).Elem() + +to: + + reflect.TypeFor[io.Reader]() + +No fix is offered in cases when the runtime type is dynamic, such as: + + var r io.Reader = ... + reflect.TypeOf(r) + +or when the operand has potential side effects. + +# Analyzer slicescontains + +slicescontains: replace loops with slices.Contains or slices.ContainsFunc + +The slicescontains analyzer simplifies loops that check for the existence of +an element in a slice. It replaces them with calls to `slices.Contains` or +`slices.ContainsFunc`, which were added in Go 1.21. + +If the expression for the target element has side effects, this +transformation will cause those effects to occur only once, not +once per tested slice element. + +# Analyzer slicesdelete + +slicesdelete: replace append-based slice deletion with slices.Delete + +The slicesdelete analyzer suggests replacing the idiom + + s = append(s[:i], s[j:]...) + +with the more explicit + + s = slices.Delete(s, i, j) + +introduced in Go 1.21. + +This analyzer is disabled by default. The `slices.Delete` function +zeros the elements between the new length and the old length of the +slice to prevent memory leaks, which is a subtle difference in +behavior compared to the append-based idiom; see https://go.dev/issue/73686. + +# Analyzer slicessort + +slicessort: replace sort.Slice with slices.Sort for basic types + +The slicessort analyzer simplifies sorting slices of basic ordered +types. It replaces + + sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) + +with the simpler `slices.Sort(s)`, which was added in Go 1.21. + +# Analyzer stditerators + +stditerators: use iterators instead of Len/At-style APIs + +This analyzer suggests a fix to replace each loop of the form: + + for i := 0; i < x.Len(); i++ { + use(x.At(i)) + } + +or its "for elem := range x.Len()" equivalent by a range loop over an +iterator offered by the same data type: + + for elem := range x.All() { + use(x.At(i) + } + +where x is one of various well-known types in the standard library. + +# Analyzer stringscutprefix + +stringscutprefix: replace HasPrefix/TrimPrefix with CutPrefix + +The stringscutprefix analyzer simplifies a common pattern where code first +checks for a prefix with `strings.HasPrefix` and then removes it with +`strings.TrimPrefix`. It replaces this two-step process with a single call +to `strings.CutPrefix`, introduced in Go 1.20. The analyzer also handles +the equivalent functions in the `bytes` package. + +For example, this input: + + if strings.HasPrefix(s, prefix) { + use(strings.TrimPrefix(s, prefix)) + } + +is fixed to: + + if after, ok := strings.CutPrefix(s, prefix); ok { + use(after) + } + +The analyzer also offers fixes to use CutSuffix in a similar way. +This input: + + if strings.HasSuffix(s, suffix) { + use(strings.TrimSuffix(s, suffix)) + } + +is fixed to: + + if before, ok := strings.CutSuffix(s, suffix); ok { + use(before) + } + +# Analyzer stringsseq + +stringsseq: replace ranging over Split/Fields with SplitSeq/FieldsSeq + +The stringsseq analyzer improves the efficiency of iterating over substrings. +It replaces + + for range strings.Split(...) + +with the more efficient + + for range strings.SplitSeq(...) + +which was added in Go 1.24 and avoids allocating a slice for the +substrings. The analyzer also handles strings.Fields and the +equivalent functions in the bytes package. + +# Analyzer stringsbuilder + +stringsbuilder: replace += with strings.Builder + +This analyzer replaces repeated string += string concatenation +operations with calls to Go 1.10's strings.Builder. + +For example: + + var s = "[" + for x := range seq { + s += x + s += "." + } + s += "]" + use(s) + +is replaced by: + + var s strings.Builder + s.WriteString("[") + for x := range seq { + s.WriteString(x) + s.WriteString(".") + } + s.WriteString("]") + use(s.String()) + +This avoids quadratic memory allocation and improves performance. + +The analyzer requires that all references to s except the final one +are += operations. To avoid warning about trivial cases, at least one +must appear within a loop. The variable s must be a local +variable, not a global or parameter. + +The sole use of the finished string must be the last reference to the +variable s. (It may appear within an intervening loop or function literal, +since even s.String() is called repeatedly, it does not allocate memory.) + +# Analyzer testingcontext + +testingcontext: replace context.WithCancel with t.Context in tests + +The testingcontext analyzer simplifies context management in tests. It +replaces the manual creation of a cancellable context, + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + +with a single call to t.Context(), which was added in Go 1.24. + +This change is only suggested if the `cancel` function is not used +for any other purpose. + +# Analyzer waitgroup + +waitgroup: replace wg.Add(1)/go/wg.Done() with wg.Go + +The waitgroup analyzer simplifies goroutine management with `sync.WaitGroup`. +It replaces the common pattern + + wg.Add(1) + go func() { + defer wg.Done() + ... + }() + +with a single call to + + wg.Go(func(){ ... }) + +which was added in Go 1.25. +*/ +package modernize diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/errorsastype.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/errorsastype.go new file mode 100644 index 00000000..b6387ad8 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/errorsastype.go @@ -0,0 +1,243 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/token" + "go/types" + + "fmt" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/goplsexport" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var errorsastypeAnalyzer = &analysis.Analyzer{ + Name: "errorsastype", + Doc: analysisinternal.MustExtractDoc(doc, "errorsastype"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#errorsastype", + Requires: []*analysis.Analyzer{generated.Analyzer, typeindexanalyzer.Analyzer}, + Run: errorsastype, +} + +func init() { + // Export to gopls until this is a published modernizer. + goplsexport.ErrorsAsTypeModernizer = errorsastypeAnalyzer +} + +// errorsastype offers a fix to replace error.As with the newer +// errors.AsType[T] following this pattern: +// +// var myerr *MyErr +// if errors.As(err, &myerr) { ... } +// +// => +// +// if myerr, ok := errors.AsType[*MyErr](err); ok { ... } +// +// (In principle several of these can then be chained using if/else, +// but we don't attempt that.) +// +// We offer the fix only within an if statement, but not within a +// switch case such as: +// +// var myerr *MyErr +// switch { +// case errors.As(err, &myerr): +// } +// +// because the transformation in that case would be ungainly. +// +// Note that the cmd/vet suite includes the "errorsas" analyzer, which +// detects actual mistakes in the use of errors.As. This logic does +// not belong in errorsas because the problems it fixes are merely +// stylistic. +// +// TODO(adonovan): support more cases: +// +// - Negative cases +// var myerr E +// if !errors.As(err, &myerr) { ... } +// => +// myerr, ok := errors.AsType[E](err) +// if !ok { ... } +// +// - if myerr := new(E); errors.As(err, myerr); { ... } +// +// - if errors.As(err, myerr) && othercond { ... } +func errorsastype(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + ) + + for curCall := range index.Calls(index.Object("errors", "As")) { + call := curCall.Node().(*ast.CallExpr) + if len(call.Args) < 2 { + continue // spread call: errors.As(pair()) + } + + v, curDeclStmt := canUseErrorsAsType(info, index, curCall) + if v == nil { + continue + } + + file := astutil.EnclosingFile(curDeclStmt) + if !fileUses(info, file, "go1.26") { + continue // errors.AsType is too new + } + + // Locate identifier "As" in errors.As. + var asIdent *ast.Ident + switch n := ast.Unparen(call.Fun).(type) { + case *ast.Ident: + asIdent = n // "errors" was dot-imported + case *ast.SelectorExpr: + asIdent = n.Sel + default: + panic("no Ident for errors.As") + } + + // Format the type as valid Go syntax. + // TODO(adonovan): fix: FileQualifier needs to respect + // visibility at the current point, and either fail + // or edit the imports as needed. + // TODO(adonovan): fix: TypeString is not a sound way + // to print types as Go syntax as it does not respect + // symbol visibility, etc. We need something loosely + // integrated with FileQualifier that accumulates + // import edits, and may fail (e.g. for unexported + // type or field names from other packages). + // See https://go.dev/issues/75604. + qual := typesinternal.FileQualifier(file, pass.Pkg) + errtype := types.TypeString(v.Type(), qual) + + // Choose a name for the "ok" variable. + okName := "ok" + if okVar := lookup(info, curCall, "ok"); okVar != nil { + // The name 'ok' is already declared, but + // don't choose a fresh name unless okVar + // is also used within the if-statement. + curIf := curCall.Parent() + for curUse := range index.Uses(okVar) { + if curIf.Contains(curUse) { + scope := info.Scopes[curIf.Node().(*ast.IfStmt)] + okName = refactor.FreshName(scope, v.Pos(), "ok") + break + } + } + } + + pass.Report(analysis.Diagnostic{ + Pos: call.Fun.Pos(), + End: call.Fun.End(), + Message: fmt.Sprintf("errors.As can be simplified using AsType[%s]", errtype), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace errors.As with AsType[%s]", errtype), + TextEdits: append( + // delete "var myerr *MyErr" + refactor.DeleteStmt(pass.Fset.File(call.Fun.Pos()), curDeclStmt), + // if errors.As (err, &myerr) { ... } + // ------------- -------------- -------- ---- + // if myerr, ok := errors.AsType[*MyErr](err ); ok { ... } + analysis.TextEdit{ + // insert "myerr, ok := " + Pos: call.Pos(), + End: call.Pos(), + NewText: fmt.Appendf(nil, "%s, %s := ", v.Name(), okName), + }, + analysis.TextEdit{ + // replace As with AsType[T] + Pos: asIdent.Pos(), + End: asIdent.End(), + NewText: fmt.Appendf(nil, "AsType[%s]", errtype), + }, + analysis.TextEdit{ + // delete ", &myerr" + Pos: call.Args[0].End(), + End: call.Args[1].End(), + }, + analysis.TextEdit{ + // insert "; ok" + Pos: call.End(), + End: call.End(), + NewText: fmt.Appendf(nil, "; %s", okName), + }, + ), + }}, + }) + } + return nil, nil +} + +// canUseErrorsAsType reports whether curCall is a call to +// errors.As beneath an if statement, preceded by a +// declaration of the typed error var. The var must not be +// used outside the if statement. +func canUseErrorsAsType(info *types.Info, index *typeindex.Index, curCall inspector.Cursor) (_ *types.Var, _ inspector.Cursor) { + if !astutil.IsChildOf(curCall, edge.IfStmt_Cond) { + return // not beneath if statement + } + var ( + curIfStmt = curCall.Parent() + ifStmt = curIfStmt.Node().(*ast.IfStmt) + ) + if ifStmt.Init != nil { + return // if statement already has an init part + } + unary, ok := curCall.Node().(*ast.CallExpr).Args[1].(*ast.UnaryExpr) + if !ok || unary.Op != token.AND { + return // 2nd arg is not &var + } + id, ok := unary.X.(*ast.Ident) + if !ok { + return // not a simple ident (local var) + } + v := info.Uses[id].(*types.Var) + curDef, ok := index.Def(v) + if !ok { + return // var is not local (e.g. dot-imported) + } + // Have: if errors.As(err, &v) { ... } + + // Reject if v is used outside (before or after) the + // IfStmt, since that will become its new scope. + for curUse := range index.Uses(v) { + if !curIfStmt.Contains(curUse) { + return // v used before/after if statement + } + } + if !astutil.IsChildOf(curDef, edge.ValueSpec_Names) { + return // v not declared by "var v T" + } + var ( + curSpec = curDef.Parent() // ValueSpec + curDecl = curSpec.Parent() // GenDecl + spec = curSpec.Node().(*ast.ValueSpec) + ) + if len(spec.Names) != 1 || len(spec.Values) != 0 || + len(curDecl.Node().(*ast.GenDecl).Specs) != 1 { + return // not a simple "var v T" decl + } + + // Have: + // var v *MyErr + // ... + // if errors.As(err, &v) { ... } + // with no uses of v outside the IfStmt. + return v, curDecl.Parent() // DeclStmt +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/fmtappendf.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/fmtappendf.go new file mode 100644 index 00000000..f2e53605 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/fmtappendf.go @@ -0,0 +1,115 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var FmtAppendfAnalyzer = &analysis.Analyzer{ + Name: "fmtappendf", + Doc: analysisinternal.MustExtractDoc(doc, "fmtappendf"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: fmtappendf, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#fmtappendf", +} + +// The fmtappend function replaces []byte(fmt.Sprintf(...)) by +// fmt.Appendf(nil, ...), and similarly for Sprint, Sprintln. +func fmtappendf(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + index := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + for _, fn := range []types.Object{ + index.Object("fmt", "Sprintf"), + index.Object("fmt", "Sprintln"), + index.Object("fmt", "Sprint"), + } { + for curCall := range index.Calls(fn) { + call := curCall.Node().(*ast.CallExpr) + if ek, idx := curCall.ParentEdge(); ek == edge.CallExpr_Args && idx == 0 { + // Is parent a T(fmt.SprintX(...)) conversion? + conv := curCall.Parent().Node().(*ast.CallExpr) + tv := pass.TypesInfo.Types[conv.Fun] + if tv.IsType() && types.Identical(tv.Type, byteSliceType) && + fileUses(pass.TypesInfo, astutil.EnclosingFile(curCall), "go1.19") { + // Have: []byte(fmt.SprintX(...)) + + // Find "Sprint" identifier. + var id *ast.Ident + switch e := ast.Unparen(call.Fun).(type) { + case *ast.SelectorExpr: + id = e.Sel // "fmt.Sprint" + case *ast.Ident: + id = e // "Sprint" after `import . "fmt"` + } + + old, new := fn.Name(), strings.Replace(fn.Name(), "Sprint", "Append", 1) + edits := []analysis.TextEdit{ + { + // delete "[]byte(" + Pos: conv.Pos(), + End: conv.Lparen + 1, + }, + { + // remove ")" + Pos: conv.Rparen, + End: conv.Rparen + 1, + }, + { + Pos: id.Pos(), + End: id.End(), + NewText: []byte(new), + }, + { + Pos: call.Lparen + 1, + NewText: []byte("nil, "), + }, + } + if len(conv.Args) == 1 { + arg := conv.Args[0] + // Determine if we have T(fmt.SprintX(...)). If so, delete the non-args + // that come before the right parenthesis. Leaving an + // extra comma here produces invalid code. (See + // golang/go#74709) + if arg.End() < conv.Rparen { + edits = append(edits, analysis.TextEdit{ + Pos: arg.End(), + End: conv.Rparen, + }) + } + } + pass.Report(analysis.Diagnostic{ + Pos: conv.Pos(), + End: conv.End(), + Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace []byte(fmt.%s...) with fmt.%s", old, new), + TextEdits: edits, + }}, + }) + } + } + } + } + return nil, nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/forvar.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/forvar.go new file mode 100644 index 00000000..76e3a8a7 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/forvar.go @@ -0,0 +1,94 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/refactor" +) + +var ForVarAnalyzer = &analysis.Analyzer{ + Name: "forvar", + Doc: analysisinternal.MustExtractDoc(doc, "forvar"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + }, + Run: forvar, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#forvar", +} + +// forvar offers to fix unnecessary copying of a for variable +// +// for _, x := range foo { +// x := x // offer to remove this superfluous assignment +// } +// +// Prerequisites: +// First statement in a range loop has to be := +// where the two idents are the same, +// and the ident is defined (:=) as a variable in the for statement. +// (Note that this 'fix' does not work for three clause loops +// because the Go specification says "The variable used by each subsequent iteration +// is declared implicitly before executing the post statement and initialized to the +// value of the previous iteration's variable at that moment.") +func forvar(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.22") { + for curLoop := range curFile.Preorder((*ast.RangeStmt)(nil)) { + loop := curLoop.Node().(*ast.RangeStmt) + if loop.Tok != token.DEFINE { + continue + } + isLoopVarRedecl := func(assign *ast.AssignStmt) bool { + for i, lhs := range assign.Lhs { + if !(astutil.EqualSyntax(lhs, assign.Rhs[i]) && + (astutil.EqualSyntax(lhs, loop.Key) || astutil.EqualSyntax(lhs, loop.Value))) { + return false + } + } + return true + } + // Have: for k, v := range x { stmts } + // + // Delete the prefix of stmts that are + // of the form k := k; v := v; k, v := k, v; v, k := v, k. + for _, stmt := range loop.Body.List { + if assign, ok := stmt.(*ast.AssignStmt); ok && + assign.Tok == token.DEFINE && + len(assign.Lhs) == len(assign.Rhs) && + isLoopVarRedecl(assign) { + + curStmt, _ := curLoop.FindNode(stmt) + edits := refactor.DeleteStmt(pass.Fset.File(stmt.Pos()), curStmt) + if len(edits) > 0 { + pass.Report(analysis.Diagnostic{ + Pos: stmt.Pos(), + End: stmt.End(), + Message: "copying variable is unneeded", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Remove unneeded redeclaration", + TextEdits: edits, + }}, + }) + } + } else { + break // stop at first other statement + } + } + } + } + return nil, nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/maps.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/maps.go new file mode 100644 index 00000000..3072cf6f --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/maps.go @@ -0,0 +1,280 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +// This file defines modernizers that use the "maps" package. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" +) + +var MapsLoopAnalyzer = &analysis.Analyzer{ + Name: "mapsloop", + Doc: analysisinternal.MustExtractDoc(doc, "mapsloop"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + }, + Run: mapsloop, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#mapsloop", +} + +// The mapsloop pass offers to simplify a loop of map insertions: +// +// for k, v := range x { +// m[k] = v +// } +// +// by a call to go1.23's maps package. There are four variants, the +// product of two axes: whether the source x is a map or an iter.Seq2, +// and whether the destination m is a newly created map: +// +// maps.Copy(m, x) (x is map) +// maps.Insert(m, x) (x is iter.Seq2) +// m = maps.Clone(x) (x is a non-nil map, m is a new map) +// m = maps.Collect(x) (x is iter.Seq2, m is a new map) +// +// A map is newly created if the preceding statement has one of these +// forms, where M is a map type: +// +// m = make(M) +// m = M{} +func mapsloop(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "maps", "bytes", "runtime") { + return nil, nil + } + + info := pass.TypesInfo + + // check is called for each statement of this form: + // for k, v := range x { m[k] = v } + check := func(file *ast.File, curRange inspector.Cursor, assign *ast.AssignStmt, m, x ast.Expr) { + + // Is x a map or iter.Seq2? + tx := types.Unalias(info.TypeOf(x)) + var xmap bool + switch typeparams.CoreType(tx).(type) { + case *types.Map: + xmap = true + + case *types.Signature: + k, v, ok := assignableToIterSeq2(tx) + if !ok { + return // a named isomer of Seq2 + } + xmap = false + + // Record in tx the unnamed map[K]V type + // derived from the yield function. + // This is the type of maps.Collect(x). + tx = types.NewMap(k, v) + + default: + return // e.g. slice, channel (or no core type!) + } + + // Is the preceding statement of the form + // m = make(M) or M{} + // and can we replace its RHS with slices.{Clone,Collect}? + // + // Beware: if x may be nil, we cannot use Clone as it preserves nilness. + var mrhs ast.Expr // make(M) or M{}, or nil + if curPrev, ok := curRange.PrevSibling(); ok { + if assign, ok := curPrev.Node().(*ast.AssignStmt); ok && + len(assign.Lhs) == 1 && + len(assign.Rhs) == 1 && + astutil.EqualSyntax(assign.Lhs[0], m) { + + // Have: m = rhs; for k, v := range x { m[k] = v } + var newMap bool + rhs := assign.Rhs[0] + switch rhs := ast.Unparen(rhs).(type) { + case *ast.CallExpr: + if id, ok := ast.Unparen(rhs.Fun).(*ast.Ident); ok && + info.Uses[id] == builtinMake { + // Have: m = make(...) + newMap = true + } + case *ast.CompositeLit: + if len(rhs.Elts) == 0 { + // Have m = M{} + newMap = true + } + } + + // Take care not to change type of m's RHS expression. + if newMap { + trhs := info.TypeOf(rhs) + + // Inv: tx is the type of maps.F(x) + // - maps.Clone(x) has the same type as x. + // - maps.Collect(x) returns an unnamed map type. + + if assign.Tok == token.DEFINE { + // DEFINE (:=): we must not + // change the type of RHS. + if types.Identical(tx, trhs) { + mrhs = rhs + } + } else { + // ASSIGN (=): the types of LHS + // and RHS may differ in namedness. + if types.AssignableTo(tx, trhs) { + mrhs = rhs + } + } + + // Temporarily disable the transformation to the + // (nil-preserving) maps.Clone until we can prove + // that x is non-nil. This is rarely possible, + // and may require control flow analysis + // (e.g. a dominating "if len(x)" check). + // See #71844. + if xmap { + mrhs = nil + } + } + } + } + + // Choose function. + var funcName string + if mrhs != nil { + funcName = cond(xmap, "Clone", "Collect") + } else { + funcName = cond(xmap, "Copy", "Insert") + } + + // Report diagnostic, and suggest fix. + rng := curRange.Node() + prefix, importEdits := refactor.AddImport(info, file, "maps", "maps", funcName, rng.Pos()) + var ( + newText []byte + start, end token.Pos + ) + if mrhs != nil { + // Replace assignment and loop with expression. + // + // m = make(...) + // for k, v := range x { /* comments */ m[k] = v } + // + // -> + // + // /* comments */ + // m = maps.Copy(x) + curPrev, _ := curRange.PrevSibling() + start, end = curPrev.Node().Pos(), rng.End() + newText = fmt.Appendf(nil, "%s%s = %s%s(%s)", + allComments(file, start, end), + astutil.Format(pass.Fset, m), + prefix, + funcName, + astutil.Format(pass.Fset, x)) + } else { + // Replace loop with call statement. + // + // for k, v := range x { /* comments */ m[k] = v } + // + // -> + // + // /* comments */ + // maps.Copy(m, x) + start, end = rng.Pos(), rng.End() + newText = fmt.Appendf(nil, "%s%s%s(%s, %s)", + allComments(file, start, end), + prefix, + funcName, + astutil.Format(pass.Fset, m), + astutil.Format(pass.Fset, x)) + } + pass.Report(analysis.Diagnostic{ + Pos: assign.Lhs[0].Pos(), + End: assign.Lhs[0].End(), + Message: "Replace m[k]=v loop with maps." + funcName, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace m[k]=v loop with maps." + funcName, + TextEdits: append(importEdits, []analysis.TextEdit{{ + Pos: start, + End: end, + NewText: newText, + }}...), + }}, + }) + + } + + // Find all range loops around m[k] = v. + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.23") { + file := curFile.Node().(*ast.File) + + for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) { + rng := curRange.Node().(*ast.RangeStmt) + + if rng.Tok == token.DEFINE && + rng.Key != nil && + rng.Value != nil && + isAssignBlock(rng.Body) { + // Have: for k, v := range x { lhs = rhs } + + assign := rng.Body.List[0].(*ast.AssignStmt) + if index, ok := assign.Lhs[0].(*ast.IndexExpr); ok && + astutil.EqualSyntax(rng.Key, index.Index) && + astutil.EqualSyntax(rng.Value, assign.Rhs[0]) && + is[*types.Map](typeparams.CoreType(info.TypeOf(index.X))) && + types.Identical(info.TypeOf(index), info.TypeOf(rng.Value)) { // m[k], v + + // Have: for k, v := range x { m[k] = v } + // where there is no implicit conversion. + check(file, curRange, assign, index.X, rng.X) + } + } + } + } + return nil, nil +} + +// assignableToIterSeq2 reports whether t is assignable to +// iter.Seq[K, V] and returns K and V if so. +func assignableToIterSeq2(t types.Type) (k, v types.Type, ok bool) { + // The only named type assignable to iter.Seq2 is iter.Seq2. + if is[*types.Named](t) { + if !typesinternal.IsTypeNamed(t, "iter", "Seq2") { + return + } + t = t.Underlying() + } + + if t, ok := t.(*types.Signature); ok { + // func(yield func(K, V) bool)? + if t.Params().Len() == 1 && t.Results().Len() == 0 { + if yield, ok := t.Params().At(0).Type().(*types.Signature); ok { // sic, no Underlying/CoreType + if yield.Params().Len() == 2 && + yield.Results().Len() == 1 && + types.Identical(yield.Results().At(0).Type(), builtinBool.Type()) { + return yield.Params().At(0).Type(), yield.Params().At(1).Type(), true + } + } + } + } + return +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/minmax.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/minmax.go new file mode 100644 index 00000000..7ebf8373 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/minmax.go @@ -0,0 +1,440 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var MinMaxAnalyzer = &analysis.Analyzer{ + Name: "minmax", + Doc: analysisinternal.MustExtractDoc(doc, "minmax"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: minmax, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#minmax", +} + +// The minmax pass replaces if/else statements with calls to min or max, +// and removes user-defined min/max functions that are equivalent to built-ins. +// +// If/else replacement patterns: +// +// 1. if a < b { x = a } else { x = b } => x = min(a, b) +// 2. x = a; if a < b { x = b } => x = max(a, b) +// +// Pattern 1 requires that a is not NaN, and pattern 2 requires that b +// is not Nan. Since this is hard to prove, we reject floating-point +// numbers. +// +// Function removal: +// User-defined min/max functions are suggested for removal if they may +// be safely replaced by their built-in namesake. +// +// Variants: +// - all four ordered comparisons +// - "x := a" or "x = a" or "var x = a" in pattern 2 +// - "x < b" or "a < b" in pattern 2 +func minmax(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + // Check for user-defined min/max functions that can be removed + checkUserDefinedMinMax(pass) + + // check is called for all statements of this form: + // if a < b { lhs = rhs } + check := func(file *ast.File, curIfStmt inspector.Cursor, compare *ast.BinaryExpr) { + var ( + ifStmt = curIfStmt.Node().(*ast.IfStmt) + tassign = ifStmt.Body.List[0].(*ast.AssignStmt) + a = compare.X + b = compare.Y + lhs = tassign.Lhs[0] + rhs = tassign.Rhs[0] + sign = isInequality(compare.Op) + + // callArg formats a call argument, preserving comments from [start-end). + callArg = func(arg ast.Expr, start, end token.Pos) string { + comments := allComments(file, start, end) + return cond(arg == b, ", ", "") + // second argument needs a comma + cond(comments != "", "\n", "") + // comments need their own line + comments + + astutil.Format(pass.Fset, arg) + } + ) + + if fblock, ok := ifStmt.Else.(*ast.BlockStmt); ok && isAssignBlock(fblock) { + fassign := fblock.List[0].(*ast.AssignStmt) + + // Have: if a < b { lhs = rhs } else { lhs2 = rhs2 } + lhs2 := fassign.Lhs[0] + rhs2 := fassign.Rhs[0] + + // For pattern 1, check that: + // - lhs = lhs2 + // - {rhs,rhs2} = {a,b} + if astutil.EqualSyntax(lhs, lhs2) { + if astutil.EqualSyntax(rhs, a) && astutil.EqualSyntax(rhs2, b) { + sign = +sign + } else if astutil.EqualSyntax(rhs2, a) && astutil.EqualSyntax(rhs, b) { + sign = -sign + } else { + return + } + + sym := cond(sign < 0, "min", "max") + + if !is[*types.Builtin](lookup(pass.TypesInfo, curIfStmt, sym)) { + return // min/max function is shadowed + } + + // pattern 1 + // + // TODO(adonovan): if lhs is declared "var lhs T" on preceding line, + // simplify the whole thing to "lhs := min(a, b)". + pass.Report(analysis.Diagnostic{ + // Highlight the condition a < b. + Pos: compare.Pos(), + End: compare.End(), + Message: fmt.Sprintf("if/else statement can be modernized using %s", sym), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace if statement with %s", sym), + TextEdits: []analysis.TextEdit{{ + // Replace IfStmt with lhs = min(a, b). + Pos: ifStmt.Pos(), + End: ifStmt.End(), + NewText: fmt.Appendf(nil, "%s = %s(%s%s)", + astutil.Format(pass.Fset, lhs), + sym, + callArg(a, ifStmt.Pos(), ifStmt.Else.Pos()), + callArg(b, ifStmt.Else.Pos(), ifStmt.End()), + ), + }}, + }}, + }) + } + + } else if prev, ok := curIfStmt.PrevSibling(); ok && isSimpleAssign(prev.Node()) && ifStmt.Else == nil { + fassign := prev.Node().(*ast.AssignStmt) + + // Have: lhs0 = rhs0; if a < b { lhs = rhs } + // + // For pattern 2, check that + // - lhs = lhs0 + // - {a,b} = {rhs,rhs0} or {rhs,lhs0} + // The replacement must use rhs0 not lhs0 though. + // For example, we accept this variant: + // lhs = x; if lhs < y { lhs = y } => lhs = min(x, y), not min(lhs, y) + // + // TODO(adonovan): accept "var lhs0 = rhs0" form too. + lhs0 := fassign.Lhs[0] + rhs0 := fassign.Rhs[0] + + if astutil.EqualSyntax(lhs, lhs0) { + if astutil.EqualSyntax(rhs, a) && (astutil.EqualSyntax(rhs0, b) || astutil.EqualSyntax(lhs0, b)) { + sign = +sign + } else if (astutil.EqualSyntax(rhs0, a) || astutil.EqualSyntax(lhs0, a)) && astutil.EqualSyntax(rhs, b) { + sign = -sign + } else { + return + } + sym := cond(sign < 0, "min", "max") + + if !is[*types.Builtin](lookup(pass.TypesInfo, curIfStmt, sym)) { + return // min/max function is shadowed + } + + // Permit lhs0 to stand for rhs0 in the matching, + // but don't actually reduce to lhs0 = min(lhs0, rhs) + // since the "=" could be a ":=". Use min(rhs0, rhs). + if astutil.EqualSyntax(lhs0, a) { + a = rhs0 + } else if astutil.EqualSyntax(lhs0, b) { + b = rhs0 + } + + // pattern 2 + pass.Report(analysis.Diagnostic{ + // Highlight the condition a < b. + Pos: compare.Pos(), + End: compare.End(), + Message: fmt.Sprintf("if statement can be modernized using %s", sym), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace if/else with %s", sym), + TextEdits: []analysis.TextEdit{{ + Pos: fassign.Pos(), + End: ifStmt.End(), + // Replace "x := a; if ... {}" with "x = min(...)", preserving comments. + NewText: fmt.Appendf(nil, "%s %s %s(%s%s)", + astutil.Format(pass.Fset, lhs), + fassign.Tok.String(), + sym, + callArg(a, fassign.Pos(), ifStmt.Pos()), + callArg(b, ifStmt.Pos(), ifStmt.End()), + ), + }}, + }}, + }) + } + } + } + + // Find all "if a < b { lhs = rhs }" statements. + info := pass.TypesInfo + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.21") { + astFile := curFile.Node().(*ast.File) + for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) { + ifStmt := curIfStmt.Node().(*ast.IfStmt) + + // Don't bother handling "if a < b { lhs = rhs }" when it appears + // as the "else" branch of another if-statement. + // if cond { ... } else if a < b { lhs = rhs } + // (This case would require introducing another block + // if cond { ... } else { if a < b { lhs = rhs } } + // and checking that there is no following "else".) + if astutil.IsChildOf(curIfStmt, edge.IfStmt_Else) { + continue + } + + if compare, ok := ifStmt.Cond.(*ast.BinaryExpr); ok && + ifStmt.Init == nil && + isInequality(compare.Op) != 0 && + isAssignBlock(ifStmt.Body) { + // a blank var has no type. + if tLHS := info.TypeOf(ifStmt.Body.List[0].(*ast.AssignStmt).Lhs[0]); tLHS != nil && !maybeNaN(tLHS) { + // Have: if a < b { lhs = rhs } + check(astFile, curIfStmt, compare) + } + } + } + } + return nil, nil +} + +// allComments collects all the comments from start to end. +func allComments(file *ast.File, start, end token.Pos) string { + var buf strings.Builder + for co := range astutil.Comments(file, start, end) { + _, _ = fmt.Fprintf(&buf, "%s\n", co.Text) + } + return buf.String() +} + +// isInequality reports non-zero if tok is one of < <= => >: +// +1 for > and -1 for <. +func isInequality(tok token.Token) int { + switch tok { + case token.LEQ, token.LSS: + return -1 + case token.GEQ, token.GTR: + return +1 + } + return 0 +} + +// isAssignBlock reports whether b is a block of the form { lhs = rhs }. +func isAssignBlock(b *ast.BlockStmt) bool { + if len(b.List) != 1 { + return false + } + // Inv: the sole statement cannot be { lhs := rhs }. + return isSimpleAssign(b.List[0]) +} + +// isSimpleAssign reports whether n has the form "lhs = rhs" or "lhs := rhs". +func isSimpleAssign(n ast.Node) bool { + assign, ok := n.(*ast.AssignStmt) + return ok && + (assign.Tok == token.ASSIGN || assign.Tok == token.DEFINE) && + len(assign.Lhs) == 1 && + len(assign.Rhs) == 1 +} + +// maybeNaN reports whether t is (or may be) a floating-point type. +func maybeNaN(t types.Type) bool { + // For now, we rely on core types. + // TODO(adonovan): In the post-core-types future, + // follow the approach of types.Checker.applyTypeFunc. + t = typeparams.CoreType(t) + if t == nil { + return true // fail safe + } + if basic, ok := t.(*types.Basic); ok && basic.Info()&types.IsFloat != 0 { + return true + } + return false +} + +// checkUserDefinedMinMax looks for user-defined min/max functions that are +// equivalent to the built-in functions and suggests removing them. +func checkUserDefinedMinMax(pass *analysis.Pass) { + index := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + + // Look up min and max functions by name in package scope + for _, funcName := range []string{"min", "max"} { + if fn, ok := pass.Pkg.Scope().Lookup(funcName).(*types.Func); ok { + // Use typeindex to get the FuncDecl directly + if def, ok := index.Def(fn); ok { + decl := def.Parent().Node().(*ast.FuncDecl) + // Check if this function matches the built-in min/max signature and behavior + if canUseBuiltinMinMax(fn, decl.Body) { + // Expand to include leading doc comment + pos := decl.Pos() + if docs := astutil.DocComment(decl); docs != nil { + pos = docs.Pos() + } + + pass.Report(analysis.Diagnostic{ + Pos: decl.Pos(), + End: decl.End(), + Message: fmt.Sprintf("user-defined %s function is equivalent to built-in %s and can be removed", funcName, funcName), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Remove user-defined %s function", funcName), + TextEdits: []analysis.TextEdit{{ + Pos: pos, + End: decl.End(), + }}, + }}, + }) + } + } + } + } +} + +// canUseBuiltinMinMax reports whether it is safe to replace a call +// to this min or max function by its built-in namesake. +func canUseBuiltinMinMax(fn *types.Func, body *ast.BlockStmt) bool { + sig := fn.Type().(*types.Signature) + + // Only consider the most common case: exactly 2 parameters + if sig.Params().Len() != 2 { + return false + } + + // Check if any parameter might be floating-point + for param := range sig.Params().Variables() { + if maybeNaN(param.Type()) { + return false // Don't suggest removal for float types due to NaN handling + } + } + + // Must have exactly one return value + if sig.Results().Len() != 1 { + return false + } + + // Check that the function body implements the expected min/max logic + if body == nil { + return false + } + + return hasMinMaxLogic(body, fn.Name()) +} + +// hasMinMaxLogic checks if the function body implements simple min/max logic. +func hasMinMaxLogic(body *ast.BlockStmt, funcName string) bool { + // Pattern 1: Single if/else statement + if len(body.List) == 1 { + if ifStmt, ok := body.List[0].(*ast.IfStmt); ok { + // Get the "false" result from the else block + if elseBlock, ok := ifStmt.Else.(*ast.BlockStmt); ok && len(elseBlock.List) == 1 { + if elseRet, ok := elseBlock.List[0].(*ast.ReturnStmt); ok && len(elseRet.Results) == 1 { + return checkMinMaxPattern(ifStmt, elseRet.Results[0], funcName) + } + } + } + } + + // Pattern 2: if statement followed by return + if len(body.List) == 2 { + if ifStmt, ok := body.List[0].(*ast.IfStmt); ok && ifStmt.Else == nil { + if retStmt, ok := body.List[1].(*ast.ReturnStmt); ok && len(retStmt.Results) == 1 { + return checkMinMaxPattern(ifStmt, retStmt.Results[0], funcName) + } + } + } + + return false +} + +// checkMinMaxPattern checks if an if statement implements min/max logic. +// ifStmt: the if statement to check +// falseResult: the expression returned when the condition is false +// funcName: "min" or "max" +func checkMinMaxPattern(ifStmt *ast.IfStmt, falseResult ast.Expr, funcName string) bool { + // Must have condition with comparison + cmp, ok := ifStmt.Cond.(*ast.BinaryExpr) + if !ok { + return false + } + + // Check if then branch returns one of the compared values + if len(ifStmt.Body.List) != 1 { + return false + } + + thenRet, ok := ifStmt.Body.List[0].(*ast.ReturnStmt) + if !ok || len(thenRet.Results) != 1 { + return false + } + + // Use the same logic as the existing minmax analyzer + sign := isInequality(cmp.Op) + if sign == 0 { + return false // Not a comparison operator + } + + t := thenRet.Results[0] // "true" result + f := falseResult // "false" result + x := cmp.X // left operand + y := cmp.Y // right operand + + // Check operand order and adjust sign accordingly + if astutil.EqualSyntax(t, x) && astutil.EqualSyntax(f, y) { + sign = +sign + } else if astutil.EqualSyntax(t, y) && astutil.EqualSyntax(f, x) { + sign = -sign + } else { + return false + } + + // Check if the sign matches the function name + return cond(sign < 0, "min", "max") == funcName +} + +// -- utils -- + +func is[T any](x any) bool { + _, ok := x.(T) + return ok +} + +func cond[T any](cond bool, t, f T) T { + if cond { + return t + } else { + return f + } +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/modernize.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/modernize.go new file mode 100644 index 00000000..59adee12 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/modernize.go @@ -0,0 +1,161 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + _ "embed" + "go/ast" + "go/constant" + "go/format" + "go/token" + "go/types" + "iter" + "regexp" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/moreiters" + "golang.org/x/tools/internal/stdlib" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/versions" +) + +//go:embed doc.go +var doc string + +// Suite lists all modernize analyzers. +var Suite = []*analysis.Analyzer{ + AnyAnalyzer, + // AppendClippedAnalyzer, // not nil-preserving! + BLoopAnalyzer, + FmtAppendfAnalyzer, + ForVarAnalyzer, + MapsLoopAnalyzer, + MinMaxAnalyzer, + NewExprAnalyzer, + OmitZeroAnalyzer, + RangeIntAnalyzer, + ReflectTypeForAnalyzer, + SlicesContainsAnalyzer, + // SlicesDeleteAnalyzer, // not nil-preserving! + SlicesSortAnalyzer, + stditeratorsAnalyzer, + StringsCutPrefixAnalyzer, + StringsSeqAnalyzer, + StringsBuilderAnalyzer, + TestingContextAnalyzer, + WaitGroupAnalyzer, +} + +// -- helpers -- + +// skipGenerated decorates pass.Report to suppress diagnostics in generated files. +func skipGenerated(pass *analysis.Pass) { + report := pass.Report + pass.Report = func(diag analysis.Diagnostic) { + generated := pass.ResultOf[generated.Analyzer].(*generated.Result) + if generated.IsGenerated(diag.Pos) { + return // skip + } + report(diag) + } +} + +// formatExprs formats a comma-separated list of expressions. +func formatExprs(fset *token.FileSet, exprs []ast.Expr) string { + var buf strings.Builder + for i, e := range exprs { + if i > 0 { + buf.WriteString(", ") + } + format.Node(&buf, fset, e) // ignore errors + } + return buf.String() +} + +// isZeroIntLiteral reports whether e is an integer whose value is 0. +func isZeroIntLiteral(info *types.Info, e ast.Expr) bool { + return isIntLiteral(info, e, 0) +} + +// isIntLiteral reports whether e is an integer with given value. +func isIntLiteral(info *types.Info, e ast.Expr, n int64) bool { + return info.Types[e].Value == constant.MakeInt64(n) +} + +// filesUsing returns a cursor for each *ast.File in the inspector +// that uses at least the specified version of Go (e.g. "go1.24"). +// +// TODO(adonovan): opt: eliminate this function, instead following the +// approach of [fmtappendf], which uses typeindex and [fileUses]. +// See "Tip" at [fileUses] for motivation. +func filesUsing(inspect *inspector.Inspector, info *types.Info, version string) iter.Seq[inspector.Cursor] { + return func(yield func(inspector.Cursor) bool) { + for curFile := range inspect.Root().Children() { + file := curFile.Node().(*ast.File) + if !versions.Before(info.FileVersions[file], version) && !yield(curFile) { + break + } + } + } +} + +// fileUses reports whether the specified file uses at least the +// specified version of Go (e.g. "go1.24"). +// +// Tip: we recommend using this check "late", just before calling +// pass.Report, rather than "early" (when entering each ast.File, or +// each candidate node of interest, during the traversal), because the +// operation is not free, yet is not a highly selective filter: the +// fraction of files that pass most version checks is high and +// increases over time. +func fileUses(info *types.Info, file *ast.File, version string) bool { + return !versions.Before(info.FileVersions[file], version) +} + +// within reports whether the current pass is analyzing one of the +// specified standard packages or their dependencies. +func within(pass *analysis.Pass, pkgs ...string) bool { + path := pass.Pkg.Path() + return analysisinternal.IsStdPackage(path) && + moreiters.Contains(stdlib.Dependencies(pkgs...), path) +} + +// unparenEnclosing removes enclosing parens from cur in +// preparation for a call to [Cursor.ParentEdge]. +func unparenEnclosing(cur inspector.Cursor) inspector.Cursor { + for astutil.IsChildOf(cur, edge.ParenExpr_X) { + cur = cur.Parent() + } + return cur +} + +var ( + builtinAny = types.Universe.Lookup("any") + builtinAppend = types.Universe.Lookup("append") + builtinBool = types.Universe.Lookup("bool") + builtinInt = types.Universe.Lookup("int") + builtinFalse = types.Universe.Lookup("false") + builtinLen = types.Universe.Lookup("len") + builtinMake = types.Universe.Lookup("make") + builtinNew = types.Universe.Lookup("new") + builtinNil = types.Universe.Lookup("nil") + builtinString = types.Universe.Lookup("string") + builtinTrue = types.Universe.Lookup("true") + byteSliceType = types.NewSlice(types.Typ[types.Byte]) + omitemptyRegex = regexp.MustCompile(`(?:^json| json):"[^"]*(,omitempty)(?:"|,[^"]*")\s?`) +) + +// lookup returns the symbol denoted by name at the position of the cursor. +func lookup(info *types.Info, cur inspector.Cursor, name string) types.Object { + scope := typesinternal.EnclosingScope(info, cur) + _, obj := scope.LookupParent(name, cur.Node().Pos()) + return obj +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/newexpr.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/newexpr.go new file mode 100644 index 00000000..b8893244 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/newexpr.go @@ -0,0 +1,208 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + _ "embed" + "go/ast" + "go/token" + "go/types" + "strings" + + "fmt" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil" +) + +var NewExprAnalyzer = &analysis.Analyzer{ + Name: "newexpr", + Doc: analysisinternal.MustExtractDoc(doc, "newexpr"), + URL: "https://pkg.go.dev/golang.org/x/tools/gopls/internal/analysis/modernize#newexpr", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, + FactTypes: []analysis.Fact{&newLike{}}, +} + +func run(pass *analysis.Pass) (any, error) { + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + info = pass.TypesInfo + ) + + // Detect functions that are new-like, i.e. have the form: + // + // func f(x T) *T { return &x } + // + // meaning that it is equivalent to new(x), if x has type T. + for curFuncDecl := range inspect.Root().Preorder((*ast.FuncDecl)(nil)) { + decl := curFuncDecl.Node().(*ast.FuncDecl) + fn := info.Defs[decl.Name].(*types.Func) + if decl.Body != nil && len(decl.Body.List) == 1 { + if ret, ok := decl.Body.List[0].(*ast.ReturnStmt); ok && len(ret.Results) == 1 { + if unary, ok := ret.Results[0].(*ast.UnaryExpr); ok && unary.Op == token.AND { + if id, ok := unary.X.(*ast.Ident); ok { + if v, ok := info.Uses[id].(*types.Var); ok { + sig := fn.Signature() + if sig.Results().Len() == 1 && + is[*types.Pointer](sig.Results().At(0).Type()) && // => no iface conversion + sig.Params().Len() == 1 && + sig.Params().At(0) == v { + + // Export a fact for each one. + pass.ExportObjectFact(fn, &newLike{}) + + // Check file version. + file := astutil.EnclosingFile(curFuncDecl) + if !fileUses(info, file, "go1.26") { + continue // new(expr) not available in this file + } + + var edits []analysis.TextEdit + + // If 'new' is not shadowed, replace func body: &x -> new(x). + // This makes it safely and cleanly inlinable. + curRet, _ := curFuncDecl.FindNode(ret) + if lookup(info, curRet, "new") == builtinNew { + edits = []analysis.TextEdit{ + // return &x + // ---- - + // return new(x) + { + Pos: unary.OpPos, + End: unary.OpPos + token.Pos(len("&")), + NewText: []byte("new("), + }, + { + Pos: unary.X.End(), + End: unary.X.End(), + NewText: []byte(")"), + }, + } + } + + // Disabled until we resolve https://go.dev/issue/75726 + // (Go version skew between caller and callee in inliner.) + // TODO(adonovan): fix and reenable. + // + // Also, restore these lines to our section of doc.go: + // //go:fix inline + // ... + // (The directive comment causes the inline analyzer to suggest + // that calls to such functions are inlined.) + if false { + // Add a //go:fix inline annotation, if not already present. + // TODO(adonovan): use ast.ParseDirective when go1.26 is assured. + if !strings.Contains(decl.Doc.Text(), "go:fix inline") { + edits = append(edits, analysis.TextEdit{ + Pos: decl.Pos(), + End: decl.Pos(), + NewText: []byte("//go:fix inline\n"), + }) + } + } + + if len(edits) > 0 { + pass.Report(analysis.Diagnostic{ + Pos: decl.Name.Pos(), + End: decl.Name.End(), + Message: fmt.Sprintf("%s can be an inlinable wrapper around new(expr)", decl.Name), + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: "Make %s an inlinable wrapper around new(expr)", + TextEdits: edits, + }, + }, + }) + } + } + } + } + } + } + } + } + + // Report and transform calls, when safe. + // In effect, this is inlining the new-like function + // even before we have marked the callee with //go:fix inline. + for curCall := range inspect.Root().Preorder((*ast.CallExpr)(nil)) { + call := curCall.Node().(*ast.CallExpr) + var fact newLike + if fn, ok := typeutil.Callee(info, call).(*types.Func); ok && + pass.ImportObjectFact(fn, &fact) { + + // Check file version. + file := astutil.EnclosingFile(curCall) + if !fileUses(info, file, "go1.26") { + continue // new(expr) not available in this file + } + + // Check new is not shadowed. + if lookup(info, curCall, "new") != builtinNew { + continue + } + + // The return type *T must exactly match the argument type T. + // (We formulate it this way--not in terms of the parameter + // type--to support generics.) + var targ types.Type + { + arg := call.Args[0] + tvarg := info.Types[arg] + + // Constants: we must work around the type checker + // bug that causes info.Types to wrongly report the + // "typed" type for an untyped constant. + // (See "historical reasons" in issue go.dev/issue/70638.) + // + // We don't have a reliable way to do this but we can attempt + // to re-typecheck the constant expression on its own, in + // the original lexical environment but not as a part of some + // larger expression that implies a conversion to some "typed" type. + // (For the genesis of this idea see (*state).arguments + // in ../../../../internal/refactor/inline/inline.go.) + if tvarg.Value != nil { + info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)} + if err := types.CheckExpr(token.NewFileSet(), pass.Pkg, token.NoPos, arg, info2); err != nil { + continue // unexpected error + } + tvarg = info2.Types[arg] + } + + targ = types.Default(tvarg.Type) + } + if !types.Identical(types.NewPointer(targ), info.TypeOf(call)) { + continue + } + + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: fmt.Sprintf("call of %s(x) can be simplified to new(x)", fn.Name()), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Simplify %s(x) to new(x)", fn.Name()), + TextEdits: []analysis.TextEdit{{ + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte("new"), + }}, + }}, + }) + } + } + + return nil, nil +} + +// A newLike fact records that its associated function is "new-like". +type newLike struct{} + +func (*newLike) AFact() {} +func (*newLike) String() string { return "newlike" } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/omitzero.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/omitzero.go new file mode 100644 index 00000000..bd309cf9 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/omitzero.go @@ -0,0 +1,119 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/types" + "reflect" + "strconv" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + "golang.org/x/tools/internal/astutil" +) + +var OmitZeroAnalyzer = &analysis.Analyzer{ + Name: "omitzero", + Doc: analysisinternal.MustExtractDoc(doc, "omitzero"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + }, + Run: omitzero, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#omitzero", +} + +func checkOmitEmptyField(pass *analysis.Pass, info *types.Info, curField *ast.Field) { + typ := info.TypeOf(curField.Type) + _, ok := typ.Underlying().(*types.Struct) + if !ok { + // Not a struct + return + } + tag := curField.Tag + if tag == nil { + // No tag to check + return + } + // The omitempty tag may be used by other packages besides json, but we should only modify its use with json + tagconv, _ := strconv.Unquote(tag.Value) + match := omitemptyRegex.FindStringSubmatchIndex(tagconv) + if match == nil { + // No omitempty in json tag + return + } + omitEmptyPos, omitEmptyEnd, err := astutil.RangeInStringLiteral(curField.Tag, match[2], match[3]) + if err != nil { + return + } + removePos, removeEnd := omitEmptyPos, omitEmptyEnd + + jsonTag := reflect.StructTag(tagconv).Get("json") + if jsonTag == ",omitempty" { + // Remove the entire struct tag if json is the only package used + if match[1]-match[0] == len(tagconv) { + removePos = curField.Tag.Pos() + removeEnd = curField.Tag.End() + } else { + // Remove the json tag if omitempty is the only field + removePos, err = astutil.PosInStringLiteral(curField.Tag, match[0]) + if err != nil { + return + } + removeEnd, err = astutil.PosInStringLiteral(curField.Tag, match[1]) + if err != nil { + return + } + } + } + pass.Report(analysis.Diagnostic{ + Pos: curField.Tag.Pos(), + End: curField.Tag.End(), + Message: "Omitempty has no effect on nested struct fields", + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: "Remove redundant omitempty tag", + TextEdits: []analysis.TextEdit{ + { + Pos: removePos, + End: removeEnd, + }, + }, + }, + { + Message: "Replace omitempty with omitzero (behavior change)", + TextEdits: []analysis.TextEdit{ + { + Pos: omitEmptyPos, + End: omitEmptyEnd, + NewText: []byte(",omitzero"), + }, + }, + }, + }}) +} + +// The omitzero pass searches for instances of "omitempty" in a json field tag on a +// struct. Since "omitempty" does not have any effect when applied to a struct field, +// it suggests either deleting "omitempty" or replacing it with "omitzero", which +// correctly excludes structs from a json encoding. +func omitzero(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + info := pass.TypesInfo + for curFile := range filesUsing(inspect, info, "go1.24") { + for curStruct := range curFile.Preorder((*ast.StructType)(nil)) { + for _, curField := range curStruct.Node().(*ast.StructType).Fields.List { + checkOmitEmptyField(pass, info, curField) + } + } + } + return nil, nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/rangeint.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/rangeint.go new file mode 100644 index 00000000..adc840f1 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/rangeint.go @@ -0,0 +1,310 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var RangeIntAnalyzer = &analysis.Analyzer{ + Name: "rangeint", + Doc: analysisinternal.MustExtractDoc(doc, "rangeint"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: rangeint, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#rangeint", +} + +// rangeint offers a fix to replace a 3-clause 'for' loop: +// +// for i := 0; i < limit; i++ {} +// +// by a range loop with an integer operand: +// +// for i := range limit {} +// +// Variants: +// - The ':=' may be replaced by '='. +// - The fix may remove "i :=" if it would become unused. +// +// Restrictions: +// - The variable i must not be assigned or address-taken within the +// loop, because a "for range int" loop does not respect assignments +// to the loop index. +// - The limit must not be b.N, to avoid redundancy with bloop's fixes. +// +// Caveats: +// +// The fix causes the limit expression to be evaluated exactly once, +// instead of once per iteration. So, to avoid changing the +// cardinality of side effects, the limit expression must not involve +// function calls (e.g. seq.Len()) or channel receives. Moreover, the +// value of the limit expression must be loop invariant, which in +// practice means it must take one of the following forms: +// +// - a local variable that is assigned only once and not address-taken; +// - a constant; or +// - len(s), where s has the above properties. +func rangeint(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + info := pass.TypesInfo + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + typeindex := pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + + for curFile := range filesUsing(inspect, info, "go1.22") { + nextLoop: + for curLoop := range curFile.Preorder((*ast.ForStmt)(nil)) { + loop := curLoop.Node().(*ast.ForStmt) + if init, ok := loop.Init.(*ast.AssignStmt); ok && + isSimpleAssign(init) && + is[*ast.Ident](init.Lhs[0]) && + isZeroIntLiteral(info, init.Rhs[0]) { + // Have: for i = 0; ... (or i := 0) + index := init.Lhs[0].(*ast.Ident) + + if compare, ok := loop.Cond.(*ast.BinaryExpr); ok && + compare.Op == token.LSS && + astutil.EqualSyntax(compare.X, init.Lhs[0]) { + // Have: for i = 0; i < limit; ... {} + + limit := compare.Y + + // If limit is "len(slice)", simplify it to "slice". + // + // (Don't replace "for i := 0; i < len(map); i++" + // with "for range m" because it's too hard to prove + // that len(m) is loop-invariant). + if call, ok := limit.(*ast.CallExpr); ok && + typeutil.Callee(info, call) == builtinLen && + is[*types.Slice](info.TypeOf(call.Args[0]).Underlying()) { + limit = call.Args[0] + } + + // Check the form of limit: must be a constant, + // or a local var that is not assigned or address-taken. + limitOK := false + if info.Types[limit].Value != nil { + limitOK = true // constant + } else if id, ok := limit.(*ast.Ident); ok { + if v, ok := info.Uses[id].(*types.Var); ok && + !(v.Exported() && typesinternal.IsPackageLevel(v)) { + // limit is a local or unexported global var. + // (An exported global may have uses we can't see.) + for cur := range typeindex.Uses(v) { + if isScalarLvalue(info, cur) { + // Limit var is assigned or address-taken. + continue nextLoop + } + } + limitOK = true + } + } + if !limitOK { + continue nextLoop + } + + if inc, ok := loop.Post.(*ast.IncDecStmt); ok && + inc.Tok == token.INC && + astutil.EqualSyntax(compare.X, inc.X) { + // Have: for i = 0; i < limit; i++ {} + + // Find references to i within the loop body. + v := info.ObjectOf(index).(*types.Var) + // TODO(adonovan): use go1.25 v.Kind() == types.PackageVar + if typesinternal.IsPackageLevel(v) { + continue nextLoop + } + used := false + for curId := range curLoop.Child(loop.Body).Preorder((*ast.Ident)(nil)) { + id := curId.Node().(*ast.Ident) + if info.Uses[id] == v { + used = true + + // Reject if any is an l-value (assigned or address-taken): + // a "for range int" loop does not respect assignments to + // the loop variable. + if isScalarLvalue(info, curId) { + continue nextLoop + } + } + } + + // If i is no longer used, delete "i := ". + var edits []analysis.TextEdit + if !used && init.Tok == token.DEFINE { + edits = append(edits, analysis.TextEdit{ + Pos: index.Pos(), + End: init.Rhs[0].Pos(), + }) + } + + // If i is used after the loop, + // don't offer a fix, as a range loop + // leaves i with a different final value (limit-1). + if init.Tok == token.ASSIGN { + for curId := range curLoop.Parent().Preorder((*ast.Ident)(nil)) { + id := curId.Node().(*ast.Ident) + if info.Uses[id] == v { + // Is i used after loop? + if id.Pos() > loop.End() { + continue nextLoop + } + // Is i used within a defer statement + // that is within the scope of i? + // var i int + // defer func() { print(i)} + // for i = ... { ... } + for curDefer := range curId.Enclosing((*ast.DeferStmt)(nil)) { + if curDefer.Node().Pos() > v.Pos() { + continue nextLoop + } + } + } + } + } + + // If limit is len(slice), + // simplify "range len(slice)" to "range slice". + if call, ok := limit.(*ast.CallExpr); ok && + typeutil.Callee(info, call) == builtinLen && + is[*types.Slice](info.TypeOf(call.Args[0]).Underlying()) { + limit = call.Args[0] + } + + // If the limit is a untyped constant of non-integer type, + // such as "const limit = 1e3", its effective type may + // differ between the two forms. + // In a for loop, it must be comparable with int i, + // for i := 0; i < limit; i++ + // but in a range loop it would become a float, + // for i := range limit {} + // which is a type error. We need to convert it to int + // in this case. + // + // Unfortunately go/types discards the untyped type + // (but see Untyped in golang/go#70638) so we must + // re-type check the expression to detect this case. + var beforeLimit, afterLimit string + if v := info.Types[limit].Value; v != nil { + tVar := info.TypeOf(init.Rhs[0]) + file := curFile.Node().(*ast.File) + // TODO(mkalil): use a types.Qualifier that respects the existing + // imports of this file that are visible (not shadowed) at the current position. + qual := typesinternal.FileQualifier(file, pass.Pkg) + beforeLimit, afterLimit = fmt.Sprintf("%s(", types.TypeString(tVar, qual)), ")" + info2 := &types.Info{Types: make(map[ast.Expr]types.TypeAndValue)} + if types.CheckExpr(pass.Fset, pass.Pkg, limit.Pos(), limit, info2) == nil { + tLimit := types.Default(info2.TypeOf(limit)) + if types.AssignableTo(tLimit, tVar) { + beforeLimit, afterLimit = "", "" + } + } + } + + pass.Report(analysis.Diagnostic{ + Pos: init.Pos(), + End: inc.End(), + Message: "for loop can be modernized using range over int", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace for loop with range %s", + astutil.Format(pass.Fset, limit)), + TextEdits: append(edits, []analysis.TextEdit{ + // for i := 0; i < limit; i++ {} + // ----- --- + // ------- + // for i := range limit {} + + // Delete init. + { + Pos: init.Rhs[0].Pos(), + End: limit.Pos(), + NewText: []byte("range "), + }, + // Add "int(" before limit, if needed. + { + Pos: limit.Pos(), + End: limit.Pos(), + NewText: []byte(beforeLimit), + }, + // Delete inc. + { + Pos: limit.End(), + End: inc.End(), + }, + // Add ")" after limit, if needed. + { + Pos: limit.End(), + End: limit.End(), + NewText: []byte(afterLimit), + }, + }...), + }}, + }) + } + } + } + } + } + return nil, nil +} + +// isScalarLvalue reports whether the specified identifier is +// address-taken or appears on the left side of an assignment. +// +// This function is valid only for scalars (x = ...), +// not for aggregates (x.a[i] = ...) +func isScalarLvalue(info *types.Info, curId inspector.Cursor) bool { + // Unfortunately we can't simply use info.Types[e].Assignable() + // as it is always true for a variable even when that variable is + // used only as an r-value. So we must inspect enclosing syntax. + + cur := curId + + // Strip enclosing parens. + ek, _ := cur.ParentEdge() + for ek == edge.ParenExpr_X { + cur = cur.Parent() + ek, _ = cur.ParentEdge() + } + + switch ek { + case edge.AssignStmt_Lhs: + assign := cur.Parent().Node().(*ast.AssignStmt) + if assign.Tok != token.DEFINE { + return true // i = j or i += j + } + id := curId.Node().(*ast.Ident) + if v, ok := info.Defs[id]; ok && v.Pos() != id.Pos() { + return true // reassignment of i (i, j := 1, 2) + } + case edge.IncDecStmt_X: + return true // i++, i-- + case edge.UnaryExpr_X: + if cur.Parent().Node().(*ast.UnaryExpr).Op == token.AND { + return true // &i + } + } + return false +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/reflect.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/reflect.go new file mode 100644 index 00000000..1a4e3eb2 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/reflect.go @@ -0,0 +1,134 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +// This file defines modernizers that use the "reflect" package. + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/typesinternal/typeindex" + "golang.org/x/tools/internal/versions" +) + +var ReflectTypeForAnalyzer = &analysis.Analyzer{ + Name: "reflecttypefor", + Doc: analysisinternal.MustExtractDoc(doc, "reflecttypefor"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: reflecttypefor, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#reflecttypefor", +} + +func reflecttypefor(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + + reflectTypeOf = index.Object("reflect", "TypeOf") + ) + + for curCall := range index.Calls(reflectTypeOf) { + call := curCall.Node().(*ast.CallExpr) + // Have: reflect.TypeOf(expr) + + expr := call.Args[0] + if !typesinternal.NoEffects(info, expr) { + continue // don't eliminate operand: may have effects + } + + t := info.TypeOf(expr) + var edits []analysis.TextEdit + + // Special case for TypeOf((*T)(nil)).Elem(), + // needed when T is an interface type. + if astutil.IsChildOf(curCall, edge.SelectorExpr_X) { + curSel := unparenEnclosing(curCall).Parent() + if astutil.IsChildOf(curSel, edge.CallExpr_Fun) { + call2 := unparenEnclosing(curSel).Parent().Node().(*ast.CallExpr) + obj := typeutil.Callee(info, call2) + if typesinternal.IsMethodNamed(obj, "reflect", "Type", "Elem") { + if ptr, ok := t.(*types.Pointer); ok { + // Have: reflect.TypeOf(...*T value...).Elem() + // => reflect.TypeFor[T]() + t = ptr.Elem() + edits = []analysis.TextEdit{ + { + // delete .Elem() + Pos: call.End(), + End: call2.End(), + }, + } + } + } + } + } + + // TypeOf(x) where x has an interface type is a + // dynamic operation; don't transform it to TypeFor. + // (edits == nil means "not the Elem() special case".) + if types.IsInterface(t) && edits == nil { + continue + } + + file := astutil.EnclosingFile(curCall) + if versions.Before(info.FileVersions[file], "go1.22") { + continue // TypeFor requires go1.22 + } + + // Format the type as valid Go syntax. + // TODO(adonovan): FileQualifier needs to respect + // visibility at the current point, and either fail + // or edit the imports as needed. + qual := typesinternal.FileQualifier(file, pass.Pkg) + tstr := types.TypeString(t, qual) + + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + continue // e.g. reflect was dot-imported + } + + pass.Report(analysis.Diagnostic{ + Pos: call.Fun.Pos(), + End: call.Fun.End(), + Message: "reflect.TypeOf call can be simplified using TypeFor", + SuggestedFixes: []analysis.SuggestedFix{{ + // reflect.TypeOf (...T value...) + // ------ ------------- + // reflect.TypeFor[T]( ) + Message: "Replace TypeOf by TypeFor", + TextEdits: append([]analysis.TextEdit{ + { + Pos: sel.Sel.Pos(), + End: sel.Sel.End(), + NewText: []byte("TypeFor[" + tstr + "]"), + }, + // delete (pure) argument + { + Pos: call.Lparen + 1, + End: call.Rparen, + }, + }, edits...), + }}, + }) + } + + return nil, nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/slices.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/slices.go new file mode 100644 index 00000000..032f874d --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/slices.go @@ -0,0 +1,300 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/types" + "slices" + "strconv" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/typesinternal" +) + +// Warning: this analyzer is not safe to enable by default. +var AppendClippedAnalyzer = &analysis.Analyzer{ + Name: "appendclipped", + Doc: analysisinternal.MustExtractDoc(doc, "appendclipped"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + }, + Run: appendclipped, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#appendclipped", +} + +// The appendclipped pass offers to simplify a tower of append calls: +// +// append(append(append(base, a...), b..., c...) +// +// with a call to go1.21's slices.Concat(base, a, b, c), or simpler +// replacements such as slices.Clone(a) in degenerate cases. +// +// We offer bytes.Clone in preference to slices.Clone where +// appropriate, if the package already imports "bytes"; +// their behaviors are identical. +// +// The base expression must denote a clipped slice (see [isClipped] +// for definition), otherwise the replacement might eliminate intended +// side effects to the base slice's array. +// +// Examples: +// +// append(append(append(x[:0:0], a...), b...), c...) -> slices.Concat(a, b, c) +// append(append(slices.Clip(a), b...) -> slices.Concat(a, b) +// append([]T{}, a...) -> slices.Clone(a) +// append([]string(nil), os.Environ()...) -> os.Environ() +// +// The fix does not always preserve nilness the of base slice when the +// addends (a, b, c) are all empty (see #73557). +func appendclipped(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "slices", "bytes", "runtime") { + return nil, nil + } + + info := pass.TypesInfo + + // sliceArgs is a non-empty (reversed) list of slices to be concatenated. + simplifyAppendEllipsis := func(file *ast.File, call *ast.CallExpr, base ast.Expr, sliceArgs []ast.Expr) { + // Only appends whose base is a clipped slice can be simplified: + // We must conservatively assume an append to an unclipped slice + // such as append(y[:0], x...) is intended to have effects on y. + clipped, empty := clippedSlice(info, base) + if clipped == nil { + return + } + + // If any slice arg has a different type from the base + // (and thus the result) don't offer a fix, to avoid + // changing the return type, e.g: + // + // type S []int + // - x := append([]int(nil), S{}...) // x : []int + // + x := slices.Clone(S{}) // x : S + // + // We could do better by inserting an explicit generic + // instantiation: + // + // x := slices.Clone[[]int](S{}) + // + // but this is often unnecessary and unwanted, such as + // when the value is used an in assignment context that + // provides an explicit type: + // + // var x []int = slices.Clone(S{}) + baseType := info.TypeOf(base) + for _, arg := range sliceArgs { + if !types.Identical(info.TypeOf(arg), baseType) { + return + } + } + + // If the (clipped) base is empty, it may be safely ignored. + // Otherwise treat it (or its unclipped subexpression, if possible) + // as just another arg (the first) to Concat. + // + // TODO(adonovan): not so fast! If all the operands + // are empty, then the nilness of base matters, because + // append preserves nilness whereas Concat does not (#73557). + if !empty { + sliceArgs = append(sliceArgs, clipped) + } + slices.Reverse(sliceArgs) + + // TODO(adonovan): simplify sliceArgs[0] further: slices.Clone(s) -> s + + // Concat of a single (non-trivial) slice degenerates to Clone. + if len(sliceArgs) == 1 { + s := sliceArgs[0] + + // Special case for common but redundant clone of os.Environ(). + // append(zerocap, os.Environ()...) -> os.Environ() + if scall, ok := s.(*ast.CallExpr); ok { + obj := typeutil.Callee(info, scall) + if typesinternal.IsFunctionNamed(obj, "os", "Environ") { + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: "Redundant clone of os.Environ()", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Eliminate redundant clone", + TextEdits: []analysis.TextEdit{{ + Pos: call.Pos(), + End: call.End(), + NewText: []byte(astutil.Format(pass.Fset, s)), + }}, + }}, + }) + return + } + } + + // If the slice type is []byte, and the file imports + // "bytes" but not "slices", prefer the (behaviorally + // identical) bytes.Clone for local consistency. + // https://go.dev/issue/70815#issuecomment-2671572984 + fileImports := func(path string) bool { + return slices.ContainsFunc(file.Imports, func(spec *ast.ImportSpec) bool { + value, _ := strconv.Unquote(spec.Path.Value) + return value == path + }) + } + clonepkg := cond( + types.Identical(info.TypeOf(call), byteSliceType) && + !fileImports("slices") && fileImports("bytes"), + "bytes", + "slices") + + // append(zerocap, s...) -> slices.Clone(s) or bytes.Clone(s) + // + // This is unsound if s is empty and its nilness + // differs from zerocap (#73557). + prefix, importEdits := refactor.AddImport(info, file, clonepkg, clonepkg, "Clone", call.Pos()) + message := fmt.Sprintf("Replace append with %s.Clone", clonepkg) + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: message, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: message, + TextEdits: append(importEdits, []analysis.TextEdit{{ + Pos: call.Pos(), + End: call.End(), + NewText: fmt.Appendf(nil, "%sClone(%s)", prefix, astutil.Format(pass.Fset, s)), + }}...), + }}, + }) + return + } + + // append(append(append(base, a...), b..., c...) -> slices.Concat(base, a, b, c) + // + // This is unsound if all slices are empty and base is non-nil (#73557). + prefix, importEdits := refactor.AddImport(info, file, "slices", "slices", "Concat", call.Pos()) + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: "Replace append with slices.Concat", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace append with slices.Concat", + TextEdits: append(importEdits, []analysis.TextEdit{{ + Pos: call.Pos(), + End: call.End(), + NewText: fmt.Appendf(nil, "%sConcat(%s)", prefix, formatExprs(pass.Fset, sliceArgs)), + }}...), + }}, + }) + } + + // Mark nested calls to append so that we don't emit diagnostics for them. + skip := make(map[*ast.CallExpr]bool) + + // Visit calls of form append(x, y...). + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + for curFile := range filesUsing(inspect, info, "go1.21") { + file := curFile.Node().(*ast.File) + + for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) { + call := curCall.Node().(*ast.CallExpr) + if skip[call] { + continue + } + + // Recursively unwrap ellipsis calls to append, so + // append(append(append(base, a...), b..., c...) + // yields (base, [c b a]). + base, slices := ast.Expr(call), []ast.Expr(nil) // base case: (call, nil) + again: + if call, ok := base.(*ast.CallExpr); ok { + if id, ok := call.Fun.(*ast.Ident); ok && + call.Ellipsis.IsValid() && + len(call.Args) == 2 && + info.Uses[id] == builtinAppend { + + // Have: append(base, s...) + base, slices = call.Args[0], append(slices, call.Args[1]) + skip[call] = true + goto again + } + } + + if len(slices) > 0 { + simplifyAppendEllipsis(file, call, base, slices) + } + } + } + return nil, nil +} + +// clippedSlice returns res != nil if e denotes a slice that is +// definitely clipped, that is, its len(s)==cap(s). +// +// The value of res is either the same as e or is a subexpression of e +// that denotes the same slice but without the clipping operation. +// +// In addition, it reports whether the slice is definitely empty. +// +// Examples of clipped slices: +// +// x[:0:0] (empty) +// []T(nil) (empty) +// Slice{} (empty) +// x[:len(x):len(x)] (nonempty) res=x +// x[:k:k] (nonempty) +// slices.Clip(x) (nonempty) res=x +// +// TODO(adonovan): Add a check that the expression x has no side effects in +// case x[:len(x):len(x)] -> x. Now the program behavior may change. +func clippedSlice(info *types.Info, e ast.Expr) (res ast.Expr, empty bool) { + switch e := e.(type) { + case *ast.SliceExpr: + // x[:0:0], x[:len(x):len(x)], x[:k:k] + if e.Slice3 && e.High != nil && e.Max != nil && astutil.EqualSyntax(e.High, e.Max) { // x[:k:k] + res = e + empty = isZeroIntLiteral(info, e.High) // x[:0:0] + if call, ok := e.High.(*ast.CallExpr); ok && + typeutil.Callee(info, call) == builtinLen && + astutil.EqualSyntax(call.Args[0], e.X) { + res = e.X // x[:len(x):len(x)] -> x + } + return + } + return + + case *ast.CallExpr: + // []T(nil)? + if info.Types[e.Fun].IsType() && + is[*ast.Ident](e.Args[0]) && + info.Uses[e.Args[0].(*ast.Ident)] == builtinNil { + return e, true + } + + // slices.Clip(x)? + obj := typeutil.Callee(info, e) + if typesinternal.IsFunctionNamed(obj, "slices", "Clip") { + return e.Args[0], false // slices.Clip(x) -> x + } + + case *ast.CompositeLit: + // Slice{}? + if len(e.Elts) == 0 { + return e, true + } + } + return nil, false +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/slicescontains.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/slicescontains.go new file mode 100644 index 00000000..b3c2e562 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/slicescontains.go @@ -0,0 +1,433 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var SlicesContainsAnalyzer = &analysis.Analyzer{ + Name: "slicescontains", + Doc: analysisinternal.MustExtractDoc(doc, "slicescontains"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: slicescontains, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicescontains", +} + +// The slicescontains pass identifies loops that can be replaced by a +// call to slices.Contains{,Func}. For example: +// +// for i, elem := range s { +// if elem == needle { +// ... +// break +// } +// } +// +// => +// +// if slices.Contains(s, needle) { ... } +// +// Variants: +// - if the if-condition is f(elem), the replacement +// uses slices.ContainsFunc(s, f). +// - if the if-body is "return true" and the fallthrough +// statement is "return false" (or vice versa), the +// loop becomes "return [!]slices.Contains(...)". +// - if the if-body is "found = true" and the previous +// statement is "found = false" (or vice versa), the +// loop becomes "found = [!]slices.Contains(...)". +// +// It may change cardinality of effects of the "needle" expression. +// (Mostly this appears to be a desirable optimization, avoiding +// redundantly repeated evaluation.) +// +// TODO(adonovan): Add a check that needle/predicate expression from +// if-statement has no effects. Now the program behavior may change. +func slicescontains(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "slices", "runtime") { + return nil, nil + } + + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + ) + + // check is called for each RangeStmt of this form: + // for i, elem := range s { if cond { ... } } + check := func(file *ast.File, curRange inspector.Cursor) { + rng := curRange.Node().(*ast.RangeStmt) + ifStmt := rng.Body.List[0].(*ast.IfStmt) + + // isSliceElem reports whether e denotes the + // current slice element (elem or s[i]). + isSliceElem := func(e ast.Expr) bool { + if rng.Value != nil && astutil.EqualSyntax(e, rng.Value) { + return true // "elem" + } + if x, ok := e.(*ast.IndexExpr); ok && + astutil.EqualSyntax(x.X, rng.X) && + astutil.EqualSyntax(x.Index, rng.Key) { + return true // "s[i]" + } + return false + } + + // Examine the condition for one of these forms: + // + // - if elem or s[i] == needle { ... } => Contains + // - if predicate(s[i] or elem) { ... } => ContainsFunc + var ( + funcName string // "Contains" or "ContainsFunc" + arg2 ast.Expr // second argument to func (needle or predicate) + ) + switch cond := ifStmt.Cond.(type) { + case *ast.BinaryExpr: + if cond.Op == token.EQL { + var elem ast.Expr + if isSliceElem(cond.X) { + funcName = "Contains" + elem = cond.X + arg2 = cond.Y // "if elem == needle" + } else if isSliceElem(cond.Y) { + funcName = "Contains" + elem = cond.Y + arg2 = cond.X // "if needle == elem" + } + + // Reject if elem and needle have different types. + if elem != nil { + tElem := info.TypeOf(elem) + tNeedle := info.TypeOf(arg2) + if !types.Identical(tElem, tNeedle) { + // Avoid ill-typed slices.Contains([]error, any). + if !types.AssignableTo(tNeedle, tElem) { + return + } + // TODO(adonovan): relax this check to allow + // slices.Contains([]error, error(any)), + // inserting an explicit widening conversion + // around the needle. + return + } + } + } + + case *ast.CallExpr: + if len(cond.Args) == 1 && + isSliceElem(cond.Args[0]) && + typeutil.Callee(info, cond) != nil { // not a conversion + + // Attempt to get signature + sig, isSignature := info.TypeOf(cond.Fun).(*types.Signature) + if isSignature { + // skip variadic functions + if sig.Variadic() { + return + } + + // Slice element type must match function parameter type. + var ( + tElem = typeparams.CoreType(info.TypeOf(rng.X)).(*types.Slice).Elem() + tParam = sig.Params().At(0).Type() + ) + if !types.Identical(tElem, tParam) { + return + } + } + + funcName = "ContainsFunc" + arg2 = cond.Fun // "if predicate(elem)" + } + } + if funcName == "" { + return // not a candidate for Contains{,Func} + } + + // body is the "true" body. + body := ifStmt.Body + if len(body.List) == 0 { + // (We could perhaps delete the loop entirely.) + return + } + + // Reject if the body, needle or predicate references either range variable. + usesRangeVar := func(n ast.Node) bool { + cur, ok := curRange.FindNode(n) + if !ok { + panic(fmt.Sprintf("FindNode(%T) failed", n)) + } + return uses(index, cur, info.Defs[rng.Key.(*ast.Ident)]) || + rng.Value != nil && uses(index, cur, info.Defs[rng.Value.(*ast.Ident)]) + } + if usesRangeVar(body) { + // Body uses range var "i" or "elem". + // + // (The check for "i" could be relaxed when we + // generalize this to support slices.Index; + // and the check for "elem" could be relaxed + // if "elem" can safely be replaced in the + // body by "needle".) + return + } + if usesRangeVar(arg2) { + return + } + + // Prepare slices.Contains{,Func} call. + prefix, importEdits := refactor.AddImport(info, file, "slices", "slices", funcName, rng.Pos()) + contains := fmt.Sprintf("%s%s(%s, %s)", + prefix, + funcName, + astutil.Format(pass.Fset, rng.X), + astutil.Format(pass.Fset, arg2)) + + report := func(edits []analysis.TextEdit) { + pass.Report(analysis.Diagnostic{ + Pos: rng.Pos(), + End: rng.End(), + Message: fmt.Sprintf("Loop can be simplified using slices.%s", funcName), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace loop by call to slices." + funcName, + TextEdits: append(edits, importEdits...), + }}, + }) + } + + // Last statement of body must return/break out of the loop. + // + // TODO(adonovan): opt:consider avoiding FindNode with new API of form: + // curRange.Get(edge.RangeStmt_Body, -1). + // Get(edge.BodyStmt_List, 0). + // Get(edge.IfStmt_Body) + curBody, _ := curRange.FindNode(body) + curLastStmt, _ := curBody.LastChild() + + // Reject if any statement in the body except the + // last has a free continuation (continue or break) + // that might affected by melting down the loop. + // + // TODO(adonovan): relax check by analyzing branch target. + for curBodyStmt := range curBody.Children() { + if curBodyStmt != curLastStmt { + for range curBodyStmt.Preorder((*ast.BranchStmt)(nil), (*ast.ReturnStmt)(nil)) { + return + } + } + } + + switch lastStmt := curLastStmt.Node().(type) { + case *ast.ReturnStmt: + // Have: for ... range seq { if ... { stmts; return x } } + + // Special case: + // body={ return true } next="return false" (or negation) + // => return [!]slices.Contains(...) + if curNext, ok := curRange.NextSibling(); ok { + nextStmt := curNext.Node().(ast.Stmt) + tval := isReturnTrueOrFalse(info, lastStmt) + fval := isReturnTrueOrFalse(info, nextStmt) + if len(body.List) == 1 && tval*fval < 0 { + // for ... { if ... { return true/false } } + // => return [!]slices.Contains(...) + report([]analysis.TextEdit{ + // Delete the range statement and following space. + { + Pos: rng.Pos(), + End: nextStmt.Pos(), + }, + // Change return to [!]slices.Contains(...). + { + Pos: nextStmt.Pos(), + End: nextStmt.End(), + NewText: fmt.Appendf(nil, "return %s%s", + cond(tval > 0, "", "!"), + contains), + }, + }) + return + } + } + + // General case: + // => if slices.Contains(...) { stmts; return x } + report([]analysis.TextEdit{ + // Replace "for ... { if ... " with "if slices.Contains(...)". + { + Pos: rng.Pos(), + End: ifStmt.Body.Pos(), + NewText: fmt.Appendf(nil, "if %s ", contains), + }, + // Delete '}' of range statement and preceding space. + { + Pos: ifStmt.Body.End(), + End: rng.End(), + }, + }) + return + + case *ast.BranchStmt: + if lastStmt.Tok == token.BREAK && lastStmt.Label == nil { // unlabeled break + // Have: for ... { if ... { stmts; break } } + + var prevStmt ast.Stmt // previous statement to range (if any) + if curPrev, ok := curRange.PrevSibling(); ok { + // If the RangeStmt's previous sibling is a Stmt, + // the RangeStmt must be among the Body list of + // a BlockStmt, CauseClause, or CommClause. + // In all cases, the prevStmt is the immediate + // predecessor of the RangeStmt during execution. + // + // (This is not true for Stmts in general; + // see [Cursor.Children] and #71074.) + prevStmt, _ = curPrev.Node().(ast.Stmt) + } + + // Special case: + // prev="lhs = false" body={ lhs = true; break } + // => lhs = slices.Contains(...) (or negation) + if assign, ok := body.List[0].(*ast.AssignStmt); ok && + len(body.List) == 2 && + assign.Tok == token.ASSIGN && + len(assign.Lhs) == 1 && + len(assign.Rhs) == 1 { + + // Have: body={ lhs = rhs; break } + + if prevAssign, ok := prevStmt.(*ast.AssignStmt); ok && + len(prevAssign.Lhs) == 1 && + len(prevAssign.Rhs) == 1 && + astutil.EqualSyntax(prevAssign.Lhs[0], assign.Lhs[0]) && + is[*ast.Ident](assign.Rhs[0]) && + info.Uses[assign.Rhs[0].(*ast.Ident)] == builtinTrue { + + // Have: + // lhs = false + // for ... { if ... { lhs = true; break } } + // => + // lhs = slices.Contains(...) + // + // TODO(adonovan): + // - support "var lhs bool = false" and variants. + // - support negation. + // Both these variants seem quite significant. + // - allow the break to be omitted. + report([]analysis.TextEdit{ + // Replace "rhs" of previous assignment by slices.Contains(...) + { + Pos: prevAssign.Rhs[0].Pos(), + End: prevAssign.Rhs[0].End(), + NewText: []byte(contains), + }, + // Delete the loop and preceding space. + { + Pos: prevAssign.Rhs[0].End(), + End: rng.End(), + }, + }) + return + } + } + + // General case: + // for ... { if ... { stmts; break } } + // => if slices.Contains(...) { stmts } + report([]analysis.TextEdit{ + // Replace "for ... { if ... " with "if slices.Contains(...)". + { + Pos: rng.Pos(), + End: ifStmt.Body.Pos(), + NewText: fmt.Appendf(nil, "if %s ", contains), + }, + // Delete break statement and preceding space. + { + Pos: func() token.Pos { + if len(body.List) > 1 { + beforeBreak, _ := curLastStmt.PrevSibling() + return beforeBreak.Node().End() + } + return lastStmt.Pos() + }(), + End: lastStmt.End(), + }, + // Delete '}' of range statement and preceding space. + { + Pos: ifStmt.Body.End(), + End: rng.End(), + }, + }) + return + } + } + } + + for curFile := range filesUsing(inspect, info, "go1.21") { + file := curFile.Node().(*ast.File) + + for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) { + rng := curRange.Node().(*ast.RangeStmt) + + if is[*ast.Ident](rng.Key) && + rng.Tok == token.DEFINE && + len(rng.Body.List) == 1 && + is[*types.Slice](typeparams.CoreType(info.TypeOf(rng.X))) { + + // Have: + // - for _, elem := range s { S } + // - for i := range s { S } + + if ifStmt, ok := rng.Body.List[0].(*ast.IfStmt); ok && + ifStmt.Init == nil && ifStmt.Else == nil { + + // Have: for i, elem := range s { if cond { ... } } + check(file, curRange) + } + } + } + } + return nil, nil +} + +// -- helpers -- + +// isReturnTrueOrFalse returns nonzero if stmt returns true (+1) or false (-1). +func isReturnTrueOrFalse(info *types.Info, stmt ast.Stmt) int { + if ret, ok := stmt.(*ast.ReturnStmt); ok && len(ret.Results) == 1 { + if id, ok := ret.Results[0].(*ast.Ident); ok { + switch info.Uses[id] { + case builtinTrue: + return +1 + case builtinFalse: + return -1 + } + } + } + return 0 +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/slicesdelete.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/slicesdelete.go new file mode 100644 index 00000000..b3e063db --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/slicesdelete.go @@ -0,0 +1,184 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/constant" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/typesinternal" +) + +// Warning: this analyzer is not safe to enable by default (not nil-preserving). +var SlicesDeleteAnalyzer = &analysis.Analyzer{ + Name: "slicesdelete", + Doc: analysisinternal.MustExtractDoc(doc, "slicesdelete"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + }, + Run: slicesdelete, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicesdelete", +} + +// The slicesdelete pass attempts to replace instances of append(s[:i], s[i+k:]...) +// with slices.Delete(s, i, i+k) where k is some positive constant. +// Other variations that will also have suggested replacements include: +// append(s[:i-1], s[i:]...) and append(s[:i+k1], s[i+k2:]) where k2 > k1. +func slicesdelete(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "slices", "runtime") { + return nil, nil + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + info := pass.TypesInfo + report := func(file *ast.File, call *ast.CallExpr, slice1, slice2 *ast.SliceExpr) { + insert := func(pos token.Pos, text string) analysis.TextEdit { + return analysis.TextEdit{Pos: pos, End: pos, NewText: []byte(text)} + } + isIntExpr := func(e ast.Expr) bool { + return types.Identical(types.Default(info.TypeOf(e)), builtinInt.Type()) + } + isIntShadowed := func() bool { + scope := pass.TypesInfo.Scopes[file].Innermost(call.Lparen) + if _, obj := scope.LookupParent("int", call.Lparen); obj != builtinInt { + return true // int type is shadowed + } + return false + } + + prefix, edits := refactor.AddImport(info, file, "slices", "slices", "Delete", call.Pos()) + // append's indices may be any integer type; slices.Delete requires int. + // Insert int conversions as needed (and if possible). + if isIntShadowed() && (!isIntExpr(slice1.High) || !isIntExpr(slice2.Low)) { + return + } + if !isIntExpr(slice1.High) { + edits = append(edits, + insert(slice1.High.Pos(), "int("), + insert(slice1.High.End(), ")"), + ) + } + if !isIntExpr(slice2.Low) { + edits = append(edits, + insert(slice2.Low.Pos(), "int("), + insert(slice2.Low.End(), ")"), + ) + } + + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: "Replace append with slices.Delete", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace append with slices.Delete", + TextEdits: append(edits, []analysis.TextEdit{ + // Change name of called function. + { + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte(prefix + "Delete"), + }, + // Delete ellipsis. + { + Pos: call.Ellipsis, + End: call.Ellipsis + token.Pos(len("...")), // delete ellipsis + }, + // Remove second slice variable name. + { + Pos: slice2.X.Pos(), + End: slice2.X.End(), + }, + // Insert after first slice variable name. + { + Pos: slice1.X.End(), + NewText: []byte(", "), + }, + // Remove brackets and colons. + { + Pos: slice1.Lbrack, + End: slice1.High.Pos(), + }, + { + Pos: slice1.Rbrack, + End: slice1.Rbrack + 1, + }, + { + Pos: slice2.Lbrack, + End: slice2.Lbrack + 1, + }, + { + Pos: slice2.Low.End(), + End: slice2.Rbrack + 1, + }, + }...), + }}, + }) + } + for curFile := range filesUsing(inspect, info, "go1.21") { + file := curFile.Node().(*ast.File) + for curCall := range curFile.Preorder((*ast.CallExpr)(nil)) { + call := curCall.Node().(*ast.CallExpr) + if id, ok := call.Fun.(*ast.Ident); ok && len(call.Args) == 2 { + // Verify we have append with two slices and ... operator, + // the first slice has no low index and second slice has no + // high index, and not a three-index slice. + if call.Ellipsis.IsValid() && info.Uses[id] == builtinAppend { + slice1, ok1 := call.Args[0].(*ast.SliceExpr) + slice2, ok2 := call.Args[1].(*ast.SliceExpr) + if ok1 && slice1.Low == nil && !slice1.Slice3 && + ok2 && slice2.High == nil && !slice2.Slice3 && + astutil.EqualSyntax(slice1.X, slice2.X) && + typesinternal.NoEffects(info, slice1.X) && + increasingSliceIndices(info, slice1.High, slice2.Low) { + // Have append(s[:a], s[b:]...) where we can verify a < b. + report(file, call, slice1, slice2) + } + } + } + } + } + return nil, nil +} + +// Given two slice indices a and b, returns true if we can verify that a < b. +// It recognizes certain forms such as i+k1 < i+k2 where k1 < k2. +func increasingSliceIndices(info *types.Info, a, b ast.Expr) bool { + // Given an expression of the form i±k, returns (i, k) + // where k is a signed constant. Otherwise it returns (e, 0). + split := func(e ast.Expr) (ast.Expr, constant.Value) { + if binary, ok := e.(*ast.BinaryExpr); ok && (binary.Op == token.SUB || binary.Op == token.ADD) { + // Negate constants if operation is subtract instead of add + if k := info.Types[binary.Y].Value; k != nil { + return binary.X, constant.UnaryOp(binary.Op, k, 0) // i ± k + } + } + return e, constant.MakeInt64(0) + } + + // Handle case where either a or b is a constant + ak := info.Types[a].Value + bk := info.Types[b].Value + if ak != nil || bk != nil { + return ak != nil && bk != nil && constant.Compare(ak, token.LSS, bk) + } + + ai, ak := split(a) + bi, bk := split(b) + return astutil.EqualSyntax(ai, bi) && constant.Compare(ak, token.LSS, bk) +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/sortslice.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/sortslice.go new file mode 100644 index 00000000..66af16d1 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/sortslice.go @@ -0,0 +1,124 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +// (Not to be confused with go/analysis/passes/sortslice.) +var SlicesSortAnalyzer = &analysis.Analyzer{ + Name: "slicessort", + Doc: analysisinternal.MustExtractDoc(doc, "slicessort"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: slicessort, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#slicessort", +} + +// The slicessort pass replaces sort.Slice(slice, less) with +// slices.Sort(slice) when slice is a []T and less is a FuncLit +// equivalent to cmp.Ordered[T]. +// +// sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) +// => slices.Sort(s) +// +// There is no slices.SortStable. +// +// TODO(adonovan): support +// +// - sort.Slice(s, func(i, j int) bool { return s[i] ... s[j] }) +// -> slices.SortFunc(s, func(x, y T) int { return x ... y }) +// iff all uses of i, j can be replaced by s[i], s[j] and "<" can be replaced with cmp.Compare. +// +// - As above for sort.SliceStable -> slices.SortStableFunc. +// +// - sort.Sort(x) where x has a named slice type whose Less method is the natural order. +// -> sort.Slice(x) +func slicessort(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "slices", "sort", "runtime") { + return nil, nil + } + + var ( + info = pass.TypesInfo + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + sortSlice = index.Object("sort", "Slice") + ) + for curCall := range index.Calls(sortSlice) { + call := curCall.Node().(*ast.CallExpr) + if lit, ok := call.Args[1].(*ast.FuncLit); ok && len(lit.Body.List) == 1 { + sig := info.Types[lit.Type].Type.(*types.Signature) + + // Have: sort.Slice(s, func(i, j int) bool { return ... }) + s := call.Args[0] + i := sig.Params().At(0) + j := sig.Params().At(1) + + if ret, ok := lit.Body.List[0].(*ast.ReturnStmt); ok { + if compare, ok := ret.Results[0].(*ast.BinaryExpr); ok && compare.Op == token.LSS { + // isIndex reports whether e is s[v]. + isIndex := func(e ast.Expr, v *types.Var) bool { + index, ok := e.(*ast.IndexExpr) + return ok && + astutil.EqualSyntax(index.X, s) && + is[*ast.Ident](index.Index) && + info.Uses[index.Index.(*ast.Ident)] == v + } + file := astutil.EnclosingFile(curCall) + if isIndex(compare.X, i) && isIndex(compare.Y, j) && + fileUses(info, file, "go1.21") { + // Have: sort.Slice(s, func(i, j int) bool { return s[i] < s[j] }) + + prefix, importEdits := refactor.AddImport( + info, file, "slices", "slices", "Sort", call.Pos()) + + pass.Report(analysis.Diagnostic{ + // Highlight "sort.Slice". + Pos: call.Fun.Pos(), + End: call.Fun.End(), + Message: "sort.Slice can be modernized using slices.Sort", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace sort.Slice call by slices.Sort", + TextEdits: append(importEdits, []analysis.TextEdit{ + { + // Replace sort.Slice with slices.Sort. + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: []byte(prefix + "Sort"), + }, + { + // Eliminate FuncLit. + Pos: call.Args[0].End(), + End: call.Rparen, + }, + }...), + }}, + }) + } + } + } + } + } + return nil, nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stditerators.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stditerators.go new file mode 100644 index 00000000..20817520 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stditerators.go @@ -0,0 +1,354 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/goplsexport" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/stdlib" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var stditeratorsAnalyzer = &analysis.Analyzer{ + Name: "stditerators", + Doc: analysisinternal.MustExtractDoc(doc, "stditerators"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: stditerators, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stditerators", +} + +func init() { + // Export to gopls until this is a published modernizer. + goplsexport.StdIteratorsModernizer = stditeratorsAnalyzer +} + +// stditeratorsTable records std types that have legacy T.{Len,At} +// iteration methods as well as a newer T.All method that returns an +// iter.Seq. +var stditeratorsTable = [...]struct { + pkgpath, typename, lenmethod, atmethod, itermethod, elemname string +}{ + // Example: in go/types, (*Tuple).Variables returns an + // iterator that replaces a loop over (*Tuple).{Len,At}. + // The loop variable is named "v". + {"go/types", "Interface", "NumEmbeddeds", "EmbeddedType", "EmbeddedTypes", "etyp"}, + {"go/types", "Interface", "NumExplicitMethods", "ExplicitMethod", "ExplicitMethods", "method"}, + {"go/types", "Interface", "NumMethods", "Method", "Methods", "method"}, + {"go/types", "MethodSet", "Len", "At", "Methods", "method"}, + {"go/types", "Named", "NumMethods", "Method", "Methods", "method"}, + {"go/types", "Scope", "NumChildren", "Child", "Children", "child"}, + {"go/types", "Struct", "NumFields", "Field", "Fields", "field"}, + {"go/types", "Tuple", "Len", "At", "Variables", "v"}, + {"go/types", "TypeList", "Len", "At", "Types", "t"}, + {"go/types", "TypeParamList", "Len", "At", "TypeParams", "tparam"}, + {"go/types", "Union", "Len", "Term", "Terms", "term"}, + // TODO(adonovan): support Seq2. Bonus: transform uses of both key and value. + // {"reflect", "Value", "NumFields", "Field", "Fields", "field"}, +} + +// stditerators suggests fixes to replace loops using Len/At-style +// iterator APIs by a range loop over an iterator. The set of +// participating types and methods is defined by [iteratorsTable]. +// +// Pattern: +// +// for i := 0; i < x.Len(); i++ { +// use(x.At(i)) +// } +// +// => +// +// for elem := range x.All() { +// use(elem) +// } +// +// Variant: +// +// for i := range x.Len() { ... } +// +// Note: Iterators have a dynamic cost. How do we know that +// the user hasn't intentionally chosen not to use an +// iterator for that reason? We don't want to go fix to +// undo optimizations. Do we need a suppression mechanism? +func stditerators(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + ) + + for _, row := range stditeratorsTable { + // Don't offer fixes within the package + // that defines the iterator in question. + if within(pass, row.pkgpath) { + continue + } + + var ( + lenMethod = index.Selection(row.pkgpath, row.typename, row.lenmethod) + atMethod = index.Selection(row.pkgpath, row.typename, row.atmethod) + ) + + // chooseName returns an appropriate fresh name + // for the index variable of the iterator loop + // whose body is specified. + // + // If the loop body starts with + // + // for ... { e := x.At(i); use(e) } + // + // then chooseName prefers the name e and additionally + // returns the var's symbol. We'll transform this to: + // + // for e := range x.Len() { e := e; use(e) } + // + // which leaves a redundant assignment that a + // subsequent 'forvar' pass will eliminate. + chooseName := func(curBody inspector.Cursor, x ast.Expr, i *types.Var) (string, *types.Var) { + // Is body { elem := x.At(i); ... } ? + body := curBody.Node().(*ast.BlockStmt) + if len(body.List) > 0 { + if assign, ok := body.List[0].(*ast.AssignStmt); ok && + assign.Tok == token.DEFINE && + len(assign.Lhs) == 1 && + len(assign.Rhs) == 1 && + is[*ast.Ident](assign.Lhs[0]) { + // call to x.At(i)? + if call, ok := assign.Rhs[0].(*ast.CallExpr); ok && + typeutil.Callee(info, call) == atMethod && + astutil.EqualSyntax(ast.Unparen(call.Fun).(*ast.SelectorExpr).X, x) && + is[*ast.Ident](call.Args[0]) && + info.Uses[call.Args[0].(*ast.Ident)] == i { + // Have: { elem := x.At(i); ... } + id := assign.Lhs[0].(*ast.Ident) + return id.Name, info.Defs[id].(*types.Var) + } + } + } + + loop := curBody.Parent().Node() + return refactor.FreshName(info.Scopes[loop], loop.Pos(), row.elemname), nil + } + + // Process each call of x.Len(). + nextCall: + for curLenCall := range index.Calls(lenMethod) { + lenSel, ok := ast.Unparen(curLenCall.Node().(*ast.CallExpr).Fun).(*ast.SelectorExpr) + if !ok { + continue + } + // lenSel is "x.Len" + + var ( + rng analysis.Range // where to report diagnostic + curBody inspector.Cursor // loop body + indexVar *types.Var // old loop index var + elemVar *types.Var // existing "elem := x.At(i)" var, if present + elem string // name for new loop var + edits []analysis.TextEdit + ) + + // Analyze enclosing loop. + switch ek, _ := curLenCall.ParentEdge(); ek { + case edge.BinaryExpr_Y: + // pattern 1: for i := 0; i < x.Len(); i++ { ... } + var ( + curCmp = curLenCall.Parent() + cmp = curCmp.Node().(*ast.BinaryExpr) + ) + if cmp.Op != token.LSS || + !astutil.IsChildOf(curCmp, edge.ForStmt_Cond) { + continue + } + if id, ok := cmp.X.(*ast.Ident); ok { + // Have: for _; i < x.Len(); _ { ... } + var ( + v = info.Uses[id].(*types.Var) + curFor = curCmp.Parent() + loop = curFor.Node().(*ast.ForStmt) + ) + if v != isIncrementLoop(info, loop) { + continue + } + // Have: for i := 0; i < x.Len(); i++ { ... }. + // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + rng = analysisinternal.Range(loop.For, loop.Post.End()) + indexVar = v + curBody = curFor.ChildAt(edge.ForStmt_Body, -1) + elem, elemVar = chooseName(curBody, lenSel.X, indexVar) + + // for i := 0; i < x.Len(); i++ { + // ---- ------- --- ----- + // for elem := range x.All() { + edits = []analysis.TextEdit{ + { + Pos: v.Pos(), + End: v.Pos() + token.Pos(len(v.Name())), + NewText: []byte(elem), + }, + { + Pos: loop.Init.(*ast.AssignStmt).Rhs[0].Pos(), + End: cmp.Y.Pos(), + NewText: []byte("range "), + }, + { + Pos: lenSel.Sel.Pos(), + End: lenSel.Sel.End(), + NewText: []byte(row.itermethod), + }, + { + Pos: curLenCall.Node().End(), + End: loop.Post.End(), + }, + } + } + + case edge.RangeStmt_X: + // pattern 2: for i := range x.Len() { ... } + var ( + curRange = curLenCall.Parent() + loop = curRange.Node().(*ast.RangeStmt) + ) + if id, ok := loop.Key.(*ast.Ident); ok && + loop.Value == nil && + loop.Tok == token.DEFINE { + // Have: for i := range x.Len() { ... } + // ~~~~~~~~~~~~~ + + rng = analysisinternal.Range(loop.Range, loop.X.End()) + indexVar = info.Defs[id].(*types.Var) + curBody = curRange.ChildAt(edge.RangeStmt_Body, -1) + elem, elemVar = chooseName(curBody, lenSel.X, indexVar) + + // for i := range x.Len() { + // ---- --- + // for elem := range x.All() { + edits = []analysis.TextEdit{ + { + Pos: loop.Key.Pos(), + End: loop.Key.End(), + NewText: []byte(elem), + }, + { + Pos: lenSel.Sel.Pos(), + End: lenSel.Sel.End(), + NewText: []byte(row.itermethod), + }, + } + } + } + + if indexVar == nil { + continue // no loop of the required form + } + + // TODO(adonovan): what about possible + // modifications of x within the loop? + // Aliasing seems to make a conservative + // treatment impossible. + + // Check that all uses of var i within loop body are x.At(i). + for curUse := range index.Uses(indexVar) { + if !curBody.Contains(curUse) { + continue + } + if ek, argidx := curUse.ParentEdge(); ek != edge.CallExpr_Args || argidx != 0 { + continue nextCall // use is not arg of call + } + curAtCall := curUse.Parent() + atCall := curAtCall.Node().(*ast.CallExpr) + if typeutil.Callee(info, atCall) != atMethod { + continue nextCall // use is not arg of call to T.At + } + atSel := ast.Unparen(atCall.Fun).(*ast.SelectorExpr) + + // Check receivers of Len, At calls match (syntactically). + if !astutil.EqualSyntax(lenSel.X, atSel.X) { + continue nextCall + } + + // At each point of use, check that + // the fresh variable is not shadowed + // by an intervening local declaration + // (or by the idiomatic elemVar optionally + // found by chooseName). + if obj := lookup(info, curAtCall, elem); obj != nil && obj != elemVar && obj.Pos() > indexVar.Pos() { + // (Ideally, instead of giving up, we would + // embellish the name and try again.) + continue nextCall + } + + // use(x.At(i)) + // ------- + // use(elem ) + edits = append(edits, analysis.TextEdit{ + Pos: atCall.Pos(), + End: atCall.End(), + NewText: []byte(elem), + }) + } + + // Check file Go version is new enough for the iterator method. + // (In the long run, version filters are not highly selective, + // so there's no need to do them first, especially as this check + // may be somewhat expensive.) + if v, ok := methodGoVersion(row.pkgpath, row.typename, row.itermethod); !ok { + panic("no version found") + } else if file := astutil.EnclosingFile(curLenCall); !fileUses(info, file, v.String()) { + continue nextCall + } + + pass.Report(analysis.Diagnostic{ + Pos: rng.Pos(), + End: rng.End(), + Message: fmt.Sprintf("%s/%s loop can simplified using %s.%s iteration", + row.lenmethod, row.atmethod, row.typename, row.itermethod), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf( + "Replace %s/%s loop with %s.%s iteration", + row.lenmethod, row.atmethod, row.typename, row.itermethod), + TextEdits: edits, + }}, + }) + } + } + return nil, nil +} + +// -- helpers -- + +// methodGoVersion reports the version at which the method +// (pkgpath.recvtype).method appeared in the standard library. +func methodGoVersion(pkgpath, recvtype, method string) (stdlib.Version, bool) { + // TODO(adonovan): opt: this might be inefficient for large packages + // like go/types. If so, memoize using a map (and kill two birds with + // one stone by also memoizing the 'within' check above). + for _, sym := range stdlib.PackageSymbols[pkgpath] { + if sym.Kind == stdlib.Method { + _, recv, name := sym.SplitMethod() + if recv == recvtype && name == method { + return sym.Version, true + } + } + } + return 0, false +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stringsbuilder.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stringsbuilder.go new file mode 100644 index 00000000..56d0ba73 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stringsbuilder.go @@ -0,0 +1,328 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var StringsBuilderAnalyzer = &analysis.Analyzer{ + Name: "stringsbuilder", + Doc: analysisinternal.MustExtractDoc(doc, "stringsbuilder"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: stringsbuilder, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringbuilder", +} + +// stringsbuilder replaces string += string in a loop by strings.Builder. +func stringsbuilder(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + // Skip the analyzer in packages where its + // fixes would create an import cycle. + if within(pass, "strings", "runtime") { + return nil, nil + } + + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + ) + + // Gather all local string variables that appear on the + // LHS of some string += string assignment. + candidates := make(map[*types.Var]bool) + for curAssign := range inspect.Root().Preorder((*ast.AssignStmt)(nil)) { + assign := curAssign.Node().(*ast.AssignStmt) + if assign.Tok == token.ADD_ASSIGN && is[*ast.Ident](assign.Lhs[0]) { + if v, ok := pass.TypesInfo.Uses[assign.Lhs[0].(*ast.Ident)].(*types.Var); ok && + !typesinternal.IsPackageLevel(v) && // TODO(adonovan): in go1.25, use v.Kind() == types.LocalVar && + types.Identical(v.Type(), builtinString.Type()) { + candidates[v] = true + } + } + } + + // Now check each candidate variable's decl and uses. +nextcand: + for v := range candidates { + var edits []analysis.TextEdit + + // Check declaration of s: + // + // s := expr + // var s [string] [= expr] + // + // and transform to: + // + // var s strings.Builder; s.WriteString(expr) + // + def, ok := index.Def(v) + if !ok { + continue + } + ek, _ := def.ParentEdge() + if ek == edge.AssignStmt_Lhs && + len(def.Parent().Node().(*ast.AssignStmt).Lhs) == 1 { + // Have: s := expr + // => var s strings.Builder; s.WriteString(expr) + + assign := def.Parent().Node().(*ast.AssignStmt) + + // Reject "if s := f(); ..." since in that context + // we can't replace the assign with two statements. + switch def.Parent().Parent().Node().(type) { + case *ast.BlockStmt, *ast.CaseClause, *ast.CommClause: + // OK: these are the parts of syntax that + // allow unrestricted statement lists. + default: + continue + } + + // Add strings import. + prefix, importEdits := refactor.AddImport( + pass.TypesInfo, astutil.EnclosingFile(def), "strings", "strings", "Builder", v.Pos()) + edits = append(edits, importEdits...) + + if isEmptyString(pass.TypesInfo, assign.Rhs[0]) { + // s := "" + // --------------------- + // var s strings.Builder + edits = append(edits, analysis.TextEdit{ + Pos: assign.Pos(), + End: assign.End(), + NewText: fmt.Appendf(nil, "var %[1]s %[2]sBuilder", v.Name(), prefix), + }) + + } else { + // s := expr + // ------------------------------------- - + // var s strings.Builder; s.WriteString(expr) + edits = append(edits, []analysis.TextEdit{ + { + Pos: assign.Pos(), + End: assign.Rhs[0].Pos(), + NewText: fmt.Appendf(nil, "var %[1]s %[2]sBuilder; %[1]s.WriteString(", v.Name(), prefix), + }, + { + Pos: assign.End(), + End: assign.End(), + NewText: []byte(")"), + }, + }...) + + } + + } else if ek == edge.ValueSpec_Names && + len(def.Parent().Node().(*ast.ValueSpec).Names) == 1 { + // Have: var s [string] [= expr] + // => var s strings.Builder; s.WriteString(expr) + + // Add strings import. + prefix, importEdits := refactor.AddImport( + pass.TypesInfo, astutil.EnclosingFile(def), "strings", "strings", "Builder", v.Pos()) + edits = append(edits, importEdits...) + + spec := def.Parent().Node().(*ast.ValueSpec) + decl := def.Parent().Parent().Node().(*ast.GenDecl) + + init := spec.Names[0].End() // start of " = expr" + if spec.Type != nil { + init = spec.Type.End() + } + + // var s [string] + // ---------------- + // var s strings.Builder + edits = append(edits, analysis.TextEdit{ + Pos: spec.Names[0].End(), + End: init, + NewText: fmt.Appendf(nil, " %sBuilder", prefix), + }) + + if len(spec.Values) > 0 && !isEmptyString(pass.TypesInfo, spec.Values[0]) { + // = expr + // ---------------- - + // ; s.WriteString(expr) + edits = append(edits, []analysis.TextEdit{ + { + Pos: init, + End: spec.Values[0].Pos(), + NewText: fmt.Appendf(nil, "; %s.WriteString(", v.Name()), + }, + { + Pos: decl.End(), + End: decl.End(), + NewText: []byte(")"), + }, + }...) + } else { + // delete "= expr" + edits = append(edits, analysis.TextEdit{ + Pos: init, + End: spec.End(), + }) + } + + } else { + continue + } + + // Check uses of s. + // + // - All uses of s except the final one must be of the form + // + // s += expr + // + // Each of these will become s.WriteString(expr). + // At least one of them must be in an intervening loop + // w.r.t. the declaration of s: + // + // var s string + // for ... { s += expr } + // + // - The final use of s must be as an rvalue (e.g. use(s), not &s). + // This will become s.String(). + // + // Perhaps surprisingly, it is fine for there to be an + // intervening loop or lambda w.r.t. the declaration of s: + // + // var s strings.Builder + // for range kSmall { s.WriteString(expr) } + // for range kLarge { use(s.String()) } // called repeatedly + // + // Even though that might cause the s.String() operation to be + // executed repeatedly, this is not a deoptimization because, + // by design, (*strings.Builder).String does not allocate. + var ( + numLoopAssigns int // number of += assignments within a loop + loopAssign *ast.AssignStmt // first += assignment within a loop + seenRvalueUse bool // => we've seen the sole final use of s as an rvalue + ) + for curUse := range index.Uses(v) { + // Strip enclosing parens around Ident. + ek, _ := curUse.ParentEdge() + for ek == edge.ParenExpr_X { + curUse = curUse.Parent() + ek, _ = curUse.ParentEdge() + } + + // The rvalueUse must be the lexically last use. + if seenRvalueUse { + continue nextcand + } + + // intervening reports whether cur has an ancestor of + // one of the given types that is within the scope of v. + intervening := func(types ...ast.Node) bool { + for cur := range curUse.Enclosing(types...) { + if v.Pos() <= cur.Node().Pos() { // in scope of v + return true + } + } + return false + } + + if ek == edge.AssignStmt_Lhs { + assign := curUse.Parent().Node().(*ast.AssignStmt) + if assign.Tok != token.ADD_ASSIGN { + continue nextcand + } + // Have: s += expr + + // At least one of the += operations + // must appear within a loop. + // relative to the declaration of s. + if intervening((*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)) { + numLoopAssigns++ + if loopAssign == nil { + loopAssign = assign + } + } + + // s += expr + // ------------- - + // s.WriteString(expr) + edits = append(edits, []analysis.TextEdit{ + // replace += with .WriteString() + { + Pos: assign.TokPos, + End: assign.Rhs[0].Pos(), + NewText: []byte(".WriteString("), + }, + // insert ")" + { + Pos: assign.End(), + End: assign.End(), + NewText: []byte(")"), + }, + }...) + + } else if ek == edge.UnaryExpr_X && + curUse.Parent().Node().(*ast.UnaryExpr).Op == token.AND { + // Have: use(&s) + continue nextcand // s is used as an lvalue; reject + + } else { + // The only possible l-value uses of a string variable + // are assignments (s=expr, s+=expr, etc) and &s. + // (For strings, we can ignore method calls s.m().) + // All other uses are r-values. + seenRvalueUse = true + + edits = append(edits, analysis.TextEdit{ + // insert ".String()" + Pos: curUse.Node().End(), + End: curUse.Node().End(), + NewText: []byte(".String()"), + }) + } + } + if !seenRvalueUse { + continue nextcand // no rvalue use; reject + } + if numLoopAssigns == 0 { + continue nextcand // no += in a loop; reject + } + + pass.Report(analysis.Diagnostic{ + Pos: loopAssign.Pos(), + End: loopAssign.End(), + Message: "using string += string in a loop is inefficient", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Replace string += string with strings.Builder", + TextEdits: edits, + }}, + }) + } + + return nil, nil +} + +// isEmptyString reports whether e (a string-typed expression) has constant value "". +func isEmptyString(info *types.Info, e ast.Expr) bool { + tv, ok := info.Types[e] + return ok && tv.Value != nil && constant.StringVal(tv.Value) == "" +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stringscutprefix.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stringscutprefix.go new file mode 100644 index 00000000..9e76f953 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stringscutprefix.go @@ -0,0 +1,261 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var StringsCutPrefixAnalyzer = &analysis.Analyzer{ + Name: "stringscutprefix", + Doc: analysisinternal.MustExtractDoc(doc, "stringscutprefix"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: stringscutprefix, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringscutprefix", +} + +// stringscutprefix offers a fix to replace an if statement which +// calls to the 2 patterns below with strings.CutPrefix or strings.CutSuffix. +// +// Patterns: +// +// 1. if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre) } +// => +// if after, ok := strings.CutPrefix(s, pre); ok { use(after) } +// +// 2. if after := strings.TrimPrefix(s, pre); after != s { use(after) } +// => +// if after, ok := strings.CutPrefix(s, pre); ok { use(after) } +// +// Similar patterns apply for CutSuffix. +// +// The use must occur within the first statement of the block, and the offered fix +// only replaces the first occurrence of strings.TrimPrefix/TrimSuffix. +// +// Variants: +// - bytes.HasPrefix/HasSuffix usage as pattern 1. +func stringscutprefix(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + + stringsTrimPrefix = index.Object("strings", "TrimPrefix") + bytesTrimPrefix = index.Object("bytes", "TrimPrefix") + stringsTrimSuffix = index.Object("strings", "TrimSuffix") + bytesTrimSuffix = index.Object("bytes", "TrimSuffix") + ) + if !index.Used(stringsTrimPrefix, bytesTrimPrefix, stringsTrimSuffix, bytesTrimSuffix) { + return nil, nil + } + + for curFile := range filesUsing(inspect, pass.TypesInfo, "go1.20") { + for curIfStmt := range curFile.Preorder((*ast.IfStmt)(nil)) { + ifStmt := curIfStmt.Node().(*ast.IfStmt) + + // pattern1 + if call, ok := ifStmt.Cond.(*ast.CallExpr); ok && ifStmt.Init == nil && len(ifStmt.Body.List) > 0 { + + obj := typeutil.Callee(info, call) + if !typesinternal.IsFunctionNamed(obj, "strings", "HasPrefix", "HasSuffix") && + !typesinternal.IsFunctionNamed(obj, "bytes", "HasPrefix", "HasSuffix") { + continue + } + isPrefix := strings.HasSuffix(obj.Name(), "Prefix") + + // Replace the first occurrence of strings.TrimPrefix(s, pre) in the first statement only, + // but not later statements in case s or pre are modified by intervening logic (ditto Suffix). + firstStmt := curIfStmt.Child(ifStmt.Body).Child(ifStmt.Body.List[0]) + for curCall := range firstStmt.Preorder((*ast.CallExpr)(nil)) { + call1 := curCall.Node().(*ast.CallExpr) + obj1 := typeutil.Callee(info, call1) + // bytesTrimPrefix or stringsTrimPrefix might be nil if the file doesn't import it, + // so we need to ensure the obj1 is not nil otherwise the call1 is not TrimPrefix and cause a panic (ditto Suffix). + if obj1 == nil || + obj1 != stringsTrimPrefix && obj1 != bytesTrimPrefix && + obj1 != stringsTrimSuffix && obj1 != bytesTrimSuffix { + continue + } + + isPrefix1 := strings.HasSuffix(obj1.Name(), "Prefix") + var cutFuncName, varName, message, fixMessage string + if isPrefix && isPrefix1 { + cutFuncName = "CutPrefix" + varName = "after" + message = "HasPrefix + TrimPrefix can be simplified to CutPrefix" + fixMessage = "Replace HasPrefix/TrimPrefix with CutPrefix" + } else if !isPrefix && !isPrefix1 { + cutFuncName = "CutSuffix" + varName = "before" + message = "HasSuffix + TrimSuffix can be simplified to CutSuffix" + fixMessage = "Replace HasSuffix/TrimSuffix with CutSuffix" + } else { + continue + } + + // Have: if strings.HasPrefix(s0, pre0) { ...strings.TrimPrefix(s, pre)... } (ditto Suffix) + var ( + s0 = call.Args[0] + pre0 = call.Args[1] + s = call1.Args[0] + pre = call1.Args[1] + ) + + // check whether the obj1 uses the exact the same argument with strings.HasPrefix + // shadow variables won't be valid because we only access the first statement (ditto Suffix). + if astutil.EqualSyntax(s0, s) && astutil.EqualSyntax(pre0, pre) { + after := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), varName) + prefix, importEdits := refactor.AddImport( + info, + curFile.Node().(*ast.File), + obj1.Pkg().Name(), + obj1.Pkg().Path(), + cutFuncName, + call.Pos(), + ) + okVarName := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok") + pass.Report(analysis.Diagnostic{ + // highlight at HasPrefix call (ditto Suffix). + Pos: call.Pos(), + End: call.End(), + Message: message, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fixMessage, + // if strings.HasPrefix(s, pre) { use(strings.TrimPrefix(s, pre)) } + // ------------ ----------------- ----- -------------------------- + // if after, ok := strings.CutPrefix(s, pre); ok { use(after) } + // (ditto Suffix) + TextEdits: append(importEdits, []analysis.TextEdit{ + { + Pos: call.Fun.Pos(), + End: call.Fun.Pos(), + NewText: fmt.Appendf(nil, "%s, %s :=", after, okVarName), + }, + { + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: fmt.Appendf(nil, "%s%s", prefix, cutFuncName), + }, + { + Pos: call.End(), + End: call.End(), + NewText: fmt.Appendf(nil, "; %s ", okVarName), + }, + { + Pos: call1.Pos(), + End: call1.End(), + NewText: []byte(after), + }, + }...), + }}}, + ) + break + } + } + } + + // pattern2 + if bin, ok := ifStmt.Cond.(*ast.BinaryExpr); ok && + bin.Op == token.NEQ && + ifStmt.Init != nil && + isSimpleAssign(ifStmt.Init) { + assign := ifStmt.Init.(*ast.AssignStmt) + if call, ok := assign.Rhs[0].(*ast.CallExpr); ok && assign.Tok == token.DEFINE { + lhs := assign.Lhs[0] + obj := typeutil.Callee(info, call) + + if obj == nil || + obj != stringsTrimPrefix && obj != bytesTrimPrefix && obj != stringsTrimSuffix && obj != bytesTrimSuffix { + continue + } + + isPrefix1 := strings.HasSuffix(obj.Name(), "Prefix") + var cutFuncName, message, fixMessage string + if isPrefix1 { + cutFuncName = "CutPrefix" + message = "TrimPrefix can be simplified to CutPrefix" + fixMessage = "Replace TrimPrefix with CutPrefix" + } else { + cutFuncName = "CutSuffix" + message = "TrimSuffix can be simplified to CutSuffix" + fixMessage = "Replace TrimSuffix with CutSuffix" + } + + if astutil.EqualSyntax(lhs, bin.X) && astutil.EqualSyntax(call.Args[0], bin.Y) || + (astutil.EqualSyntax(lhs, bin.Y) && astutil.EqualSyntax(call.Args[0], bin.X)) { + okVarName := refactor.FreshName(info.Scopes[ifStmt], ifStmt.Pos(), "ok") + // Have one of: + // if rest := TrimPrefix(s, prefix); rest != s { (ditto Suffix) + // if rest := TrimPrefix(s, prefix); s != rest { (ditto Suffix) + + // We use AddImport not to add an import (since it exists already) + // but to compute the correct prefix in the dot-import case. + prefix, importEdits := refactor.AddImport( + info, + curFile.Node().(*ast.File), + obj.Pkg().Name(), + obj.Pkg().Path(), + cutFuncName, + call.Pos(), + ) + + pass.Report(analysis.Diagnostic{ + // highlight from the init and the condition end. + Pos: ifStmt.Init.Pos(), + End: ifStmt.Cond.End(), + Message: message, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fixMessage, + // if x := strings.TrimPrefix(s, pre); x != s ... + // ---- ---------- ------ + // if x, ok := strings.CutPrefix (s, pre); ok ... + // (ditto Suffix) + TextEdits: append(importEdits, []analysis.TextEdit{ + { + Pos: assign.Lhs[0].End(), + End: assign.Lhs[0].End(), + NewText: fmt.Appendf(nil, ", %s", okVarName), + }, + { + Pos: call.Fun.Pos(), + End: call.Fun.End(), + NewText: fmt.Appendf(nil, "%s%s", prefix, cutFuncName), + }, + { + Pos: ifStmt.Cond.Pos(), + End: ifStmt.Cond.End(), + NewText: []byte(okVarName), + }, + }...), + }}, + }) + } + } + } + } + } + return nil, nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stringsseq.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stringsseq.go new file mode 100644 index 00000000..ef2b5463 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/stringsseq.go @@ -0,0 +1,145 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var StringsSeqAnalyzer = &analysis.Analyzer{ + Name: "stringsseq", + Doc: analysisinternal.MustExtractDoc(doc, "stringsseq"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: stringsseq, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#stringsseq", +} + +// stringsseq offers a fix to replace a call to strings.Split with +// SplitSeq or strings.Fields with FieldsSeq +// when it is the operand of a range loop, either directly: +// +// for _, line := range strings.Split() {...} +// +// or indirectly, if the variable's sole use is the range statement: +// +// lines := strings.Split() +// for _, line := range lines {...} +// +// Variants: +// - bytes.SplitSeq +// - bytes.FieldsSeq +func stringsseq(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + + stringsSplit = index.Object("strings", "Split") + stringsFields = index.Object("strings", "Fields") + bytesSplit = index.Object("bytes", "Split") + bytesFields = index.Object("bytes", "Fields") + ) + if !index.Used(stringsSplit, stringsFields, bytesSplit, bytesFields) { + return nil, nil + } + + for curFile := range filesUsing(inspect, info, "go1.24") { + for curRange := range curFile.Preorder((*ast.RangeStmt)(nil)) { + rng := curRange.Node().(*ast.RangeStmt) + + // Reject "for i, line := ..." since SplitSeq is not an iter.Seq2. + // (We require that i is blank.) + if id, ok := rng.Key.(*ast.Ident); ok && id.Name != "_" { + continue + } + + // Find the call operand of the range statement, + // whether direct or indirect. + call, ok := rng.X.(*ast.CallExpr) + if !ok { + if id, ok := rng.X.(*ast.Ident); ok { + if v, ok := info.Uses[id].(*types.Var); ok { + if ek, idx := curRange.ParentEdge(); ek == edge.BlockStmt_List && idx > 0 { + curPrev, _ := curRange.PrevSibling() + if assign, ok := curPrev.Node().(*ast.AssignStmt); ok && + assign.Tok == token.DEFINE && + len(assign.Lhs) == 1 && + len(assign.Rhs) == 1 && + info.Defs[assign.Lhs[0].(*ast.Ident)] == v && + soleUseIs(index, v, id) { + // Have: + // lines := ... + // for _, line := range lines {...} + // and no other uses of lines. + call, _ = assign.Rhs[0].(*ast.CallExpr) + } + } + } + } + } + + if call != nil { + var edits []analysis.TextEdit + if rng.Key != nil { + // Delete (blank) RangeStmt.Key: + // for _, line := -> for line := + // for _, _ := -> for + // for _ := -> for + end := rng.Range + if rng.Value != nil { + end = rng.Value.Pos() + } + edits = append(edits, analysis.TextEdit{ + Pos: rng.Key.Pos(), + End: end, + }) + } + + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + continue + } + + switch obj := typeutil.Callee(info, call); obj { + case stringsSplit, stringsFields, bytesSplit, bytesFields: + oldFnName := obj.Name() + seqFnName := fmt.Sprintf("%sSeq", oldFnName) + pass.Report(analysis.Diagnostic{ + Pos: sel.Pos(), + End: sel.End(), + Message: fmt.Sprintf("Ranging over %s is more efficient", seqFnName), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace %s with %s", oldFnName, seqFnName), + TextEdits: append(edits, analysis.TextEdit{ + Pos: sel.Sel.Pos(), + End: sel.Sel.End(), + NewText: []byte(seqFnName)}), + }}, + }) + } + } + } + } + return nil, nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/testingcontext.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/testingcontext.go new file mode 100644 index 00000000..558cf142 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/testingcontext.go @@ -0,0 +1,253 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/typesinternal" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var TestingContextAnalyzer = &analysis.Analyzer{ + Name: "testingcontext", + Doc: analysisinternal.MustExtractDoc(doc, "testingcontext"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: testingContext, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#testingcontext", +} + +// The testingContext pass replaces calls to context.WithCancel from within +// tests to a use of testing.{T,B,F}.Context(), added in Go 1.24. +// +// Specifically, the testingContext pass suggests to replace: +// +// ctx, cancel := context.WithCancel(context.Background()) // or context.TODO +// defer cancel() +// +// with: +// +// ctx := t.Context() +// +// provided: +// +// - ctx and cancel are declared by the assignment +// - the deferred call is the only use of cancel +// - the call is within a test or subtest function +// - the relevant testing.{T,B,F} is named and not shadowed at the call +func testingContext(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + + contextWithCancel = index.Object("context", "WithCancel") + ) + +calls: + for cur := range index.Calls(contextWithCancel) { + call := cur.Node().(*ast.CallExpr) + // Have: context.WithCancel(...) + + arg, ok := call.Args[0].(*ast.CallExpr) + if !ok { + continue + } + if !typesinternal.IsFunctionNamed(typeutil.Callee(info, arg), "context", "Background", "TODO") { + continue + } + // Have: context.WithCancel(context.{Background,TODO}()) + + parent := cur.Parent() + assign, ok := parent.Node().(*ast.AssignStmt) + if !ok || assign.Tok != token.DEFINE { + continue + } + // Have: a, b := context.WithCancel(context.{Background,TODO}()) + + // Check that both a and b are declared, not redeclarations. + var lhs []types.Object + for _, expr := range assign.Lhs { + id, ok := expr.(*ast.Ident) + if !ok { + continue calls + } + obj, ok := info.Defs[id] + if !ok { + continue calls + } + lhs = append(lhs, obj) + } + + next, ok := parent.NextSibling() + if !ok { + continue + } + defr, ok := next.Node().(*ast.DeferStmt) + if !ok { + continue + } + deferId, ok := defr.Call.Fun.(*ast.Ident) + if !ok || !soleUseIs(index, lhs[1], deferId) { + continue // b is used elsewhere + } + // Have: + // a, b := context.WithCancel(context.{Background,TODO}()) + // defer b() + + // Check that we are in a test func. + var testObj types.Object // relevant testing.{T,B,F}, or nil + if curFunc, ok := enclosingFunc(cur); ok { + switch n := curFunc.Node().(type) { + case *ast.FuncLit: + if ek, idx := curFunc.ParentEdge(); ek == edge.CallExpr_Args && idx == 1 { + // Have: call(..., func(...) { ...context.WithCancel(...)... }) + obj := typeutil.Callee(info, curFunc.Parent().Node().(*ast.CallExpr)) + if (typesinternal.IsMethodNamed(obj, "testing", "T", "Run") || + typesinternal.IsMethodNamed(obj, "testing", "B", "Run")) && + len(n.Type.Params.List[0].Names) == 1 { + + // Have tb.Run(..., func(..., tb *testing.[TB]) { ...context.WithCancel(...)... } + testObj = info.Defs[n.Type.Params.List[0].Names[0]] + } + } + + case *ast.FuncDecl: + testObj = isTestFn(info, n) + } + } + if testObj != nil && fileUses(info, astutil.EnclosingFile(cur), "go1.24") { + // Have a test function. Check that we can resolve the relevant + // testing.{T,B,F} at the current position. + if _, obj := lhs[0].Parent().LookupParent(testObj.Name(), lhs[0].Pos()); obj == testObj { + pass.Report(analysis.Diagnostic{ + Pos: call.Fun.Pos(), + End: call.Fun.End(), + Message: fmt.Sprintf("context.WithCancel can be modernized using %s.Context", testObj.Name()), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Replace context.WithCancel with %s.Context", testObj.Name()), + TextEdits: []analysis.TextEdit{{ + Pos: assign.Pos(), + End: defr.End(), + NewText: fmt.Appendf(nil, "%s := %s.Context()", lhs[0].Name(), testObj.Name()), + }}, + }}, + }) + } + } + } + return nil, nil +} + +// soleUseIs reports whether id is the sole Ident that uses obj. +// (It returns false if there were no uses of obj.) +func soleUseIs(index *typeindex.Index, obj types.Object, id *ast.Ident) bool { + empty := true + for use := range index.Uses(obj) { + empty = false + if use.Node() != id { + return false + } + } + return !empty +} + +// isTestFn checks whether fn is a test function (TestX, BenchmarkX, FuzzX), +// returning the corresponding types.Object of the *testing.{T,B,F} argument. +// Returns nil if fn is a test function, but the testing.{T,B,F} argument is +// unnamed (or _). +// +// TODO(rfindley): consider handling the case of an unnamed argument, by adding +// an edit to give the argument a name. +// +// Adapted from go/analysis/passes/tests. +// TODO(rfindley): consider refactoring to share logic. +func isTestFn(info *types.Info, fn *ast.FuncDecl) types.Object { + // Want functions with 0 results and 1 parameter. + if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 || + fn.Type.Params == nil || + len(fn.Type.Params.List) != 1 || + len(fn.Type.Params.List[0].Names) != 1 { + + return nil + } + + prefix := testKind(fn.Name.Name) + if prefix == "" { + return nil + } + + if tparams := fn.Type.TypeParams; tparams != nil && len(tparams.List) > 0 { + return nil // test functions must not be generic + } + + obj := info.Defs[fn.Type.Params.List[0].Names[0]] + if obj == nil { + return nil // e.g. _ *testing.T + } + + var name string + switch prefix { + case "Test": + name = "T" + case "Benchmark": + name = "B" + case "Fuzz": + name = "F" + } + + if !typesinternal.IsPointerToNamed(obj.Type(), "testing", name) { + return nil + } + return obj +} + +// testKind returns "Test", "Benchmark", or "Fuzz" if name is a valid resp. +// test, benchmark, or fuzz function name. Otherwise, isTestName returns "". +// +// Adapted from go/analysis/passes/tests.isTestName. +func testKind(name string) string { + var prefix string + switch { + case strings.HasPrefix(name, "Test"): + prefix = "Test" + case strings.HasPrefix(name, "Benchmark"): + prefix = "Benchmark" + case strings.HasPrefix(name, "Fuzz"): + prefix = "Fuzz" + } + if prefix == "" { + return "" + } + suffix := name[len(prefix):] + if len(suffix) == 0 { + // "Test" is ok. + return prefix + } + r, _ := utf8.DecodeRuneInString(suffix) + if unicode.IsLower(r) { + return "" + } + return prefix +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/waitgroup.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/waitgroup.go new file mode 100644 index 00000000..b890f334 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/modernize/waitgroup.go @@ -0,0 +1,175 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modernize + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "slices" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/types/typeutil" + "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/analysisinternal/generated" + typeindexanalyzer "golang.org/x/tools/internal/analysisinternal/typeindex" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/refactor" + "golang.org/x/tools/internal/typesinternal/typeindex" +) + +var WaitGroupAnalyzer = &analysis.Analyzer{ + Name: "waitgroup", + Doc: analysisinternal.MustExtractDoc(doc, "waitgroup"), + Requires: []*analysis.Analyzer{ + generated.Analyzer, + inspect.Analyzer, + typeindexanalyzer.Analyzer, + }, + Run: waitgroup, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/modernize#waitgroup", +} + +// The waitgroup pass replaces old more complex code with +// go1.25 added API WaitGroup.Go. +// +// Patterns: +// +// 1. wg.Add(1); go func() { defer wg.Done(); ... }() +// => +// wg.Go(go func() { ... }) +// +// 2. wg.Add(1); go func() { ...; wg.Done() }() +// => +// wg.Go(go func() { ... }) +// +// The wg.Done must occur within the first statement of the block in a +// defer format or last statement of the block, and the offered fix +// only removes the first/last wg.Done call. It doesn't fix existing +// wrong usage of sync.WaitGroup. +// +// The use of WaitGroup.Go in pattern 1 implicitly introduces a +// 'defer', which may change the behavior in the case of panic from +// the "..." logic. In this instance, the change is safe: before and +// after the transformation, an unhandled panic inevitably results in +// a fatal crash. The fact that the transformed code calls wg.Done() +// before the crash doesn't materially change anything. (If Done had +// other effects, or blocked, or if WaitGroup.Go propagated panics +// from child to parent goroutine, the argument would be different.) +func waitgroup(pass *analysis.Pass) (any, error) { + skipGenerated(pass) + + var ( + index = pass.ResultOf[typeindexanalyzer.Analyzer].(*typeindex.Index) + info = pass.TypesInfo + syncWaitGroupAdd = index.Selection("sync", "WaitGroup", "Add") + syncWaitGroupDone = index.Selection("sync", "WaitGroup", "Done") + ) + if !index.Used(syncWaitGroupDone) { + return nil, nil + } + + for curAddCall := range index.Calls(syncWaitGroupAdd) { + // Extract receiver from wg.Add call. + addCall := curAddCall.Node().(*ast.CallExpr) + if !isIntLiteral(info, addCall.Args[0], 1) { + continue // not a call to wg.Add(1) + } + // Inv: the Args[0] check ensures addCall is not of + // the form sync.WaitGroup.Add(&wg, 1). + addCallRecv := ast.Unparen(addCall.Fun).(*ast.SelectorExpr).X + + // Following statement must be go func() { ... } (). + curAddStmt := curAddCall.Parent() + if !is[*ast.ExprStmt](curAddStmt.Node()) { + continue // unnecessary parens? + } + curNext, ok := curAddCall.Parent().NextSibling() + if !ok { + continue // no successor + } + goStmt, ok := curNext.Node().(*ast.GoStmt) + if !ok { + continue // not a go stmt + } + lit, ok := goStmt.Call.Fun.(*ast.FuncLit) + if !ok || len(goStmt.Call.Args) != 0 { + continue // go argument is not func(){...}() + } + list := lit.Body.List + if len(list) == 0 { + continue + } + + // Body must start with "defer wg.Done()" or end with "wg.Done()". + var doneStmt ast.Stmt + if deferStmt, ok := list[0].(*ast.DeferStmt); ok && + typeutil.Callee(info, deferStmt.Call) == syncWaitGroupDone && + astutil.EqualSyntax(ast.Unparen(deferStmt.Call.Fun).(*ast.SelectorExpr).X, addCallRecv) { + doneStmt = deferStmt // "defer wg.Done()" + + } else if lastStmt, ok := list[len(list)-1].(*ast.ExprStmt); ok { + if doneCall, ok := lastStmt.X.(*ast.CallExpr); ok && + typeutil.Callee(info, doneCall) == syncWaitGroupDone && + astutil.EqualSyntax(ast.Unparen(doneCall.Fun).(*ast.SelectorExpr).X, addCallRecv) { + doneStmt = lastStmt // "wg.Done()" + } + } + if doneStmt == nil { + continue + } + curDoneStmt, ok := curNext.FindNode(doneStmt) + if !ok { + panic("can't find Cursor for 'done' statement") + } + + file := astutil.EnclosingFile(curAddCall) + if !fileUses(info, file, "go1.25") { + continue + } + tokFile := pass.Fset.File(file.Pos()) + + var addCallRecvText bytes.Buffer + err := printer.Fprint(&addCallRecvText, pass.Fset, addCallRecv) + if err != nil { + continue // error getting text for the edit + } + + pass.Report(analysis.Diagnostic{ + Pos: addCall.Pos(), + End: goStmt.End(), + Message: "Goroutine creation can be simplified using WaitGroup.Go", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Simplify by using WaitGroup.Go", + TextEdits: slices.Concat( + // delete "wg.Add(1)" + refactor.DeleteStmt(tokFile, curAddStmt), + // delete "wg.Done()" or "defer wg.Done()" + refactor.DeleteStmt(tokFile, curDoneStmt), + []analysis.TextEdit{ + // go func() + // ------ + // wg.Go(func() + { + Pos: goStmt.Pos(), + End: goStmt.Call.Pos(), + NewText: fmt.Appendf(nil, "%s.Go(", addCallRecvText.String()), + }, + // ... }() + // - + // ... } ) + { + Pos: goStmt.Call.Lparen, + End: goStmt.Call.Rparen, + }, + }, + ), + }}, + }) + } + return nil, nil +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go index af61ae60..8f1cca8f 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go @@ -52,7 +52,7 @@ func runFunc(pass *analysis.Pass, fn *ssa.Function) { // notNil reports an error if v is provably nil. notNil := func(stack []fact, instr ssa.Instruction, v ssa.Value, descr string) { if nilnessOf(stack, v) == isnil { - reportf("nilderef", instr.Pos(), descr) + reportf("nilderef", instr.Pos(), "%s", descr) } } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go index 31748795..2b8add30 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go @@ -41,7 +41,7 @@ var Analyzer = &analysis.Analyzer{ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/pkgfact", Run: run, FactTypes: []analysis.Fact{new(pairsFact)}, - ResultType: reflect.TypeOf(map[string]string{}), + ResultType: reflect.TypeFor[map[string]string](), } // A pairsFact is a package-level fact that records diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go index eebf4020..f04e4414 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go @@ -82,6 +82,16 @@ // ... // } // +// A local function may also be inferred as a printf wrapper. If it +// is assigned to a variable, each call made through that variable will +// be checked just like a call to a function: +// +// logf := func(format string, args ...any) { +// message := fmt.Sprintf(format, args...) +// log.Printf("%s: %s", prefix, message) +// } +// logf("%s", 123) // logf format %s has arg 123 of wrong type int +// // # Specifying printf wrappers by flag // // The -funcs flag specifies a comma-separated list of names of diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go index 9ad18a04..d94e592c 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go @@ -19,12 +19,14 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/edge" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/fmtstr" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/typesinternal" "golang.org/x/tools/internal/versions" ) @@ -41,7 +43,7 @@ var Analyzer = &analysis.Analyzer{ URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/printf", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, - ResultType: reflect.TypeOf((*Result)(nil)), + ResultType: reflect.TypeFor[*Result](), FactTypes: []analysis.Fact{new(isWrapper)}, } @@ -70,7 +72,7 @@ func (kind Kind) String() string { // Result is the printf analyzer's result type. Clients may query the result // to learn whether a function behaves like fmt.Print or fmt.Printf. type Result struct { - funcs map[*types.Func]Kind + funcs map[types.Object]Kind } // Kind reports whether fn behaves like fmt.Print or fmt.Printf. @@ -111,149 +113,210 @@ func (f *isWrapper) String() string { func run(pass *analysis.Pass) (any, error) { res := &Result{ - funcs: make(map[*types.Func]Kind), + funcs: make(map[types.Object]Kind), } - findPrintfLike(pass, res) - checkCalls(pass) + findPrintLike(pass, res) + checkCalls(pass, res) return res, nil } -type printfWrapper struct { - obj *types.Func - fdecl *ast.FuncDecl - format *types.Var - args *types.Var +// A wrapper is a candidate print/printf wrapper function. +// +// We represent functions generally as types.Object, not *Func, so +// that we can analyze anonymous functions such as +// +// printf := func(format string, args ...any) {...}, +// +// representing them by the *types.Var symbol for the local variable +// 'printf'. +type wrapper struct { + obj types.Object // *Func or *Var + curBody inspector.Cursor // for *ast.BlockStmt + format *types.Var // optional "format string" parameter in the Func{Decl,Lit} + args *types.Var // "args ...any" parameter in the Func{Decl,Lit} callers []printfCaller - failed bool // if true, not a printf wrapper } type printfCaller struct { - w *printfWrapper + w *wrapper call *ast.CallExpr } -// maybePrintfWrapper decides whether decl (a declared function) may be a wrapper -// around a fmt.Printf or fmt.Print function. If so it returns a printfWrapper -// function describing the declaration. Later processing will analyze the -// graph of potential printf wrappers to pick out the ones that are true wrappers. -// A function may be a Printf or Print wrapper if its last argument is ...interface{}. -// If the next-to-last argument is a string, then this may be a Printf wrapper. -// Otherwise it may be a Print wrapper. -func maybePrintfWrapper(info *types.Info, decl ast.Decl) *printfWrapper { - // Look for functions with final argument type ...interface{}. - fdecl, ok := decl.(*ast.FuncDecl) - if !ok || fdecl.Body == nil { - return nil - } - fn, ok := info.Defs[fdecl.Name].(*types.Func) - // Type information may be incomplete. - if !ok { - return nil - } - - sig := fn.Type().(*types.Signature) +// formatArgsParams returns the "format string" and "args ...any" +// parameters of a potential print or printf wrapper function. +// (The format is nil in the print-like case.) +func formatArgsParams(sig *types.Signature) (format, args *types.Var) { if !sig.Variadic() { - return nil // not variadic + return nil, nil // not variadic } params := sig.Params() nparams := params.Len() // variadic => nonzero - // Check final parameter is "args ...interface{}". - args := params.At(nparams - 1) - iface, ok := types.Unalias(args.Type().(*types.Slice).Elem()).(*types.Interface) - if !ok || !iface.Empty() { - return nil - } - // Is second last param 'format string'? - var format *types.Var if nparams >= 2 { if p := params.At(nparams - 2); p.Type() == types.Typ[types.String] { format = p } } - return &printfWrapper{ - obj: fn, - fdecl: fdecl, - format: format, - args: args, + // Check final parameter is "args ...any". + // (variadic => slice) + args = params.At(nparams - 1) + iface, ok := types.Unalias(args.Type().(*types.Slice).Elem()).(*types.Interface) + if !ok || !iface.Empty() { + return nil, nil } + + return format, args } -// findPrintfLike scans the entire package to find printf-like functions. -func findPrintfLike(pass *analysis.Pass, res *Result) (any, error) { - // Gather potential wrappers and call graph between them. - byObj := make(map[*types.Func]*printfWrapper) - var wrappers []*printfWrapper - for _, file := range pass.Files { - for _, decl := range file.Decls { - w := maybePrintfWrapper(pass.TypesInfo, decl) - if w == nil { - continue +// findPrintLike scans the entire package to find print or printf-like functions. +// When it returns, all such functions have been identified. +func findPrintLike(pass *analysis.Pass, res *Result) { + var ( + inspect = pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + info = pass.TypesInfo + ) + + // Pass 1: gather candidate wrapper functions (and populate wrappers). + var ( + wrappers []*wrapper + byObj = make(map[types.Object]*wrapper) + ) + for cur := range inspect.Root().Preorder((*ast.FuncDecl)(nil), (*ast.FuncLit)(nil)) { + var ( + curBody inspector.Cursor // for *ast.BlockStmt + sig *types.Signature + obj types.Object + ) + switch f := cur.Node().(type) { + case *ast.FuncDecl: + // named function or method: + // + // func wrapf(format string, args ...any) {...} + if f.Body != nil { + curBody = cur.ChildAt(edge.FuncDecl_Body, -1) + obj = info.Defs[f.Name] + sig = obj.Type().(*types.Signature) } - byObj[w.obj] = w - wrappers = append(wrappers, w) - } - } - // Walk the graph to figure out which are really printf wrappers. - for _, w := range wrappers { - // Scan function for calls that could be to other printf-like functions. - ast.Inspect(w.fdecl.Body, func(n ast.Node) bool { - if w.failed { - return false + case *ast.FuncLit: + // anonymous function directly assigned to a variable: + // + // var wrapf = func(format string, args ...any) {...} + // wrapf := func(format string, args ...any) {...} + // wrapf = func(format string, args ...any) {...} + // + // The LHS may also be a struct field x.wrapf or + // an imported var pkg.Wrapf. + // + sig = info.TypeOf(f).(*types.Signature) + curBody = cur.ChildAt(edge.FuncLit_Body, -1) + var lhs ast.Expr + switch ek, idx := cur.ParentEdge(); ek { + case edge.ValueSpec_Values: + curName := cur.Parent().ChildAt(edge.ValueSpec_Names, idx) + lhs = curName.Node().(*ast.Ident) + case edge.AssignStmt_Rhs: + curLhs := cur.Parent().ChildAt(edge.AssignStmt_Lhs, idx) + lhs = curLhs.Node().(ast.Expr) } - // TODO: Relax these checks; issue 26555. - if assign, ok := n.(*ast.AssignStmt); ok { - for _, lhs := range assign.Lhs { - if match(pass.TypesInfo, lhs, w.format) || - match(pass.TypesInfo, lhs, w.args) { - // Modifies the format - // string or args in - // some way, so not a - // simple wrapper. - w.failed = true - return false - } + switch lhs := lhs.(type) { + case *ast.Ident: + // variable: wrapf = func(...) + obj = info.ObjectOf(lhs).(*types.Var) + case *ast.SelectorExpr: + if sel, ok := info.Selections[lhs]; ok { + // struct field: x.wrapf = func(...) + obj = sel.Obj().(*types.Var) + } else { + // imported var: pkg.Wrapf = func(...) + obj = info.Uses[lhs.Sel].(*types.Var) } } - if un, ok := n.(*ast.UnaryExpr); ok && un.Op == token.AND { - if match(pass.TypesInfo, un.X, w.format) || - match(pass.TypesInfo, un.X, w.args) { - // Taking the address of the - // format string or args, - // so not a simple wrapper. - w.failed = true - return false + } + if obj != nil { + format, args := formatArgsParams(sig) + if args != nil { + // obj (the symbol for a function/method, or variable + // assigned to an anonymous function) is a potential + // print or printf wrapper. + // + // Later processing will analyze the graph of potential + // wrappers and their function bodies to pick out the + // ones that are true wrappers. + w := &wrapper{ + obj: obj, + curBody: curBody, + format: format, // non-nil => printf + args: args, } + byObj[w.obj] = w + wrappers = append(wrappers, w) } + } + } - call, ok := n.(*ast.CallExpr) - if !ok || len(call.Args) == 0 || !match(pass.TypesInfo, call.Args[len(call.Args)-1], w.args) { - return true - } + // Pass 2: scan the body of each wrapper function + // for calls to other printf-like functions. + // + // Also, reject tricky cases where the parameters + // are potentially mutated by AssignStmt or UnaryExpr. + // TODO: Relax these checks; issue 26555. + for _, w := range wrappers { + scan: + for cur := range w.curBody.Preorder( + (*ast.AssignStmt)(nil), + (*ast.UnaryExpr)(nil), + (*ast.CallExpr)(nil), + ) { + switch n := cur.Node().(type) { + case *ast.AssignStmt: + // If the wrapper updates format or args + // it is not a simple wrapper. + for _, lhs := range n.Lhs { + if w.format != nil && match(info, lhs, w.format) || + match(info, lhs, w.args) { + break scan + } + } - fn, kind := printfNameAndKind(pass, call) - if kind != 0 { - checkPrintfFwd(pass, w, call, kind, res) - return true - } + case *ast.UnaryExpr: + // If the wrapper computes &format or &args, + // it is not a simple wrapper. + if n.Op == token.AND && + (w.format != nil && match(info, n.X, w.format) || + match(info, n.X, w.args)) { + break scan + } - // If the call is to another function in this package, - // maybe we will find out it is printf-like later. - // Remember this call for later checking. - if fn != nil && fn.Pkg() == pass.Pkg && byObj[fn] != nil { - callee := byObj[fn] - callee.callers = append(callee.callers, printfCaller{w, call}) + case *ast.CallExpr: + if len(n.Args) > 0 && match(info, n.Args[len(n.Args)-1], w.args) { + if callee := typeutil.Callee(pass.TypesInfo, n); callee != nil { + + // Call from one wrapper candidate to another? + // Record the edge so that if callee is found to be + // a true wrapper, w will be too. + if w2, ok := byObj[callee]; ok { + w2.callers = append(w2.callers, printfCaller{w, n}) + } + + // Is the candidate a true wrapper, because it calls + // a known print{,f}-like function from the allowlist + // or an imported fact, or another wrapper found + // to be a true wrapper? + // If so, convert all w's callers to kind. + kind := callKind(pass, callee, res) + if kind != KindNone { + checkForward(pass, w, n, kind, res) + } + } + } } - - return true - }) + } } - return nil, nil } func match(info *types.Info, arg ast.Expr, param *types.Var) bool { @@ -261,9 +324,9 @@ func match(info *types.Info, arg ast.Expr, param *types.Var) bool { return ok && info.ObjectOf(id) == param } -// checkPrintfFwd checks that a printf-forwarding wrapper is forwarding correctly. +// checkForward checks that a forwarding wrapper is forwarding correctly. // It diagnoses writing fmt.Printf(format, args) instead of fmt.Printf(format, args...). -func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, kind Kind, res *Result) { +func checkForward(pass *analysis.Pass, w *wrapper, call *ast.CallExpr, kind Kind, res *Result) { matched := kind == KindPrint || kind != KindNone && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format) if !matched { @@ -292,18 +355,39 @@ func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, k pass.ReportRangef(call, "missing ... in args forwarded to %s-like function", desc) return } - fn := w.obj - var fact isWrapper - if !pass.ImportObjectFact(fn, &fact) { - fact.Kind = kind - pass.ExportObjectFact(fn, &fact) - res.funcs[fn] = kind + + // If the candidate's print{,f} status becomes known, + // propagate it back to all its so-far known callers. + if res.funcs[w.obj] != kind { + res.funcs[w.obj] = kind + + // Export a fact. + // (This is a no-op for local symbols.) + // We can't export facts on a symbol of another package, + // but we can treat the symbol as a wrapper within + // the current analysis unit. + if w.obj.Pkg() == pass.Pkg { + // Facts are associated with origins. + pass.ExportObjectFact(origin(w.obj), &isWrapper{Kind: kind}) + } + + // Propagate kind back to known callers. for _, caller := range w.callers { - checkPrintfFwd(pass, caller.w, caller.call, kind, res) + checkForward(pass, caller.w, caller.call, kind, res) } } } +func origin(obj types.Object) types.Object { + switch obj := obj.(type) { + case *types.Func: + return obj.Origin() + case *types.Var: + return obj.Origin() + } + return obj +} + // isPrint records the print functions. // If a key ends in 'f' then it is assumed to be a formatted print. // @@ -412,7 +496,7 @@ func stringConstantExpr(pass *analysis.Pass, expr ast.Expr) (string, bool) { // checkCalls triggers the print-specific checks for calls that invoke a print // function. -func checkCalls(pass *analysis.Pass) { +func checkCalls(pass *analysis.Pass, res *Result) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ (*ast.File)(nil), @@ -426,48 +510,60 @@ func checkCalls(pass *analysis.Pass) { fileVersion = versions.Lang(versions.FileVersion(pass.TypesInfo, n)) case *ast.CallExpr: - fn, kind := printfNameAndKind(pass, n) - switch kind { - case KindPrintf, KindErrorf: - checkPrintf(pass, fileVersion, kind, n, fn.FullName()) - case KindPrint: - checkPrint(pass, n, fn.FullName()) + if callee := typeutil.Callee(pass.TypesInfo, n); callee != nil { + kind := callKind(pass, callee, res) + switch kind { + case KindPrintf, KindErrorf: + checkPrintf(pass, fileVersion, kind, n, fullname(callee)) + case KindPrint: + checkPrint(pass, n, fullname(callee)) + } } } }) } -func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func, kind Kind) { - fn, _ = typeutil.Callee(pass.TypesInfo, call).(*types.Func) - if fn == nil { - return nil, 0 +func fullname(obj types.Object) string { + if fn, ok := obj.(*types.Func); ok { + return fn.FullName() } + return obj.Name() +} - // Facts are associated with generic declarations, not instantiations. - fn = fn.Origin() - - _, ok := isPrint[fn.FullName()] +// callKind returns the symbol of the called function +// and its print/printf kind, if any. +// (The symbol may be a var for an anonymous function.) +// The result is memoized in res.funcs. +func callKind(pass *analysis.Pass, obj types.Object, res *Result) Kind { + kind, ok := res.funcs[obj] if !ok { - // Next look up just "printf", for use with -printf.funcs. - _, ok = isPrint[strings.ToLower(fn.Name())] - } - if ok { - if fn.FullName() == "fmt.Errorf" { - kind = KindErrorf - } else if strings.HasSuffix(fn.Name(), "f") { - kind = KindPrintf + // cache miss + _, ok := isPrint[fullname(obj)] + if !ok { + // Next look up just "printf", for use with -printf.funcs. + _, ok = isPrint[strings.ToLower(obj.Name())] + } + if ok { + // well-known printf functions + if fullname(obj) == "fmt.Errorf" { + kind = KindErrorf + } else if strings.HasSuffix(obj.Name(), "f") { + kind = KindPrintf + } else { + kind = KindPrint + } } else { - kind = KindPrint + // imported wrappers + // Facts are associated with generic declarations, not instantiations. + obj = origin(obj) + var fact isWrapper + if pass.ImportObjectFact(obj, &fact) { + kind = fact.Kind + } } - return fn, kind + res.funcs[obj] = kind // cache } - - var fact isWrapper - if pass.ImportObjectFact(fn, &fact) { - return fn, fact.Kind - } - - return fn, KindNone + return kind } // isFormatter reports whether t could satisfy fmt.Formatter. @@ -490,7 +586,7 @@ func isFormatter(typ types.Type) bool { sig := fn.Type().(*types.Signature) return sig.Params().Len() == 2 && sig.Results().Len() == 0 && - analysisinternal.IsTypeNamed(sig.Params().At(0).Type(), "fmt", "State") && + typesinternal.IsTypeNamed(sig.Params().At(0).Type(), "fmt", "State") && types.Identical(sig.Params().At(1).Type(), types.Typ[types.Rune]) } @@ -729,7 +825,7 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, rng analysis.Range, ma if reason != "" { details = " (" + reason + ")" } - pass.ReportRangef(rng, "%s format %s uses non-int %s%s as argument of *", name, operation.Text, analysisinternal.Format(pass.Fset, arg), details) + pass.ReportRangef(rng, "%s format %s uses non-int %s%s as argument of *", name, operation.Text, astutil.Format(pass.Fset, arg), details) return false } } @@ -756,7 +852,7 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, rng analysis.Range, ma } arg := call.Args[verbArgIndex] if isFunctionValue(pass, arg) && verb != 'p' && verb != 'T' { - pass.ReportRangef(rng, "%s format %s arg %s is a func value, not called", name, operation.Text, analysisinternal.Format(pass.Fset, arg)) + pass.ReportRangef(rng, "%s format %s arg %s is a func value, not called", name, operation.Text, astutil.Format(pass.Fset, arg)) return false } if reason, ok := matchArgType(pass, v.typ, arg); !ok { @@ -768,14 +864,14 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, rng analysis.Range, ma if reason != "" { details = " (" + reason + ")" } - pass.ReportRangef(rng, "%s format %s has arg %s of wrong type %s%s", name, operation.Text, analysisinternal.Format(pass.Fset, arg), typeString, details) + pass.ReportRangef(rng, "%s format %s has arg %s of wrong type %s%s", name, operation.Text, astutil.Format(pass.Fset, arg), typeString, details) return false } // Detect recursive formatting via value's String/Error methods. // The '#' flag suppresses the methods, except with %x, %X, and %q. if v.typ&argString != 0 && v.verb != 'T' && (!strings.Contains(operation.Flags, "#") || strings.ContainsRune("qxX", v.verb)) { if methodName, ok := recursiveStringer(pass, arg); ok { - pass.ReportRangef(rng, "%s format %s with arg %s causes recursive %s method call", name, operation.Text, analysisinternal.Format(pass.Fset, arg), methodName) + pass.ReportRangef(rng, "%s format %s with arg %s causes recursive %s method call", name, operation.Text, astutil.Format(pass.Fset, arg), methodName) return false } } @@ -927,7 +1023,7 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, name string) { if sel, ok := call.Args[0].(*ast.SelectorExpr); ok { if x, ok := sel.X.(*ast.Ident); ok { if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") { - pass.ReportRangef(call, "%s does not take io.Writer but has first arg %s", name, analysisinternal.Format(pass.Fset, call.Args[0])) + pass.ReportRangef(call, "%s does not take io.Writer but has first arg %s", name, astutil.Format(pass.Fset, call.Args[0])) } } } @@ -961,10 +1057,10 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, name string) { } for _, arg := range args { if isFunctionValue(pass, arg) { - pass.ReportRangef(call, "%s arg %s is a func value, not called", name, analysisinternal.Format(pass.Fset, arg)) + pass.ReportRangef(call, "%s arg %s is a func value, not called", name, astutil.Format(pass.Fset, arg)) } if methodName, ok := recursiveStringer(pass, arg); ok { - pass.ReportRangef(call, "%s arg %s causes recursive call to %s method", name, analysisinternal.Format(pass.Fset, arg), methodName) + pass.ReportRangef(call, "%s arg %s causes recursive call to %s method", name, astutil.Format(pass.Fset, arg), methodName) } } } @@ -992,7 +1088,7 @@ func (ss stringSet) String() string { } func (ss stringSet) Set(flag string) error { - for _, name := range strings.Split(flag, ",") { + for name := range strings.SplitSeq(flag, ",") { if len(name) == 0 { return fmt.Errorf("empty string") } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go index d0632dbd..5626ac1c 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go @@ -14,7 +14,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -50,7 +50,7 @@ func run(pass *analysis.Pass) (any, error) { } case *ast.CallExpr: obj := typeutil.Callee(pass.TypesInfo, n) - if analysisinternal.IsFunctionNamed(obj, "reflect", "DeepEqual") && (isReflectValue(pass, n.Args[0]) || isReflectValue(pass, n.Args[1])) { + if typesinternal.IsFunctionNamed(obj, "reflect", "DeepEqual") && (isReflectValue(pass, n.Args[0]) || isReflectValue(pass, n.Args[1])) { pass.ReportRangef(n, "avoid using reflect.DeepEqual with reflect.Value") } } @@ -65,7 +65,7 @@ func isReflectValue(pass *analysis.Pass, e ast.Expr) bool { return false } // See if the type is reflect.Value - if !analysisinternal.IsTypeNamed(tv.Type, "reflect", "Value") { + if !typesinternal.IsTypeNamed(tv.Type, "reflect", "Value") { return false } if _, ok := e.(*ast.CompositeLit); ok { diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go index 57987b3d..36692732 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go @@ -20,7 +20,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typeparams" ) @@ -123,7 +123,7 @@ func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) { } } if amt >= minSize { - ident := analysisinternal.Format(pass.Fset, x) + ident := astutil.Format(pass.Fset, x) qualifier := "" if len(sizes) > 1 { qualifier = "may be " diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go index 78a2fa5e..c339fa06 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go @@ -20,7 +20,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -36,7 +36,7 @@ var Analyzer = &analysis.Analyzer{ } func run(pass *analysis.Pass) (any, error) { - if !analysisinternal.Imports(pass.Pkg, "os/signal") { + if !typesinternal.Imports(pass.Pkg, "os/signal") { return nil, nil // doesn't directly import signal } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go index c1ac9604..cc58396a 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go @@ -20,7 +20,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/astutil" "golang.org/x/tools/internal/typesinternal" ) @@ -115,10 +115,10 @@ func run(pass *analysis.Pass) (any, error) { default: if unknownArg == nil { pass.ReportRangef(arg, "%s arg %q should be a string or a slog.Attr (possible missing key or value)", - shortName(fn), analysisinternal.Format(pass.Fset, arg)) + shortName(fn), astutil.Format(pass.Fset, arg)) } else { pass.ReportRangef(arg, "%s arg %q should probably be a string or a slog.Attr (previous arg %q cannot be a key)", - shortName(fn), analysisinternal.Format(pass.Fset, arg), analysisinternal.Format(pass.Fset, unknownArg)) + shortName(fn), astutil.Format(pass.Fset, arg), astutil.Format(pass.Fset, unknownArg)) } // Stop here so we report at most one missing key per call. return @@ -158,7 +158,7 @@ func run(pass *analysis.Pass) (any, error) { } func isAttr(t types.Type) bool { - return analysisinternal.IsTypeNamed(t, "log/slog", "Attr") + return typesinternal.IsTypeNamed(t, "log/slog", "Attr") } // shortName returns a name for the function that is shorter than FullName. diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go index 9fe0d209..2b188204 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go @@ -17,7 +17,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) const Doc = `check the argument type of sort.Slice @@ -34,7 +34,7 @@ var Analyzer = &analysis.Analyzer{ } func run(pass *analysis.Pass) (any, error) { - if !analysisinternal.Imports(pass.Pkg, "sort") { + if !typesinternal.Imports(pass.Pkg, "sort") { return nil, nil // doesn't directly import sort } @@ -47,7 +47,7 @@ func run(pass *analysis.Pass) (any, error) { inspect.Preorder(nodeFilter, func(n ast.Node) { call := n.(*ast.CallExpr) obj := typeutil.Callee(pass.TypesInfo, call) - if !analysisinternal.IsFunctionNamed(obj, "sort", "Slice", "SliceStable", "SliceIsSorted") { + if !typesinternal.IsFunctionNamed(obj, "sort", "Slice", "SliceStable", "SliceIsSorted") { return } callee := obj.(*types.Func) diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/stdversion/stdversion.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/stdversion/stdversion.go index 429125a8..31472195 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/stdversion/stdversion.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/stdversion/stdversion.go @@ -10,7 +10,6 @@ import ( "go/ast" "go/build" "go/types" - "regexp" "slices" "golang.org/x/tools/go/analysis" @@ -114,11 +113,6 @@ func run(pass *analysis.Pass) (any, error) { return nil, nil } -// Matches cgo generated comment as well as the proposed standard: -// -// https://golang.org/s/generatedcode -var generatedRx = regexp.MustCompile(`// .*DO NOT EDIT\.?`) - // origin returns the original uninstantiated symbol for obj. func origin(obj types.Object) types.Object { switch obj := obj.(type) { diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go index 7dbff1e4..7a02d85c 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go @@ -15,7 +15,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/refactor" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" ) @@ -198,7 +198,7 @@ func run(pass *analysis.Pass) (any, error) { // the type has methods, as some {String,GoString,Format} // may change the behavior of fmt.Sprint. if len(ttypes) == 1 && len(vtypes) == 1 && types.NewMethodSet(V0).Len() == 0 { - _, prefix, importEdits := analysisinternal.AddImport(pass.TypesInfo, file, "fmt", "fmt", "Sprint", arg.Pos()) + prefix, importEdits := refactor.AddImport(pass.TypesInfo, file, "fmt", "fmt", "Sprint", arg.Pos()) if types.Identical(T0, types.Typ[types.String]) { // string(x) -> fmt.Sprint(x) addFix("Format the number as a decimal", append(importEdits, diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go index cc90f733..826add2c 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go @@ -17,6 +17,8 @@ import ( "strconv" "strings" + "fmt" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" @@ -100,7 +102,11 @@ func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, s } if err := validateStructTag(tag); err != nil { - pass.Reportf(field.Pos(), "struct field tag %#q not compatible with reflect.StructTag.Get: %s", tag, err) + pass.Report(analysis.Diagnostic{ + Pos: field.Pos(), + End: field.Pos() + token.Pos(len(field.Name())), + Message: fmt.Sprintf("struct field tag %#q not compatible with reflect.StructTag.Get: %s", tag, err), + }) } // Check for use of json or xml tags with unexported fields. @@ -122,7 +128,11 @@ func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, s // ignored. case "", "-": default: - pass.Reportf(field.Pos(), "struct field %s has %s tag but is not exported", field.Name(), enc) + pass.Report(analysis.Diagnostic{ + Pos: field.Pos(), + End: field.Pos() + token.Pos(len(field.Name())), + Message: fmt.Sprintf("struct field %s has %s tag but is not exported", field.Name(), enc), + }) return } } @@ -190,7 +200,11 @@ func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *ty alsoPos.Filename = rel } - pass.Reportf(nearest.Pos(), "struct field %s repeats %s tag %q also at %s", field.Name(), key, val, alsoPos) + pass.Report(analysis.Diagnostic{ + Pos: nearest.Pos(), + End: nearest.Pos() + token.Pos(len(nearest.Name())), + Message: fmt.Sprintf("struct field %s repeats %s tag %q also at %s", field.Name(), key, val, alsoPos), + }) } else { seen.Set(key, val, level, field.Pos()) } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go index 360ba0e7..400a6960 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go @@ -16,7 +16,6 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typesinternal" ) @@ -40,7 +39,7 @@ var Analyzer = &analysis.Analyzer{ func run(pass *analysis.Pass) (any, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - if !analysisinternal.Imports(pass.Pkg, "testing") { + if !typesinternal.Imports(pass.Pkg, "testing") { return nil, nil } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go index d4e9b025..1c0e92d0 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go @@ -17,6 +17,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -258,7 +259,7 @@ func isTestingType(typ types.Type, testingType string) bool { if !ok { return false } - return analysisinternal.IsTypeNamed(ptr.Elem(), "testing", testingType) + return typesinternal.IsTypeNamed(ptr.Elem(), "testing", testingType) } // Validate that fuzz target function's arguments are of accepted types. diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go index 4fdbb2b5..db91d37c 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go @@ -19,7 +19,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) const badFormat = "2006-02-01" @@ -50,8 +50,8 @@ func run(pass *analysis.Pass) (any, error) { inspect.Preorder(nodeFilter, func(n ast.Node) { call := n.(*ast.CallExpr) obj := typeutil.Callee(pass.TypesInfo, call) - if !analysisinternal.IsMethodNamed(obj, "time", "Time", "Format") && - !analysisinternal.IsFunctionNamed(obj, "time", "Parse") { + if !typesinternal.IsMethodNamed(obj, "time", "Time", "Format") && + !typesinternal.IsFunctionNamed(obj, "time", "Parse") { return } if len(call.Args) > 0 { diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go index 57c6da64..778010bc 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go @@ -16,7 +16,7 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -105,7 +105,7 @@ func isSafeUintptr(info *types.Info, x ast.Expr) bool { } switch sel.Sel.Name { case "Pointer", "UnsafeAddr": - if analysisinternal.IsTypeNamed(info.Types[sel.X].Type, "reflect", "Value") { + if typesinternal.IsTypeNamed(info.Types[sel.X].Type, "reflect", "Value") { return true } } @@ -153,5 +153,5 @@ func hasBasicType(info *types.Info, x ast.Expr, kind types.BasicKind) bool { // isReflectHeader reports whether t is reflect.SliceHeader or reflect.StringHeader. func isReflectHeader(t types.Type) bool { - return analysisinternal.IsTypeNamed(t, "reflect", "SliceHeader", "StringHeader") + return typesinternal.IsTypeNamed(t, "reflect", "SliceHeader", "StringHeader") } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go index 556ffed7..ed4cf7ae 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go @@ -188,7 +188,7 @@ func (ss *stringSetFlag) String() string { func (ss *stringSetFlag) Set(s string) error { m := make(map[string]bool) // clobber previous value if s != "" { - for _, name := range strings.Split(s, ",") { + for name := range strings.SplitSeq(s, ",") { if name == "" { continue // TODO: report error? proceed? } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/waitgroup/waitgroup.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/waitgroup/waitgroup.go index 14c6986e..5ed1814f 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/waitgroup/waitgroup.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/waitgroup/waitgroup.go @@ -16,7 +16,7 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/internal/typesinternal" ) //go:embed doc.go @@ -31,7 +31,7 @@ var Analyzer = &analysis.Analyzer{ } func run(pass *analysis.Pass) (any, error) { - if !analysisinternal.Imports(pass.Pkg, "sync") { + if !typesinternal.Imports(pass.Pkg, "sync") { return nil, nil // doesn't directly import sync } @@ -44,7 +44,7 @@ func run(pass *analysis.Pass) (any, error) { if push { call := n.(*ast.CallExpr) obj := typeutil.Callee(pass.TypesInfo, call) - if analysisinternal.IsMethodNamed(obj, "sync", "WaitGroup", "Add") && + if typesinternal.IsMethodNamed(obj, "sync", "WaitGroup", "Add") && hasSuffix(stack, wantSuffix) && backindex(stack, 1) == backindex(stack, 2).(*ast.BlockStmt).List[0] { // ExprStmt must be Block's first stmt diff --git a/tools/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/tools/vendor/golang.org/x/tools/go/ast/astutil/imports.go index 5e5601aa..5bacc0fa 100644 --- a/tools/vendor/golang.org/x/tools/go/ast/astutil/imports.go +++ b/tools/vendor/golang.org/x/tools/go/ast/astutil/imports.go @@ -209,48 +209,46 @@ func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) // DeleteNamedImport deletes the import with the given name and path from the file f, if present. // If there are duplicate import declarations, all matching ones are deleted. func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) { - var delspecs []*ast.ImportSpec - var delcomments []*ast.CommentGroup + var ( + delspecs = make(map[*ast.ImportSpec]bool) + delcomments = make(map[*ast.CommentGroup]bool) + ) // Find the import nodes that import path, if any. for i := 0; i < len(f.Decls); i++ { - decl := f.Decls[i] - gen, ok := decl.(*ast.GenDecl) + gen, ok := f.Decls[i].(*ast.GenDecl) if !ok || gen.Tok != token.IMPORT { continue } for j := 0; j < len(gen.Specs); j++ { - spec := gen.Specs[j] - impspec := spec.(*ast.ImportSpec) + impspec := gen.Specs[j].(*ast.ImportSpec) if importName(impspec) != name || importPath(impspec) != path { continue } // We found an import spec that imports path. // Delete it. - delspecs = append(delspecs, impspec) + delspecs[impspec] = true deleted = true - copy(gen.Specs[j:], gen.Specs[j+1:]) - gen.Specs = gen.Specs[:len(gen.Specs)-1] + gen.Specs = slices.Delete(gen.Specs, j, j+1) // If this was the last import spec in this decl, // delete the decl, too. if len(gen.Specs) == 0 { - copy(f.Decls[i:], f.Decls[i+1:]) - f.Decls = f.Decls[:len(f.Decls)-1] + f.Decls = slices.Delete(f.Decls, i, i+1) i-- break } else if len(gen.Specs) == 1 { if impspec.Doc != nil { - delcomments = append(delcomments, impspec.Doc) + delcomments[impspec.Doc] = true } if impspec.Comment != nil { - delcomments = append(delcomments, impspec.Comment) + delcomments[impspec.Comment] = true } for _, cg := range f.Comments { // Found comment on the same line as the import spec. if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line { - delcomments = append(delcomments, cg) + delcomments[cg] = true break } } @@ -294,38 +292,21 @@ func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (del } // Delete imports from f.Imports. - for i := 0; i < len(f.Imports); i++ { - imp := f.Imports[i] - for j, del := range delspecs { - if imp == del { - copy(f.Imports[i:], f.Imports[i+1:]) - f.Imports = f.Imports[:len(f.Imports)-1] - copy(delspecs[j:], delspecs[j+1:]) - delspecs = delspecs[:len(delspecs)-1] - i-- - break - } - } + before := len(f.Imports) + f.Imports = slices.DeleteFunc(f.Imports, func(imp *ast.ImportSpec) bool { + _, ok := delspecs[imp] + return ok + }) + if len(f.Imports)+len(delspecs) != before { + // This can happen when the AST is invalid (i.e. imports differ between f.Decls and f.Imports). + panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) } // Delete comments from f.Comments. - for i := 0; i < len(f.Comments); i++ { - cg := f.Comments[i] - for j, del := range delcomments { - if cg == del { - copy(f.Comments[i:], f.Comments[i+1:]) - f.Comments = f.Comments[:len(f.Comments)-1] - copy(delcomments[j:], delcomments[j+1:]) - delcomments = delcomments[:len(delcomments)-1] - i-- - break - } - } - } - - if len(delspecs) > 0 { - panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) - } + f.Comments = slices.DeleteFunc(f.Comments, func(cg *ast.CommentGroup) bool { + _, ok := delcomments[cg] + return ok + }) return } diff --git a/tools/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/tools/vendor/golang.org/x/tools/go/buildutil/allpackages.go index 32886a71..8a7f0fcc 100644 --- a/tools/vendor/golang.org/x/tools/go/buildutil/allpackages.go +++ b/tools/vendor/golang.org/x/tools/go/buildutil/allpackages.go @@ -175,7 +175,7 @@ func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool { for _, pkg := range all { doPkg(pkg, neg) } - } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg { + } else if dir, ok := strings.CutSuffix(arg, "/..."); ok { // dir/... matches all packages beneath dir for _, pkg := range all { if strings.HasPrefix(pkg, dir) && diff --git a/tools/vendor/golang.org/x/tools/go/buildutil/tags.go b/tools/vendor/golang.org/x/tools/go/buildutil/tags.go index 410c8e72..f66cd5df 100644 --- a/tools/vendor/golang.org/x/tools/go/buildutil/tags.go +++ b/tools/vendor/golang.org/x/tools/go/buildutil/tags.go @@ -43,7 +43,7 @@ func (v *TagsFlag) Set(s string) error { // Starting in Go 1.13, the -tags flag is a comma-separated list of build tags. *v = []string{} - for _, s := range strings.Split(s, ",") { + for s := range strings.SplitSeq(s, ",") { if s != "" { *v = append(*v, s) } diff --git a/tools/vendor/golang.org/x/tools/go/cfg/cfg.go b/tools/vendor/golang.org/x/tools/go/cfg/cfg.go index 1f208716..29a39f69 100644 --- a/tools/vendor/golang.org/x/tools/go/cfg/cfg.go +++ b/tools/vendor/golang.org/x/tools/go/cfg/cfg.go @@ -53,7 +53,6 @@ import ( // // The entry point is Blocks[0]; there may be multiple return blocks. type CFG struct { - fset *token.FileSet Blocks []*Block // block[0] is entry; order otherwise undefined } diff --git a/tools/vendor/golang.org/x/tools/go/packages/golist.go b/tools/vendor/golang.org/x/tools/go/packages/golist.go index 89f89dd2..680a70ca 100644 --- a/tools/vendor/golang.org/x/tools/go/packages/golist.go +++ b/tools/vendor/golang.org/x/tools/go/packages/golist.go @@ -364,12 +364,6 @@ type jsonPackage struct { DepsErrors []*packagesinternal.PackageError } -type jsonPackageError struct { - ImportStack []string - Pos string - Err string -} - func otherFiles(p *jsonPackage) [][]string { return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles} } diff --git a/tools/vendor/golang.org/x/tools/go/ssa/builder.go b/tools/vendor/golang.org/x/tools/go/ssa/builder.go index a5ef8fb4..41857ffb 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/builder.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/builder.go @@ -380,7 +380,13 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ } case "new": - return emitNew(fn, typeparams.MustDeref(typ), pos, "new") + alloc := emitNew(fn, typeparams.MustDeref(typ), pos, "new") + if !fn.info.Types[args[0]].IsType() { + // new(expr), requires go1.26 + v := b.expr(fn, args[0]) + emitStore(fn, alloc, v, pos) + } + return alloc case "len", "cap": // Special case: len or cap of an array or *array is diff --git a/tools/vendor/golang.org/x/tools/go/ssa/subst.go b/tools/vendor/golang.org/x/tools/go/ssa/subst.go index 362dce12..2c465ec0 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/subst.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/subst.go @@ -567,76 +567,3 @@ func (subst *subster) signature(t *types.Signature) types.Type { } return t } - -// reaches returns true if a type t reaches any type t' s.t. c[t'] == true. -// It updates c to cache results. -// -// reaches is currently only part of the wellFormed debug logic, and -// in practice c is initially only type parameters. It is not currently -// relied on in production. -func reaches(t types.Type, c map[types.Type]bool) (res bool) { - if c, ok := c[t]; ok { - return c - } - - // c is populated with temporary false entries as types are visited. - // This avoids repeat visits and break cycles. - c[t] = false - defer func() { - c[t] = res - }() - - switch t := t.(type) { - case *types.TypeParam, *types.Basic: - return false - case *types.Array: - return reaches(t.Elem(), c) - case *types.Slice: - return reaches(t.Elem(), c) - case *types.Pointer: - return reaches(t.Elem(), c) - case *types.Tuple: - for i := 0; i < t.Len(); i++ { - if reaches(t.At(i).Type(), c) { - return true - } - } - case *types.Struct: - for i := 0; i < t.NumFields(); i++ { - if reaches(t.Field(i).Type(), c) { - return true - } - } - case *types.Map: - return reaches(t.Key(), c) || reaches(t.Elem(), c) - case *types.Chan: - return reaches(t.Elem(), c) - case *types.Signature: - if t.Recv() != nil && reaches(t.Recv().Type(), c) { - return true - } - return reaches(t.Params(), c) || reaches(t.Results(), c) - case *types.Union: - for i := 0; i < t.Len(); i++ { - if reaches(t.Term(i).Type(), c) { - return true - } - } - case *types.Interface: - for i := 0; i < t.NumEmbeddeds(); i++ { - if reaches(t.Embedded(i), c) { - return true - } - } - for i := 0; i < t.NumExplicitMethods(); i++ { - if reaches(t.ExplicitMethod(i).Type(), c) { - return true - } - } - case *types.Named, *types.Alias: - return reaches(t.Underlying(), c) - default: - panic("unreachable") - } - return false -} diff --git a/tools/vendor/golang.org/x/tools/imports/forward.go b/tools/vendor/golang.org/x/tools/imports/forward.go index cb6db889..22ae7777 100644 --- a/tools/vendor/golang.org/x/tools/imports/forward.go +++ b/tools/vendor/golang.org/x/tools/imports/forward.go @@ -69,9 +69,3 @@ func Process(filename string, src []byte, opt *Options) ([]byte, error) { } return intimp.Process(filename, src, intopt) } - -// VendorlessPath returns the devendorized version of the import path ipath. -// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b". -func VendorlessPath(ipath string) string { - return intimp.VendorlessPath(ipath) -} diff --git a/tools/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go b/tools/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go index e48dc3f3..2b4a8ebb 100644 --- a/tools/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go +++ b/tools/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go @@ -7,96 +7,23 @@ package analysisinternal import ( - "bytes" "cmp" "fmt" "go/ast" - "go/printer" - "go/scanner" "go/token" "go/types" - "iter" - pathpkg "path" "slices" "strings" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/moreiters" - "golang.org/x/tools/internal/typesinternal" ) -// Deprecated: this heuristic is ill-defined. -// TODO(adonovan): move to sole use in gopls/internal/cache. -func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos { - // Get the end position for the type error. - file := fset.File(start) - if file == nil { - return start - } - if offset := file.PositionFor(start, false).Offset; offset > len(src) { - return start - } else { - src = src[offset:] - } - - // Attempt to find a reasonable end position for the type error. - // - // TODO(rfindley): the heuristic implemented here is unclear. It looks like - // it seeks the end of the primary operand starting at start, but that is not - // quite implemented (for example, given a func literal this heuristic will - // return the range of the func keyword). - // - // We should formalize this heuristic, or deprecate it by finally proposing - // to add end position to all type checker errors. - // - // Nevertheless, ensure that the end position at least spans the current - // token at the cursor (this was golang/go#69505). - end := start - { - var s scanner.Scanner - fset := token.NewFileSet() - f := fset.AddFile("", fset.Base(), len(src)) - s.Init(f, src, nil /* no error handler */, scanner.ScanComments) - pos, tok, lit := s.Scan() - if tok != token.SEMICOLON && token.Pos(f.Base()) <= pos && pos <= token.Pos(f.Base()+f.Size()) { - off := file.Offset(pos) + len(lit) - src = src[off:] - end += token.Pos(off) - } - } - - // Look for bytes that might terminate the current operand. See note above: - // this is imprecise. - if width := bytes.IndexAny(src, " \n,():;[]+-*/"); width > 0 { - end += token.Pos(width) - } - return end -} - -// WalkASTWithParent walks the AST rooted at n. The semantics are -// similar to ast.Inspect except it does not call f(nil). -func WalkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) { - var ancestors []ast.Node - ast.Inspect(n, func(n ast.Node) (recurse bool) { - if n == nil { - ancestors = ancestors[:len(ancestors)-1] - return false - } - - var parent ast.Node - if len(ancestors) > 0 { - parent = ancestors[len(ancestors)-1] - } - ancestors = append(ancestors, n) - return f(n, parent) - }) -} - // MatchingIdents finds the names of all identifiers in 'node' that match any of the given types. // 'pos' represents the position at which the identifiers may be inserted. 'pos' must be within // the scope of each of identifier we select. Otherwise, we will insert a variable at 'pos' that // is unrecognized. +// +// TODO(adonovan): this is only used by gopls/internal/analysis/fill{returns,struct}. Move closer. func MatchingIdents(typs []types.Type, node ast.Node, pos token.Pos, info *types.Info, pkg *types.Package) map[types.Type][]string { // Initialize matches to contain the variable types we are searching for. @@ -212,185 +139,6 @@ func CheckReadable(pass *analysis.Pass, filename string) error { return fmt.Errorf("Pass.ReadFile: %s is not among OtherFiles, IgnoredFiles, or names of Files", filename) } -// AddImport checks whether this file already imports pkgpath and -// that import is in scope at pos. If so, it returns the name under -// which it was imported and a zero edit. Otherwise, it adds a new -// import of pkgpath, using a name derived from the preferred name, -// and returns the chosen name, a prefix to be concatenated with member -// to form a qualified name, and the edit for the new import. -// -// In the special case that pkgpath is dot-imported then member, the -// identifier for which the import is being added, is consulted. If -// member is not shadowed at pos, AddImport returns (".", "", nil). -// (AddImport accepts the caller's implicit claim that the imported -// package declares member.) -// -// It does not mutate its arguments. -func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member string, pos token.Pos) (name, prefix string, newImport []analysis.TextEdit) { - // Find innermost enclosing lexical block. - scope := info.Scopes[file].Innermost(pos) - if scope == nil { - panic("no enclosing lexical block") - } - - // Is there an existing import of this package? - // If so, are we in its scope? (not shadowed) - for _, spec := range file.Imports { - pkgname := info.PkgNameOf(spec) - if pkgname != nil && pkgname.Imported().Path() == pkgpath { - name = pkgname.Name() - if name == "." { - // The scope of ident must be the file scope. - if s, _ := scope.LookupParent(member, pos); s == info.Scopes[file] { - return name, "", nil - } - } else if _, obj := scope.LookupParent(name, pos); obj == pkgname { - return name, name + ".", nil - } - } - } - - // We must add a new import. - // Ensure we have a fresh name. - newName := FreshName(scope, pos, preferredName) - - // Create a new import declaration either before the first existing - // declaration (which must exist), including its comments; or - // inside the declaration, if it is an import group. - // - // Use a renaming import whenever the preferred name is not - // available, or the chosen name does not match the last - // segment of its path. - newText := fmt.Sprintf("%q", pkgpath) - if newName != preferredName || newName != pathpkg.Base(pkgpath) { - newText = fmt.Sprintf("%s %q", newName, pkgpath) - } - decl0 := file.Decls[0] - var before ast.Node = decl0 - switch decl0 := decl0.(type) { - case *ast.GenDecl: - if decl0.Doc != nil { - before = decl0.Doc - } - case *ast.FuncDecl: - if decl0.Doc != nil { - before = decl0.Doc - } - } - // If the first decl is an import group, add this new import at the end. - if gd, ok := before.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() { - pos = gd.Rparen - // if it's a std lib, we should append it at the beginning of import group. - // otherwise we may see the std package is put at the last behind a 3rd module which doesn't follow our convention. - // besides, gofmt doesn't help in this case. - if IsStdPackage(pkgpath) && len(gd.Specs) != 0 { - pos = gd.Specs[0].Pos() - newText += "\n\t" - } else { - newText = "\t" + newText + "\n" - } - } else { - pos = before.Pos() - newText = "import " + newText + "\n\n" - } - return newName, newName + ".", []analysis.TextEdit{{ - Pos: pos, - End: pos, - NewText: []byte(newText), - }} -} - -// FreshName returns the name of an identifier that is undefined -// at the specified position, based on the preferred name. -func FreshName(scope *types.Scope, pos token.Pos, preferred string) string { - newName := preferred - for i := 0; ; i++ { - if _, obj := scope.LookupParent(newName, pos); obj == nil { - break // fresh - } - newName = fmt.Sprintf("%s%d", preferred, i) - } - return newName -} - -// Format returns a string representation of the node n. -func Format(fset *token.FileSet, n ast.Node) string { - var buf strings.Builder - printer.Fprint(&buf, fset, n) // ignore errors - return buf.String() -} - -// Imports returns true if path is imported by pkg. -func Imports(pkg *types.Package, path string) bool { - for _, imp := range pkg.Imports() { - if imp.Path() == path { - return true - } - } - return false -} - -// IsTypeNamed reports whether t is (or is an alias for) a -// package-level defined type with the given package path and one of -// the given names. It returns false if t is nil. -// -// This function avoids allocating the concatenation of "pkg.Name", -// which is important for the performance of syntax matching. -func IsTypeNamed(t types.Type, pkgPath string, names ...string) bool { - if named, ok := types.Unalias(t).(*types.Named); ok { - tname := named.Obj() - return tname != nil && - typesinternal.IsPackageLevel(tname) && - tname.Pkg().Path() == pkgPath && - slices.Contains(names, tname.Name()) - } - return false -} - -// IsPointerToNamed reports whether t is (or is an alias for) a pointer to a -// package-level defined type with the given package path and one of the given -// names. It returns false if t is not a pointer type. -func IsPointerToNamed(t types.Type, pkgPath string, names ...string) bool { - r := typesinternal.Unpointer(t) - if r == t { - return false - } - return IsTypeNamed(r, pkgPath, names...) -} - -// IsFunctionNamed reports whether obj is a package-level function -// defined in the given package and has one of the given names. -// It returns false if obj is nil. -// -// This function avoids allocating the concatenation of "pkg.Name", -// which is important for the performance of syntax matching. -func IsFunctionNamed(obj types.Object, pkgPath string, names ...string) bool { - f, ok := obj.(*types.Func) - return ok && - typesinternal.IsPackageLevel(obj) && - f.Pkg().Path() == pkgPath && - f.Type().(*types.Signature).Recv() == nil && - slices.Contains(names, f.Name()) -} - -// IsMethodNamed reports whether obj is a method defined on a -// package-level type with the given package and type name, and has -// one of the given names. It returns false if obj is nil. -// -// This function avoids allocating the concatenation of "pkg.TypeName.Name", -// which is important for the performance of syntax matching. -func IsMethodNamed(obj types.Object, pkgPath string, typeName string, names ...string) bool { - if fn, ok := obj.(*types.Func); ok { - if recv := fn.Type().(*types.Signature).Recv(); recv != nil { - _, T := typesinternal.ReceiverNamed(recv) - return T != nil && - IsTypeNamed(T, pkgPath, typeName) && - slices.Contains(names, fn.Name()) - } - } - return false -} - // ValidateFixes validates the set of fixes for a single diagnostic. // Any error indicates a bug in the originating analyzer. // @@ -493,6 +241,20 @@ func validateFix(fset *token.FileSet, fix *analysis.SuggestedFix) error { return nil } +// Range returns an [analysis.Range] for the specified start and end positions. +func Range(pos, end token.Pos) analysis.Range { + return tokenRange{pos, end} +} + +// tokenRange is an implementation of the [analysis.Range] interface. +type tokenRange struct{ StartPos, EndPos token.Pos } + +func (r tokenRange) Pos() token.Pos { return r.StartPos } +func (r tokenRange) End() token.Pos { return r.EndPos } + +// TODO(adonovan): the import-related functions below don't depend on +// analysis (or even on go/types or go/ast). Move somewhere more logical. + // CanImport reports whether one package is allowed to import another. // // TODO(adonovan): allow customization of the accessibility relation @@ -520,133 +282,6 @@ func CanImport(from, to string) bool { return true } -// DeleteStmt returns the edits to remove the [ast.Stmt] identified by -// curStmt, if it is contained within a BlockStmt, CaseClause, -// CommClause, or is the STMT in switch STMT; ... {...}. It returns nil otherwise. -func DeleteStmt(fset *token.FileSet, curStmt inspector.Cursor) []analysis.TextEdit { - stmt := curStmt.Node().(ast.Stmt) - // if the stmt is on a line by itself delete the whole line - // otherwise just delete the statement. - - // this logic would be a lot simpler with the file contents, and somewhat simpler - // if the cursors included the comments. - - tokFile := fset.File(stmt.Pos()) - lineOf := tokFile.Line - stmtStartLine, stmtEndLine := lineOf(stmt.Pos()), lineOf(stmt.End()) - - var from, to token.Pos - // bounds of adjacent syntax/comments on same line, if any - limits := func(left, right token.Pos) { - if lineOf(left) == stmtStartLine { - from = left - } - if lineOf(right) == stmtEndLine { - to = right - } - } - // TODO(pjw): there are other places a statement might be removed: - // IfStmt = "if" [ SimpleStmt ";" ] Expression Block [ "else" ( IfStmt | Block ) ] . - // (removing the blocks requires more rewriting than this routine would do) - // CommCase = "case" ( SendStmt | RecvStmt ) | "default" . - // (removing the stmt requires more rewriting, and it's unclear what the user means) - switch parent := curStmt.Parent().Node().(type) { - case *ast.SwitchStmt: - limits(parent.Switch, parent.Body.Lbrace) - case *ast.TypeSwitchStmt: - limits(parent.Switch, parent.Body.Lbrace) - if parent.Assign == stmt { - return nil // don't let the user break the type switch - } - case *ast.BlockStmt: - limits(parent.Lbrace, parent.Rbrace) - case *ast.CommClause: - limits(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace) - if parent.Comm == stmt { - return nil // maybe the user meant to remove the entire CommClause? - } - case *ast.CaseClause: - limits(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace) - case *ast.ForStmt: - limits(parent.For, parent.Body.Lbrace) - - default: - return nil // not one of ours - } - - if prev, found := curStmt.PrevSibling(); found && lineOf(prev.Node().End()) == stmtStartLine { - from = prev.Node().End() // preceding statement ends on same line - } - if next, found := curStmt.NextSibling(); found && lineOf(next.Node().Pos()) == stmtEndLine { - to = next.Node().Pos() // following statement begins on same line - } - // and now for the comments -Outer: - for _, cg := range enclosingFile(curStmt).Comments { - for _, co := range cg.List { - if lineOf(co.End()) < stmtStartLine { - continue - } else if lineOf(co.Pos()) > stmtEndLine { - break Outer // no more are possible - } - if lineOf(co.End()) == stmtStartLine && co.End() < stmt.Pos() { - if !from.IsValid() || co.End() > from { - from = co.End() - continue // maybe there are more - } - } - if lineOf(co.Pos()) == stmtEndLine && co.Pos() > stmt.End() { - if !to.IsValid() || co.Pos() < to { - to = co.Pos() - continue // maybe there are more - } - } - } - } - // if either from or to is valid, just remove the statement - // otherwise remove the line - edit := analysis.TextEdit{Pos: stmt.Pos(), End: stmt.End()} - if from.IsValid() || to.IsValid() { - // remove just the statement. - // we can't tell if there is a ; or whitespace right after the statement - // ideally we'd like to remove the former and leave the latter - // (if gofmt has run, there likely won't be a ;) - // In type switches we know there's a semicolon somewhere after the statement, - // but the extra work for this special case is not worth it, as gofmt will fix it. - return []analysis.TextEdit{edit} - } - // remove the whole line - for lineOf(edit.Pos) == stmtStartLine { - edit.Pos-- - } - edit.Pos++ // get back tostmtStartLine - for lineOf(edit.End) == stmtEndLine { - edit.End++ - } - return []analysis.TextEdit{edit} -} - -// Comments returns an iterator over the comments overlapping the specified interval. -func Comments(file *ast.File, start, end token.Pos) iter.Seq[*ast.Comment] { - // TODO(adonovan): optimize use binary O(log n) instead of linear O(n) search. - return func(yield func(*ast.Comment) bool) { - for _, cg := range file.Comments { - for _, co := range cg.List { - if co.Pos() > end { - return - } - if co.End() < start { - continue - } - - if !yield(co) { - return - } - } - } - } -} - // IsStdPackage reports whether the specified package path belongs to a // package in the standard library (including internal dependencies). func IsStdPackage(path string) bool { @@ -658,20 +293,3 @@ func IsStdPackage(path string) bool { } return !strings.Contains(path[:slash], ".") && path != "testdata" } - -// Range returns an [analysis.Range] for the specified start and end positions. -func Range(pos, end token.Pos) analysis.Range { - return tokenRange{pos, end} -} - -// tokenRange is an implementation of the [analysis.Range] interface. -type tokenRange struct{ StartPos, EndPos token.Pos } - -func (r tokenRange) Pos() token.Pos { return r.StartPos } -func (r tokenRange) End() token.Pos { return r.EndPos } - -// enclosingFile returns the syntax tree for the file enclosing c. -func enclosingFile(c inspector.Cursor) *ast.File { - c, _ = moreiters.First(c.Enclosing((*ast.File)(nil))) - return c.Node().(*ast.File) -} diff --git a/tools/vendor/golang.org/x/tools/internal/analysisinternal/extractdoc.go b/tools/vendor/golang.org/x/tools/internal/analysisinternal/extractdoc.go index 39507723..bfb5900f 100644 --- a/tools/vendor/golang.org/x/tools/internal/analysisinternal/extractdoc.go +++ b/tools/vendor/golang.org/x/tools/internal/analysisinternal/extractdoc.go @@ -97,7 +97,7 @@ func ExtractDoc(content, name string) (string, error) { if f.Doc == nil { return "", fmt.Errorf("Go source file has no package doc comment") } - for _, section := range strings.Split(f.Doc.Text(), "\n# ") { + for section := range strings.SplitSeq(f.Doc.Text(), "\n# ") { if body := strings.TrimPrefix(section, "Analyzer "+name); body != section && body != "" && body[0] == '\r' || body[0] == '\n' { diff --git a/tools/vendor/golang.org/x/tools/internal/analysisinternal/generated/generated.go b/tools/vendor/golang.org/x/tools/internal/analysisinternal/generated/generated.go new file mode 100644 index 00000000..13e1b690 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/internal/analysisinternal/generated/generated.go @@ -0,0 +1,41 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package generated defines an analyzer whose result makes it +// convenient to skip diagnostics within generated files. +package generated + +import ( + "go/ast" + "go/token" + "reflect" + + "golang.org/x/tools/go/analysis" +) + +var Analyzer = &analysis.Analyzer{ + Name: "generated", + Doc: "detect which Go files are generated", + URL: "https://pkg.go.dev/golang.org/x/tools/internal/analysisinternal/generated", + ResultType: reflect.TypeFor[*Result](), + Run: func(pass *analysis.Pass) (any, error) { + set := make(map[*token.File]bool) + for _, file := range pass.Files { + if ast.IsGenerated(file) { + set[pass.Fset.File(file.FileStart)] = true + } + } + return &Result{fset: pass.Fset, generatedFiles: set}, nil + }, +} + +type Result struct { + fset *token.FileSet + generatedFiles map[*token.File]bool +} + +// IsGenerated reports whether the position is within a generated file. +func (r *Result) IsGenerated(pos token.Pos) bool { + return r.generatedFiles[r.fset.File(pos)] +} diff --git a/tools/vendor/golang.org/x/tools/internal/astutil/comment.go b/tools/vendor/golang.org/x/tools/internal/astutil/comment.go index ee4be23f..7e52aeaa 100644 --- a/tools/vendor/golang.org/x/tools/internal/astutil/comment.go +++ b/tools/vendor/golang.org/x/tools/internal/astutil/comment.go @@ -7,6 +7,7 @@ package astutil import ( "go/ast" "go/token" + "iter" "strings" ) @@ -15,7 +16,7 @@ import ( // https://go.dev/wiki/Deprecated, or "" if the documented symbol is not // deprecated. func Deprecation(doc *ast.CommentGroup) string { - for _, p := range strings.Split(doc.Text(), "\n\n") { + for p := range strings.SplitSeq(doc.Text(), "\n\n") { // There is still some ambiguity for deprecation message. This function // only returns the paragraph introduced by "Deprecated: ". More // information related to the deprecation may follow in additional @@ -111,3 +112,24 @@ func Directives(g *ast.CommentGroup) (res []*Directive) { } return } + +// Comments returns an iterator over the comments overlapping the specified interval. +func Comments(file *ast.File, start, end token.Pos) iter.Seq[*ast.Comment] { + // TODO(adonovan): optimize use binary O(log n) instead of linear O(n) search. + return func(yield func(*ast.Comment) bool) { + for _, cg := range file.Comments { + for _, co := range cg.List { + if co.Pos() > end { + return + } + if co.End() < start { + continue + } + + if !yield(co) { + return + } + } + } + } +} diff --git a/tools/vendor/golang.org/x/tools/internal/astutil/equal.go b/tools/vendor/golang.org/x/tools/internal/astutil/equal.go index c945de02..210f3923 100644 --- a/tools/vendor/golang.org/x/tools/internal/astutil/equal.go +++ b/tools/vendor/golang.org/x/tools/internal/astutil/equal.go @@ -26,6 +26,14 @@ func Equal(x, y ast.Node, identical func(x, y *ast.Ident) bool) bool { return equal(reflect.ValueOf(x), reflect.ValueOf(y), identical) } +// EqualSyntax reports whether x and y are equal. +// Identifiers are considered equal if they are spelled the same. +// Comments are ignored. +func EqualSyntax(x, y ast.Expr) bool { + sameName := func(x, y *ast.Ident) bool { return x.Name == y.Name } + return Equal(x, y, sameName) +} + func equal(x, y reflect.Value, identical func(x, y *ast.Ident) bool) bool { // Ensure types are the same if x.Type() != y.Type() { diff --git a/tools/vendor/golang.org/x/tools/internal/astutil/util.go b/tools/vendor/golang.org/x/tools/internal/astutil/util.go index 14189155..a1c09835 100644 --- a/tools/vendor/golang.org/x/tools/internal/astutil/util.go +++ b/tools/vendor/golang.org/x/tools/internal/astutil/util.go @@ -6,7 +6,13 @@ package astutil import ( "go/ast" + "go/printer" "go/token" + "strings" + + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/moreiters" ) // PreorderStack traverses the tree rooted at root, @@ -67,3 +73,47 @@ func NodeContains(n ast.Node, pos token.Pos) bool { } return start <= pos && pos <= end } + +// IsChildOf reports whether cur.ParentEdge is ek. +// +// TODO(adonovan): promote to a method of Cursor. +func IsChildOf(cur inspector.Cursor, ek edge.Kind) bool { + got, _ := cur.ParentEdge() + return got == ek +} + +// EnclosingFile returns the syntax tree for the file enclosing c. +// +// TODO(adonovan): promote this to a method of Cursor. +func EnclosingFile(c inspector.Cursor) *ast.File { + c, _ = moreiters.First(c.Enclosing((*ast.File)(nil))) + return c.Node().(*ast.File) +} + +// DocComment returns the doc comment for a node, if any. +func DocComment(n ast.Node) *ast.CommentGroup { + switch n := n.(type) { + case *ast.FuncDecl: + return n.Doc + case *ast.GenDecl: + return n.Doc + case *ast.ValueSpec: + return n.Doc + case *ast.TypeSpec: + return n.Doc + case *ast.File: + return n.Doc + case *ast.ImportSpec: + return n.Doc + case *ast.Field: + return n.Doc + } + return nil +} + +// Format returns a string representation of the node n. +func Format(fset *token.FileSet, n ast.Node) string { + var buf strings.Builder + printer.Fprint(&buf, fset, n) // ignore errors + return buf.String() +} diff --git a/tools/vendor/golang.org/x/tools/internal/event/core/event.go b/tools/vendor/golang.org/x/tools/internal/event/core/event.go index a6cf0e64..ade5d1e7 100644 --- a/tools/vendor/golang.org/x/tools/internal/event/core/event.go +++ b/tools/vendor/golang.org/x/tools/internal/event/core/event.go @@ -28,11 +28,6 @@ type Event struct { dynamic []label.Label // dynamically sized storage for remaining labels } -// eventLabelMap implements label.Map for a the labels of an Event. -type eventLabelMap struct { - event Event -} - func (ev Event) At() time.Time { return ev.at } func (ev Event) Format(f fmt.State, r rune) { diff --git a/tools/vendor/golang.org/x/tools/internal/gcimporter/iexport.go b/tools/vendor/golang.org/x/tools/internal/gcimporter/iexport.go index 780873e3..4a4357d2 100644 --- a/tools/vendor/golang.org/x/tools/internal/gcimporter/iexport.go +++ b/tools/vendor/golang.org/x/tools/internal/gcimporter/iexport.go @@ -569,7 +569,6 @@ func (p *iexporter) exportName(obj types.Object) (res string) { type iexporter struct { fset *token.FileSet - out *bytes.Buffer version int shallow bool // don't put types from other packages in the index diff --git a/tools/vendor/golang.org/x/tools/internal/goplsexport/export.go b/tools/vendor/golang.org/x/tools/internal/goplsexport/export.go new file mode 100644 index 00000000..d7c2b9f3 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/internal/goplsexport/export.go @@ -0,0 +1,14 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package goplsexport provides various backdoors to not-yet-published +// parts of x/tools that are needed by gopls. +package goplsexport + +import "golang.org/x/tools/go/analysis" + +var ( + ErrorsAsTypeModernizer *analysis.Analyzer // = modernize.errorsastypeAnalyzer + StdIteratorsModernizer *analysis.Analyzer // = modernize.stditeratorsAnalyer +) diff --git a/tools/vendor/golang.org/x/tools/internal/imports/fix.go b/tools/vendor/golang.org/x/tools/internal/imports/fix.go index 50b6ca51..1b4dc0cb 100644 --- a/tools/vendor/golang.org/x/tools/internal/imports/fix.go +++ b/tools/vendor/golang.org/x/tools/internal/imports/fix.go @@ -16,6 +16,7 @@ import ( "go/types" "io/fs" "io/ioutil" + "maps" "os" "path" "path/filepath" @@ -27,8 +28,6 @@ import ( "unicode" "unicode/utf8" - "maps" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" @@ -43,7 +42,7 @@ var importToGroup = []func(localPrefix, importPath string) (num int, ok bool){ if localPrefix == "" { return } - for _, p := range strings.Split(localPrefix, ",") { + for p := range strings.SplitSeq(localPrefix, ",") { if strings.HasPrefix(importPath, p) || strings.TrimSuffix(p, "/") == importPath { return 3, true } @@ -1251,7 +1250,6 @@ func ImportPathToAssumedName(importPath string) string { // gopathResolver implements resolver for GOPATH workspaces. type gopathResolver struct { env *ProcessEnv - walked bool cache *DirInfoCache scanSema chan struct{} // scanSema prevents concurrent scans. } diff --git a/tools/vendor/golang.org/x/tools/internal/modindex/symbols.go b/tools/vendor/golang.org/x/tools/internal/modindex/symbols.go index fe24db9b..8e9702d8 100644 --- a/tools/vendor/golang.org/x/tools/internal/modindex/symbols.go +++ b/tools/vendor/golang.org/x/tools/internal/modindex/symbols.go @@ -206,8 +206,7 @@ func isDeprecated(doc *ast.CommentGroup) bool { // go.dev/wiki/Deprecated Paragraph starting 'Deprecated:' // This code fails for /* Deprecated: */, but it's the code from // gopls/internal/analysis/deprecated - lines := strings.Split(doc.Text(), "\n\n") - for _, line := range lines { + for line := range strings.SplitSeq(doc.Text(), "\n\n") { if strings.HasPrefix(line, "Deprecated:") { return true } diff --git a/tools/vendor/golang.org/x/tools/internal/refactor/delete.go b/tools/vendor/golang.org/x/tools/internal/refactor/delete.go new file mode 100644 index 00000000..aa8ba5af --- /dev/null +++ b/tools/vendor/golang.org/x/tools/internal/refactor/delete.go @@ -0,0 +1,433 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package refactor + +// This file defines operations for computing deletion edits. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "slices" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/edge" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/internal/astutil" + "golang.org/x/tools/internal/typesinternal" +) + +// DeleteVar returns edits to delete the declaration of a variable +// whose defining identifier is curId. +// +// It handles variants including: +// - GenDecl > ValueSpec versus AssignStmt; +// - RHS expression has effects, or not; +// - entire statement/declaration may be eliminated; +// and removes associated comments. +// +// If it cannot make the necessary edits, such as for a function +// parameter or result, it returns nil. +func DeleteVar(tokFile *token.File, info *types.Info, curId inspector.Cursor) []analysis.TextEdit { + switch ek, _ := curId.ParentEdge(); ek { + case edge.ValueSpec_Names: + return deleteVarFromValueSpec(tokFile, info, curId) + + case edge.AssignStmt_Lhs: + return deleteVarFromAssignStmt(tokFile, info, curId) + } + + // e.g. function receiver, parameter, or result, + // or "switch v := expr.(T) {}" (which has no object). + return nil +} + +// Precondition: curId is Ident beneath ValueSpec.Names beneath GenDecl. +// +// See also [deleteVarFromAssignStmt], which has parallel structure. +func deleteVarFromValueSpec(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []analysis.TextEdit { + var ( + id = curIdent.Node().(*ast.Ident) + curSpec = curIdent.Parent() + spec = curSpec.Node().(*ast.ValueSpec) + ) + + declaresOtherNames := slices.ContainsFunc(spec.Names, func(name *ast.Ident) bool { + return name != id && name.Name != "_" + }) + noRHSEffects := !slices.ContainsFunc(spec.Values, func(rhs ast.Expr) bool { + return !typesinternal.NoEffects(info, rhs) + }) + if !declaresOtherNames && noRHSEffects { + // The spec is no longer needed, either to declare + // other variables, or for its side effects. + return DeleteSpec(tokFile, curSpec) + } + + // The spec is still needed, either for + // at least one LHS, or for effects on RHS. + // Blank out or delete just one LHS. + + _, index := curIdent.ParentEdge() // index of LHS within ValueSpec.Names + + // If there is no RHS, we can delete the LHS. + if len(spec.Values) == 0 { + var pos, end token.Pos + if index == len(spec.Names)-1 { + // Delete final name. + // + // var _, lhs1 T + // ------ + pos = spec.Names[index-1].End() + end = spec.Names[index].End() + } else { + // Delete non-final name. + // + // var lhs0, _ T + // ------ + pos = spec.Names[index].Pos() + end = spec.Names[index+1].Pos() + } + return []analysis.TextEdit{{ + Pos: pos, + End: end, + }} + } + + // If the assignment is 1:1 and the RHS has no effects, + // we can delete the LHS and its corresponding RHS. + if len(spec.Names) == len(spec.Values) && + typesinternal.NoEffects(info, spec.Values[index]) { + + if index == len(spec.Names)-1 { + // Delete final items. + // + // var _, lhs1 = rhs0, rhs1 + // ------ ------ + return []analysis.TextEdit{ + { + Pos: spec.Names[index-1].End(), + End: spec.Names[index].End(), + }, + { + Pos: spec.Values[index-1].End(), + End: spec.Values[index].End(), + }, + } + } else { + // Delete non-final items. + // + // var lhs0, _ = rhs0, rhs1 + // ------ ------ + return []analysis.TextEdit{ + { + Pos: spec.Names[index].Pos(), + End: spec.Names[index+1].Pos(), + }, + { + Pos: spec.Values[index].Pos(), + End: spec.Values[index+1].Pos(), + }, + } + } + } + + // We cannot delete the RHS. + // Blank out the LHS. + return []analysis.TextEdit{{ + Pos: id.Pos(), + End: id.End(), + NewText: []byte("_"), + }} +} + +// Precondition: curId is Ident beneath AssignStmt.Lhs. +// +// See also [deleteVarFromValueSpec], which has parallel structure. +func deleteVarFromAssignStmt(tokFile *token.File, info *types.Info, curIdent inspector.Cursor) []analysis.TextEdit { + var ( + id = curIdent.Node().(*ast.Ident) + curStmt = curIdent.Parent() + assign = curStmt.Node().(*ast.AssignStmt) + ) + + declaresOtherNames := slices.ContainsFunc(assign.Lhs, func(lhs ast.Expr) bool { + lhsId, ok := lhs.(*ast.Ident) + return ok && lhsId != id && lhsId.Name != "_" + }) + noRHSEffects := !slices.ContainsFunc(assign.Rhs, func(rhs ast.Expr) bool { + return !typesinternal.NoEffects(info, rhs) + }) + if !declaresOtherNames && noRHSEffects { + // The assignment is no longer needed, either to + // declare other variables, or for its side effects. + if edits := DeleteStmt(tokFile, curStmt); edits != nil { + return edits + } + // Statement could not not be deleted in this context. + // Fall back to conservative deletion. + } + + // The assign is still needed, either for + // at least one LHS, or for effects on RHS, + // or because it cannot deleted because of its context. + // Blank out or delete just one LHS. + + // If the assignment is 1:1 and the RHS has no effects, + // we can delete the LHS and its corresponding RHS. + _, index := curIdent.ParentEdge() + if len(assign.Lhs) > 1 && + len(assign.Lhs) == len(assign.Rhs) && + typesinternal.NoEffects(info, assign.Rhs[index]) { + + if index == len(assign.Lhs)-1 { + // Delete final items. + // + // _, lhs1 := rhs0, rhs1 + // ------ ------ + return []analysis.TextEdit{ + { + Pos: assign.Lhs[index-1].End(), + End: assign.Lhs[index].End(), + }, + { + Pos: assign.Rhs[index-1].End(), + End: assign.Rhs[index].End(), + }, + } + } else { + // Delete non-final items. + // + // lhs0, _ := rhs0, rhs1 + // ------ ------ + return []analysis.TextEdit{ + { + Pos: assign.Lhs[index].Pos(), + End: assign.Lhs[index+1].Pos(), + }, + { + Pos: assign.Rhs[index].Pos(), + End: assign.Rhs[index+1].Pos(), + }, + } + } + } + + // We cannot delete the RHS. + // Blank out the LHS. + edits := []analysis.TextEdit{{ + Pos: id.Pos(), + End: id.End(), + NewText: []byte("_"), + }} + + // If this eliminates the final variable declared by + // an := statement, we need to turn it into an = + // assignment to avoid a "no new variables on left + // side of :=" error. + if !declaresOtherNames { + edits = append(edits, analysis.TextEdit{ + Pos: assign.TokPos, + End: assign.TokPos + token.Pos(len(":=")), + NewText: []byte("="), + }) + } + + return edits +} + +// DeleteSpec returns edits to delete the ValueSpec identified by curSpec. +// +// TODO(adonovan): add test suite. Test for consts as well. +func DeleteSpec(tokFile *token.File, curSpec inspector.Cursor) []analysis.TextEdit { + var ( + spec = curSpec.Node().(*ast.ValueSpec) + curDecl = curSpec.Parent() + decl = curDecl.Node().(*ast.GenDecl) + ) + + // If it is the sole spec in the decl, + // delete the entire decl. + if len(decl.Specs) == 1 { + return DeleteDecl(tokFile, curDecl) + } + + // Delete the spec and its comments. + _, index := curSpec.ParentEdge() // index of ValueSpec within GenDecl.Specs + pos, end := spec.Pos(), spec.End() + if spec.Doc != nil { + pos = spec.Doc.Pos() // leading comment + } + if index == len(decl.Specs)-1 { + // Delete final spec. + if spec.Comment != nil { + // var (v int // comment \n) + end = spec.Comment.End() + } + } else { + // Delete non-final spec. + // var ( a T; b T ) + // ----- + end = decl.Specs[index+1].Pos() + } + return []analysis.TextEdit{{ + Pos: pos, + End: end, + }} +} + +// DeleteDecl returns edits to delete the ast.Decl identified by curDecl. +// +// TODO(adonovan): add test suite. +func DeleteDecl(tokFile *token.File, curDecl inspector.Cursor) []analysis.TextEdit { + decl := curDecl.Node().(ast.Decl) + + ek, _ := curDecl.ParentEdge() + switch ek { + case edge.DeclStmt_Decl: + return DeleteStmt(tokFile, curDecl.Parent()) + + case edge.File_Decls: + pos, end := decl.Pos(), decl.End() + if doc := astutil.DocComment(decl); doc != nil { + pos = doc.Pos() + } + + // Delete free-floating comments on same line as rparen. + // var (...) // comment + var ( + file = curDecl.Parent().Node().(*ast.File) + lineOf = tokFile.Line + declEndLine = lineOf(decl.End()) + ) + for _, cg := range file.Comments { + for _, c := range cg.List { + if c.Pos() < end { + continue // too early + } + commentEndLine := lineOf(c.End()) + if commentEndLine > declEndLine { + break // too late + } else if lineOf(c.Pos()) == declEndLine && commentEndLine == declEndLine { + end = c.End() + } + } + } + + return []analysis.TextEdit{{ + Pos: pos, + End: end, + }} + + default: + panic(fmt.Sprintf("Decl parent is %v, want DeclStmt or File", ek)) + } +} + +// DeleteStmt returns the edits to remove the [ast.Stmt] identified by +// curStmt, if it is contained within a BlockStmt, CaseClause, +// CommClause, or is the STMT in switch STMT; ... {...}. It returns nil otherwise. +func DeleteStmt(tokFile *token.File, curStmt inspector.Cursor) []analysis.TextEdit { + stmt := curStmt.Node().(ast.Stmt) + // if the stmt is on a line by itself delete the whole line + // otherwise just delete the statement. + + // this logic would be a lot simpler with the file contents, and somewhat simpler + // if the cursors included the comments. + + lineOf := tokFile.Line + stmtStartLine, stmtEndLine := lineOf(stmt.Pos()), lineOf(stmt.End()) + + var from, to token.Pos + // bounds of adjacent syntax/comments on same line, if any + limits := func(left, right token.Pos) { + if lineOf(left) == stmtStartLine { + from = left + } + if lineOf(right) == stmtEndLine { + to = right + } + } + // TODO(pjw): there are other places a statement might be removed: + // IfStmt = "if" [ SimpleStmt ";" ] Expression Block [ "else" ( IfStmt | Block ) ] . + // (removing the blocks requires more rewriting than this routine would do) + // CommCase = "case" ( SendStmt | RecvStmt ) | "default" . + // (removing the stmt requires more rewriting, and it's unclear what the user means) + switch parent := curStmt.Parent().Node().(type) { + case *ast.SwitchStmt: + limits(parent.Switch, parent.Body.Lbrace) + case *ast.TypeSwitchStmt: + limits(parent.Switch, parent.Body.Lbrace) + if parent.Assign == stmt { + return nil // don't let the user break the type switch + } + case *ast.BlockStmt: + limits(parent.Lbrace, parent.Rbrace) + case *ast.CommClause: + limits(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace) + if parent.Comm == stmt { + return nil // maybe the user meant to remove the entire CommClause? + } + case *ast.CaseClause: + limits(parent.Colon, curStmt.Parent().Parent().Node().(*ast.BlockStmt).Rbrace) + case *ast.ForStmt: + limits(parent.For, parent.Body.Lbrace) + + default: + return nil // not one of ours + } + + if prev, found := curStmt.PrevSibling(); found && lineOf(prev.Node().End()) == stmtStartLine { + from = prev.Node().End() // preceding statement ends on same line + } + if next, found := curStmt.NextSibling(); found && lineOf(next.Node().Pos()) == stmtEndLine { + to = next.Node().Pos() // following statement begins on same line + } + // and now for the comments +Outer: + for _, cg := range astutil.EnclosingFile(curStmt).Comments { + for _, co := range cg.List { + if lineOf(co.End()) < stmtStartLine { + continue + } else if lineOf(co.Pos()) > stmtEndLine { + break Outer // no more are possible + } + if lineOf(co.End()) == stmtStartLine && co.End() < stmt.Pos() { + if !from.IsValid() || co.End() > from { + from = co.End() + continue // maybe there are more + } + } + if lineOf(co.Pos()) == stmtEndLine && co.Pos() > stmt.End() { + if !to.IsValid() || co.Pos() < to { + to = co.Pos() + continue // maybe there are more + } + } + } + } + // if either from or to is valid, just remove the statement + // otherwise remove the line + edit := analysis.TextEdit{Pos: stmt.Pos(), End: stmt.End()} + if from.IsValid() || to.IsValid() { + // remove just the statement. + // we can't tell if there is a ; or whitespace right after the statement + // ideally we'd like to remove the former and leave the latter + // (if gofmt has run, there likely won't be a ;) + // In type switches we know there's a semicolon somewhere after the statement, + // but the extra work for this special case is not worth it, as gofmt will fix it. + return []analysis.TextEdit{edit} + } + // remove the whole line + for lineOf(edit.Pos) == stmtStartLine { + edit.Pos-- + } + edit.Pos++ // get back tostmtStartLine + for lineOf(edit.End) == stmtEndLine { + edit.End++ + } + return []analysis.TextEdit{edit} +} diff --git a/tools/vendor/golang.org/x/tools/internal/refactor/imports.go b/tools/vendor/golang.org/x/tools/internal/refactor/imports.go new file mode 100644 index 00000000..1ba3a960 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/internal/refactor/imports.go @@ -0,0 +1,127 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package refactor + +// This file defines operations for computing edits to imports. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + pathpkg "path" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/internal/analysisinternal" +) + +// AddImport returns the prefix (either "pkg." or "") that should be +// used to qualify references to the desired symbol (member) imported +// from the specified package, plus any necessary edits to the file's +// import declaration to add a new import. +// +// If the import already exists, and is accessible at pos, AddImport +// returns the existing name and no edits. (If the existing import is +// a dot import, the prefix is "".) +// +// Otherwise, it adds a new import, using a local name derived from +// the preferred name. To request a blank import, use a preferredName +// of "_", and discard the prefix result; member is ignored in this +// case. +// +// AddImport accepts the caller's implicit claim that the imported +// package declares member. +// +// AddImport does not mutate its arguments. +func AddImport(info *types.Info, file *ast.File, preferredName, pkgpath, member string, pos token.Pos) (prefix string, edits []analysis.TextEdit) { + // Find innermost enclosing lexical block. + scope := info.Scopes[file].Innermost(pos) + if scope == nil { + panic("no enclosing lexical block") + } + + // Is there an existing import of this package? + // If so, are we in its scope? (not shadowed) + for _, spec := range file.Imports { + pkgname := info.PkgNameOf(spec) + if pkgname != nil && pkgname.Imported().Path() == pkgpath { + name := pkgname.Name() + if preferredName == "_" { + // Request for blank import; any existing import will do. + return "", nil + } + if name == "." { + // The scope of ident must be the file scope. + if s, _ := scope.LookupParent(member, pos); s == info.Scopes[file] { + return "", nil + } + } else if _, obj := scope.LookupParent(name, pos); obj == pkgname { + return name + ".", nil + } + } + } + + // We must add a new import. + + // Ensure we have a fresh name. + newName := preferredName + if preferredName != "_" { + newName = FreshName(scope, pos, preferredName) + } + + // Create a new import declaration either before the first existing + // declaration (which must exist), including its comments; or + // inside the declaration, if it is an import group. + // + // Use a renaming import whenever the preferred name is not + // available, or the chosen name does not match the last + // segment of its path. + newText := fmt.Sprintf("%q", pkgpath) + if newName != preferredName || newName != pathpkg.Base(pkgpath) { + newText = fmt.Sprintf("%s %q", newName, pkgpath) + } + + decl0 := file.Decls[0] + var before ast.Node = decl0 + switch decl0 := decl0.(type) { + case *ast.GenDecl: + if decl0.Doc != nil { + before = decl0.Doc + } + case *ast.FuncDecl: + if decl0.Doc != nil { + before = decl0.Doc + } + } + if gd, ok := before.(*ast.GenDecl); ok && gd.Tok == token.IMPORT && gd.Rparen.IsValid() { + // Have existing grouped import ( ... ) decl. + if analysisinternal.IsStdPackage(pkgpath) && len(gd.Specs) > 0 { + // Add spec for a std package before + // first existing spec, followed by + // a blank line if the next one is non-std. + first := gd.Specs[0].(*ast.ImportSpec) + pos = first.Pos() + if !analysisinternal.IsStdPackage(first.Path.Value) { + newText += "\n" + } + newText += "\n\t" + } else { + // Add spec at end of group. + pos = gd.Rparen + newText = "\t" + newText + "\n" + } + } else { + // No import decl, or non-grouped import. + // Add a new import decl before first decl. + // (gofmt will merge multiple import decls.) + pos = before.Pos() + newText = "import " + newText + "\n\n" + } + return newName + ".", []analysis.TextEdit{{ + Pos: pos, + End: pos, + NewText: []byte(newText), + }} +} diff --git a/tools/vendor/golang.org/x/tools/internal/refactor/refactor.go b/tools/vendor/golang.org/x/tools/internal/refactor/refactor.go new file mode 100644 index 00000000..27b97508 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/internal/refactor/refactor.go @@ -0,0 +1,29 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package refactor provides operators to compute common textual edits +// for refactoring tools. +// +// This package should not use features of the analysis API +// other than [analysis.TextEdit]. +package refactor + +import ( + "fmt" + "go/token" + "go/types" +) + +// FreshName returns the name of an identifier that is undefined +// at the specified position, based on the preferred name. +func FreshName(scope *types.Scope, pos token.Pos, preferred string) string { + newName := preferred + for i := 0; ; i++ { + if _, obj := scope.LookupParent(newName, pos); obj == nil { + break // fresh + } + newName = fmt.Sprintf("%s%d", preferred, i) + } + return newName +} diff --git a/tools/vendor/golang.org/x/tools/internal/typesinternal/fx.go b/tools/vendor/golang.org/x/tools/internal/typesinternal/fx.go new file mode 100644 index 00000000..93acff21 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/internal/typesinternal/fx.go @@ -0,0 +1,49 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +import ( + "go/ast" + "go/token" + "go/types" +) + +// NoEffects reports whether the expression has no side effects, i.e., it +// does not modify the memory state. This function is conservative: it may +// return false even when the expression has no effect. +func NoEffects(info *types.Info, expr ast.Expr) bool { + noEffects := true + ast.Inspect(expr, func(n ast.Node) bool { + switch v := n.(type) { + case nil, *ast.Ident, *ast.BasicLit, *ast.BinaryExpr, *ast.ParenExpr, + *ast.SelectorExpr, *ast.IndexExpr, *ast.SliceExpr, *ast.TypeAssertExpr, + *ast.StarExpr, *ast.CompositeLit, *ast.ArrayType, *ast.StructType, + *ast.MapType, *ast.InterfaceType, *ast.KeyValueExpr: + // No effect + case *ast.UnaryExpr: + // Channel send <-ch has effects + if v.Op == token.ARROW { + noEffects = false + } + case *ast.CallExpr: + // Type conversion has no effects + if !info.Types[v.Fun].IsType() { + // TODO(adonovan): Add a case for built-in functions without side + // effects (by using callsPureBuiltin from tools/internal/refactor/inline) + + noEffects = false + } + case *ast.FuncLit: + // A FuncLit has no effects, but do not descend into it. + return false + default: + // All other expressions have effects + noEffects = false + } + + return noEffects + }) + return noEffects +} diff --git a/tools/vendor/golang.org/x/tools/internal/typesinternal/isnamed.go b/tools/vendor/golang.org/x/tools/internal/typesinternal/isnamed.go new file mode 100644 index 00000000..f2affec4 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/internal/typesinternal/isnamed.go @@ -0,0 +1,71 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +import ( + "go/types" + "slices" +) + +// IsTypeNamed reports whether t is (or is an alias for) a +// package-level defined type with the given package path and one of +// the given names. It returns false if t is nil. +// +// This function avoids allocating the concatenation of "pkg.Name", +// which is important for the performance of syntax matching. +func IsTypeNamed(t types.Type, pkgPath string, names ...string) bool { + if named, ok := types.Unalias(t).(*types.Named); ok { + tname := named.Obj() + return tname != nil && + IsPackageLevel(tname) && + tname.Pkg().Path() == pkgPath && + slices.Contains(names, tname.Name()) + } + return false +} + +// IsPointerToNamed reports whether t is (or is an alias for) a pointer to a +// package-level defined type with the given package path and one of the given +// names. It returns false if t is not a pointer type. +func IsPointerToNamed(t types.Type, pkgPath string, names ...string) bool { + r := Unpointer(t) + if r == t { + return false + } + return IsTypeNamed(r, pkgPath, names...) +} + +// IsFunctionNamed reports whether obj is a package-level function +// defined in the given package and has one of the given names. +// It returns false if obj is nil. +// +// This function avoids allocating the concatenation of "pkg.Name", +// which is important for the performance of syntax matching. +func IsFunctionNamed(obj types.Object, pkgPath string, names ...string) bool { + f, ok := obj.(*types.Func) + return ok && + IsPackageLevel(obj) && + f.Pkg().Path() == pkgPath && + f.Type().(*types.Signature).Recv() == nil && + slices.Contains(names, f.Name()) +} + +// IsMethodNamed reports whether obj is a method defined on a +// package-level type with the given package and type name, and has +// one of the given names. It returns false if obj is nil. +// +// This function avoids allocating the concatenation of "pkg.TypeName.Name", +// which is important for the performance of syntax matching. +func IsMethodNamed(obj types.Object, pkgPath string, typeName string, names ...string) bool { + if fn, ok := obj.(*types.Func); ok { + if recv := fn.Type().(*types.Signature).Recv(); recv != nil { + _, T := ReceiverNamed(recv) + return T != nil && + IsTypeNamed(T, pkgPath, typeName) && + slices.Contains(names, fn.Name()) + } + } + return false +} diff --git a/tools/vendor/golang.org/x/tools/internal/typesinternal/qualifier.go b/tools/vendor/golang.org/x/tools/internal/typesinternal/qualifier.go index b64f714e..64f47919 100644 --- a/tools/vendor/golang.org/x/tools/internal/typesinternal/qualifier.go +++ b/tools/vendor/golang.org/x/tools/internal/typesinternal/qualifier.go @@ -15,6 +15,14 @@ import ( // file. // If the same package is imported multiple times, the last appearance is // recorded. +// +// TODO(adonovan): this function ignores the effect of shadowing. It +// should accept a [token.Pos] and a [types.Info] and compute only the +// set of imports that are not shadowed at that point, analogous to +// [analysisinternal.AddImport]. It could also compute (as a side +// effect) the set of additional imports required to ensure that there +// is an accessible import for each necessary package, making it +// converge even more closely with AddImport. func FileQualifier(f *ast.File, pkg *types.Package) types.Qualifier { // Construct mapping of import paths to their defined names. // It is only necessary to look at renaming imports. diff --git a/tools/vendor/golang.org/x/tools/internal/typesinternal/types.go b/tools/vendor/golang.org/x/tools/internal/typesinternal/types.go index a5cd7e8d..fef74a78 100644 --- a/tools/vendor/golang.org/x/tools/internal/typesinternal/types.go +++ b/tools/vendor/golang.org/x/tools/internal/typesinternal/types.go @@ -2,8 +2,20 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package typesinternal provides access to internal go/types APIs that are not -// yet exported. +// Package typesinternal provides helpful operators for dealing with +// go/types: +// +// - operators for querying typed syntax trees (e.g. [Imports], [IsFunctionNamed]); +// - functions for converting types to strings or syntax (e.g. [TypeExpr], FileQualifier]); +// - helpers for working with the [go/types] API (e.g. [NewTypesInfo]); +// - access to internal go/types APIs that are not yet +// exported (e.g. [SetUsesCgo], [ErrorCodeStartEnd], [VarKind]); and +// - common algorithms related to types (e.g. [TooNewStdSymbols]). +// +// See also: +// - [golang.org/x/tools/internal/astutil], for operations on untyped syntax; +// - [golang.org/x/tools/internal/analysisinernal], for helpers for analyzers; +// - [golang.org/x/tools/internal/refactor], for operators to compute text edits. package typesinternal import ( @@ -13,6 +25,7 @@ import ( "reflect" "unsafe" + "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/aliases" ) @@ -60,6 +73,9 @@ func ErrorCodeStartEnd(err types.Error) (code ErrorCode, start, end token.Pos, o // which is often excessive.) // // If pkg is nil, it is equivalent to [*types.Package.Name]. +// +// TODO(adonovan): all uses of this with TypeString should be +// eliminated when https://go.dev/issues/75604 is resolved. func NameRelativeTo(pkg *types.Package) types.Qualifier { return func(other *types.Package) string { if pkg != nil && pkg == other { @@ -153,3 +169,31 @@ func NewTypesInfo() *types.Info { FileVersions: map[*ast.File]string{}, } } + +// EnclosingScope returns the innermost block logically enclosing the cursor. +func EnclosingScope(info *types.Info, cur inspector.Cursor) *types.Scope { + for cur := range cur.Enclosing() { + n := cur.Node() + // A function's Scope is associated with its FuncType. + switch f := n.(type) { + case *ast.FuncDecl: + n = f.Type + case *ast.FuncLit: + n = f.Type + } + if b := info.Scopes[n]; b != nil { + return b + } + } + panic("no Scope for *ast.File") +} + +// Imports reports whether path is imported by pkg. +func Imports(pkg *types.Package, path string) bool { + for _, imp := range pkg.Imports() { + if imp.Path() == path { + return true + } + } + return false +} diff --git a/tools/vendor/golang.org/x/tools/internal/typesinternal/zerovalue.go b/tools/vendor/golang.org/x/tools/internal/typesinternal/zerovalue.go index d272949c..453bba2a 100644 --- a/tools/vendor/golang.org/x/tools/internal/typesinternal/zerovalue.go +++ b/tools/vendor/golang.org/x/tools/internal/typesinternal/zerovalue.go @@ -204,23 +204,12 @@ func ZeroExpr(t types.Type, qual types.Qualifier) (_ ast.Expr, isValid bool) { } } -// IsZeroExpr uses simple syntactic heuristics to report whether expr -// is a obvious zero value, such as 0, "", nil, or false. -// It cannot do better without type information. -func IsZeroExpr(expr ast.Expr) bool { - switch e := expr.(type) { - case *ast.BasicLit: - return e.Value == "0" || e.Value == `""` - case *ast.Ident: - return e.Name == "nil" || e.Name == "false" - default: - return false - } -} - // TypeExpr returns syntax for the specified type. References to named types // are qualified by an appropriate (optional) qualifier function. // It may panic for types such as Tuple or Union. +// +// See also https://go.dev/issues/75604, which will provide a robust +// Type-to-valid-Go-syntax formatter. func TypeExpr(t types.Type, qual types.Qualifier) ast.Expr { switch t := t.(type) { case *types.Basic: diff --git a/tools/vendor/google.golang.org/protobuf/encoding/protowire/wire.go b/tools/vendor/google.golang.org/protobuf/encoding/protowire/wire.go index e942bc98..743bfb81 100644 --- a/tools/vendor/google.golang.org/protobuf/encoding/protowire/wire.go +++ b/tools/vendor/google.golang.org/protobuf/encoding/protowire/wire.go @@ -371,7 +371,31 @@ func ConsumeVarint(b []byte) (v uint64, n int) { func SizeVarint(v uint64) int { // This computes 1 + (bits.Len64(v)-1)/7. // 9/64 is a good enough approximation of 1/7 - return int(9*uint32(bits.Len64(v))+64) / 64 + // + // The Go compiler can translate the bits.LeadingZeros64 call into the LZCNT + // instruction, which is very fast on CPUs from the last few years. The + // specific way of expressing the calculation matches C++ Protobuf, see + // https://godbolt.org/z/4P3h53oM4 for the C++ code and how gcc/clang + // optimize that function for GOAMD64=v1 and GOAMD64=v3 (-march=haswell). + + // By OR'ing v with 1, we guarantee that v is never 0, without changing the + // result of SizeVarint. LZCNT is not defined for 0, meaning the compiler + // needs to add extra instructions to handle that case. + // + // The Go compiler currently (go1.24.4) does not make use of this knowledge. + // This opportunity (removing the XOR instruction, which handles the 0 case) + // results in a small (1%) performance win across CPU architectures. + // + // Independently of avoiding the 0 case, we need the v |= 1 line because + // it allows the Go compiler to eliminate an extra XCHGL barrier. + v |= 1 + + // It would be clearer to write log2value := 63 - uint32(...), but + // writing uint32(...) ^ 63 is much more efficient (-14% ARM, -20% Intel). + // Proof of identity for our value range [0..63]: + // https://go.dev/play/p/Pdn9hEWYakX + log2value := uint32(bits.LeadingZeros64(v)) ^ 63 + return int((log2value*9 + (64 + 9)) / 64) } // AppendFixed32 appends v to b as a little-endian uint32. diff --git a/tools/vendor/google.golang.org/protobuf/internal/editiondefaults/editions_defaults.binpb b/tools/vendor/google.golang.org/protobuf/internal/editiondefaults/editions_defaults.binpb index 323829da..04696351 100644 Binary files a/tools/vendor/google.golang.org/protobuf/internal/editiondefaults/editions_defaults.binpb and b/tools/vendor/google.golang.org/protobuf/internal/editiondefaults/editions_defaults.binpb differ diff --git a/tools/vendor/google.golang.org/protobuf/internal/filedesc/editions.go b/tools/vendor/google.golang.org/protobuf/internal/filedesc/editions.go index b08b7183..a0aad277 100644 --- a/tools/vendor/google.golang.org/protobuf/internal/filedesc/editions.go +++ b/tools/vendor/google.golang.org/protobuf/internal/filedesc/editions.go @@ -72,6 +72,9 @@ func unmarshalFeatureSet(b []byte, parent EditionFeatures) EditionFeatures { case genid.FeatureSet_EnforceNamingStyle_field_number: // EnforceNamingStyle is enforced in protoc, languages other than C++ // are not supposed to do anything with this feature. + case genid.FeatureSet_DefaultSymbolVisibility_field_number: + // DefaultSymbolVisibility is enforced in protoc, runtimes should not + // inspect this value. default: panic(fmt.Sprintf("unkown field number %d while unmarshalling FeatureSet", num)) } diff --git a/tools/vendor/google.golang.org/protobuf/internal/filedesc/presence.go b/tools/vendor/google.golang.org/protobuf/internal/filedesc/presence.go new file mode 100644 index 00000000..a12ec979 --- /dev/null +++ b/tools/vendor/google.golang.org/protobuf/internal/filedesc/presence.go @@ -0,0 +1,33 @@ +// Copyright 2025 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package filedesc + +import "google.golang.org/protobuf/reflect/protoreflect" + +// UsePresenceForField reports whether the presence bitmap should be used for +// the specified field. +func UsePresenceForField(fd protoreflect.FieldDescriptor) (usePresence, canBeLazy bool) { + switch { + case fd.ContainingOneof() != nil && !fd.ContainingOneof().IsSynthetic(): + // Oneof fields never use the presence bitmap. + // + // Synthetic oneofs are an exception: Those are used to implement proto3 + // optional fields and hence should follow non-oneof field semantics. + return false, false + + case fd.IsMap(): + // Map-typed fields never use the presence bitmap. + return false, false + + case fd.Kind() == protoreflect.MessageKind || fd.Kind() == protoreflect.GroupKind: + // Lazy fields always use the presence bitmap (only messages can be lazy). + isLazy := fd.(interface{ IsLazy() bool }).IsLazy() + return isLazy, isLazy + + default: + // If the field has presence, use the presence bitmap. + return fd.HasPresence(), false + } +} diff --git a/tools/vendor/google.golang.org/protobuf/internal/genid/api_gen.go b/tools/vendor/google.golang.org/protobuf/internal/genid/api_gen.go index df8f9185..3ceb6fa7 100644 --- a/tools/vendor/google.golang.org/protobuf/internal/genid/api_gen.go +++ b/tools/vendor/google.golang.org/protobuf/internal/genid/api_gen.go @@ -27,6 +27,7 @@ const ( Api_SourceContext_field_name protoreflect.Name = "source_context" Api_Mixins_field_name protoreflect.Name = "mixins" Api_Syntax_field_name protoreflect.Name = "syntax" + Api_Edition_field_name protoreflect.Name = "edition" Api_Name_field_fullname protoreflect.FullName = "google.protobuf.Api.name" Api_Methods_field_fullname protoreflect.FullName = "google.protobuf.Api.methods" @@ -35,6 +36,7 @@ const ( Api_SourceContext_field_fullname protoreflect.FullName = "google.protobuf.Api.source_context" Api_Mixins_field_fullname protoreflect.FullName = "google.protobuf.Api.mixins" Api_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Api.syntax" + Api_Edition_field_fullname protoreflect.FullName = "google.protobuf.Api.edition" ) // Field numbers for google.protobuf.Api. @@ -46,6 +48,7 @@ const ( Api_SourceContext_field_number protoreflect.FieldNumber = 5 Api_Mixins_field_number protoreflect.FieldNumber = 6 Api_Syntax_field_number protoreflect.FieldNumber = 7 + Api_Edition_field_number protoreflect.FieldNumber = 8 ) // Names for google.protobuf.Method. @@ -63,6 +66,7 @@ const ( Method_ResponseStreaming_field_name protoreflect.Name = "response_streaming" Method_Options_field_name protoreflect.Name = "options" Method_Syntax_field_name protoreflect.Name = "syntax" + Method_Edition_field_name protoreflect.Name = "edition" Method_Name_field_fullname protoreflect.FullName = "google.protobuf.Method.name" Method_RequestTypeUrl_field_fullname protoreflect.FullName = "google.protobuf.Method.request_type_url" @@ -71,6 +75,7 @@ const ( Method_ResponseStreaming_field_fullname protoreflect.FullName = "google.protobuf.Method.response_streaming" Method_Options_field_fullname protoreflect.FullName = "google.protobuf.Method.options" Method_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Method.syntax" + Method_Edition_field_fullname protoreflect.FullName = "google.protobuf.Method.edition" ) // Field numbers for google.protobuf.Method. @@ -82,6 +87,7 @@ const ( Method_ResponseStreaming_field_number protoreflect.FieldNumber = 5 Method_Options_field_number protoreflect.FieldNumber = 6 Method_Syntax_field_number protoreflect.FieldNumber = 7 + Method_Edition_field_number protoreflect.FieldNumber = 8 ) // Names for google.protobuf.Mixin. diff --git a/tools/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go b/tools/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go index 39524782..950a6a32 100644 --- a/tools/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go +++ b/tools/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go @@ -34,6 +34,19 @@ const ( Edition_EDITION_MAX_enum_value = 2147483647 ) +// Full and short names for google.protobuf.SymbolVisibility. +const ( + SymbolVisibility_enum_fullname = "google.protobuf.SymbolVisibility" + SymbolVisibility_enum_name = "SymbolVisibility" +) + +// Enum values for google.protobuf.SymbolVisibility. +const ( + SymbolVisibility_VISIBILITY_UNSET_enum_value = 0 + SymbolVisibility_VISIBILITY_LOCAL_enum_value = 1 + SymbolVisibility_VISIBILITY_EXPORT_enum_value = 2 +) + // Names for google.protobuf.FileDescriptorSet. const ( FileDescriptorSet_message_name protoreflect.Name = "FileDescriptorSet" @@ -65,6 +78,7 @@ const ( FileDescriptorProto_Dependency_field_name protoreflect.Name = "dependency" FileDescriptorProto_PublicDependency_field_name protoreflect.Name = "public_dependency" FileDescriptorProto_WeakDependency_field_name protoreflect.Name = "weak_dependency" + FileDescriptorProto_OptionDependency_field_name protoreflect.Name = "option_dependency" FileDescriptorProto_MessageType_field_name protoreflect.Name = "message_type" FileDescriptorProto_EnumType_field_name protoreflect.Name = "enum_type" FileDescriptorProto_Service_field_name protoreflect.Name = "service" @@ -79,6 +93,7 @@ const ( FileDescriptorProto_Dependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.dependency" FileDescriptorProto_PublicDependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.public_dependency" FileDescriptorProto_WeakDependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.weak_dependency" + FileDescriptorProto_OptionDependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.option_dependency" FileDescriptorProto_MessageType_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.message_type" FileDescriptorProto_EnumType_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.enum_type" FileDescriptorProto_Service_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.service" @@ -96,6 +111,7 @@ const ( FileDescriptorProto_Dependency_field_number protoreflect.FieldNumber = 3 FileDescriptorProto_PublicDependency_field_number protoreflect.FieldNumber = 10 FileDescriptorProto_WeakDependency_field_number protoreflect.FieldNumber = 11 + FileDescriptorProto_OptionDependency_field_number protoreflect.FieldNumber = 15 FileDescriptorProto_MessageType_field_number protoreflect.FieldNumber = 4 FileDescriptorProto_EnumType_field_number protoreflect.FieldNumber = 5 FileDescriptorProto_Service_field_number protoreflect.FieldNumber = 6 @@ -124,6 +140,7 @@ const ( DescriptorProto_Options_field_name protoreflect.Name = "options" DescriptorProto_ReservedRange_field_name protoreflect.Name = "reserved_range" DescriptorProto_ReservedName_field_name protoreflect.Name = "reserved_name" + DescriptorProto_Visibility_field_name protoreflect.Name = "visibility" DescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.name" DescriptorProto_Field_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.field" @@ -135,6 +152,7 @@ const ( DescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.options" DescriptorProto_ReservedRange_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.reserved_range" DescriptorProto_ReservedName_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.reserved_name" + DescriptorProto_Visibility_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.visibility" ) // Field numbers for google.protobuf.DescriptorProto. @@ -149,6 +167,7 @@ const ( DescriptorProto_Options_field_number protoreflect.FieldNumber = 7 DescriptorProto_ReservedRange_field_number protoreflect.FieldNumber = 9 DescriptorProto_ReservedName_field_number protoreflect.FieldNumber = 10 + DescriptorProto_Visibility_field_number protoreflect.FieldNumber = 11 ) // Names for google.protobuf.DescriptorProto.ExtensionRange. @@ -388,12 +407,14 @@ const ( EnumDescriptorProto_Options_field_name protoreflect.Name = "options" EnumDescriptorProto_ReservedRange_field_name protoreflect.Name = "reserved_range" EnumDescriptorProto_ReservedName_field_name protoreflect.Name = "reserved_name" + EnumDescriptorProto_Visibility_field_name protoreflect.Name = "visibility" EnumDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.name" EnumDescriptorProto_Value_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.value" EnumDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.options" EnumDescriptorProto_ReservedRange_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.reserved_range" EnumDescriptorProto_ReservedName_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.reserved_name" + EnumDescriptorProto_Visibility_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.visibility" ) // Field numbers for google.protobuf.EnumDescriptorProto. @@ -403,6 +424,7 @@ const ( EnumDescriptorProto_Options_field_number protoreflect.FieldNumber = 3 EnumDescriptorProto_ReservedRange_field_number protoreflect.FieldNumber = 4 EnumDescriptorProto_ReservedName_field_number protoreflect.FieldNumber = 5 + EnumDescriptorProto_Visibility_field_number protoreflect.FieldNumber = 6 ) // Names for google.protobuf.EnumDescriptorProto.EnumReservedRange. @@ -1008,32 +1030,35 @@ const ( // Field names for google.protobuf.FeatureSet. const ( - FeatureSet_FieldPresence_field_name protoreflect.Name = "field_presence" - FeatureSet_EnumType_field_name protoreflect.Name = "enum_type" - FeatureSet_RepeatedFieldEncoding_field_name protoreflect.Name = "repeated_field_encoding" - FeatureSet_Utf8Validation_field_name protoreflect.Name = "utf8_validation" - FeatureSet_MessageEncoding_field_name protoreflect.Name = "message_encoding" - FeatureSet_JsonFormat_field_name protoreflect.Name = "json_format" - FeatureSet_EnforceNamingStyle_field_name protoreflect.Name = "enforce_naming_style" - - FeatureSet_FieldPresence_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.field_presence" - FeatureSet_EnumType_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.enum_type" - FeatureSet_RepeatedFieldEncoding_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.repeated_field_encoding" - FeatureSet_Utf8Validation_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.utf8_validation" - FeatureSet_MessageEncoding_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.message_encoding" - FeatureSet_JsonFormat_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.json_format" - FeatureSet_EnforceNamingStyle_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.enforce_naming_style" + FeatureSet_FieldPresence_field_name protoreflect.Name = "field_presence" + FeatureSet_EnumType_field_name protoreflect.Name = "enum_type" + FeatureSet_RepeatedFieldEncoding_field_name protoreflect.Name = "repeated_field_encoding" + FeatureSet_Utf8Validation_field_name protoreflect.Name = "utf8_validation" + FeatureSet_MessageEncoding_field_name protoreflect.Name = "message_encoding" + FeatureSet_JsonFormat_field_name protoreflect.Name = "json_format" + FeatureSet_EnforceNamingStyle_field_name protoreflect.Name = "enforce_naming_style" + FeatureSet_DefaultSymbolVisibility_field_name protoreflect.Name = "default_symbol_visibility" + + FeatureSet_FieldPresence_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.field_presence" + FeatureSet_EnumType_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.enum_type" + FeatureSet_RepeatedFieldEncoding_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.repeated_field_encoding" + FeatureSet_Utf8Validation_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.utf8_validation" + FeatureSet_MessageEncoding_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.message_encoding" + FeatureSet_JsonFormat_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.json_format" + FeatureSet_EnforceNamingStyle_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.enforce_naming_style" + FeatureSet_DefaultSymbolVisibility_field_fullname protoreflect.FullName = "google.protobuf.FeatureSet.default_symbol_visibility" ) // Field numbers for google.protobuf.FeatureSet. const ( - FeatureSet_FieldPresence_field_number protoreflect.FieldNumber = 1 - FeatureSet_EnumType_field_number protoreflect.FieldNumber = 2 - FeatureSet_RepeatedFieldEncoding_field_number protoreflect.FieldNumber = 3 - FeatureSet_Utf8Validation_field_number protoreflect.FieldNumber = 4 - FeatureSet_MessageEncoding_field_number protoreflect.FieldNumber = 5 - FeatureSet_JsonFormat_field_number protoreflect.FieldNumber = 6 - FeatureSet_EnforceNamingStyle_field_number protoreflect.FieldNumber = 7 + FeatureSet_FieldPresence_field_number protoreflect.FieldNumber = 1 + FeatureSet_EnumType_field_number protoreflect.FieldNumber = 2 + FeatureSet_RepeatedFieldEncoding_field_number protoreflect.FieldNumber = 3 + FeatureSet_Utf8Validation_field_number protoreflect.FieldNumber = 4 + FeatureSet_MessageEncoding_field_number protoreflect.FieldNumber = 5 + FeatureSet_JsonFormat_field_number protoreflect.FieldNumber = 6 + FeatureSet_EnforceNamingStyle_field_number protoreflect.FieldNumber = 7 + FeatureSet_DefaultSymbolVisibility_field_number protoreflect.FieldNumber = 8 ) // Full and short names for google.protobuf.FeatureSet.FieldPresence. @@ -1128,6 +1153,27 @@ const ( FeatureSet_STYLE_LEGACY_enum_value = 2 ) +// Names for google.protobuf.FeatureSet.VisibilityFeature. +const ( + FeatureSet_VisibilityFeature_message_name protoreflect.Name = "VisibilityFeature" + FeatureSet_VisibilityFeature_message_fullname protoreflect.FullName = "google.protobuf.FeatureSet.VisibilityFeature" +) + +// Full and short names for google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility. +const ( + FeatureSet_VisibilityFeature_DefaultSymbolVisibility_enum_fullname = "google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility" + FeatureSet_VisibilityFeature_DefaultSymbolVisibility_enum_name = "DefaultSymbolVisibility" +) + +// Enum values for google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility. +const ( + FeatureSet_VisibilityFeature_DEFAULT_SYMBOL_VISIBILITY_UNKNOWN_enum_value = 0 + FeatureSet_VisibilityFeature_EXPORT_ALL_enum_value = 1 + FeatureSet_VisibilityFeature_EXPORT_TOP_LEVEL_enum_value = 2 + FeatureSet_VisibilityFeature_LOCAL_ALL_enum_value = 3 + FeatureSet_VisibilityFeature_STRICT_enum_value = 4 +) + // Names for google.protobuf.FeatureSetDefaults. const ( FeatureSetDefaults_message_name protoreflect.Name = "FeatureSetDefaults" diff --git a/tools/vendor/google.golang.org/protobuf/internal/impl/codec_message_opaque.go b/tools/vendor/google.golang.org/protobuf/internal/impl/codec_message_opaque.go index 41c1f74e..bdad12a9 100644 --- a/tools/vendor/google.golang.org/protobuf/internal/impl/codec_message_opaque.go +++ b/tools/vendor/google.golang.org/protobuf/internal/impl/codec_message_opaque.go @@ -11,6 +11,7 @@ import ( "google.golang.org/protobuf/encoding/protowire" "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/filedesc" "google.golang.org/protobuf/internal/order" "google.golang.org/protobuf/reflect/protoreflect" piface "google.golang.org/protobuf/runtime/protoiface" @@ -80,7 +81,7 @@ func (mi *MessageInfo) makeOpaqueCoderMethods(t reflect.Type, si opaqueStructInf // permit us to skip over definitely-unset fields at marshal time. var hasPresence bool - hasPresence, cf.isLazy = usePresenceForField(si, fd) + hasPresence, cf.isLazy = filedesc.UsePresenceForField(fd) if hasPresence { cf.presenceIndex, mi.presenceSize = presenceIndex(mi.Desc, fd) diff --git a/tools/vendor/google.golang.org/protobuf/internal/impl/message_opaque.go b/tools/vendor/google.golang.org/protobuf/internal/impl/message_opaque.go index dd55e8e0..5a439daa 100644 --- a/tools/vendor/google.golang.org/protobuf/internal/impl/message_opaque.go +++ b/tools/vendor/google.golang.org/protobuf/internal/impl/message_opaque.go @@ -11,6 +11,7 @@ import ( "strings" "sync/atomic" + "google.golang.org/protobuf/internal/filedesc" "google.golang.org/protobuf/reflect/protoreflect" ) @@ -53,7 +54,7 @@ func opaqueInitHook(mi *MessageInfo) bool { fd := fds.Get(i) fs := si.fieldsByNumber[fd.Number()] var fi fieldInfo - usePresence, _ := usePresenceForField(si, fd) + usePresence, _ := filedesc.UsePresenceForField(fd) switch { case fd.ContainingOneof() != nil && !fd.ContainingOneof().IsSynthetic(): @@ -343,17 +344,15 @@ func (mi *MessageInfo) fieldInfoForMessageListOpaqueNoPresence(si opaqueStructIn if p.IsNil() { return false } - sp := p.Apply(fieldOffset).AtomicGetPointer() - if sp.IsNil() { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.IsNil() { return false } - rv := sp.AsValueOf(fs.Type.Elem()) return rv.Elem().Len() > 0 }, clear: func(p pointer) { - sp := p.Apply(fieldOffset).AtomicGetPointer() - if !sp.IsNil() { - rv := sp.AsValueOf(fs.Type.Elem()) + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if !rv.IsNil() { rv.Elem().Set(reflect.Zero(rv.Type().Elem())) } }, @@ -361,11 +360,10 @@ func (mi *MessageInfo) fieldInfoForMessageListOpaqueNoPresence(si opaqueStructIn if p.IsNil() { return conv.Zero() } - sp := p.Apply(fieldOffset).AtomicGetPointer() - if sp.IsNil() { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.IsNil() { return conv.Zero() } - rv := sp.AsValueOf(fs.Type.Elem()) if rv.Elem().Len() == 0 { return conv.Zero() } @@ -598,30 +596,3 @@ func (mi *MessageInfo) clearPresent(p pointer, index uint32) { func (mi *MessageInfo) present(p pointer, index uint32) bool { return p.Apply(mi.presenceOffset).PresenceInfo().Present(index) } - -// usePresenceForField implements the somewhat intricate logic of when -// the presence bitmap is used for a field. The main logic is that a -// field that is optional or that can be lazy will use the presence -// bit, but for proto2, also maps have a presence bit. It also records -// if the field can ever be lazy, which is true if we have a -// lazyOffset and the field is a message or a slice of messages. A -// field that is lazy will always need a presence bit. Oneofs are not -// lazy and do not use presence, unless they are a synthetic oneof, -// which is a proto3 optional field. For proto3 optionals, we use the -// presence and they can also be lazy when applicable (a message). -func usePresenceForField(si opaqueStructInfo, fd protoreflect.FieldDescriptor) (usePresence, canBeLazy bool) { - hasLazyField := fd.(interface{ IsLazy() bool }).IsLazy() - - // Non-oneof scalar fields with explicit field presence use the presence array. - usesPresenceArray := fd.HasPresence() && fd.Message() == nil && (fd.ContainingOneof() == nil || fd.ContainingOneof().IsSynthetic()) - switch { - case fd.ContainingOneof() != nil && !fd.ContainingOneof().IsSynthetic(): - return false, false - case fd.IsMap(): - return false, false - case fd.Kind() == protoreflect.MessageKind || fd.Kind() == protoreflect.GroupKind: - return hasLazyField, hasLazyField - default: - return usesPresenceArray || (hasLazyField && fd.HasPresence()), false - } -} diff --git a/tools/vendor/google.golang.org/protobuf/internal/impl/presence.go b/tools/vendor/google.golang.org/protobuf/internal/impl/presence.go index 914cb1de..443afe81 100644 --- a/tools/vendor/google.golang.org/protobuf/internal/impl/presence.go +++ b/tools/vendor/google.golang.org/protobuf/internal/impl/presence.go @@ -32,9 +32,6 @@ func (p presence) toElem(num uint32) (ret *uint32) { // Present checks for the presence of a specific field number in a presence set. func (p presence) Present(num uint32) bool { - if p.P == nil { - return false - } return Export{}.Present(p.toElem(num), num) } diff --git a/tools/vendor/google.golang.org/protobuf/internal/version/version.go b/tools/vendor/google.golang.org/protobuf/internal/version/version.go index aac1cb18..697d1c14 100644 --- a/tools/vendor/google.golang.org/protobuf/internal/version/version.go +++ b/tools/vendor/google.golang.org/protobuf/internal/version/version.go @@ -52,7 +52,7 @@ import ( const ( Major = 1 Minor = 36 - Patch = 6 + Patch = 8 PreRelease = "" ) diff --git a/tools/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go b/tools/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go index a4a0a297..730331e6 100644 --- a/tools/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go +++ b/tools/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go @@ -21,6 +21,8 @@ func (p *SourcePath) appendFileDescriptorProto(b []byte) []byte { b = p.appendRepeatedField(b, "public_dependency", nil) case 11: b = p.appendRepeatedField(b, "weak_dependency", nil) + case 15: + b = p.appendRepeatedField(b, "option_dependency", nil) case 4: b = p.appendRepeatedField(b, "message_type", (*SourcePath).appendDescriptorProto) case 5: @@ -66,6 +68,8 @@ func (p *SourcePath) appendDescriptorProto(b []byte) []byte { b = p.appendRepeatedField(b, "reserved_range", (*SourcePath).appendDescriptorProto_ReservedRange) case 10: b = p.appendRepeatedField(b, "reserved_name", nil) + case 11: + b = p.appendSingularField(b, "visibility", nil) } return b } @@ -85,6 +89,8 @@ func (p *SourcePath) appendEnumDescriptorProto(b []byte) []byte { b = p.appendRepeatedField(b, "reserved_range", (*SourcePath).appendEnumDescriptorProto_EnumReservedRange) case 5: b = p.appendRepeatedField(b, "reserved_name", nil) + case 6: + b = p.appendSingularField(b, "visibility", nil) } return b } @@ -400,6 +406,8 @@ func (p *SourcePath) appendFeatureSet(b []byte) []byte { b = p.appendSingularField(b, "json_format", nil) case 7: b = p.appendSingularField(b, "enforce_naming_style", nil) + case 8: + b = p.appendSingularField(b, "default_symbol_visibility", nil) } return b } diff --git a/tools/vendor/google.golang.org/protobuf/types/descriptorpb/descriptor.pb.go b/tools/vendor/google.golang.org/protobuf/types/descriptorpb/descriptor.pb.go index 7fe280f1..4eacb523 100644 --- a/tools/vendor/google.golang.org/protobuf/types/descriptorpb/descriptor.pb.go +++ b/tools/vendor/google.golang.org/protobuf/types/descriptorpb/descriptor.pb.go @@ -151,6 +151,70 @@ func (Edition) EnumDescriptor() ([]byte, []int) { return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{0} } +// Describes the 'visibility' of a symbol with respect to the proto import +// system. Symbols can only be imported when the visibility rules do not prevent +// it (ex: local symbols cannot be imported). Visibility modifiers can only set +// on `message` and `enum` as they are the only types available to be referenced +// from other files. +type SymbolVisibility int32 + +const ( + SymbolVisibility_VISIBILITY_UNSET SymbolVisibility = 0 + SymbolVisibility_VISIBILITY_LOCAL SymbolVisibility = 1 + SymbolVisibility_VISIBILITY_EXPORT SymbolVisibility = 2 +) + +// Enum value maps for SymbolVisibility. +var ( + SymbolVisibility_name = map[int32]string{ + 0: "VISIBILITY_UNSET", + 1: "VISIBILITY_LOCAL", + 2: "VISIBILITY_EXPORT", + } + SymbolVisibility_value = map[string]int32{ + "VISIBILITY_UNSET": 0, + "VISIBILITY_LOCAL": 1, + "VISIBILITY_EXPORT": 2, + } +) + +func (x SymbolVisibility) Enum() *SymbolVisibility { + p := new(SymbolVisibility) + *p = x + return p +} + +func (x SymbolVisibility) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (SymbolVisibility) Descriptor() protoreflect.EnumDescriptor { + return file_google_protobuf_descriptor_proto_enumTypes[1].Descriptor() +} + +func (SymbolVisibility) Type() protoreflect.EnumType { + return &file_google_protobuf_descriptor_proto_enumTypes[1] +} + +func (x SymbolVisibility) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *SymbolVisibility) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = SymbolVisibility(num) + return nil +} + +// Deprecated: Use SymbolVisibility.Descriptor instead. +func (SymbolVisibility) EnumDescriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{1} +} + // The verification state of the extension range. type ExtensionRangeOptions_VerificationState int32 @@ -183,11 +247,11 @@ func (x ExtensionRangeOptions_VerificationState) String() string { } func (ExtensionRangeOptions_VerificationState) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[1].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[2].Descriptor() } func (ExtensionRangeOptions_VerificationState) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[1] + return &file_google_protobuf_descriptor_proto_enumTypes[2] } func (x ExtensionRangeOptions_VerificationState) Number() protoreflect.EnumNumber { @@ -299,11 +363,11 @@ func (x FieldDescriptorProto_Type) String() string { } func (FieldDescriptorProto_Type) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[2].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[3].Descriptor() } func (FieldDescriptorProto_Type) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[2] + return &file_google_protobuf_descriptor_proto_enumTypes[3] } func (x FieldDescriptorProto_Type) Number() protoreflect.EnumNumber { @@ -362,11 +426,11 @@ func (x FieldDescriptorProto_Label) String() string { } func (FieldDescriptorProto_Label) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[3].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[4].Descriptor() } func (FieldDescriptorProto_Label) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[3] + return &file_google_protobuf_descriptor_proto_enumTypes[4] } func (x FieldDescriptorProto_Label) Number() protoreflect.EnumNumber { @@ -423,11 +487,11 @@ func (x FileOptions_OptimizeMode) String() string { } func (FileOptions_OptimizeMode) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[4].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[5].Descriptor() } func (FileOptions_OptimizeMode) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[4] + return &file_google_protobuf_descriptor_proto_enumTypes[5] } func (x FileOptions_OptimizeMode) Number() protoreflect.EnumNumber { @@ -489,11 +553,11 @@ func (x FieldOptions_CType) String() string { } func (FieldOptions_CType) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[5].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[6].Descriptor() } func (FieldOptions_CType) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[5] + return &file_google_protobuf_descriptor_proto_enumTypes[6] } func (x FieldOptions_CType) Number() protoreflect.EnumNumber { @@ -551,11 +615,11 @@ func (x FieldOptions_JSType) String() string { } func (FieldOptions_JSType) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[6].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[7].Descriptor() } func (FieldOptions_JSType) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[6] + return &file_google_protobuf_descriptor_proto_enumTypes[7] } func (x FieldOptions_JSType) Number() protoreflect.EnumNumber { @@ -611,11 +675,11 @@ func (x FieldOptions_OptionRetention) String() string { } func (FieldOptions_OptionRetention) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[7].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[8].Descriptor() } func (FieldOptions_OptionRetention) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[7] + return &file_google_protobuf_descriptor_proto_enumTypes[8] } func (x FieldOptions_OptionRetention) Number() protoreflect.EnumNumber { @@ -694,11 +758,11 @@ func (x FieldOptions_OptionTargetType) String() string { } func (FieldOptions_OptionTargetType) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[8].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[9].Descriptor() } func (FieldOptions_OptionTargetType) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[8] + return &file_google_protobuf_descriptor_proto_enumTypes[9] } func (x FieldOptions_OptionTargetType) Number() protoreflect.EnumNumber { @@ -756,11 +820,11 @@ func (x MethodOptions_IdempotencyLevel) String() string { } func (MethodOptions_IdempotencyLevel) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[9].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[10].Descriptor() } func (MethodOptions_IdempotencyLevel) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[9] + return &file_google_protobuf_descriptor_proto_enumTypes[10] } func (x MethodOptions_IdempotencyLevel) Number() protoreflect.EnumNumber { @@ -818,11 +882,11 @@ func (x FeatureSet_FieldPresence) String() string { } func (FeatureSet_FieldPresence) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[10].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[11].Descriptor() } func (FeatureSet_FieldPresence) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[10] + return &file_google_protobuf_descriptor_proto_enumTypes[11] } func (x FeatureSet_FieldPresence) Number() protoreflect.EnumNumber { @@ -877,11 +941,11 @@ func (x FeatureSet_EnumType) String() string { } func (FeatureSet_EnumType) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[11].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[12].Descriptor() } func (FeatureSet_EnumType) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[11] + return &file_google_protobuf_descriptor_proto_enumTypes[12] } func (x FeatureSet_EnumType) Number() protoreflect.EnumNumber { @@ -936,11 +1000,11 @@ func (x FeatureSet_RepeatedFieldEncoding) String() string { } func (FeatureSet_RepeatedFieldEncoding) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[12].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[13].Descriptor() } func (FeatureSet_RepeatedFieldEncoding) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[12] + return &file_google_protobuf_descriptor_proto_enumTypes[13] } func (x FeatureSet_RepeatedFieldEncoding) Number() protoreflect.EnumNumber { @@ -995,11 +1059,11 @@ func (x FeatureSet_Utf8Validation) String() string { } func (FeatureSet_Utf8Validation) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[13].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[14].Descriptor() } func (FeatureSet_Utf8Validation) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[13] + return &file_google_protobuf_descriptor_proto_enumTypes[14] } func (x FeatureSet_Utf8Validation) Number() protoreflect.EnumNumber { @@ -1054,11 +1118,11 @@ func (x FeatureSet_MessageEncoding) String() string { } func (FeatureSet_MessageEncoding) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[14].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[15].Descriptor() } func (FeatureSet_MessageEncoding) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[14] + return &file_google_protobuf_descriptor_proto_enumTypes[15] } func (x FeatureSet_MessageEncoding) Number() protoreflect.EnumNumber { @@ -1113,11 +1177,11 @@ func (x FeatureSet_JsonFormat) String() string { } func (FeatureSet_JsonFormat) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[15].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[16].Descriptor() } func (FeatureSet_JsonFormat) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[15] + return &file_google_protobuf_descriptor_proto_enumTypes[16] } func (x FeatureSet_JsonFormat) Number() protoreflect.EnumNumber { @@ -1172,11 +1236,11 @@ func (x FeatureSet_EnforceNamingStyle) String() string { } func (FeatureSet_EnforceNamingStyle) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[16].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[17].Descriptor() } func (FeatureSet_EnforceNamingStyle) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[16] + return &file_google_protobuf_descriptor_proto_enumTypes[17] } func (x FeatureSet_EnforceNamingStyle) Number() protoreflect.EnumNumber { @@ -1198,6 +1262,77 @@ func (FeatureSet_EnforceNamingStyle) EnumDescriptor() ([]byte, []int) { return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{19, 6} } +type FeatureSet_VisibilityFeature_DefaultSymbolVisibility int32 + +const ( + FeatureSet_VisibilityFeature_DEFAULT_SYMBOL_VISIBILITY_UNKNOWN FeatureSet_VisibilityFeature_DefaultSymbolVisibility = 0 + // Default pre-EDITION_2024, all UNSET visibility are export. + FeatureSet_VisibilityFeature_EXPORT_ALL FeatureSet_VisibilityFeature_DefaultSymbolVisibility = 1 + // All top-level symbols default to export, nested default to local. + FeatureSet_VisibilityFeature_EXPORT_TOP_LEVEL FeatureSet_VisibilityFeature_DefaultSymbolVisibility = 2 + // All symbols default to local. + FeatureSet_VisibilityFeature_LOCAL_ALL FeatureSet_VisibilityFeature_DefaultSymbolVisibility = 3 + // All symbols local by default. Nested types cannot be exported. + // With special case caveat for message { enum {} reserved 1 to max; } + // This is the recommended setting for new protos. + FeatureSet_VisibilityFeature_STRICT FeatureSet_VisibilityFeature_DefaultSymbolVisibility = 4 +) + +// Enum value maps for FeatureSet_VisibilityFeature_DefaultSymbolVisibility. +var ( + FeatureSet_VisibilityFeature_DefaultSymbolVisibility_name = map[int32]string{ + 0: "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN", + 1: "EXPORT_ALL", + 2: "EXPORT_TOP_LEVEL", + 3: "LOCAL_ALL", + 4: "STRICT", + } + FeatureSet_VisibilityFeature_DefaultSymbolVisibility_value = map[string]int32{ + "DEFAULT_SYMBOL_VISIBILITY_UNKNOWN": 0, + "EXPORT_ALL": 1, + "EXPORT_TOP_LEVEL": 2, + "LOCAL_ALL": 3, + "STRICT": 4, + } +) + +func (x FeatureSet_VisibilityFeature_DefaultSymbolVisibility) Enum() *FeatureSet_VisibilityFeature_DefaultSymbolVisibility { + p := new(FeatureSet_VisibilityFeature_DefaultSymbolVisibility) + *p = x + return p +} + +func (x FeatureSet_VisibilityFeature_DefaultSymbolVisibility) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (FeatureSet_VisibilityFeature_DefaultSymbolVisibility) Descriptor() protoreflect.EnumDescriptor { + return file_google_protobuf_descriptor_proto_enumTypes[18].Descriptor() +} + +func (FeatureSet_VisibilityFeature_DefaultSymbolVisibility) Type() protoreflect.EnumType { + return &file_google_protobuf_descriptor_proto_enumTypes[18] +} + +func (x FeatureSet_VisibilityFeature_DefaultSymbolVisibility) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *FeatureSet_VisibilityFeature_DefaultSymbolVisibility) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = FeatureSet_VisibilityFeature_DefaultSymbolVisibility(num) + return nil +} + +// Deprecated: Use FeatureSet_VisibilityFeature_DefaultSymbolVisibility.Descriptor instead. +func (FeatureSet_VisibilityFeature_DefaultSymbolVisibility) EnumDescriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{19, 0, 0} +} + // Represents the identified object's effect on the element in the original // .proto file. type GeneratedCodeInfo_Annotation_Semantic int32 @@ -1236,11 +1371,11 @@ func (x GeneratedCodeInfo_Annotation_Semantic) String() string { } func (GeneratedCodeInfo_Annotation_Semantic) Descriptor() protoreflect.EnumDescriptor { - return file_google_protobuf_descriptor_proto_enumTypes[17].Descriptor() + return file_google_protobuf_descriptor_proto_enumTypes[19].Descriptor() } func (GeneratedCodeInfo_Annotation_Semantic) Type() protoreflect.EnumType { - return &file_google_protobuf_descriptor_proto_enumTypes[17] + return &file_google_protobuf_descriptor_proto_enumTypes[19] } func (x GeneratedCodeInfo_Annotation_Semantic) Number() protoreflect.EnumNumber { @@ -1321,6 +1456,9 @@ type FileDescriptorProto struct { // Indexes of the weak imported files in the dependency list. // For Google-internal migration only. Do not use. WeakDependency []int32 `protobuf:"varint,11,rep,name=weak_dependency,json=weakDependency" json:"weak_dependency,omitempty"` + // Names of files imported by this file purely for the purpose of providing + // option extensions. These are excluded from the dependency list above. + OptionDependency []string `protobuf:"bytes,15,rep,name=option_dependency,json=optionDependency" json:"option_dependency,omitempty"` // All top-level definitions in this file. MessageType []*DescriptorProto `protobuf:"bytes,4,rep,name=message_type,json=messageType" json:"message_type,omitempty"` EnumType []*EnumDescriptorProto `protobuf:"bytes,5,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` @@ -1414,6 +1552,13 @@ func (x *FileDescriptorProto) GetWeakDependency() []int32 { return nil } +func (x *FileDescriptorProto) GetOptionDependency() []string { + if x != nil { + return x.OptionDependency + } + return nil +} + func (x *FileDescriptorProto) GetMessageType() []*DescriptorProto { if x != nil { return x.MessageType @@ -1484,7 +1629,9 @@ type DescriptorProto struct { ReservedRange []*DescriptorProto_ReservedRange `protobuf:"bytes,9,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` // Reserved field names, which may not be used by fields in the same message. // A given name may only be reserved once. - ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + // Support for `export` and `local` keywords on enums. + Visibility *SymbolVisibility `protobuf:"varint,11,opt,name=visibility,enum=google.protobuf.SymbolVisibility" json:"visibility,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1589,6 +1736,13 @@ func (x *DescriptorProto) GetReservedName() []string { return nil } +func (x *DescriptorProto) GetVisibility() SymbolVisibility { + if x != nil && x.Visibility != nil { + return *x.Visibility + } + return SymbolVisibility_VISIBILITY_UNSET +} + type ExtensionRangeOptions struct { state protoimpl.MessageState `protogen:"open.v1"` // The parser stores options it doesn't recognize here. See above. @@ -1901,7 +2055,9 @@ type EnumDescriptorProto struct { ReservedRange []*EnumDescriptorProto_EnumReservedRange `protobuf:"bytes,4,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` // Reserved enum value names, which may not be reused. A given name may only // be reserved once. - ReservedName []string `protobuf:"bytes,5,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + ReservedName []string `protobuf:"bytes,5,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` + // Support for `export` and `local` keywords on enums. + Visibility *SymbolVisibility `protobuf:"varint,6,opt,name=visibility,enum=google.protobuf.SymbolVisibility" json:"visibility,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -1971,6 +2127,13 @@ func (x *EnumDescriptorProto) GetReservedName() []string { return nil } +func (x *EnumDescriptorProto) GetVisibility() SymbolVisibility { + if x != nil && x.Visibility != nil { + return *x.Visibility + } + return SymbolVisibility_VISIBILITY_UNSET +} + // Describes a value within an enum. type EnumValueDescriptorProto struct { state protoimpl.MessageState `protogen:"open.v1"` @@ -2710,7 +2873,10 @@ type FieldOptions struct { // for accessors, or it will be completely ignored; in the very least, this // is a formalization for deprecating fields. Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // DEPRECATED. DO NOT USE! // For Google-internal migration only. Do not use. + // + // Deprecated: Marked as deprecated in google/protobuf/descriptor.proto. Weak *bool `protobuf:"varint,10,opt,name=weak,def=0" json:"weak,omitempty"` // Indicate that the field value should not be printed out when using debug // formats, e.g. when the field contains sensitive credentials. @@ -2814,6 +2980,7 @@ func (x *FieldOptions) GetDeprecated() bool { return Default_FieldOptions_Deprecated } +// Deprecated: Marked as deprecated in google/protobuf/descriptor.proto. func (x *FieldOptions) GetWeak() bool { if x != nil && x.Weak != nil { return *x.Weak @@ -3392,17 +3559,18 @@ func (x *UninterpretedOption) GetAggregateValue() string { // be designed and implemented to handle this, hopefully before we ever hit a // conflict here. type FeatureSet struct { - state protoimpl.MessageState `protogen:"open.v1"` - FieldPresence *FeatureSet_FieldPresence `protobuf:"varint,1,opt,name=field_presence,json=fieldPresence,enum=google.protobuf.FeatureSet_FieldPresence" json:"field_presence,omitempty"` - EnumType *FeatureSet_EnumType `protobuf:"varint,2,opt,name=enum_type,json=enumType,enum=google.protobuf.FeatureSet_EnumType" json:"enum_type,omitempty"` - RepeatedFieldEncoding *FeatureSet_RepeatedFieldEncoding `protobuf:"varint,3,opt,name=repeated_field_encoding,json=repeatedFieldEncoding,enum=google.protobuf.FeatureSet_RepeatedFieldEncoding" json:"repeated_field_encoding,omitempty"` - Utf8Validation *FeatureSet_Utf8Validation `protobuf:"varint,4,opt,name=utf8_validation,json=utf8Validation,enum=google.protobuf.FeatureSet_Utf8Validation" json:"utf8_validation,omitempty"` - MessageEncoding *FeatureSet_MessageEncoding `protobuf:"varint,5,opt,name=message_encoding,json=messageEncoding,enum=google.protobuf.FeatureSet_MessageEncoding" json:"message_encoding,omitempty"` - JsonFormat *FeatureSet_JsonFormat `protobuf:"varint,6,opt,name=json_format,json=jsonFormat,enum=google.protobuf.FeatureSet_JsonFormat" json:"json_format,omitempty"` - EnforceNamingStyle *FeatureSet_EnforceNamingStyle `protobuf:"varint,7,opt,name=enforce_naming_style,json=enforceNamingStyle,enum=google.protobuf.FeatureSet_EnforceNamingStyle" json:"enforce_naming_style,omitempty"` - extensionFields protoimpl.ExtensionFields - unknownFields protoimpl.UnknownFields - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + FieldPresence *FeatureSet_FieldPresence `protobuf:"varint,1,opt,name=field_presence,json=fieldPresence,enum=google.protobuf.FeatureSet_FieldPresence" json:"field_presence,omitempty"` + EnumType *FeatureSet_EnumType `protobuf:"varint,2,opt,name=enum_type,json=enumType,enum=google.protobuf.FeatureSet_EnumType" json:"enum_type,omitempty"` + RepeatedFieldEncoding *FeatureSet_RepeatedFieldEncoding `protobuf:"varint,3,opt,name=repeated_field_encoding,json=repeatedFieldEncoding,enum=google.protobuf.FeatureSet_RepeatedFieldEncoding" json:"repeated_field_encoding,omitempty"` + Utf8Validation *FeatureSet_Utf8Validation `protobuf:"varint,4,opt,name=utf8_validation,json=utf8Validation,enum=google.protobuf.FeatureSet_Utf8Validation" json:"utf8_validation,omitempty"` + MessageEncoding *FeatureSet_MessageEncoding `protobuf:"varint,5,opt,name=message_encoding,json=messageEncoding,enum=google.protobuf.FeatureSet_MessageEncoding" json:"message_encoding,omitempty"` + JsonFormat *FeatureSet_JsonFormat `protobuf:"varint,6,opt,name=json_format,json=jsonFormat,enum=google.protobuf.FeatureSet_JsonFormat" json:"json_format,omitempty"` + EnforceNamingStyle *FeatureSet_EnforceNamingStyle `protobuf:"varint,7,opt,name=enforce_naming_style,json=enforceNamingStyle,enum=google.protobuf.FeatureSet_EnforceNamingStyle" json:"enforce_naming_style,omitempty"` + DefaultSymbolVisibility *FeatureSet_VisibilityFeature_DefaultSymbolVisibility `protobuf:"varint,8,opt,name=default_symbol_visibility,json=defaultSymbolVisibility,enum=google.protobuf.FeatureSet_VisibilityFeature_DefaultSymbolVisibility" json:"default_symbol_visibility,omitempty"` + extensionFields protoimpl.ExtensionFields + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *FeatureSet) Reset() { @@ -3484,6 +3652,13 @@ func (x *FeatureSet) GetEnforceNamingStyle() FeatureSet_EnforceNamingStyle { return FeatureSet_ENFORCE_NAMING_STYLE_UNKNOWN } +func (x *FeatureSet) GetDefaultSymbolVisibility() FeatureSet_VisibilityFeature_DefaultSymbolVisibility { + if x != nil && x.DefaultSymbolVisibility != nil { + return *x.DefaultSymbolVisibility + } + return FeatureSet_VisibilityFeature_DEFAULT_SYMBOL_VISIBILITY_UNKNOWN +} + // A compiled specification for the defaults of a set of features. These // messages are generated from FeatureSet extensions and can be used to seed // feature resolution. The resolution with this object becomes a simple search @@ -4144,6 +4319,42 @@ func (x *UninterpretedOption_NamePart) GetIsExtension() bool { return false } +type FeatureSet_VisibilityFeature struct { + state protoimpl.MessageState `protogen:"open.v1"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *FeatureSet_VisibilityFeature) Reset() { + *x = FeatureSet_VisibilityFeature{} + mi := &file_google_protobuf_descriptor_proto_msgTypes[30] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *FeatureSet_VisibilityFeature) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FeatureSet_VisibilityFeature) ProtoMessage() {} + +func (x *FeatureSet_VisibilityFeature) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[30] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FeatureSet_VisibilityFeature.ProtoReflect.Descriptor instead. +func (*FeatureSet_VisibilityFeature) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{19, 0} +} + // A map from every known edition with a unique set of defaults to its // defaults. Not all editions may be contained here. For a given edition, // the defaults at the closest matching edition ordered at or before it should @@ -4161,7 +4372,7 @@ type FeatureSetDefaults_FeatureSetEditionDefault struct { func (x *FeatureSetDefaults_FeatureSetEditionDefault) Reset() { *x = FeatureSetDefaults_FeatureSetEditionDefault{} - mi := &file_google_protobuf_descriptor_proto_msgTypes[30] + mi := &file_google_protobuf_descriptor_proto_msgTypes[31] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -4173,7 +4384,7 @@ func (x *FeatureSetDefaults_FeatureSetEditionDefault) String() string { func (*FeatureSetDefaults_FeatureSetEditionDefault) ProtoMessage() {} func (x *FeatureSetDefaults_FeatureSetEditionDefault) ProtoReflect() protoreflect.Message { - mi := &file_google_protobuf_descriptor_proto_msgTypes[30] + mi := &file_google_protobuf_descriptor_proto_msgTypes[31] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -4309,7 +4520,7 @@ type SourceCodeInfo_Location struct { func (x *SourceCodeInfo_Location) Reset() { *x = SourceCodeInfo_Location{} - mi := &file_google_protobuf_descriptor_proto_msgTypes[31] + mi := &file_google_protobuf_descriptor_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -4321,7 +4532,7 @@ func (x *SourceCodeInfo_Location) String() string { func (*SourceCodeInfo_Location) ProtoMessage() {} func (x *SourceCodeInfo_Location) ProtoReflect() protoreflect.Message { - mi := &file_google_protobuf_descriptor_proto_msgTypes[31] + mi := &file_google_protobuf_descriptor_proto_msgTypes[32] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -4393,7 +4604,7 @@ type GeneratedCodeInfo_Annotation struct { func (x *GeneratedCodeInfo_Annotation) Reset() { *x = GeneratedCodeInfo_Annotation{} - mi := &file_google_protobuf_descriptor_proto_msgTypes[32] + mi := &file_google_protobuf_descriptor_proto_msgTypes[33] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -4405,7 +4616,7 @@ func (x *GeneratedCodeInfo_Annotation) String() string { func (*GeneratedCodeInfo_Annotation) ProtoMessage() {} func (x *GeneratedCodeInfo_Annotation) ProtoReflect() protoreflect.Message { - mi := &file_google_protobuf_descriptor_proto_msgTypes[32] + mi := &file_google_protobuf_descriptor_proto_msgTypes[33] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -4462,7 +4673,7 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "\n" + " google/protobuf/descriptor.proto\x12\x0fgoogle.protobuf\"[\n" + "\x11FileDescriptorSet\x128\n" + - "\x04file\x18\x01 \x03(\v2$.google.protobuf.FileDescriptorProtoR\x04file*\f\b\x80\xec\xca\xff\x01\x10\x81\xec\xca\xff\x01\"\x98\x05\n" + + "\x04file\x18\x01 \x03(\v2$.google.protobuf.FileDescriptorProtoR\x04file*\f\b\x80\xec\xca\xff\x01\x10\x81\xec\xca\xff\x01\"\xc5\x05\n" + "\x13FileDescriptorProto\x12\x12\n" + "\x04name\x18\x01 \x01(\tR\x04name\x12\x18\n" + "\apackage\x18\x02 \x01(\tR\apackage\x12\x1e\n" + @@ -4471,7 +4682,8 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "dependency\x12+\n" + "\x11public_dependency\x18\n" + " \x03(\x05R\x10publicDependency\x12'\n" + - "\x0fweak_dependency\x18\v \x03(\x05R\x0eweakDependency\x12C\n" + + "\x0fweak_dependency\x18\v \x03(\x05R\x0eweakDependency\x12+\n" + + "\x11option_dependency\x18\x0f \x03(\tR\x10optionDependency\x12C\n" + "\fmessage_type\x18\x04 \x03(\v2 .google.protobuf.DescriptorProtoR\vmessageType\x12A\n" + "\tenum_type\x18\x05 \x03(\v2$.google.protobuf.EnumDescriptorProtoR\benumType\x12A\n" + "\aservice\x18\x06 \x03(\v2'.google.protobuf.ServiceDescriptorProtoR\aservice\x12C\n" + @@ -4479,7 +4691,7 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "\aoptions\x18\b \x01(\v2\x1c.google.protobuf.FileOptionsR\aoptions\x12I\n" + "\x10source_code_info\x18\t \x01(\v2\x1f.google.protobuf.SourceCodeInfoR\x0esourceCodeInfo\x12\x16\n" + "\x06syntax\x18\f \x01(\tR\x06syntax\x122\n" + - "\aedition\x18\x0e \x01(\x0e2\x18.google.protobuf.EditionR\aedition\"\xb9\x06\n" + + "\aedition\x18\x0e \x01(\x0e2\x18.google.protobuf.EditionR\aedition\"\xfc\x06\n" + "\x0fDescriptorProto\x12\x12\n" + "\x04name\x18\x01 \x01(\tR\x04name\x12;\n" + "\x05field\x18\x02 \x03(\v2%.google.protobuf.FieldDescriptorProtoR\x05field\x12C\n" + @@ -4493,7 +4705,10 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "\aoptions\x18\a \x01(\v2\x1f.google.protobuf.MessageOptionsR\aoptions\x12U\n" + "\x0ereserved_range\x18\t \x03(\v2..google.protobuf.DescriptorProto.ReservedRangeR\rreservedRange\x12#\n" + "\rreserved_name\x18\n" + - " \x03(\tR\freservedName\x1az\n" + + " \x03(\tR\freservedName\x12A\n" + + "\n" + + "visibility\x18\v \x01(\x0e2!.google.protobuf.SymbolVisibilityR\n" + + "visibility\x1az\n" + "\x0eExtensionRange\x12\x14\n" + "\x05start\x18\x01 \x01(\x05R\x05start\x12\x10\n" + "\x03end\x18\x02 \x01(\x05R\x03end\x12@\n" + @@ -4562,13 +4777,16 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "\x0eLABEL_REQUIRED\x10\x02\"c\n" + "\x14OneofDescriptorProto\x12\x12\n" + "\x04name\x18\x01 \x01(\tR\x04name\x127\n" + - "\aoptions\x18\x02 \x01(\v2\x1d.google.protobuf.OneofOptionsR\aoptions\"\xe3\x02\n" + + "\aoptions\x18\x02 \x01(\v2\x1d.google.protobuf.OneofOptionsR\aoptions\"\xa6\x03\n" + "\x13EnumDescriptorProto\x12\x12\n" + "\x04name\x18\x01 \x01(\tR\x04name\x12?\n" + "\x05value\x18\x02 \x03(\v2).google.protobuf.EnumValueDescriptorProtoR\x05value\x126\n" + "\aoptions\x18\x03 \x01(\v2\x1c.google.protobuf.EnumOptionsR\aoptions\x12]\n" + "\x0ereserved_range\x18\x04 \x03(\v26.google.protobuf.EnumDescriptorProto.EnumReservedRangeR\rreservedRange\x12#\n" + - "\rreserved_name\x18\x05 \x03(\tR\freservedName\x1a;\n" + + "\rreserved_name\x18\x05 \x03(\tR\freservedName\x12A\n" + + "\n" + + "visibility\x18\x06 \x01(\x0e2!.google.protobuf.SymbolVisibilityR\n" + + "visibility\x1a;\n" + "\x11EnumReservedRange\x12\x14\n" + "\x05start\x18\x01 \x01(\x05R\x05start\x12\x10\n" + "\x03end\x18\x02 \x01(\x05R\x03end\"\x83\x01\n" + @@ -4629,7 +4847,7 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "&deprecated_legacy_json_field_conflicts\x18\v \x01(\bB\x02\x18\x01R\"deprecatedLegacyJsonFieldConflicts\x127\n" + "\bfeatures\x18\f \x01(\v2\x1b.google.protobuf.FeatureSetR\bfeatures\x12X\n" + "\x14uninterpreted_option\x18\xe7\a \x03(\v2$.google.protobuf.UninterpretedOptionR\x13uninterpretedOption*\t\b\xe8\a\x10\x80\x80\x80\x80\x02J\x04\b\x04\x10\x05J\x04\b\x05\x10\x06J\x04\b\x06\x10\aJ\x04\b\b\x10\tJ\x04\b\t\x10\n" + - "\"\x9d\r\n" + + "\"\xa1\r\n" + "\fFieldOptions\x12A\n" + "\x05ctype\x18\x01 \x01(\x0e2#.google.protobuf.FieldOptions.CType:\x06STRINGR\x05ctype\x12\x16\n" + "\x06packed\x18\x02 \x01(\bR\x06packed\x12G\n" + @@ -4638,9 +4856,9 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "\x0funverified_lazy\x18\x0f \x01(\b:\x05falseR\x0eunverifiedLazy\x12%\n" + "\n" + "deprecated\x18\x03 \x01(\b:\x05falseR\n" + - "deprecated\x12\x19\n" + + "deprecated\x12\x1d\n" + "\x04weak\x18\n" + - " \x01(\b:\x05falseR\x04weak\x12(\n" + + " \x01(\b:\x05falseB\x02\x18\x01R\x04weak\x12(\n" + "\fdebug_redact\x18\x10 \x01(\b:\x05falseR\vdebugRedact\x12K\n" + "\tretention\x18\x11 \x01(\x0e2-.google.protobuf.FieldOptions.OptionRetentionR\tretention\x12H\n" + "\atargets\x18\x13 \x03(\x0e2..google.protobuf.FieldOptions.OptionTargetTypeR\atargets\x12W\n" + @@ -4728,7 +4946,7 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "\x0faggregate_value\x18\b \x01(\tR\x0eaggregateValue\x1aJ\n" + "\bNamePart\x12\x1b\n" + "\tname_part\x18\x01 \x02(\tR\bnamePart\x12!\n" + - "\fis_extension\x18\x02 \x02(\bR\visExtension\"\xae\f\n" + + "\fis_extension\x18\x02 \x02(\bR\visExtension\"\x8e\x0f\n" + "\n" + "FeatureSet\x12\x91\x01\n" + "\x0efield_presence\x18\x01 \x01(\x0e2).google.protobuf.FeatureSet.FieldPresenceB?\x88\x01\x01\x98\x01\x04\x98\x01\x01\xa2\x01\r\x12\bEXPLICIT\x18\x84\a\xa2\x01\r\x12\bIMPLICIT\x18\xe7\a\xa2\x01\r\x12\bEXPLICIT\x18\xe8\a\xb2\x01\x03\b\xe8\aR\rfieldPresence\x12l\n" + @@ -4739,7 +4957,18 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "\vjson_format\x18\x06 \x01(\x0e2&.google.protobuf.FeatureSet.JsonFormatB9\x88\x01\x01\x98\x01\x03\x98\x01\x06\x98\x01\x01\xa2\x01\x17\x12\x12LEGACY_BEST_EFFORT\x18\x84\a\xa2\x01\n" + "\x12\x05ALLOW\x18\xe7\a\xb2\x01\x03\b\xe8\aR\n" + "jsonFormat\x12\xab\x01\n" + - "\x14enforce_naming_style\x18\a \x01(\x0e2..google.protobuf.FeatureSet.EnforceNamingStyleBI\x88\x01\x02\x98\x01\x01\x98\x01\x02\x98\x01\x03\x98\x01\x04\x98\x01\x05\x98\x01\x06\x98\x01\a\x98\x01\b\x98\x01\t\xa2\x01\x11\x12\fSTYLE_LEGACY\x18\x84\a\xa2\x01\x0e\x12\tSTYLE2024\x18\xe9\a\xb2\x01\x03\b\xe9\aR\x12enforceNamingStyle\"\\\n" + + "\x14enforce_naming_style\x18\a \x01(\x0e2..google.protobuf.FeatureSet.EnforceNamingStyleBI\x88\x01\x02\x98\x01\x01\x98\x01\x02\x98\x01\x03\x98\x01\x04\x98\x01\x05\x98\x01\x06\x98\x01\a\x98\x01\b\x98\x01\t\xa2\x01\x11\x12\fSTYLE_LEGACY\x18\x84\a\xa2\x01\x0e\x12\tSTYLE2024\x18\xe9\a\xb2\x01\x03\b\xe9\aR\x12enforceNamingStyle\x12\xb9\x01\n" + + "\x19default_symbol_visibility\x18\b \x01(\x0e2E.google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibilityB6\x88\x01\x02\x98\x01\x01\xa2\x01\x0f\x12\n" + + "EXPORT_ALL\x18\x84\a\xa2\x01\x15\x12\x10EXPORT_TOP_LEVEL\x18\xe9\a\xb2\x01\x03\b\xe9\aR\x17defaultSymbolVisibility\x1a\xa1\x01\n" + + "\x11VisibilityFeature\"\x81\x01\n" + + "\x17DefaultSymbolVisibility\x12%\n" + + "!DEFAULT_SYMBOL_VISIBILITY_UNKNOWN\x10\x00\x12\x0e\n" + + "\n" + + "EXPORT_ALL\x10\x01\x12\x14\n" + + "\x10EXPORT_TOP_LEVEL\x10\x02\x12\r\n" + + "\tLOCAL_ALL\x10\x03\x12\n" + + "\n" + + "\x06STRICT\x10\x04J\b\b\x01\x10\x80\x80\x80\x80\x02\"\\\n" + "\rFieldPresence\x12\x1a\n" + "\x16FIELD_PRESENCE_UNKNOWN\x10\x00\x12\f\n" + "\bEXPLICIT\x10\x01\x12\f\n" + @@ -4817,7 +5046,11 @@ const file_google_protobuf_descriptor_proto_rawDesc = "" + "\x17EDITION_99997_TEST_ONLY\x10\x9d\x8d\x06\x12\x1d\n" + "\x17EDITION_99998_TEST_ONLY\x10\x9e\x8d\x06\x12\x1d\n" + "\x17EDITION_99999_TEST_ONLY\x10\x9f\x8d\x06\x12\x13\n" + - "\vEDITION_MAX\x10\xff\xff\xff\xff\aB~\n" + + "\vEDITION_MAX\x10\xff\xff\xff\xff\a*U\n" + + "\x10SymbolVisibility\x12\x14\n" + + "\x10VISIBILITY_UNSET\x10\x00\x12\x14\n" + + "\x10VISIBILITY_LOCAL\x10\x01\x12\x15\n" + + "\x11VISIBILITY_EXPORT\x10\x02B~\n" + "\x13com.google.protobufB\x10DescriptorProtosH\x01Z-google.golang.org/protobuf/types/descriptorpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1aGoogle.Protobuf.Reflection" var ( @@ -4832,145 +5065,151 @@ func file_google_protobuf_descriptor_proto_rawDescGZIP() []byte { return file_google_protobuf_descriptor_proto_rawDescData } -var file_google_protobuf_descriptor_proto_enumTypes = make([]protoimpl.EnumInfo, 18) -var file_google_protobuf_descriptor_proto_msgTypes = make([]protoimpl.MessageInfo, 33) +var file_google_protobuf_descriptor_proto_enumTypes = make([]protoimpl.EnumInfo, 20) +var file_google_protobuf_descriptor_proto_msgTypes = make([]protoimpl.MessageInfo, 34) var file_google_protobuf_descriptor_proto_goTypes = []any{ - (Edition)(0), // 0: google.protobuf.Edition - (ExtensionRangeOptions_VerificationState)(0), // 1: google.protobuf.ExtensionRangeOptions.VerificationState - (FieldDescriptorProto_Type)(0), // 2: google.protobuf.FieldDescriptorProto.Type - (FieldDescriptorProto_Label)(0), // 3: google.protobuf.FieldDescriptorProto.Label - (FileOptions_OptimizeMode)(0), // 4: google.protobuf.FileOptions.OptimizeMode - (FieldOptions_CType)(0), // 5: google.protobuf.FieldOptions.CType - (FieldOptions_JSType)(0), // 6: google.protobuf.FieldOptions.JSType - (FieldOptions_OptionRetention)(0), // 7: google.protobuf.FieldOptions.OptionRetention - (FieldOptions_OptionTargetType)(0), // 8: google.protobuf.FieldOptions.OptionTargetType - (MethodOptions_IdempotencyLevel)(0), // 9: google.protobuf.MethodOptions.IdempotencyLevel - (FeatureSet_FieldPresence)(0), // 10: google.protobuf.FeatureSet.FieldPresence - (FeatureSet_EnumType)(0), // 11: google.protobuf.FeatureSet.EnumType - (FeatureSet_RepeatedFieldEncoding)(0), // 12: google.protobuf.FeatureSet.RepeatedFieldEncoding - (FeatureSet_Utf8Validation)(0), // 13: google.protobuf.FeatureSet.Utf8Validation - (FeatureSet_MessageEncoding)(0), // 14: google.protobuf.FeatureSet.MessageEncoding - (FeatureSet_JsonFormat)(0), // 15: google.protobuf.FeatureSet.JsonFormat - (FeatureSet_EnforceNamingStyle)(0), // 16: google.protobuf.FeatureSet.EnforceNamingStyle - (GeneratedCodeInfo_Annotation_Semantic)(0), // 17: google.protobuf.GeneratedCodeInfo.Annotation.Semantic - (*FileDescriptorSet)(nil), // 18: google.protobuf.FileDescriptorSet - (*FileDescriptorProto)(nil), // 19: google.protobuf.FileDescriptorProto - (*DescriptorProto)(nil), // 20: google.protobuf.DescriptorProto - (*ExtensionRangeOptions)(nil), // 21: google.protobuf.ExtensionRangeOptions - (*FieldDescriptorProto)(nil), // 22: google.protobuf.FieldDescriptorProto - (*OneofDescriptorProto)(nil), // 23: google.protobuf.OneofDescriptorProto - (*EnumDescriptorProto)(nil), // 24: google.protobuf.EnumDescriptorProto - (*EnumValueDescriptorProto)(nil), // 25: google.protobuf.EnumValueDescriptorProto - (*ServiceDescriptorProto)(nil), // 26: google.protobuf.ServiceDescriptorProto - (*MethodDescriptorProto)(nil), // 27: google.protobuf.MethodDescriptorProto - (*FileOptions)(nil), // 28: google.protobuf.FileOptions - (*MessageOptions)(nil), // 29: google.protobuf.MessageOptions - (*FieldOptions)(nil), // 30: google.protobuf.FieldOptions - (*OneofOptions)(nil), // 31: google.protobuf.OneofOptions - (*EnumOptions)(nil), // 32: google.protobuf.EnumOptions - (*EnumValueOptions)(nil), // 33: google.protobuf.EnumValueOptions - (*ServiceOptions)(nil), // 34: google.protobuf.ServiceOptions - (*MethodOptions)(nil), // 35: google.protobuf.MethodOptions - (*UninterpretedOption)(nil), // 36: google.protobuf.UninterpretedOption - (*FeatureSet)(nil), // 37: google.protobuf.FeatureSet - (*FeatureSetDefaults)(nil), // 38: google.protobuf.FeatureSetDefaults - (*SourceCodeInfo)(nil), // 39: google.protobuf.SourceCodeInfo - (*GeneratedCodeInfo)(nil), // 40: google.protobuf.GeneratedCodeInfo - (*DescriptorProto_ExtensionRange)(nil), // 41: google.protobuf.DescriptorProto.ExtensionRange - (*DescriptorProto_ReservedRange)(nil), // 42: google.protobuf.DescriptorProto.ReservedRange - (*ExtensionRangeOptions_Declaration)(nil), // 43: google.protobuf.ExtensionRangeOptions.Declaration - (*EnumDescriptorProto_EnumReservedRange)(nil), // 44: google.protobuf.EnumDescriptorProto.EnumReservedRange - (*FieldOptions_EditionDefault)(nil), // 45: google.protobuf.FieldOptions.EditionDefault - (*FieldOptions_FeatureSupport)(nil), // 46: google.protobuf.FieldOptions.FeatureSupport - (*UninterpretedOption_NamePart)(nil), // 47: google.protobuf.UninterpretedOption.NamePart - (*FeatureSetDefaults_FeatureSetEditionDefault)(nil), // 48: google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault - (*SourceCodeInfo_Location)(nil), // 49: google.protobuf.SourceCodeInfo.Location - (*GeneratedCodeInfo_Annotation)(nil), // 50: google.protobuf.GeneratedCodeInfo.Annotation + (Edition)(0), // 0: google.protobuf.Edition + (SymbolVisibility)(0), // 1: google.protobuf.SymbolVisibility + (ExtensionRangeOptions_VerificationState)(0), // 2: google.protobuf.ExtensionRangeOptions.VerificationState + (FieldDescriptorProto_Type)(0), // 3: google.protobuf.FieldDescriptorProto.Type + (FieldDescriptorProto_Label)(0), // 4: google.protobuf.FieldDescriptorProto.Label + (FileOptions_OptimizeMode)(0), // 5: google.protobuf.FileOptions.OptimizeMode + (FieldOptions_CType)(0), // 6: google.protobuf.FieldOptions.CType + (FieldOptions_JSType)(0), // 7: google.protobuf.FieldOptions.JSType + (FieldOptions_OptionRetention)(0), // 8: google.protobuf.FieldOptions.OptionRetention + (FieldOptions_OptionTargetType)(0), // 9: google.protobuf.FieldOptions.OptionTargetType + (MethodOptions_IdempotencyLevel)(0), // 10: google.protobuf.MethodOptions.IdempotencyLevel + (FeatureSet_FieldPresence)(0), // 11: google.protobuf.FeatureSet.FieldPresence + (FeatureSet_EnumType)(0), // 12: google.protobuf.FeatureSet.EnumType + (FeatureSet_RepeatedFieldEncoding)(0), // 13: google.protobuf.FeatureSet.RepeatedFieldEncoding + (FeatureSet_Utf8Validation)(0), // 14: google.protobuf.FeatureSet.Utf8Validation + (FeatureSet_MessageEncoding)(0), // 15: google.protobuf.FeatureSet.MessageEncoding + (FeatureSet_JsonFormat)(0), // 16: google.protobuf.FeatureSet.JsonFormat + (FeatureSet_EnforceNamingStyle)(0), // 17: google.protobuf.FeatureSet.EnforceNamingStyle + (FeatureSet_VisibilityFeature_DefaultSymbolVisibility)(0), // 18: google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility + (GeneratedCodeInfo_Annotation_Semantic)(0), // 19: google.protobuf.GeneratedCodeInfo.Annotation.Semantic + (*FileDescriptorSet)(nil), // 20: google.protobuf.FileDescriptorSet + (*FileDescriptorProto)(nil), // 21: google.protobuf.FileDescriptorProto + (*DescriptorProto)(nil), // 22: google.protobuf.DescriptorProto + (*ExtensionRangeOptions)(nil), // 23: google.protobuf.ExtensionRangeOptions + (*FieldDescriptorProto)(nil), // 24: google.protobuf.FieldDescriptorProto + (*OneofDescriptorProto)(nil), // 25: google.protobuf.OneofDescriptorProto + (*EnumDescriptorProto)(nil), // 26: google.protobuf.EnumDescriptorProto + (*EnumValueDescriptorProto)(nil), // 27: google.protobuf.EnumValueDescriptorProto + (*ServiceDescriptorProto)(nil), // 28: google.protobuf.ServiceDescriptorProto + (*MethodDescriptorProto)(nil), // 29: google.protobuf.MethodDescriptorProto + (*FileOptions)(nil), // 30: google.protobuf.FileOptions + (*MessageOptions)(nil), // 31: google.protobuf.MessageOptions + (*FieldOptions)(nil), // 32: google.protobuf.FieldOptions + (*OneofOptions)(nil), // 33: google.protobuf.OneofOptions + (*EnumOptions)(nil), // 34: google.protobuf.EnumOptions + (*EnumValueOptions)(nil), // 35: google.protobuf.EnumValueOptions + (*ServiceOptions)(nil), // 36: google.protobuf.ServiceOptions + (*MethodOptions)(nil), // 37: google.protobuf.MethodOptions + (*UninterpretedOption)(nil), // 38: google.protobuf.UninterpretedOption + (*FeatureSet)(nil), // 39: google.protobuf.FeatureSet + (*FeatureSetDefaults)(nil), // 40: google.protobuf.FeatureSetDefaults + (*SourceCodeInfo)(nil), // 41: google.protobuf.SourceCodeInfo + (*GeneratedCodeInfo)(nil), // 42: google.protobuf.GeneratedCodeInfo + (*DescriptorProto_ExtensionRange)(nil), // 43: google.protobuf.DescriptorProto.ExtensionRange + (*DescriptorProto_ReservedRange)(nil), // 44: google.protobuf.DescriptorProto.ReservedRange + (*ExtensionRangeOptions_Declaration)(nil), // 45: google.protobuf.ExtensionRangeOptions.Declaration + (*EnumDescriptorProto_EnumReservedRange)(nil), // 46: google.protobuf.EnumDescriptorProto.EnumReservedRange + (*FieldOptions_EditionDefault)(nil), // 47: google.protobuf.FieldOptions.EditionDefault + (*FieldOptions_FeatureSupport)(nil), // 48: google.protobuf.FieldOptions.FeatureSupport + (*UninterpretedOption_NamePart)(nil), // 49: google.protobuf.UninterpretedOption.NamePart + (*FeatureSet_VisibilityFeature)(nil), // 50: google.protobuf.FeatureSet.VisibilityFeature + (*FeatureSetDefaults_FeatureSetEditionDefault)(nil), // 51: google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + (*SourceCodeInfo_Location)(nil), // 52: google.protobuf.SourceCodeInfo.Location + (*GeneratedCodeInfo_Annotation)(nil), // 53: google.protobuf.GeneratedCodeInfo.Annotation } var file_google_protobuf_descriptor_proto_depIdxs = []int32{ - 19, // 0: google.protobuf.FileDescriptorSet.file:type_name -> google.protobuf.FileDescriptorProto - 20, // 1: google.protobuf.FileDescriptorProto.message_type:type_name -> google.protobuf.DescriptorProto - 24, // 2: google.protobuf.FileDescriptorProto.enum_type:type_name -> google.protobuf.EnumDescriptorProto - 26, // 3: google.protobuf.FileDescriptorProto.service:type_name -> google.protobuf.ServiceDescriptorProto - 22, // 4: google.protobuf.FileDescriptorProto.extension:type_name -> google.protobuf.FieldDescriptorProto - 28, // 5: google.protobuf.FileDescriptorProto.options:type_name -> google.protobuf.FileOptions - 39, // 6: google.protobuf.FileDescriptorProto.source_code_info:type_name -> google.protobuf.SourceCodeInfo + 21, // 0: google.protobuf.FileDescriptorSet.file:type_name -> google.protobuf.FileDescriptorProto + 22, // 1: google.protobuf.FileDescriptorProto.message_type:type_name -> google.protobuf.DescriptorProto + 26, // 2: google.protobuf.FileDescriptorProto.enum_type:type_name -> google.protobuf.EnumDescriptorProto + 28, // 3: google.protobuf.FileDescriptorProto.service:type_name -> google.protobuf.ServiceDescriptorProto + 24, // 4: google.protobuf.FileDescriptorProto.extension:type_name -> google.protobuf.FieldDescriptorProto + 30, // 5: google.protobuf.FileDescriptorProto.options:type_name -> google.protobuf.FileOptions + 41, // 6: google.protobuf.FileDescriptorProto.source_code_info:type_name -> google.protobuf.SourceCodeInfo 0, // 7: google.protobuf.FileDescriptorProto.edition:type_name -> google.protobuf.Edition - 22, // 8: google.protobuf.DescriptorProto.field:type_name -> google.protobuf.FieldDescriptorProto - 22, // 9: google.protobuf.DescriptorProto.extension:type_name -> google.protobuf.FieldDescriptorProto - 20, // 10: google.protobuf.DescriptorProto.nested_type:type_name -> google.protobuf.DescriptorProto - 24, // 11: google.protobuf.DescriptorProto.enum_type:type_name -> google.protobuf.EnumDescriptorProto - 41, // 12: google.protobuf.DescriptorProto.extension_range:type_name -> google.protobuf.DescriptorProto.ExtensionRange - 23, // 13: google.protobuf.DescriptorProto.oneof_decl:type_name -> google.protobuf.OneofDescriptorProto - 29, // 14: google.protobuf.DescriptorProto.options:type_name -> google.protobuf.MessageOptions - 42, // 15: google.protobuf.DescriptorProto.reserved_range:type_name -> google.protobuf.DescriptorProto.ReservedRange - 36, // 16: google.protobuf.ExtensionRangeOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption - 43, // 17: google.protobuf.ExtensionRangeOptions.declaration:type_name -> google.protobuf.ExtensionRangeOptions.Declaration - 37, // 18: google.protobuf.ExtensionRangeOptions.features:type_name -> google.protobuf.FeatureSet - 1, // 19: google.protobuf.ExtensionRangeOptions.verification:type_name -> google.protobuf.ExtensionRangeOptions.VerificationState - 3, // 20: google.protobuf.FieldDescriptorProto.label:type_name -> google.protobuf.FieldDescriptorProto.Label - 2, // 21: google.protobuf.FieldDescriptorProto.type:type_name -> google.protobuf.FieldDescriptorProto.Type - 30, // 22: google.protobuf.FieldDescriptorProto.options:type_name -> google.protobuf.FieldOptions - 31, // 23: google.protobuf.OneofDescriptorProto.options:type_name -> google.protobuf.OneofOptions - 25, // 24: google.protobuf.EnumDescriptorProto.value:type_name -> google.protobuf.EnumValueDescriptorProto - 32, // 25: google.protobuf.EnumDescriptorProto.options:type_name -> google.protobuf.EnumOptions - 44, // 26: google.protobuf.EnumDescriptorProto.reserved_range:type_name -> google.protobuf.EnumDescriptorProto.EnumReservedRange - 33, // 27: google.protobuf.EnumValueDescriptorProto.options:type_name -> google.protobuf.EnumValueOptions - 27, // 28: google.protobuf.ServiceDescriptorProto.method:type_name -> google.protobuf.MethodDescriptorProto - 34, // 29: google.protobuf.ServiceDescriptorProto.options:type_name -> google.protobuf.ServiceOptions - 35, // 30: google.protobuf.MethodDescriptorProto.options:type_name -> google.protobuf.MethodOptions - 4, // 31: google.protobuf.FileOptions.optimize_for:type_name -> google.protobuf.FileOptions.OptimizeMode - 37, // 32: google.protobuf.FileOptions.features:type_name -> google.protobuf.FeatureSet - 36, // 33: google.protobuf.FileOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption - 37, // 34: google.protobuf.MessageOptions.features:type_name -> google.protobuf.FeatureSet - 36, // 35: google.protobuf.MessageOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption - 5, // 36: google.protobuf.FieldOptions.ctype:type_name -> google.protobuf.FieldOptions.CType - 6, // 37: google.protobuf.FieldOptions.jstype:type_name -> google.protobuf.FieldOptions.JSType - 7, // 38: google.protobuf.FieldOptions.retention:type_name -> google.protobuf.FieldOptions.OptionRetention - 8, // 39: google.protobuf.FieldOptions.targets:type_name -> google.protobuf.FieldOptions.OptionTargetType - 45, // 40: google.protobuf.FieldOptions.edition_defaults:type_name -> google.protobuf.FieldOptions.EditionDefault - 37, // 41: google.protobuf.FieldOptions.features:type_name -> google.protobuf.FeatureSet - 46, // 42: google.protobuf.FieldOptions.feature_support:type_name -> google.protobuf.FieldOptions.FeatureSupport - 36, // 43: google.protobuf.FieldOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption - 37, // 44: google.protobuf.OneofOptions.features:type_name -> google.protobuf.FeatureSet - 36, // 45: google.protobuf.OneofOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption - 37, // 46: google.protobuf.EnumOptions.features:type_name -> google.protobuf.FeatureSet - 36, // 47: google.protobuf.EnumOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption - 37, // 48: google.protobuf.EnumValueOptions.features:type_name -> google.protobuf.FeatureSet - 46, // 49: google.protobuf.EnumValueOptions.feature_support:type_name -> google.protobuf.FieldOptions.FeatureSupport - 36, // 50: google.protobuf.EnumValueOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption - 37, // 51: google.protobuf.ServiceOptions.features:type_name -> google.protobuf.FeatureSet - 36, // 52: google.protobuf.ServiceOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption - 9, // 53: google.protobuf.MethodOptions.idempotency_level:type_name -> google.protobuf.MethodOptions.IdempotencyLevel - 37, // 54: google.protobuf.MethodOptions.features:type_name -> google.protobuf.FeatureSet - 36, // 55: google.protobuf.MethodOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption - 47, // 56: google.protobuf.UninterpretedOption.name:type_name -> google.protobuf.UninterpretedOption.NamePart - 10, // 57: google.protobuf.FeatureSet.field_presence:type_name -> google.protobuf.FeatureSet.FieldPresence - 11, // 58: google.protobuf.FeatureSet.enum_type:type_name -> google.protobuf.FeatureSet.EnumType - 12, // 59: google.protobuf.FeatureSet.repeated_field_encoding:type_name -> google.protobuf.FeatureSet.RepeatedFieldEncoding - 13, // 60: google.protobuf.FeatureSet.utf8_validation:type_name -> google.protobuf.FeatureSet.Utf8Validation - 14, // 61: google.protobuf.FeatureSet.message_encoding:type_name -> google.protobuf.FeatureSet.MessageEncoding - 15, // 62: google.protobuf.FeatureSet.json_format:type_name -> google.protobuf.FeatureSet.JsonFormat - 16, // 63: google.protobuf.FeatureSet.enforce_naming_style:type_name -> google.protobuf.FeatureSet.EnforceNamingStyle - 48, // 64: google.protobuf.FeatureSetDefaults.defaults:type_name -> google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault - 0, // 65: google.protobuf.FeatureSetDefaults.minimum_edition:type_name -> google.protobuf.Edition - 0, // 66: google.protobuf.FeatureSetDefaults.maximum_edition:type_name -> google.protobuf.Edition - 49, // 67: google.protobuf.SourceCodeInfo.location:type_name -> google.protobuf.SourceCodeInfo.Location - 50, // 68: google.protobuf.GeneratedCodeInfo.annotation:type_name -> google.protobuf.GeneratedCodeInfo.Annotation - 21, // 69: google.protobuf.DescriptorProto.ExtensionRange.options:type_name -> google.protobuf.ExtensionRangeOptions - 0, // 70: google.protobuf.FieldOptions.EditionDefault.edition:type_name -> google.protobuf.Edition - 0, // 71: google.protobuf.FieldOptions.FeatureSupport.edition_introduced:type_name -> google.protobuf.Edition - 0, // 72: google.protobuf.FieldOptions.FeatureSupport.edition_deprecated:type_name -> google.protobuf.Edition - 0, // 73: google.protobuf.FieldOptions.FeatureSupport.edition_removed:type_name -> google.protobuf.Edition - 0, // 74: google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.edition:type_name -> google.protobuf.Edition - 37, // 75: google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.overridable_features:type_name -> google.protobuf.FeatureSet - 37, // 76: google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.fixed_features:type_name -> google.protobuf.FeatureSet - 17, // 77: google.protobuf.GeneratedCodeInfo.Annotation.semantic:type_name -> google.protobuf.GeneratedCodeInfo.Annotation.Semantic - 78, // [78:78] is the sub-list for method output_type - 78, // [78:78] is the sub-list for method input_type - 78, // [78:78] is the sub-list for extension type_name - 78, // [78:78] is the sub-list for extension extendee - 0, // [0:78] is the sub-list for field type_name + 24, // 8: google.protobuf.DescriptorProto.field:type_name -> google.protobuf.FieldDescriptorProto + 24, // 9: google.protobuf.DescriptorProto.extension:type_name -> google.protobuf.FieldDescriptorProto + 22, // 10: google.protobuf.DescriptorProto.nested_type:type_name -> google.protobuf.DescriptorProto + 26, // 11: google.protobuf.DescriptorProto.enum_type:type_name -> google.protobuf.EnumDescriptorProto + 43, // 12: google.protobuf.DescriptorProto.extension_range:type_name -> google.protobuf.DescriptorProto.ExtensionRange + 25, // 13: google.protobuf.DescriptorProto.oneof_decl:type_name -> google.protobuf.OneofDescriptorProto + 31, // 14: google.protobuf.DescriptorProto.options:type_name -> google.protobuf.MessageOptions + 44, // 15: google.protobuf.DescriptorProto.reserved_range:type_name -> google.protobuf.DescriptorProto.ReservedRange + 1, // 16: google.protobuf.DescriptorProto.visibility:type_name -> google.protobuf.SymbolVisibility + 38, // 17: google.protobuf.ExtensionRangeOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 45, // 18: google.protobuf.ExtensionRangeOptions.declaration:type_name -> google.protobuf.ExtensionRangeOptions.Declaration + 39, // 19: google.protobuf.ExtensionRangeOptions.features:type_name -> google.protobuf.FeatureSet + 2, // 20: google.protobuf.ExtensionRangeOptions.verification:type_name -> google.protobuf.ExtensionRangeOptions.VerificationState + 4, // 21: google.protobuf.FieldDescriptorProto.label:type_name -> google.protobuf.FieldDescriptorProto.Label + 3, // 22: google.protobuf.FieldDescriptorProto.type:type_name -> google.protobuf.FieldDescriptorProto.Type + 32, // 23: google.protobuf.FieldDescriptorProto.options:type_name -> google.protobuf.FieldOptions + 33, // 24: google.protobuf.OneofDescriptorProto.options:type_name -> google.protobuf.OneofOptions + 27, // 25: google.protobuf.EnumDescriptorProto.value:type_name -> google.protobuf.EnumValueDescriptorProto + 34, // 26: google.protobuf.EnumDescriptorProto.options:type_name -> google.protobuf.EnumOptions + 46, // 27: google.protobuf.EnumDescriptorProto.reserved_range:type_name -> google.protobuf.EnumDescriptorProto.EnumReservedRange + 1, // 28: google.protobuf.EnumDescriptorProto.visibility:type_name -> google.protobuf.SymbolVisibility + 35, // 29: google.protobuf.EnumValueDescriptorProto.options:type_name -> google.protobuf.EnumValueOptions + 29, // 30: google.protobuf.ServiceDescriptorProto.method:type_name -> google.protobuf.MethodDescriptorProto + 36, // 31: google.protobuf.ServiceDescriptorProto.options:type_name -> google.protobuf.ServiceOptions + 37, // 32: google.protobuf.MethodDescriptorProto.options:type_name -> google.protobuf.MethodOptions + 5, // 33: google.protobuf.FileOptions.optimize_for:type_name -> google.protobuf.FileOptions.OptimizeMode + 39, // 34: google.protobuf.FileOptions.features:type_name -> google.protobuf.FeatureSet + 38, // 35: google.protobuf.FileOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 39, // 36: google.protobuf.MessageOptions.features:type_name -> google.protobuf.FeatureSet + 38, // 37: google.protobuf.MessageOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 6, // 38: google.protobuf.FieldOptions.ctype:type_name -> google.protobuf.FieldOptions.CType + 7, // 39: google.protobuf.FieldOptions.jstype:type_name -> google.protobuf.FieldOptions.JSType + 8, // 40: google.protobuf.FieldOptions.retention:type_name -> google.protobuf.FieldOptions.OptionRetention + 9, // 41: google.protobuf.FieldOptions.targets:type_name -> google.protobuf.FieldOptions.OptionTargetType + 47, // 42: google.protobuf.FieldOptions.edition_defaults:type_name -> google.protobuf.FieldOptions.EditionDefault + 39, // 43: google.protobuf.FieldOptions.features:type_name -> google.protobuf.FeatureSet + 48, // 44: google.protobuf.FieldOptions.feature_support:type_name -> google.protobuf.FieldOptions.FeatureSupport + 38, // 45: google.protobuf.FieldOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 39, // 46: google.protobuf.OneofOptions.features:type_name -> google.protobuf.FeatureSet + 38, // 47: google.protobuf.OneofOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 39, // 48: google.protobuf.EnumOptions.features:type_name -> google.protobuf.FeatureSet + 38, // 49: google.protobuf.EnumOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 39, // 50: google.protobuf.EnumValueOptions.features:type_name -> google.protobuf.FeatureSet + 48, // 51: google.protobuf.EnumValueOptions.feature_support:type_name -> google.protobuf.FieldOptions.FeatureSupport + 38, // 52: google.protobuf.EnumValueOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 39, // 53: google.protobuf.ServiceOptions.features:type_name -> google.protobuf.FeatureSet + 38, // 54: google.protobuf.ServiceOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 10, // 55: google.protobuf.MethodOptions.idempotency_level:type_name -> google.protobuf.MethodOptions.IdempotencyLevel + 39, // 56: google.protobuf.MethodOptions.features:type_name -> google.protobuf.FeatureSet + 38, // 57: google.protobuf.MethodOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 49, // 58: google.protobuf.UninterpretedOption.name:type_name -> google.protobuf.UninterpretedOption.NamePart + 11, // 59: google.protobuf.FeatureSet.field_presence:type_name -> google.protobuf.FeatureSet.FieldPresence + 12, // 60: google.protobuf.FeatureSet.enum_type:type_name -> google.protobuf.FeatureSet.EnumType + 13, // 61: google.protobuf.FeatureSet.repeated_field_encoding:type_name -> google.protobuf.FeatureSet.RepeatedFieldEncoding + 14, // 62: google.protobuf.FeatureSet.utf8_validation:type_name -> google.protobuf.FeatureSet.Utf8Validation + 15, // 63: google.protobuf.FeatureSet.message_encoding:type_name -> google.protobuf.FeatureSet.MessageEncoding + 16, // 64: google.protobuf.FeatureSet.json_format:type_name -> google.protobuf.FeatureSet.JsonFormat + 17, // 65: google.protobuf.FeatureSet.enforce_naming_style:type_name -> google.protobuf.FeatureSet.EnforceNamingStyle + 18, // 66: google.protobuf.FeatureSet.default_symbol_visibility:type_name -> google.protobuf.FeatureSet.VisibilityFeature.DefaultSymbolVisibility + 51, // 67: google.protobuf.FeatureSetDefaults.defaults:type_name -> google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault + 0, // 68: google.protobuf.FeatureSetDefaults.minimum_edition:type_name -> google.protobuf.Edition + 0, // 69: google.protobuf.FeatureSetDefaults.maximum_edition:type_name -> google.protobuf.Edition + 52, // 70: google.protobuf.SourceCodeInfo.location:type_name -> google.protobuf.SourceCodeInfo.Location + 53, // 71: google.protobuf.GeneratedCodeInfo.annotation:type_name -> google.protobuf.GeneratedCodeInfo.Annotation + 23, // 72: google.protobuf.DescriptorProto.ExtensionRange.options:type_name -> google.protobuf.ExtensionRangeOptions + 0, // 73: google.protobuf.FieldOptions.EditionDefault.edition:type_name -> google.protobuf.Edition + 0, // 74: google.protobuf.FieldOptions.FeatureSupport.edition_introduced:type_name -> google.protobuf.Edition + 0, // 75: google.protobuf.FieldOptions.FeatureSupport.edition_deprecated:type_name -> google.protobuf.Edition + 0, // 76: google.protobuf.FieldOptions.FeatureSupport.edition_removed:type_name -> google.protobuf.Edition + 0, // 77: google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.edition:type_name -> google.protobuf.Edition + 39, // 78: google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.overridable_features:type_name -> google.protobuf.FeatureSet + 39, // 79: google.protobuf.FeatureSetDefaults.FeatureSetEditionDefault.fixed_features:type_name -> google.protobuf.FeatureSet + 19, // 80: google.protobuf.GeneratedCodeInfo.Annotation.semantic:type_name -> google.protobuf.GeneratedCodeInfo.Annotation.Semantic + 81, // [81:81] is the sub-list for method output_type + 81, // [81:81] is the sub-list for method input_type + 81, // [81:81] is the sub-list for extension type_name + 81, // [81:81] is the sub-list for extension extendee + 0, // [0:81] is the sub-list for field type_name } func init() { file_google_protobuf_descriptor_proto_init() } @@ -4983,8 +5222,8 @@ func file_google_protobuf_descriptor_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_google_protobuf_descriptor_proto_rawDesc), len(file_google_protobuf_descriptor_proto_rawDesc)), - NumEnums: 18, - NumMessages: 33, + NumEnums: 20, + NumMessages: 34, NumExtensions: 0, NumServices: 0, }, diff --git a/tools/vendor/modules.txt b/tools/vendor/modules.txt index b5369a01..5d3c437a 100644 --- a/tools/vendor/modules.txt +++ b/tools/vendor/modules.txt @@ -20,8 +20,8 @@ dev.gaijin.team/go/golib/fields # github.com/4meepo/tagalign v1.4.3 ## explicit; go 1.22.0 github.com/4meepo/tagalign -# github.com/Abirdcfly/dupword v0.1.6 -## explicit; go 1.23.0 +# github.com/Abirdcfly/dupword v0.1.7 +## explicit; go 1.24.0 github.com/Abirdcfly/dupword # github.com/AdminBenni/iota-mixing v1.0.0 ## explicit; go 1.23.3 @@ -51,7 +51,7 @@ github.com/BurntSushi/toml/internal # github.com/Djarvur/go-err113 v0.1.1 ## explicit; go 1.23.0 github.com/Djarvur/go-err113 -# github.com/Masterminds/semver/v3 v3.3.1 +# github.com/Masterminds/semver/v3 v3.4.0 ## explicit; go 1.21 github.com/Masterminds/semver/v3 # github.com/MirrexOne/unqueryvet v1.2.1 @@ -91,11 +91,11 @@ github.com/alingse/asasalint ## explicit; go 1.22.0 github.com/alingse/nilnesserr github.com/alingse/nilnesserr/internal/typeparams -# github.com/ashanbrown/forbidigo/v2 v2.1.0 -## explicit; go 1.18 +# github.com/ashanbrown/forbidigo/v2 v2.3.0 +## explicit; go 1.24.0 github.com/ashanbrown/forbidigo/v2/forbidigo -# github.com/ashanbrown/makezero/v2 v2.0.1 -## explicit; go 1.18 +# github.com/ashanbrown/makezero/v2 v2.1.0 +## explicit; go 1.24.0 github.com/ashanbrown/makezero/v2/makezero # github.com/aymanbagabas/go-osc52/v2 v2.0.1 ## explicit; go 1.16 @@ -112,7 +112,7 @@ github.com/blizzy78/varnamelen # github.com/bombsimon/wsl/v4 v4.7.0 ## explicit; go 1.23 github.com/bombsimon/wsl/v4 -# github.com/bombsimon/wsl/v5 v5.2.0 +# github.com/bombsimon/wsl/v5 v5.3.0 ## explicit; go 1.23.0 github.com/bombsimon/wsl/v5 # github.com/breml/bidichk v0.3.3 @@ -130,8 +130,8 @@ github.com/butuzov/ireturn/analyzer/internal/types ## explicit; go 1.19 github.com/butuzov/mirror github.com/butuzov/mirror/internal/checker -# github.com/catenacyber/perfsprint v0.9.1 -## explicit; go 1.22.0 +# github.com/catenacyber/perfsprint v0.10.0 +## explicit; go 1.24.0 github.com/catenacyber/perfsprint/analyzer # github.com/ccojocar/zxcvbn-go v1.0.4 ## explicit; go 1.20 @@ -147,8 +147,8 @@ github.com/ccojocar/zxcvbn-go/utils/math # github.com/cespare/xxhash/v2 v2.3.0 ## explicit; go 1.11 github.com/cespare/xxhash/v2 -# github.com/charithe/durationcheck v0.0.10 -## explicit; go 1.14 +# github.com/charithe/durationcheck v0.0.11 +## explicit; go 1.16 github.com/charithe/durationcheck # github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc ## explicit; go 1.18 @@ -221,11 +221,11 @@ github.com/fsnotify/fsnotify # github.com/fzipp/gocyclo v0.6.0 ## explicit; go 1.18 github.com/fzipp/gocyclo -# github.com/ghostiam/protogetter v0.3.16 -## explicit; go 1.23.0 +# github.com/ghostiam/protogetter v0.3.17 +## explicit; go 1.24.0 github.com/ghostiam/protogetter -# github.com/go-critic/go-critic v0.13.0 -## explicit; go 1.23.0 +# github.com/go-critic/go-critic v0.14.2 +## explicit; go 1.24.0 github.com/go-critic/go-critic/checkers github.com/go-critic/go-critic/checkers/internal/astwalk github.com/go-critic/go-critic/checkers/internal/lintutil @@ -269,7 +269,7 @@ github.com/gobwas/glob/syntax/ast github.com/gobwas/glob/syntax/lexer github.com/gobwas/glob/util/runes github.com/gobwas/glob/util/strings -# github.com/godoc-lint/godoc-lint v0.10.0 +# github.com/godoc-lint/godoc-lint v0.10.1 ## explicit; go 1.24 github.com/godoc-lint/godoc-lint/pkg/analysis github.com/godoc-lint/godoc-lint/pkg/check @@ -285,8 +285,8 @@ github.com/godoc-lint/godoc-lint/pkg/config github.com/godoc-lint/godoc-lint/pkg/inspect github.com/godoc-lint/godoc-lint/pkg/model github.com/godoc-lint/godoc-lint/pkg/util -# github.com/gofrs/flock v0.12.1 -## explicit; go 1.21.0 +# github.com/gofrs/flock v0.13.0 +## explicit; go 1.24.0 github.com/gofrs/flock # github.com/golang/protobuf v1.5.3 ## explicit; go 1.9 @@ -312,7 +312,7 @@ github.com/golangci/go-printf-func-name/pkg/analyzer # github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d ## explicit; go 1.22.0 github.com/golangci/gofmt/gofmt -# github.com/golangci/golangci-lint/v2 v2.5.0 +# github.com/golangci/golangci-lint/v2 v2.6.2 ## explicit; go 1.24.0 github.com/golangci/golangci-lint/v2/cmd/golangci-lint github.com/golangci/golangci-lint/v2/internal/cache @@ -423,6 +423,7 @@ github.com/golangci/golangci-lint/v2/pkg/golinters/makezero github.com/golangci/golangci-lint/v2/pkg/golinters/mirror github.com/golangci/golangci-lint/v2/pkg/golinters/misspell github.com/golangci/golangci-lint/v2/pkg/golinters/mnd +github.com/golangci/golangci-lint/v2/pkg/golinters/modernize github.com/golangci/golangci-lint/v2/pkg/golinters/musttag github.com/golangci/golangci-lint/v2/pkg/golinters/nakedret github.com/golangci/golangci-lint/v2/pkg/golinters/nestif @@ -486,9 +487,6 @@ github.com/golangci/golines # github.com/golangci/misspell v0.7.0 ## explicit; go 1.23.0 github.com/golangci/misspell -# github.com/golangci/nilerr v0.0.0-20250918000102-015671e622fe -## explicit; go 1.24.0 -github.com/golangci/nilerr # github.com/golangci/plugin-module-register v0.1.2 ## explicit; go 1.23.0 github.com/golangci/plugin-module-register/register @@ -521,6 +519,9 @@ github.com/gostaticanalysis/comment/passes/commentmap # github.com/gostaticanalysis/forcetypeassert v0.2.0 ## explicit; go 1.23.0 github.com/gostaticanalysis/forcetypeassert +# github.com/gostaticanalysis/nilerr v0.1.2 +## explicit; go 1.23.0 +github.com/gostaticanalysis/nilerr # github.com/hashicorp/go-immutable-radix/v2 v2.1.0 ## explicit; go 1.18 github.com/hashicorp/go-immutable-radix/v2 @@ -563,8 +564,8 @@ github.com/jjti/go-spancheck # github.com/julz/importas v0.2.0 ## explicit; go 1.20 github.com/julz/importas -# github.com/karamaru-alpha/copyloopvar v1.2.1 -## explicit; go 1.21 +# github.com/karamaru-alpha/copyloopvar v1.2.2 +## explicit; go 1.24.0 github.com/karamaru-alpha/copyloopvar # github.com/kisielk/errcheck v1.9.0 ## explicit; go 1.22.0 @@ -575,17 +576,17 @@ github.com/kkHAIKE/contextcheck # github.com/kulti/thelper v0.7.1 ## explicit; go 1.24.0 github.com/kulti/thelper/pkg/analyzer -# github.com/kunwardeep/paralleltest v1.0.14 +# github.com/kunwardeep/paralleltest v1.0.15 ## explicit; go 1.23.0 github.com/kunwardeep/paralleltest/pkg/paralleltest # github.com/lasiar/canonicalheader v1.1.2 ## explicit; go 1.22.0 github.com/lasiar/canonicalheader -# github.com/ldez/exptostd v0.4.4 -## explicit; go 1.23.0 +# github.com/ldez/exptostd v0.4.5 +## explicit; go 1.24.0 github.com/ldez/exptostd -# github.com/ldez/gomoddirectives v0.7.0 -## explicit; go 1.23.0 +# github.com/ldez/gomoddirectives v0.7.1 +## explicit; go 1.24.0 github.com/ldez/gomoddirectives # github.com/ldez/grignotin v0.10.1 ## explicit; go 1.24.0 @@ -622,11 +623,11 @@ github.com/manuelarte/embeddedstructfieldcheck/internal ## explicit; go 1.23.0 github.com/manuelarte/funcorder/analyzer github.com/manuelarte/funcorder/internal -# github.com/maratori/testableexamples v1.0.0 -## explicit; go 1.19 +# github.com/maratori/testableexamples v1.0.1 +## explicit; go 1.22.0 github.com/maratori/testableexamples/pkg/testableexamples -# github.com/maratori/testpackage v1.1.1 -## explicit; go 1.20 +# github.com/maratori/testpackage v1.1.2 +## explicit; go 1.22.0 github.com/maratori/testpackage/pkg/testpackage # github.com/matoous/godox v1.1.0 ## explicit; go 1.18 @@ -677,7 +678,7 @@ github.com/nishanths/exhaustive # github.com/nishanths/predeclared v0.2.2 ## explicit; go 1.14 github.com/nishanths/predeclared/passes/predeclared -# github.com/nunnatsa/ginkgolinter v0.21.0 +# github.com/nunnatsa/ginkgolinter v0.21.2 ## explicit; go 1.24.0 github.com/nunnatsa/ginkgolinter github.com/nunnatsa/ginkgolinter/config @@ -731,7 +732,7 @@ github.com/prometheus/common/model github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util -# github.com/quasilyte/go-ruleguard v0.4.4 +# github.com/quasilyte/go-ruleguard v0.4.5 ## explicit; go 1.22.0 github.com/quasilyte/go-ruleguard/internal/goenv github.com/quasilyte/go-ruleguard/internal/golist @@ -748,7 +749,7 @@ github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrconv github.com/quasilyte/go-ruleguard/ruleguard/quasigo/stdlib/qstrings github.com/quasilyte/go-ruleguard/ruleguard/textmatch github.com/quasilyte/go-ruleguard/ruleguard/typematch -# github.com/quasilyte/go-ruleguard/dsl v0.3.22 +# github.com/quasilyte/go-ruleguard/dsl v0.3.23 ## explicit; go 1.15 github.com/quasilyte/go-ruleguard/dsl github.com/quasilyte/go-ruleguard/dsl/types @@ -797,7 +798,7 @@ github.com/sashamelentyev/interfacebloat/pkg/analyzer ## explicit; go 1.23.0 github.com/sashamelentyev/usestdlibvars/pkg/analyzer github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping -# github.com/securego/gosec/v2 v2.22.8 +# github.com/securego/gosec/v2 v2.22.10 ## explicit; go 1.24.0 github.com/securego/gosec/v2 github.com/securego/gosec/v2/analyzers @@ -925,10 +926,10 @@ go-simpler.org/musttag # go-simpler.org/sloglint v0.11.1 ## explicit; go 1.23.0 go-simpler.org/sloglint -# go.augendre.info/arangolint v0.2.0 -## explicit; go 1.23.0 +# go.augendre.info/arangolint v0.3.1 +## explicit; go 1.24.0 go.augendre.info/arangolint/pkg/analyzer -# go.augendre.info/fatcontext v0.8.1 +# go.augendre.info/fatcontext v0.9.0 ## explicit; go 1.23.0 go.augendre.info/fatcontext/pkg/analyzer # go.uber.org/automaxprocs v1.6.0 @@ -950,25 +951,25 @@ go.uber.org/zap/internal/exit go.uber.org/zap/internal/pool go.uber.org/zap/internal/stacktrace go.uber.org/zap/zapcore -# golang.org/x/exp/typeparams v0.0.0-20250911091902-df9299821621 +# golang.org/x/exp/typeparams v0.0.0-20251023183803-a4bb9ffd2546 ## explicit; go 1.24.0 golang.org/x/exp/typeparams -# golang.org/x/mod v0.28.0 +# golang.org/x/mod v0.29.0 ## explicit; go 1.24.0 golang.org/x/mod/internal/lazyregexp golang.org/x/mod/modfile golang.org/x/mod/module golang.org/x/mod/semver golang.org/x/mod/sumdb/dirhash -# golang.org/x/sync v0.17.0 +# golang.org/x/sync v0.18.0 ## explicit; go 1.24.0 golang.org/x/sync/errgroup golang.org/x/sync/semaphore -# golang.org/x/sys v0.36.0 +# golang.org/x/sys v0.37.0 ## explicit; go 1.24.0 golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.29.0 +# golang.org/x/text v0.30.0 ## explicit; go 1.24.0 golang.org/x/text/feature/plural golang.org/x/text/internal @@ -986,7 +987,7 @@ golang.org/x/text/runes golang.org/x/text/transform golang.org/x/text/unicode/norm golang.org/x/text/width -# golang.org/x/tools v0.37.0 +# golang.org/x/tools v0.38.0 ## explicit; go 1.24.0 golang.org/x/tools/go/analysis golang.org/x/tools/go/analysis/passes/appends @@ -1016,6 +1017,7 @@ golang.org/x/tools/go/analysis/passes/inspect golang.org/x/tools/go/analysis/passes/internal/analysisutil golang.org/x/tools/go/analysis/passes/loopclosure golang.org/x/tools/go/analysis/passes/lostcancel +golang.org/x/tools/go/analysis/passes/modernize golang.org/x/tools/go/analysis/passes/nilfunc golang.org/x/tools/go/analysis/passes/nilness golang.org/x/tools/go/analysis/passes/pkgfact @@ -1055,6 +1057,7 @@ golang.org/x/tools/go/types/typeutil golang.org/x/tools/imports golang.org/x/tools/internal/aliases golang.org/x/tools/internal/analysisinternal +golang.org/x/tools/internal/analysisinternal/generated golang.org/x/tools/internal/analysisinternal/typeindex golang.org/x/tools/internal/astutil golang.org/x/tools/internal/event @@ -1065,18 +1068,20 @@ golang.org/x/tools/internal/fmtstr golang.org/x/tools/internal/gcimporter golang.org/x/tools/internal/gocommand golang.org/x/tools/internal/gopathwalk +golang.org/x/tools/internal/goplsexport golang.org/x/tools/internal/imports golang.org/x/tools/internal/modindex golang.org/x/tools/internal/moreiters golang.org/x/tools/internal/packagesinternal golang.org/x/tools/internal/pkgbits +golang.org/x/tools/internal/refactor golang.org/x/tools/internal/stdlib golang.org/x/tools/internal/typeparams golang.org/x/tools/internal/typesinternal golang.org/x/tools/internal/typesinternal/typeindex golang.org/x/tools/internal/versions -# google.golang.org/protobuf v1.36.6 -## explicit; go 1.22 +# google.golang.org/protobuf v1.36.8 +## explicit; go 1.23 google.golang.org/protobuf/encoding/prototext google.golang.org/protobuf/encoding/protowire google.golang.org/protobuf/internal/descfmt @@ -1312,13 +1317,13 @@ honnef.co/go/tools/stylecheck/st1021 honnef.co/go/tools/stylecheck/st1022 honnef.co/go/tools/stylecheck/st1023 honnef.co/go/tools/unused -# mvdan.cc/gofumpt v0.9.1 +# mvdan.cc/gofumpt v0.9.2 ## explicit; go 1.24.0 mvdan.cc/gofumpt/format mvdan.cc/gofumpt/internal/govendor/go/doc/comment mvdan.cc/gofumpt/internal/govendor/go/format mvdan.cc/gofumpt/internal/govendor/go/printer mvdan.cc/gofumpt/internal/version -# mvdan.cc/unparam v0.0.0-20250301125049-0df0534333a4 -## explicit; go 1.23 +# mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 +## explicit; go 1.24.0 mvdan.cc/unparam/check diff --git a/tools/vendor/mvdan.cc/gofumpt/format/format.go b/tools/vendor/mvdan.cc/gofumpt/format/format.go index a60119c1..879969da 100644 --- a/tools/vendor/mvdan.cc/gofumpt/format/format.go +++ b/tools/vendor/mvdan.cc/gofumpt/format/format.go @@ -711,6 +711,10 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { if len(node.Results) > 0 { break } + // Clothing naked returns is disabled by default. + if !f.ExtraRules { + break + } results := f.parentFuncTypes[len(f.parentFuncTypes)-1].Results if results.NumFields() == 0 { break diff --git a/tools/vendor/mvdan.cc/unparam/check/check.go b/tools/vendor/mvdan.cc/unparam/check/check.go index 38421a09..7ca02c29 100644 --- a/tools/vendor/mvdan.cc/unparam/check/check.go +++ b/tools/vendor/mvdan.cc/unparam/check/check.go @@ -18,6 +18,7 @@ import ( "os" "path/filepath" "regexp" + "slices" "sort" "strings" @@ -138,7 +139,7 @@ type Issue struct { func (i Issue) Pos() token.Pos { return i.pos } func (i Issue) Message() string { return i.fname + " - " + i.msg } -// Program supplies Checker with the needed *loader.Program. +// Packages supplies Checker with the loaded packages. func (c *Checker) Packages(pkgs []*packages.Package) { c.pkgs = pkgs } @@ -153,7 +154,7 @@ func (c *Checker) CheckExportedFuncs(exported bool) { c.exported = exported } -func (c *Checker) debug(format string, a ...interface{}) { +func (c *Checker) debug(format string, a ...any) { if c.debugLog != nil { fmt.Fprintf(c.debugLog, format, a...) } @@ -387,23 +388,14 @@ func (c *Checker) Check() ([]Issue, error) { return c.issues, nil } -func stringsContains(list []string, elem string) bool { - for _, e := range list { - if e == elem { - return true - } - } - return false -} - func (c *Checker) addImplementing(named *types.Named, iface *types.Interface) { if named == nil || iface == nil { return } list := c.typesImplementing[named] - for i := 0; i < iface.NumMethods(); i++ { - name := iface.Method(i).Name() - if !stringsContains(list, name) { + for method := range iface.Methods() { + name := method.Name() + if !slices.Contains(list, name) { list = append(list, name) } } @@ -456,7 +448,7 @@ func findFunction(freeVars map[*ssa.FreeVar]*ssa.Function, value ssa.Value) *ssa } // addIssue records a newly found unused parameter. -func (c *Checker) addIssue(fn *ssa.Function, pos token.Pos, format string, args ...interface{}) { +func (c *Checker) addIssue(fn *ssa.Function, pos token.Pos, format string, args ...any) { c.issues = append(c.issues, Issue{ pos: pos, fname: fn.RelString(fn.Package().Pkg), @@ -485,7 +477,7 @@ func (c *Checker) checkFunc(fn *ssa.Function, pkg *packages.Package) { } if recv := fn.Signature.Recv(); recv != nil { named := findNamed(recv.Type()) - if stringsContains(c.typesImplementing[named], fn.Name()) { + if slices.Contains(c.typesImplementing[named], fn.Name()) { c.debug(" skip - method required to implement an interface\n") return } @@ -623,8 +615,8 @@ func containsTypeParam(t types.Type) bool { return true case *types.Struct: nf := t.NumFields() - for i := 0; i < nf; i++ { - if containsTypeParam(t.Field(nf).Type()) { + for i := range nf { + if containsTypeParam(t.Field(i).Type()) { return true } } @@ -632,8 +624,8 @@ func containsTypeParam(t types.Type) bool { return containsTypeParam(t.Elem()) case *types.Named: args := t.TypeArgs() - for i := 0; i < args.Len(); i++ { - if containsTypeParam(args.At(i)) { + for t0 := range args.Types() { + if containsTypeParam(t0) { return true } }