diff --git a/tools/go.mod b/tools/go.mod index ed724bc675..cf36abf1ae 100644 --- a/tools/go.mod +++ b/tools/go.mod @@ -4,50 +4,53 @@ go 1.19 require ( github.com/cfergeau/gomod2rpmdeps v0.0.0-20210223144124-2042c4850ca8 - github.com/golangci/golangci-lint v1.51.2 + github.com/golangci/golangci-lint v1.53.3 github.com/randall77/makefat v0.0.0-20210315173500-7ddd0e42c844 - golang.org/x/tools v0.6.0 + golang.org/x/tools v0.11.0 ) require ( 4d63.com/gocheckcompilerdirectives v1.2.1 // indirect 4d63.com/gochecknoglobals v0.2.1 // indirect - github.com/Abirdcfly/dupword v0.0.9 // indirect - github.com/Antonboom/errname v0.1.7 // indirect - github.com/Antonboom/nilnil v0.1.1 // indirect - github.com/BurntSushi/toml v1.2.1 // indirect + github.com/4meepo/tagalign v1.2.2 // indirect + github.com/Abirdcfly/dupword v0.0.11 // indirect + github.com/Antonboom/errname v0.1.10 // indirect + github.com/Antonboom/nilnil v0.1.5 // indirect + github.com/BurntSushi/toml v1.3.2 // indirect github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect github.com/GaijinEntertainment/go-exhaustruct/v2 v2.3.0 // indirect github.com/Masterminds/semver v1.5.0 // indirect - github.com/OpenPeeDeeP/depguard v1.1.1 // indirect + github.com/OpenPeeDeeP/depguard/v2 v2.1.0 // indirect + github.com/alexkohler/nakedret/v2 v2.0.2 // indirect github.com/alexkohler/prealloc v1.0.0 // indirect github.com/alingse/asasalint v0.0.11 // indirect - github.com/ashanbrown/forbidigo v1.4.0 // indirect + github.com/ashanbrown/forbidigo v1.5.3 // indirect github.com/ashanbrown/makezero v1.1.1 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/bkielbasa/cyclop v1.2.0 // indirect + github.com/bkielbasa/cyclop v1.2.1 // indirect github.com/blizzy78/varnamelen v0.8.0 // indirect github.com/bombsimon/wsl/v3 v3.4.0 // indirect - github.com/breml/bidichk v0.2.3 // indirect - github.com/breml/errchkjson v0.3.0 // indirect - github.com/butuzov/ireturn v0.1.1 // indirect + github.com/breml/bidichk v0.2.4 // indirect + github.com/breml/errchkjson v0.3.1 // indirect + github.com/butuzov/ireturn v0.2.0 // indirect + github.com/butuzov/mirror v1.1.0 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect - github.com/charithe/durationcheck v0.0.9 // indirect - github.com/chavacava/garif v0.0.0-20221024190013-b3ef35877348 // indirect + github.com/charithe/durationcheck v0.0.10 // indirect + github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8 // indirect github.com/curioswitch/go-reassign v0.2.0 // indirect - github.com/daixiang0/gci v0.9.1 // indirect + github.com/daixiang0/gci v0.10.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/denis-tingaikin/go-header v0.4.3 // indirect github.com/esimonov/ifshort v1.0.4 // indirect github.com/ettle/strcase v0.1.1 // indirect - github.com/fatih/color v1.14.1 // indirect + github.com/fatih/color v1.15.0 // indirect github.com/fatih/structtag v1.2.0 // indirect github.com/firefart/nonamedreturns v1.0.4 // indirect github.com/fsnotify/fsnotify v1.5.4 // indirect github.com/fzipp/gocyclo v0.6.0 // indirect - github.com/go-critic/go-critic v0.6.7 // indirect + github.com/go-critic/go-critic v0.8.1 // indirect github.com/go-toolsmith/astcast v1.1.0 // indirect - github.com/go-toolsmith/astcopy v1.0.3 // indirect + github.com/go-toolsmith/astcopy v1.1.0 // indirect github.com/go-toolsmith/astequal v1.1.0 // indirect github.com/go-toolsmith/astfmt v1.1.0 // indirect github.com/go-toolsmith/astp v1.1.0 // indirect @@ -67,7 +70,7 @@ require ( github.com/golangci/revgrep v0.0.0-20220804021717-745bb2f7c2e6 // indirect github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 // indirect github.com/google/go-cmp v0.5.9 // indirect - github.com/gordonklaus/ineffassign v0.0.0-20230107090616-13ace0543b28 // indirect + github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601 // indirect github.com/gostaticanalysis/analysisutil v0.7.1 // indirect github.com/gostaticanalysis/comment v1.4.2 // indirect github.com/gostaticanalysis/forcetypeassert v0.1.0 // indirect @@ -77,107 +80,107 @@ require ( github.com/hashicorp/go-version v1.6.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/hexops/gotextdiff v1.0.3 // indirect - github.com/inconshreveable/mousetrap v1.0.1 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jgautheron/goconst v1.5.1 // indirect github.com/jingyugao/rowserrcheck v1.1.1 // indirect github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af // indirect github.com/julz/importas v0.1.0 // indirect - github.com/junk1tm/musttag v0.4.5 // indirect github.com/kisielk/errcheck v1.6.3 // indirect github.com/kisielk/gotool v1.0.0 // indirect - github.com/kkHAIKE/contextcheck v1.1.3 // indirect + github.com/kkHAIKE/contextcheck v1.1.4 // indirect github.com/kulti/thelper v0.6.3 // indirect - github.com/kunwardeep/paralleltest v1.0.6 // indirect + github.com/kunwardeep/paralleltest v1.0.7 // indirect github.com/kyoh86/exportloopref v0.1.11 // indirect github.com/ldez/gomoddirectives v0.2.3 // indirect - github.com/ldez/tagliatelle v0.4.0 // indirect + github.com/ldez/tagliatelle v0.5.0 // indirect github.com/leonklingele/grouper v1.1.1 // indirect github.com/lufeee/execinquery v1.2.1 // indirect github.com/magiconair/properties v1.8.6 // indirect github.com/maratori/testableexamples v1.0.0 // indirect - github.com/maratori/testpackage v1.1.0 // indirect - github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 // indirect + github.com/maratori/testpackage v1.1.1 // indirect + github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.17 // indirect github.com/mattn/go-runewidth v0.0.9 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect github.com/mbilski/exhaustivestruct v1.2.0 // indirect - github.com/mgechev/revive v1.2.5 // indirect + github.com/mgechev/revive v1.3.2 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect - github.com/moricho/tparallel v0.2.1 // indirect + github.com/moricho/tparallel v0.3.1 // indirect github.com/nakabonne/nestif v0.3.1 // indirect github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 // indirect - github.com/nishanths/exhaustive v0.9.5 // indirect + github.com/nishanths/exhaustive v0.11.0 // indirect github.com/nishanths/predeclared v0.2.2 // indirect - github.com/nunnatsa/ginkgolinter v0.8.1 // indirect + github.com/nunnatsa/ginkgolinter v0.12.1 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.0.5 // indirect - github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/polyfloyd/go-errorlint v1.1.0 // indirect + github.com/polyfloyd/go-errorlint v1.4.2 // indirect github.com/prometheus/client_golang v1.12.1 // indirect github.com/prometheus/client_model v0.2.0 // indirect github.com/prometheus/common v0.32.1 // indirect github.com/prometheus/procfs v0.7.3 // indirect github.com/quasilyte/go-ruleguard v0.3.19 // indirect github.com/quasilyte/gogrep v0.5.0 // indirect - github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 // indirect + github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect github.com/ryancurrah/gomodguard v1.3.0 // indirect github.com/ryanrolds/sqlclosecheck v0.4.0 // indirect github.com/sanposhiho/wastedassign/v2 v2.0.7 // indirect github.com/sashamelentyev/interfacebloat v1.1.0 // indirect github.com/sashamelentyev/usestdlibvars v1.23.0 // indirect - github.com/securego/gosec/v2 v2.15.0 // indirect + github.com/securego/gosec/v2 v2.16.0 // indirect github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect - github.com/sirupsen/logrus v1.9.0 // indirect - github.com/sivchari/containedctx v1.0.2 // indirect + github.com/sirupsen/logrus v1.9.3 // indirect + github.com/sivchari/containedctx v1.0.3 // indirect github.com/sivchari/nosnakecase v1.7.0 // indirect github.com/sivchari/tenv v1.7.1 // indirect - github.com/sonatard/noctx v0.0.1 // indirect + github.com/sonatard/noctx v0.0.2 // indirect github.com/sourcegraph/go-diff v0.7.0 // indirect github.com/spf13/afero v1.8.2 // indirect github.com/spf13/cast v1.5.0 // indirect - github.com/spf13/cobra v1.6.1 // indirect + github.com/spf13/cobra v1.7.0 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/pflag v1.0.5 // indirect github.com/spf13/viper v1.12.0 // indirect github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect github.com/stbenjam/no-sprintf-host-port v0.1.1 // indirect github.com/stretchr/objx v0.5.0 // indirect - github.com/stretchr/testify v1.8.1 // indirect + github.com/stretchr/testify v1.8.4 // indirect github.com/subosito/gotenv v1.4.1 // indirect github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c // indirect - github.com/tdakkota/asciicheck v0.1.1 // indirect + github.com/tdakkota/asciicheck v0.2.0 // indirect github.com/tetafro/godot v1.4.11 // indirect - github.com/timakin/bodyclose v0.0.0-20221125081123-e39cf3fc478e // indirect - github.com/timonwong/loggercheck v0.9.3 // indirect - github.com/tomarrell/wrapcheck/v2 v2.8.0 // indirect + github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 // indirect + github.com/timonwong/loggercheck v0.9.4 // indirect + github.com/tomarrell/wrapcheck/v2 v2.8.1 // indirect github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect github.com/ultraware/funlen v0.0.3 // indirect github.com/ultraware/whitespace v0.0.5 // indirect github.com/uudashr/gocognit v1.0.6 // indirect + github.com/xen0n/gosmopolitan v1.2.1 // indirect github.com/yagipy/maintidx v1.0.0 // indirect github.com/yeya24/promlinter v0.2.0 // indirect + github.com/ykadowak/zerologlint v0.1.2 // indirect gitlab.com/bosi/decorder v0.2.3 // indirect + go.tmz.dev/musttag v0.7.0 // indirect go.uber.org/atomic v1.7.0 // indirect go.uber.org/multierr v1.6.0 // indirect - go.uber.org/zap v1.17.0 // indirect - golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e // indirect - golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9 // indirect - golang.org/x/mod v0.8.0 // indirect - golang.org/x/net v0.7.0 // indirect - golang.org/x/sync v0.1.0 // indirect - golang.org/x/sys v0.5.0 // indirect - golang.org/x/text v0.7.0 // indirect + go.uber.org/zap v1.24.0 // indirect + golang.org/x/exp v0.0.0-20230510235704-dd950f8aeaea // indirect + golang.org/x/exp/typeparams v0.0.0-20230224173230-c95f2b4c22f2 // indirect + golang.org/x/mod v0.12.0 // indirect + golang.org/x/sync v0.3.0 // indirect + golang.org/x/sys v0.10.0 // indirect + golang.org/x/text v0.11.0 // indirect google.golang.org/protobuf v1.28.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect - honnef.co/go/tools v0.4.2 // indirect - mvdan.cc/gofumpt v0.4.0 // indirect + honnef.co/go/tools v0.4.3 // indirect + mvdan.cc/gofumpt v0.5.0 // indirect mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed // indirect mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect mvdan.cc/unparam v0.0.0-20221223090309-7455f1af531d // indirect diff --git a/tools/go.sum b/tools/go.sum index feb51093e0..dc0ec6b17b 100644 --- a/tools/go.sum +++ b/tools/go.sum @@ -40,15 +40,17 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Abirdcfly/dupword v0.0.9 h1:MxprGjKq3yDBICXDgEEsyGirIXfMYXkLNT/agPsE1tk= -github.com/Abirdcfly/dupword v0.0.9/go.mod h1:PzmHVLLZ27MvHSzV7eFmMXSFArWXZPZmfuuziuUrf2g= -github.com/Antonboom/errname v0.1.7 h1:mBBDKvEYwPl4WFFNwec1CZO096G6vzK9vvDQzAwkako= -github.com/Antonboom/errname v0.1.7/go.mod h1:g0ONh16msHIPgJSGsecu1G/dcF2hlYR/0SddnIAGavU= -github.com/Antonboom/nilnil v0.1.1 h1:PHhrh5ANKFWRBh7TdYmyyq2gyT2lotnvFvvFbylF81Q= -github.com/Antonboom/nilnil v0.1.1/go.mod h1:L1jBqoWM7AOeTD+tSquifKSesRHs4ZdaxvZR+xdJEaI= +github.com/4meepo/tagalign v1.2.2 h1:kQeUTkFTaBRtd/7jm8OKJl9iHk0gAO+TDFPHGSna0aw= +github.com/4meepo/tagalign v1.2.2/go.mod h1:Q9c1rYMZJc9dPRkbQPpcBNCLEmY2njbAsXhQOZFE2dE= +github.com/Abirdcfly/dupword v0.0.11 h1:z6v8rMETchZXUIuHxYNmlUAuKuB21PeaSymTed16wgU= +github.com/Abirdcfly/dupword v0.0.11/go.mod h1:wH8mVGuf3CP5fsBTkfWwwwKTjDnVVCxtU8d8rgeVYXA= +github.com/Antonboom/errname v0.1.10 h1:RZ7cYo/GuZqjr1nuJLNe8ZH+a+Jd9DaZzttWzak9Bls= +github.com/Antonboom/errname v0.1.10/go.mod h1:xLeiCIrvVNpUtsN0wxAh05bNIZpqE22/qDMnTBTttiA= +github.com/Antonboom/nilnil v0.1.5 h1:X2JAdEVcbPaOom2TUa1FxZ3uyuUlex0XMLGYMemu6l0= +github.com/Antonboom/nilnil v0.1.5/go.mod h1:I24toVuBKhfP5teihGWctrRiPbRKHwZIFOvc6v3HZXk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= -github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8= +github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= @@ -56,47 +58,52 @@ github.com/GaijinEntertainment/go-exhaustruct/v2 v2.3.0 h1:+r1rSv4gvYn0wmRjC8X7I github.com/GaijinEntertainment/go-exhaustruct/v2 v2.3.0/go.mod h1:b3g59n2Y+T5xmcxJL+UEG2f8cQploZm1mR/v6BW0mU0= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/OpenPeeDeeP/depguard v1.1.1 h1:TSUznLjvp/4IUP+OQ0t/4jF4QUyxIcVX8YnghZdunyA= -github.com/OpenPeeDeeP/depguard v1.1.1/go.mod h1:JtAMzWkmFEzDPyAd+W0NHl1lvpQKTvT9jnRVsohBKpc= +github.com/OpenPeeDeeP/depguard/v2 v2.1.0 h1:aQl70G173h/GZYhWf36aE5H0KaujXfVMnn/f1kSDVYY= +github.com/OpenPeeDeeP/depguard/v2 v2.1.0/go.mod h1:PUBgk35fX4i7JDmwzlJwJ+GMe6NfO1723wmJMgPThNQ= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/alexkohler/nakedret/v2 v2.0.2 h1:qnXuZNvv3/AxkAb22q/sEsEpcA99YxLFACDtEw9TPxE= +github.com/alexkohler/nakedret/v2 v2.0.2/go.mod h1:2b8Gkk0GsOrqQv/gPWjNLDSKwG8I5moSXG1K4VIBcTQ= github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw= github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= -github.com/ashanbrown/forbidigo v1.4.0 h1:spdPbupaSqtWORq1Q4eHBoPBmHtwVyLKwaedbSLc5Sw= -github.com/ashanbrown/forbidigo v1.4.0/go.mod h1:IvgwB5Y4fzqSAj/WVXKWigoTkB0dzI2FBbpKWuh7ph8= +github.com/ashanbrown/forbidigo v1.5.3 h1:jfg+fkm/snMx+V9FBwsl1d340BV/99kZGv5jN9hBoXk= +github.com/ashanbrown/forbidigo v1.5.3/go.mod h1:Y8j9jy9ZYAEHXdu723cUlraTqbzjKF1MUyfOKL+AjcU= github.com/ashanbrown/makezero v1.1.1 h1:iCQ87C0V0vSyO+M9E/FZYbu65auqH0lnsOkf5FcB28s= github.com/ashanbrown/makezero v1.1.1/go.mod h1:i1bJLCRSCHOcOa9Y6MyF2FTfMZMFdHvxKHxgO5Z1axI= +github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bkielbasa/cyclop v1.2.0 h1:7Jmnh0yL2DjKfw28p86YTd/B4lRGcNuu12sKE35sM7A= -github.com/bkielbasa/cyclop v1.2.0/go.mod h1:qOI0yy6A7dYC4Zgsa72Ppm9kONl0RoIlPbzot9mhmeI= +github.com/bkielbasa/cyclop v1.2.1 h1:AeF71HZDob1P2/pRm1so9cd1alZnrpyc4q2uP2l0gJY= +github.com/bkielbasa/cyclop v1.2.1/go.mod h1:K/dT/M0FPAiYjBgQGau7tz+3TMh4FWAEqlMhzFWCrgM= github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= github.com/bombsimon/wsl/v3 v3.4.0 h1:RkSxjT3tmlptwfgEgTgU+KYKLI35p/tviNXNXiL2aNU= github.com/bombsimon/wsl/v3 v3.4.0/go.mod h1:KkIB+TXkqy6MvK9BDZVbZxKNYsE1/oLRJbIFtf14qqo= -github.com/breml/bidichk v0.2.3 h1:qe6ggxpTfA8E75hdjWPZ581sY3a2lnl0IRxLQFelECI= -github.com/breml/bidichk v0.2.3/go.mod h1:8u2C6DnAy0g2cEq+k/A2+tr9O1s+vHGxWn0LTc70T2A= -github.com/breml/errchkjson v0.3.0 h1:YdDqhfqMT+I1vIxPSas44P+9Z9HzJwCeAzjB8PxP1xw= -github.com/breml/errchkjson v0.3.0/go.mod h1:9Cogkyv9gcT8HREpzi3TiqBxCqDzo8awa92zSDFcofU= -github.com/butuzov/ireturn v0.1.1 h1:QvrO2QF2+/Cx1WA/vETCIYBKtRjc30vesdoPUNo1EbY= -github.com/butuzov/ireturn v0.1.1/go.mod h1:Wh6Zl3IMtTpaIKbmwzqi6olnM9ptYQxxVacMsOEFPoc= +github.com/breml/bidichk v0.2.4 h1:i3yedFWWQ7YzjdZJHnPo9d/xURinSq3OM+gyM43K4/8= +github.com/breml/bidichk v0.2.4/go.mod h1:7Zk0kRFt1LIZxtQdl9W9JwGAcLTTkOs+tN7wuEYGJ3s= +github.com/breml/errchkjson v0.3.1 h1:hlIeXuspTyt8Y/UmP5qy1JocGNR00KQHgfaNtRAjoxQ= +github.com/breml/errchkjson v0.3.1/go.mod h1:XroxrzKjdiutFyW3nWhw34VGg7kiMsDQox73yWCGI2U= +github.com/butuzov/ireturn v0.2.0 h1:kCHi+YzC150GE98WFuZQu9yrTn6GEydO2AuPLbTgnO4= +github.com/butuzov/ireturn v0.2.0/go.mod h1:Wh6Zl3IMtTpaIKbmwzqi6olnM9ptYQxxVacMsOEFPoc= +github.com/butuzov/mirror v1.1.0 h1:ZqX54gBVMXu78QLoiqdwpl2mgmoOJTk7s4p4o+0avZI= +github.com/butuzov/mirror v1.1.0/go.mod h1:8Q0BdQU6rC6WILDiBM60DBfvV78OLJmMmixe7GF45AE= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.2 h1:YRXhKfTDauu4ajMg1TPgFO5jnlC2HCbmLXMcTG5cbYE= github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cfergeau/gomod2rpmdeps v0.0.0-20210223144124-2042c4850ca8 h1:wueFIqVEP3VJnJTKQ/cU3XB4XuBuuq4XdsbnszU7xEA= github.com/cfergeau/gomod2rpmdeps v0.0.0-20210223144124-2042c4850ca8/go.mod h1:0vvDpGoZYCjudlASJzqx+hOZjwhE+Q7jdJ8fIhkqsak= -github.com/charithe/durationcheck v0.0.9 h1:mPP4ucLrf/rKZiIG/a9IPXHGlh8p4CzgpyTy6EEutYk= -github.com/charithe/durationcheck v0.0.9/go.mod h1:SSbRIBVfMjCi/kEB6K65XEA83D6prSM8ap1UCpNKtgg= -github.com/chavacava/garif v0.0.0-20221024190013-b3ef35877348 h1:cy5GCEZLUCshCGCRRUjxHrDUqkB4l5cuUt3ShEckQEo= -github.com/chavacava/garif v0.0.0-20221024190013-b3ef35877348/go.mod h1:f/miWtG3SSuTxKsNK3o58H1xl+XV6ZIfbC6p7lPPB8U= +github.com/charithe/durationcheck v0.0.10 h1:wgw73BiocdBDQPik+zcEoBG/ob8uyBHf2iyoHGPf5w4= +github.com/charithe/durationcheck v0.0.10/go.mod h1:bCWXb7gYRysD1CU3C+u4ceO49LoGOY1C1L6uouGNreQ= +github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8 h1:W9o46d2kbNL06lq7UNDPV0zYLzkrde/bjIqO02eoll0= +github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8/go.mod h1:gakxgyXaaPkxvLw1XQxNGK4I37ys9iBRzNUx/B7pUCo= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -107,8 +114,8 @@ github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnht github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/curioswitch/go-reassign v0.2.0 h1:G9UZyOcpk/d7Gd6mqYgd8XYWFMw/znxwGDUstnC9DIo= github.com/curioswitch/go-reassign v0.2.0/go.mod h1:x6OpXuWvgfQaMGks2BZybTngWjT84hqJfKoO8Tt/Roc= -github.com/daixiang0/gci v0.9.1 h1:jBrwBmBZTDsGsXiaCTLIe9diotp1X4X64zodFrh7l+c= -github.com/daixiang0/gci v0.9.1/go.mod h1:EpVfrztufwVgQRXjnX4zuNinEpLj5OmMjtu/+MB0V0c= +github.com/daixiang0/gci v0.10.1 h1:eheNA3ljF6SxnPD/vE4lCBusVHmV3Rs3dkKvFrJ7MR0= +github.com/daixiang0/gci v0.10.1/go.mod h1:xtHP9N7AHdNvtRNfcx9gwTDfw7FRJx4bZUsiEfiNNAI= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -124,19 +131,19 @@ github.com/esimonov/ifshort v1.0.4 h1:6SID4yGWfRae/M7hkVDVVyppy8q/v9OuxNdmjLQStB github.com/esimonov/ifshort v1.0.4/go.mod h1:Pe8zjlRrJ80+q2CxHLfEOfTwxCZ4O+MuhcHcfgNWTk0= github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw= github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY= -github.com/fatih/color v1.14.1 h1:qfhVLaG5s+nCROl1zJsZRxFeYrHLqWroPOQ8BWiNb4w= -github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg= +github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= +github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/firefart/nonamedreturns v1.0.4 h1:abzI1p7mAEPYuR4A+VLKn4eNDOycjYo2phmY9sfv40Y= github.com/firefart/nonamedreturns v1.0.4/go.mod h1:TDhe/tjI1BXo48CmYbUduTV7BdIga8MAO/xbKdcVsGI= -github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= -github.com/go-critic/go-critic v0.6.7 h1:1evPrElnLQ2LZtJfmNDzlieDhjnq36SLgNzisx06oPM= -github.com/go-critic/go-critic v0.6.7/go.mod h1:fYZUijFdcnxgx6wPjQA2QEjIRaNCT0gO8bhexy6/QmE= +github.com/go-critic/go-critic v0.8.1 h1:16omCF1gN3gTzt4j4J6fKI/HnRojhEp+Eks6EuKw3vw= +github.com/go-critic/go-critic v0.8.1/go.mod h1:kpzXl09SIJX1cr9TB/g/sAG+eFEl7ZS9f9cqvZtyNl0= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -146,13 +153,13 @@ github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vb github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= +github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= github.com/go-toolsmith/astcast v1.1.0 h1:+JN9xZV1A+Re+95pgnMgDboWNVnIMMQXwfBwLRPgSC8= github.com/go-toolsmith/astcast v1.1.0/go.mod h1:qdcuFWeGGS2xX5bLM/c3U9lewg7+Zu4mr+xPwZIB4ZU= -github.com/go-toolsmith/astcopy v1.0.3 h1:r0bgSRlMOAgO+BdQnVAcpMSMkrQCnV6ZJmIkrJgcJj0= -github.com/go-toolsmith/astcopy v1.0.3/go.mod h1:4TcEdbElGc9twQEYpVo/aieIXfHhiuLh4aLAck6dO7Y= -github.com/go-toolsmith/astequal v1.0.2/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4= +github.com/go-toolsmith/astcopy v1.1.0 h1:YGwBN0WM+ekI/6SS6+52zLDEf8Yvp3n2seZITCUBt5s= +github.com/go-toolsmith/astcopy v1.1.0/go.mod h1:hXM6gan18VA1T/daUEHCFcYiW8Ai1tIwIzHY6srfEAw= github.com/go-toolsmith/astequal v1.0.3/go.mod h1:9Ai4UglvtR+4up+bAD4+hCj7iTo4m/OXVTSLnCyTAx4= github.com/go-toolsmith/astequal v1.1.0 h1:kHKm1AWqClYn15R0K1KKE4RG614D46n+nqUQ06E1dTw= github.com/go-toolsmith/astequal v1.1.0/go.mod h1:sedf7VIdCL22LD8qIvv7Nn9MuWJruQA/ysswh64lffQ= @@ -160,7 +167,7 @@ github.com/go-toolsmith/astfmt v1.1.0 h1:iJVPDPp6/7AaeLJEruMsBUlOYCmvg0MoCfJprsO github.com/go-toolsmith/astfmt v1.1.0/go.mod h1:OrcLlRwu0CuiIBp/8b5PYF9ktGVZUjlNMV634mhwuQ4= github.com/go-toolsmith/astp v1.1.0 h1:dXPuCl6u2llURjdPLLDxJeZInAeZ0/eZwFJmqZMnpQA= github.com/go-toolsmith/astp v1.1.0/go.mod h1:0T1xFGz9hicKs8Z5MfAqSUitoUYS30pDMsRVIDHs8CA= -github.com/go-toolsmith/pkgload v1.0.2-0.20220101231613-e814995d17c5 h1:eD9POs68PHkwrx7hAB78z1cb6PfGq/jyWn3wJywsH1o= +github.com/go-toolsmith/pkgload v1.2.2 h1:0CtmHq/02QhxcF7E9N5LIFcYFsMR5rdovfqTtRKkgIk= github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQiyP2Bvw= github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= @@ -209,8 +216,8 @@ github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe h1:6RGUuS7EGotKx6 github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe/go.mod h1:gjqyPShc/m8pEMpk0a3SeagVb0kaqvhscv+i9jI5ZhQ= github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2 h1:amWTbTGqOZ71ruzrdA+Nx5WA3tV1N0goTspwmKCQvBY= github.com/golangci/gofmt v0.0.0-20220901101216-f2edd75033f2/go.mod h1:9wOXstvyDRshQ9LggQuzBCGysxs3b6Uo/1MvYCR2NMs= -github.com/golangci/golangci-lint v1.51.2 h1:yIcsT1X9ZYHdSpeWXRT1ORC/FPGSqDHbHsu9uk4FK7M= -github.com/golangci/golangci-lint v1.51.2/go.mod h1:KH9Q7/3glwpYSknxUgUyLlAv46A8fsSKo1hH2wDvkr8= +github.com/golangci/golangci-lint v1.53.3 h1:CUcRafczT4t1F+mvdkUm6KuOpxUZTl0yWN/rSU6sSMo= +github.com/golangci/golangci-lint v1.53.3/go.mod h1:W4Gg3ONq6p3Jl+0s/h9Gr0j7yEgHJWWZO2bHl2tBUXM= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= @@ -233,6 +240,7 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= @@ -250,18 +258,16 @@ github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= -github.com/gordonklaus/ineffassign v0.0.0-20230107090616-13ace0543b28 h1:9alfqbrhuD+9fLZ4iaAVwhlp5PEhmnBt7yvK2Oy5C1U= -github.com/gordonklaus/ineffassign v0.0.0-20230107090616-13ace0543b28/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= -github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= -github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= +github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601 h1:mrEEilTAUmaAORhssPPkxj84TsHrPMLBGW2Z4SoTxm8= +github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601/go.mod h1:Qcp2HIAYhR7mNUVSIxZww3Guk4it82ghYcEXIAk+QT0= github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk= github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc= -github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= github.com/gostaticanalysis/comment v1.4.2 h1:hlnx5+S2fY9Zo9ePo4AhgYsYHbM2+eAv8m/s1JiCd6Q= github.com/gostaticanalysis/comment v1.4.2/go.mod h1:KLUTGDv6HOCotCH8h2erHKmpci2ZoR8VPu34YA2uzdM= @@ -286,8 +292,8 @@ github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUq github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= -github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jgautheron/goconst v1.5.1 h1:HxVbL1MhydKs8R8n/HE5NPvzfaYmQJA3o879lE4+WcM= github.com/jgautheron/goconst v1.5.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs= @@ -305,33 +311,31 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/julz/importas v0.1.0 h1:F78HnrsjY3cR7j0etXy5+TU1Zuy7Xt08X/1aJnH5xXY= github.com/julz/importas v0.1.0/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= -github.com/junk1tm/musttag v0.4.5 h1:d+mpJ1vn6WFEVKHwkgJiIedis1u/EawKOuUTygAUtCo= -github.com/junk1tm/musttag v0.4.5/go.mod h1:XkcL/9O6RmD88JBXb+I15nYRl9W4ExhgQeCBEhfMC8U= github.com/kisielk/errcheck v1.6.3 h1:dEKh+GLHcWm2oN34nMvDzn1sqI0i0WxPvrgiJA5JuM8= github.com/kisielk/errcheck v1.6.3/go.mod h1:nXw/i/MfnvRHqXa7XXmQMUB0oNFGuBrNI8d8NLy0LPw= github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kkHAIKE/contextcheck v1.1.3 h1:l4pNvrb8JSwRd51ojtcOxOeHJzHek+MtOyXbaR0uvmw= -github.com/kkHAIKE/contextcheck v1.1.3/go.mod h1:PG/cwd6c0705/LM0KTr1acO2gORUxkSVWyLJOFW5qoo= +github.com/kkHAIKE/contextcheck v1.1.4 h1:B6zAaLhOEEcjvUgIYEqystmnFk1Oemn8bvJhbt0GMb8= +github.com/kkHAIKE/contextcheck v1.1.4/go.mod h1:1+i/gWqokIa+dm31mqGLZhZJ7Uh44DJGZVmr6QRBNJg= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kulti/thelper v0.6.3 h1:ElhKf+AlItIu+xGnI990no4cE2+XaSu1ULymV2Yulxs= github.com/kulti/thelper v0.6.3/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I= -github.com/kunwardeep/paralleltest v1.0.6 h1:FCKYMF1OF2+RveWlABsdnmsvJrei5aoyZoaGS+Ugg8g= -github.com/kunwardeep/paralleltest v1.0.6/go.mod h1:Y0Y0XISdZM5IKm3TREQMZ6iteqn1YuwCsJO/0kL9Zes= +github.com/kunwardeep/paralleltest v1.0.7 h1:2uCk94js0+nVNQoHZNLBkAR1DQJrVzw6T0RMzJn55dQ= +github.com/kunwardeep/paralleltest v1.0.7/go.mod h1:2C7s65hONVqY7Q5Efj5aLzRCNLjw2h4eMc9EcypGjcY= github.com/kyoh86/exportloopref v0.1.11 h1:1Z0bcmTypkL3Q4k+IDHMWTcnCliEZcaPiIe0/ymEyhQ= github.com/kyoh86/exportloopref v0.1.11/go.mod h1:qkV4UF1zGl6EkF1ox8L5t9SwyeBAZ3qLMd6up458uqA= github.com/ldez/gomoddirectives v0.2.3 h1:y7MBaisZVDYmKvt9/l1mjNCiSA1BVn34U0ObUcJwlhA= github.com/ldez/gomoddirectives v0.2.3/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= -github.com/ldez/tagliatelle v0.4.0 h1:sylp7d9kh6AdXN2DpVGHBRb5guTVAgOxqNGhbqc4b1c= -github.com/ldez/tagliatelle v0.4.0/go.mod h1:mNtTfrHy2haaBAw+VT7IBV6VXBThS7TCreYWbBcJ87I= +github.com/ldez/tagliatelle v0.5.0 h1:epgfuYt9v0CG3fms0pEgIMNPuFf/LpPIfjk4kyqSioo= +github.com/ldez/tagliatelle v0.5.0/go.mod h1:rj1HmWiL1MiKQuOONhd09iySTEkUuE/8+5jtPYz9xa4= github.com/leonklingele/grouper v1.1.1 h1:suWXRU57D4/Enn6pXR0QVqqWWrnJ9Osrz+5rjt8ivzU= github.com/leonklingele/grouper v1.1.1/go.mod h1:uk3I3uDfi9B6PeUjsCKi6ndcf63Uy7snXgR4yDYQVDY= github.com/lufeee/execinquery v1.2.1 h1:hf0Ems4SHcUGBxpGN7Jz78z1ppVkP/837ZlETPCEtOM= @@ -340,10 +344,10 @@ github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamh github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/maratori/testableexamples v1.0.0 h1:dU5alXRrD8WKSjOUnmJZuzdxWOEQ57+7s93SLMxb2vI= github.com/maratori/testableexamples v1.0.0/go.mod h1:4rhjL1n20TUTT4vdh3RDqSizKLyXp7K2u6HgraZCGzE= -github.com/maratori/testpackage v1.1.0 h1:GJY4wlzQhuBusMF1oahQCBtUV/AQ/k69IZ68vxaac2Q= -github.com/maratori/testpackage v1.1.0/go.mod h1:PeAhzU8qkCwdGEMTEupsHJNlQu2gZopMC6RjbhmHeDc= -github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 h1:pWxk9e//NbPwfxat7RXkts09K+dEBJWakUWwICVqYbA= -github.com/matoous/godox v0.0.0-20210227103229-6504466cf951/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= +github.com/maratori/testpackage v1.1.1 h1:S58XVV5AD7HADMmD0fNnziNHqKvSdDuEKdPD1rNTU04= +github.com/maratori/testpackage v1.1.1/go.mod h1:s4gRK/ym6AMrqpOa/kEbQTV4Q4jb7WeLZzVhVVVOQMc= +github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26 h1:gWg6ZQ4JhDfJPqlo2srm/LN17lpybq15AryXIRcWYLE= +github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= @@ -357,8 +361,8 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0j github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mbilski/exhaustivestruct v1.2.0 h1:wCBmUnSYufAHO6J4AVWY6ff+oxWxsVFrwgOdMUQePUo= github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= -github.com/mgechev/revive v1.2.5 h1:UF9AR8pOAuwNmhXj2odp4mxv9Nx2qUIwVz8ZsU+Mbec= -github.com/mgechev/revive v1.2.5/go.mod h1:nFOXent79jMTISAfOAasKfy0Z2Ejq0WX7Qn/KAdYopI= +github.com/mgechev/revive v1.3.2 h1:Wb8NQKBaALBJ3xrrj4zpwJwqwNA6nDpyJSEQWcCka6U= +github.com/mgechev/revive v1.3.2/go.mod h1:UCLtc7o5vg5aXCwdUTU1kEBQ1v+YXPAkYDIDXbrs5I0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= @@ -368,8 +372,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/moricho/tparallel v0.2.1 h1:95FytivzT6rYzdJLdtfn6m1bfFJylOJK41+lgv/EHf4= -github.com/moricho/tparallel v0.2.1/go.mod h1:fXEIZxG2vdfl0ZF8b42f5a78EhjjD5mX8qUplsoSU4k= +github.com/moricho/tparallel v0.3.1 h1:fQKD4U1wRMAYNngDonW5XupoB/ZGJHdpzrWqgyg9krA= +github.com/moricho/tparallel v0.3.1/go.mod h1:leENX2cUv7Sv2qDgdi0D0fCftN8fRC67Bcn8pqzeYNI= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U= @@ -377,16 +381,16 @@ github.com/nakabonne/nestif v0.3.1/go.mod h1:9EtoZochLn5iUprVDmDjqGKPofoUEBL8U4N github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 h1:4kuARK6Y6FxaNu/BnU2OAaLF86eTVhP2hjTB6iMvItA= github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= -github.com/nishanths/exhaustive v0.9.5 h1:TzssWan6orBiLYVqewCG8faud9qlFntJE30ACpzmGME= -github.com/nishanths/exhaustive v0.9.5/go.mod h1:IbwrGdVMizvDcIxPYGVdQn5BqWJaOwpCvg4RGb8r/TA= +github.com/nishanths/exhaustive v0.11.0 h1:T3I8nUGhl/Cwu5Z2hfc92l0e04D2GEW6e0l8pzda2l0= +github.com/nishanths/exhaustive v0.11.0/go.mod h1:RqwDsZ1xY0dNdqHho2z6X+bgzizwbLYOWnZbbl2wLB4= github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= -github.com/nunnatsa/ginkgolinter v0.8.1 h1:/y4o/0hV+ruUHj4xXh89xlFjoaitnI4LnkpuYs02q1c= -github.com/nunnatsa/ginkgolinter v0.8.1/go.mod h1:FYYLtszIdmzCH8XMaMPyxPVXZ7VCaIm55bA+gugx+14= +github.com/nunnatsa/ginkgolinter v0.12.1 h1:vwOqb5Nu05OikTXqhvLdHCGcx5uthIYIl0t79UVrERQ= +github.com/nunnatsa/ginkgolinter v0.12.1/go.mod h1:AK8Ab1PypVrcGUusuKD8RDcl2KgsIwvNaaxAlyHSzso= github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= -github.com/onsi/ginkgo/v2 v2.8.0 h1:pAM+oBNPrpXRs+E/8spkeGx9QgekbRVyr74EUvRVOUI= -github.com/onsi/gomega v1.26.0 h1:03cDLK28U6hWvCAns6NeydX3zIm4SF3ci69ulidS32Q= +github.com/onsi/ginkgo/v2 v2.9.4 h1:xR7vG4IXt5RWx6FfIjyAtsoMAtnc3C/rFXBBd2AjZwE= +github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= github.com/otiai10/copy v1.2.0 h1:HvG945u96iNadPoG2/Ja2+AUJeW5YuFQMixq9yirC+k= github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw= github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE= @@ -404,8 +408,8 @@ github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v1.1.0 h1:VKoEFg5yxSgJ2yFPVhxW7oGz+f8/OVcuMeNvcPIi6Eg= -github.com/polyfloyd/go-errorlint v1.1.0/go.mod h1:Uss7Bc/izYG0leCMRx3WVlrpqWedSZk7V/FUQW6VJ6U= +github.com/polyfloyd/go-errorlint v1.4.2 h1:CU+O4181IxFDdPH6t/HT7IiDj1I7zxNi1RIUxYwn8d0= +github.com/polyfloyd/go-errorlint v1.4.2/go.mod h1:k6fU/+fQe38ednoZS51T7gSIGQW1y94d6TkSr35OzH8= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= @@ -432,14 +436,14 @@ github.com/quasilyte/go-ruleguard v0.3.19 h1:tfMnabXle/HzOb5Xe9CUZYWXKfkS1KwRmZy github.com/quasilyte/go-ruleguard v0.3.19/go.mod h1:lHSn69Scl48I7Gt9cX3VrbsZYvYiBYszZOZW4A+oTEw= github.com/quasilyte/gogrep v0.5.0 h1:eTKODPXbI8ffJMN+W2aE0+oL0z/nh8/5eNdiO34SOAo= github.com/quasilyte/gogrep v0.5.0/go.mod h1:Cm9lpz9NZjEoL1tgZ2OgeUKPIxL1meE7eo60Z6Sk+Ng= -github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 h1:L8QM9bvf68pVdQ3bCFZMDmnt9yqcMBro1pC7F+IPYMY= -github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= +github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl980XxGFEZSS6KlBGIV0diGdySzxATTWoqaU= +github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ= github.com/randall77/makefat v0.0.0-20210315173500-7ddd0e42c844 h1:GranzK4hv1/pqTIhMTXt2X8MmMOuH3hMeUR0o9SP5yc= github.com/randall77/makefat v0.0.0-20210315173500-7ddd0e42c844/go.mod h1:T1TLSfyWVBRXVGzWd0o9BI4kfoO9InEgfQe4NV3mLz8= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryancurrah/gomodguard v1.3.0 h1:q15RT/pd6UggBXVBuLps8BXRvl5GPBcwVA7BJHMLuTw= github.com/ryancurrah/gomodguard v1.3.0/go.mod h1:ggBxb3luypPEzqVtq33ee7YSN35V28XeGnid8dnni50= @@ -451,8 +455,8 @@ github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tM github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ= github.com/sashamelentyev/usestdlibvars v1.23.0 h1:01h+/2Kd+NblNItNeux0veSL5cBF1jbEOPrEhDzGYq0= github.com/sashamelentyev/usestdlibvars v1.23.0/go.mod h1:YPwr/Y1LATzHI93CqoPUN/2BzGQ/6N/cl/KwgR0B/aU= -github.com/securego/gosec/v2 v2.15.0 h1:v4Ym7FF58/jlykYmmhZ7mTm7FQvN/setNm++0fgIAtw= -github.com/securego/gosec/v2 v2.15.0/go.mod h1:VOjTrZOkUtSDt2QLSJmQBMWnvwiQPEjg0l+5juIqGk8= +github.com/securego/gosec/v2 v2.16.0 h1:Pi0JKoasQQ3NnoRao/ww/N/XdynIB9NRYYZT5CyOs5U= +github.com/securego/gosec/v2 v2.16.0/go.mod h1:xvLcVZqUfo4aAQu56TNv7/Ltz6emAOQAEsrZrt7uGlI= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= @@ -460,24 +464,24 @@ github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOms github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= -github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= -github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/sivchari/containedctx v1.0.2 h1:0hLQKpgC53OVF1VT7CeoFHk9YKstur1XOgfYIc1yrHI= -github.com/sivchari/containedctx v1.0.2/go.mod h1:PwZOeqm4/DLoJOqMSIJs3aKqXRX4YO+uXww087KZ7Bw= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE= +github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4= github.com/sivchari/nosnakecase v1.7.0 h1:7QkpWIRMe8x25gckkFd2A5Pi6Ymo0qgr4JrhGt95do8= github.com/sivchari/nosnakecase v1.7.0/go.mod h1:CwDzrzPea40/GB6uynrNLiorAlgFRvRbFSgJx2Gs+QY= github.com/sivchari/tenv v1.7.1 h1:PSpuD4bu6fSmtWMxSGWcvqUUgIn7k3yOJhOIzVWn8Ak= github.com/sivchari/tenv v1.7.1/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg= -github.com/sonatard/noctx v0.0.1 h1:VC1Qhl6Oxx9vvWo3UDgrGXYCeKCe3Wbw7qAWL6FrmTY= -github.com/sonatard/noctx v0.0.1/go.mod h1:9D2D/EoULe8Yy2joDHJj7bv3sZoq9AaSb8B4lqBjiZI= +github.com/sonatard/noctx v0.0.2 h1:L7Dz4De2zDQhW8S0t+KUjY0MAQJd6SgVwhzNIc4ok00= +github.com/sonatard/noctx v0.0.2/go.mod h1:kzFz+CzWSjQ2OzIm46uJZoXuBpa2+0y3T36U18dWqIo= github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0= github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo= github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= -github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA= -github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= +github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= +github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= @@ -501,26 +505,27 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5 github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c h1:+aPplBwWcHBo6q9xrfWdMrT9o4kltkmmvpemgIjep/8= github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c/go.mod h1:SbErYREK7xXdsRiigaQiQkI9McGRzYMvlKYaP3Nimdk= -github.com/tdakkota/asciicheck v0.1.1 h1:PKzG7JUTUmVspQTDqtkX9eSiLGossXTybutHwTXuO0A= -github.com/tdakkota/asciicheck v0.1.1/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= +github.com/tdakkota/asciicheck v0.2.0 h1:o8jvnUANo0qXtnslk2d3nMKTFNlOnJjRrNcj0j9qkHM= +github.com/tdakkota/asciicheck v0.2.0/go.mod h1:Qb7Y9EgjCLJGup51gDHFzbI08/gbGhL/UVhYIPWG2rg= github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= github.com/tetafro/godot v1.4.11 h1:BVoBIqAf/2QdbFmSwAWnaIqDivZdOV0ZRwEm6jivLKw= github.com/tetafro/godot v1.4.11/go.mod h1:LR3CJpxDVGlYOWn3ZZg1PgNZdTUvzsZWu8xaEohUpn8= -github.com/timakin/bodyclose v0.0.0-20221125081123-e39cf3fc478e h1:MV6KaVu/hzByHP0UvJ4HcMGE/8a6A4Rggc/0wx2AvJo= -github.com/timakin/bodyclose v0.0.0-20221125081123-e39cf3fc478e/go.mod h1:27bSVNWSBOHm+qRp1T9qzaIpsWEP6TbUnei/43HK+PQ= -github.com/timonwong/loggercheck v0.9.3 h1:ecACo9fNiHxX4/Bc02rW2+kaJIAMAes7qJ7JKxt0EZI= -github.com/timonwong/loggercheck v0.9.3/go.mod h1:wUqnk9yAOIKtGA39l1KLE9Iz0QiTocu/YZoOf+OzFdw= -github.com/tomarrell/wrapcheck/v2 v2.8.0 h1:qDzbir0xmoE+aNxGCPrn+rUSxAX+nG6vREgbbXAR81I= -github.com/tomarrell/wrapcheck/v2 v2.8.0/go.mod h1:ao7l5p0aOlUNJKI0qVwB4Yjlqutd0IvAB9Rdwyilxvg= +github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 h1:quvGphlmUVU+nhpFa4gg4yJyTRJ13reZMDHrKwYw53M= +github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966/go.mod h1:27bSVNWSBOHm+qRp1T9qzaIpsWEP6TbUnei/43HK+PQ= +github.com/timonwong/loggercheck v0.9.4 h1:HKKhqrjcVj8sxL7K77beXh0adEm6DLjV/QOGeMXEVi4= +github.com/timonwong/loggercheck v0.9.4/go.mod h1:caz4zlPcgvpEkXgVnAJGowHAMW2NwHaNlpS8xDbVhTg= +github.com/tomarrell/wrapcheck/v2 v2.8.1 h1:HxSqDSN0sAt0yJYsrcYVoEeyM4aI9yAm3KQpIXDJRhQ= +github.com/tomarrell/wrapcheck/v2 v2.8.1/go.mod h1:/n2Q3NZ4XFT50ho6Hbxg+RV1uyo2Uow/Vdm9NQcl5SE= github.com/tommy-muehle/go-mnd/v2 v2.5.1 h1:NowYhSdyE/1zwK9QCLeRb6USWdoif80Ie+v+yU8u1Zw= github.com/tommy-muehle/go-mnd/v2 v2.5.1/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= github.com/ultraware/funlen v0.0.3 h1:5ylVWm8wsNwH5aWo9438pwvsK0QiqVuUrt9bn7S/iLA= @@ -529,10 +534,14 @@ github.com/ultraware/whitespace v0.0.5 h1:hh+/cpIcopyMYbZNVov9iSxvJU3OYQg78Sfaqz github.com/ultraware/whitespace v0.0.5/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= github.com/uudashr/gocognit v1.0.6 h1:2Cgi6MweCsdB6kpcVQp7EW4U23iBFQWfTXiWlyp842Y= github.com/uudashr/gocognit v1.0.6/go.mod h1:nAIUuVBnYU7pcninia3BHOvQkpQCeO76Uscky5BOwcY= +github.com/xen0n/gosmopolitan v1.2.1 h1:3pttnTuFumELBRSh+KQs1zcz4fN6Zy7aB0xlnQSn1Iw= +github.com/xen0n/gosmopolitan v1.2.1/go.mod h1:JsHq/Brs1o050OOdmzHeOr0N7OtlnKRAGAsElF8xBQA= github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM= github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk= github.com/yeya24/promlinter v0.2.0 h1:xFKDQ82orCU5jQujdaD8stOHiv8UN68BSdn2a8u8Y3o= github.com/yeya24/promlinter v0.2.0/go.mod h1:u54lkmBOZrpEbQQ6gox2zWKKLKu2SGe+2KOiextY+IA= +github.com/ykadowak/zerologlint v0.1.2 h1:Um4P5RMmelfjQqQJKtE8ZW+dLZrXrENeIzWWKw800U4= +github.com/ykadowak/zerologlint v0.1.2/go.mod h1:KaUskqF3e/v59oPmdq1U1DnKcuHokl2/K1U4pmIELKg= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -542,18 +551,22 @@ github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= gitlab.com/bosi/decorder v0.2.3 h1:gX4/RgK16ijY8V+BRQHAySfQAb354T7/xQpDB2n10P0= gitlab.com/bosi/decorder v0.2.3/go.mod h1:9K1RB5+VPNQYtXtTDAzd2OEftsZb1oV0IrJrzChSdGE= +go-simpler.org/assert v0.5.0 h1:+5L/lajuQtzmbtEfh69sr5cRf2/xZzyJhFjoOz/PPqs= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.tmz.dev/musttag v0.7.0 h1:QfytzjTWGXZmChoX0L++7uQN+yRCPfyFm+whsM+lfGc= +go.tmz.dev/musttag v0.7.0/go.mod h1:oTFPvgOkJmp5kYL02S8+jrH0eLrBIl57rzWeA26zDEM= go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/goleak v1.1.11 h1:wy28qYRKZgnJTxGxvye5/wgWr1EKjmUDGYox5mGlRlI= go.uber.org/multierr v1.6.0 h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= -go.uber.org/zap v1.17.0 h1:MTjgFu6ZLKvY6Pvaqk97GlxNBuMpV4Hy/3P6tRGlI2U= -go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= +go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -574,11 +587,12 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e h1:+WEEuIdZHnUeJJmEUjyYC2gfUMj69yZXw17EnHg/otA= -golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e/go.mod h1:Kr81I6Kryrl9sr8s2FK3vxD90NdsKWRuOIl2O4CvYbA= +golang.org/x/exp v0.0.0-20230510235704-dd950f8aeaea h1:vLCWI/yYrdEHyN2JzIzPO3aaQJHQdp89IZBA/+azVC4= +golang.org/x/exp v0.0.0-20230510235704-dd950f8aeaea/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w= golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9 h1:6WHiuFL9FNjg8RljAaT7FNUuKDbvMqS1i5cr2OE2sLQ= golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20230224173230-c95f2b4c22f2 h1:J74nGeMgeFnYQJN59eFwh06jX/V8g0lB7LWpjSLxtgU= +golang.org/x/exp/typeparams v0.0.0-20230224173230-c95f2b4c22f2/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -608,8 +622,9 @@ golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2 golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.6.0/go.mod h1:4mET923SAdbXp2ki8ey+zGs1SLqsuM2Y0uvdZR/fUNI= golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= +golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -649,10 +664,9 @@ golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= -golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= -golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -675,8 +689,9 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -733,16 +748,16 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= -golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -752,20 +767,17 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= +golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= @@ -788,7 +800,6 @@ golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -801,18 +812,15 @@ golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjs golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= @@ -832,10 +840,10 @@ golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4 golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= -golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ= golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= -golang.org/x/tools v0.6.0 h1:BOw41kyTf3PuCW1pVQf8+Cyg8pMlkYB1oo9iJ6D/lKM= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/tools v0.11.0 h1:EMCa6U9S2LtZXLAMoWiR/R8dAQFRqbAitmbJ2UKhoi8= +golang.org/x/tools v0.11.0/go.mod h1:anzJrxPjNtfgiYQYirP2CPGzGLxrH2u2QBhn6Bf3qY8= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -944,12 +952,10 @@ gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= @@ -959,10 +965,10 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.4.2 h1:6qXr+R5w+ktL5UkwEbPp+fEvfyoMPche6GkOpGHZcLc= -honnef.co/go/tools v0.4.2/go.mod h1:36ZgoUOrqOk1GxwHhyryEkq8FQWkUO2xGuSMhUCcdvA= -mvdan.cc/gofumpt v0.4.0 h1:JVf4NN1mIpHogBj7ABpgOyZc65/UUOkKQFkoURsz4MM= -mvdan.cc/gofumpt v0.4.0/go.mod h1:PljLOHDeZqgS8opHRKLzp2It2VBuSdteAgqUfzMTxlQ= +honnef.co/go/tools v0.4.3 h1:o/n5/K5gXqk8Gozvs2cnL0F2S1/g1vcGCAx2vETjITw= +honnef.co/go/tools v0.4.3/go.mod h1:36ZgoUOrqOk1GxwHhyryEkq8FQWkUO2xGuSMhUCcdvA= +mvdan.cc/gofumpt v0.5.0 h1:0EQ+Z56k8tXjj/6TQD25BFNKQXpCvT0rnansIc7Ug5E= +mvdan.cc/gofumpt v0.5.0/go.mod h1:HBeVDtMKRZpXyxFciAirzdKklDlGu8aAy1wEbH5Y9js= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= diff --git a/tools/vendor/github.com/4meepo/tagalign/.gitignore b/tools/vendor/github.com/4meepo/tagalign/.gitignore new file mode 100644 index 0000000000..e37bb52e49 --- /dev/null +++ b/tools/vendor/github.com/4meepo/tagalign/.gitignore @@ -0,0 +1,75 @@ +# File created using '.gitignore Generator' for Visual Studio Code: https://bit.ly/vscode-gig +# Created by https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,go +# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudiocode,macos,go + +### Go ### +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +.vscode + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### macOS Patch ### +# iCloud generated files +*.icloud + +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +# End of https://www.toptal.com/developers/gitignore/api/visualstudiocode,macos,go + +# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option) diff --git a/tools/vendor/github.com/4meepo/tagalign/.golangci.yml b/tools/vendor/github.com/4meepo/tagalign/.golangci.yml new file mode 100644 index 0000000000..99baa8c213 --- /dev/null +++ b/tools/vendor/github.com/4meepo/tagalign/.golangci.yml @@ -0,0 +1,107 @@ +# See https://golangci-lint.run/usage/configuration/ + +linters-settings: + revive: + # see https://github.com/mgechev/revive#available-rules for details. + ignore-generated-header: true + severity: warning + rules: + - name: atomic + - name: blank-imports + - name: bool-literal-in-expr + - name: call-to-gc + - name: confusing-naming + - name: confusing-results + - name: constant-logical-expr + - name: context-as-argument + - name: context-keys-type + - name: deep-exit + - name: defer + - name: dot-imports + - name: duplicated-imports + - name: early-return + - name: empty-block + - name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: exported + - name: get-return + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + - name: modifies-parameter + - name: modifies-value-receiver + - name: package-comments + - name: range + - name: range-val-address + - name: range-val-in-closure + - name: receiver-naming + - name: redefines-builtin-id + - name: string-of-int + - name: struct-tag + - name: superfluous-else + - name: time-naming + - name: unconditional-recursion + - name: unexported-naming + - name: unexported-return + - name: unnecessary-stmt + - name: unreachable-code + - name: unused-parameter + - name: var-declaration + - name: var-naming + - name: waitgroup-by-value + +linters: + disable-all: true + enable: + - asciicheck + - bodyclose + - dogsled + - dupl + - durationcheck + - errcheck + - errorlint + - exhaustive + - exportloopref + - forcetypeassert + - gochecknoinits + - gocognit + - goconst + - gocritic + - gocyclo + - godot + - godox + - goimports + - gomoddirectives + - gomodguard + - goprintffuncname + - gosec + - gosimple + - govet + - importas + - ineffassign + - makezero + - misspell + - nakedret + - nestif + - nilerr + - noctx + - nolintlint + - prealloc + - predeclared + - revive + - rowserrcheck + - sqlclosecheck + - staticcheck + - stylecheck + - thelper + - tparallel + - typecheck + - unconvert + - unparam + - unused + - whitespace diff --git a/tools/vendor/github.com/4meepo/tagalign/.goreleaser.yml b/tools/vendor/github.com/4meepo/tagalign/.goreleaser.yml new file mode 100644 index 0000000000..e7b6f6800e --- /dev/null +++ b/tools/vendor/github.com/4meepo/tagalign/.goreleaser.yml @@ -0,0 +1,32 @@ +--- +project_name: tagalign + +release: + github: + owner: 4meepo + name: tagalign + +builds: + - binary: tagalign + goos: + - darwin + - windows + - linux + - freebsd + goarch: + - amd64 + - arm64 + - arm + goarm: + - 6 + - 7 + gomips: + - hardfloat + env: + - CGO_ENABLED=0 + ignore: + - goos: darwin + goarch: 386 + - goos: freebsd + goarch: arm64 + main: ./cmd/tagalign/ \ No newline at end of file diff --git a/tools/vendor/github.com/junk1tm/musttag/LICENSE b/tools/vendor/github.com/4meepo/tagalign/LICENSE similarity index 97% rename from tools/vendor/github.com/junk1tm/musttag/LICENSE rename to tools/vendor/github.com/4meepo/tagalign/LICENSE index 38baef8d41..da3ae82706 100644 --- a/tools/vendor/github.com/junk1tm/musttag/LICENSE +++ b/tools/vendor/github.com/4meepo/tagalign/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2022 junk1tm +Copyright (c) 2023 Yifei Liu Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/tools/vendor/github.com/4meepo/tagalign/Makefile b/tools/vendor/github.com/4meepo/tagalign/Makefile new file mode 100644 index 0000000000..614e7773c3 --- /dev/null +++ b/tools/vendor/github.com/4meepo/tagalign/Makefile @@ -0,0 +1,7 @@ +.PHONY: lint +lint: + golangci-lint run ./... + +.PHONY: build +build: + go build -o tagalign cmd/tagalign/tagalign.go \ No newline at end of file diff --git a/tools/vendor/github.com/4meepo/tagalign/README.md b/tools/vendor/github.com/4meepo/tagalign/README.md new file mode 100644 index 0000000000..9181e7a190 --- /dev/null +++ b/tools/vendor/github.com/4meepo/tagalign/README.md @@ -0,0 +1,99 @@ +# Go Tag Align + +![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/4meepo/tagalign?style=flat-square) +[![codecov](https://codecov.io/github/4meepo/tagalign/branch/main/graph/badge.svg?token=1R1T61UNBQ)](https://codecov.io/github/4meepo/tagalign) +[![GoDoc](https://godoc.org/github.com/4meepo/tagalign?status.svg)](https://pkg.go.dev/github.com/4meepo/tagalign) +[![Go Report Card](https://goreportcard.com/badge/github.com/4meepo/tagalign)](https://goreportcard.com/report/github.com/4meepo/tagalign) + +TagAlign is used to align and sort tags in Go struct. It can make the struct more readable and easier to maintain. + +For example, this struct + +```go +type FooBar struct { + Foo int `json:"foo" validate:"required"` + Bar string `json:"bar" validate:"required"` + FooFoo int8 `json:"foo_foo" validate:"required"` + BarBar int `json:"bar_bar" validate:"required"` + FooBar struct { + Foo int `json:"foo" yaml:"foo" validate:"required"` + Bar222 string `json:"bar222" validate:"required" yaml:"bar"` + } `json:"foo_bar" validate:"required"` + BarFoo string `json:"bar_foo" validate:"required"` + BarFooBar string `json:"bar_foo_bar" validate:"required"` +} +``` + +can be aligned to: + +```go +type FooBar struct { + Foo int `json:"foo" validate:"required"` + Bar string `json:"bar" validate:"required"` + FooFoo int8 `json:"foo_foo" validate:"required"` + BarBar int `json:"bar_bar" validate:"required"` + FooBar struct { + Foo int `json:"foo" yaml:"foo" validate:"required"` + Bar222 string `json:"bar222" validate:"required" yaml:"bar"` + } `json:"foo_bar" validate:"required"` + BarFoo string `json:"bar_foo" validate:"required"` + BarFooBar string `json:"bar_foo_bar" validate:"required"` +} +``` + +In addition to alignment, it can also sort tags with fixed order. If we enable sort with fixed order `json,xml`, the following code + +```go +type SortExample struct { + Foo int `json:"foo,omitempty" yaml:"bar" xml:"baz" binding:"required" gorm:"column:foo" zip:"foo" validate:"required"` + Bar int `validate:"required" yaml:"foo" xml:"bar" binding:"required" json:"bar,omitempty" gorm:"column:bar" zip:"bar" ` + FooBar int `gorm:"column:bar" validate:"required" xml:"bar" binding:"required" json:"bar,omitempty" zip:"bar" yaml:"foo"` +} +``` + +will be sorted and aligned to: + +```go +type SortExample struct { + Foo int `json:"foo,omitempty" xml:"baz" binding:"required" gorm:"column:foo" validate:"required" yaml:"bar" zip:"foo"` + Bar int `json:"bar,omitempty" xml:"bar" binding:"required" gorm:"column:bar" validate:"required" yaml:"foo" zip:"bar"` + FooBar int `json:"bar,omitempty" xml:"bar" binding:"required" gorm:"column:bar" validate:"required" yaml:"foo" zip:"bar"` +} +``` + +The fixed order is `json,xml`, so the tags `json` and `xml` will be sorted and aligned first, and the rest tags will be sorted and aligned in the dictionary order. + +## Install + +```bash +go install github.com/4meepo/tagalign/cmd/tagalign +``` + +## Usage + +By default tagalign will only align tags, but not sort them. But alignment and sort can work together or separately. + +If you don't want to align tags, you can use `-noalign` to disable alignment. + +You can use `-sort` to enable sort and `-order` to set the fixed order of tags. + +```bash +# Only align tags. +tagalign -fix {package path} +# Only sort tags with fixed order. +tagalign -fix -noalign -sort -order "json,xml" {package path} +# Align and sort together. +tagalign -fix -sort -order "json,xml" {package path} +``` + +TODO: integrate with golangci-lint + +## Reference + +[Golang AST Visualizer](http://goast.yuroyoro.net/) + +[Create New Golang CI Linter](https://golangci-lint.run/contributing/new-linters/) + +[Autofix Example](https://github.com/golangci/golangci-lint/pull/2450/files) + +[Integrating](https://disaev.me/p/writing-useful-go-analysis-linter/#integrating) diff --git a/tools/vendor/github.com/4meepo/tagalign/options.go b/tools/vendor/github.com/4meepo/tagalign/options.go new file mode 100644 index 0000000000..4deaf8cbc4 --- /dev/null +++ b/tools/vendor/github.com/4meepo/tagalign/options.go @@ -0,0 +1,28 @@ +package tagalign + +type Option func(*Helper) + +// WithMode specify the mode of tagalign. +func WithMode(mode Mode) Option { + return func(h *Helper) { + h.mode = mode + } +} + +// WithSort enable tags sort. +// fixedOrder specify the order of tags, the other tags will be sorted by name. +// Sory is disabled by default. +func WithSort(fixedOrder ...string) Option { + return func(h *Helper) { + h.sort = true + h.fixedTagOrder = fixedOrder + } +} + +// WithAlign configure whether enable tags align. +// Align is enabled by default. +func WithAlign(enabled bool) Option { + return func(h *Helper) { + h.align = enabled + } +} diff --git a/tools/vendor/github.com/4meepo/tagalign/tagalign.go b/tools/vendor/github.com/4meepo/tagalign/tagalign.go new file mode 100644 index 0000000000..3dae96bc3e --- /dev/null +++ b/tools/vendor/github.com/4meepo/tagalign/tagalign.go @@ -0,0 +1,350 @@ +package tagalign + +import ( + "fmt" + "go/ast" + "go/token" + "log" + "reflect" + "sort" + "strconv" + "strings" + + "github.com/fatih/structtag" + + "golang.org/x/tools/go/analysis" +) + +type Mode int + +const ( + StandaloneMode Mode = iota + GolangciLintMode +) + +func NewAnalyzer(options ...Option) *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "tagalign", + Doc: "check that struct tags are well aligned", + Run: func(p *analysis.Pass) (any, error) { + Run(p, options...) + return nil, nil + }, + } +} + +func Run(pass *analysis.Pass, options ...Option) []Issue { + var issues []Issue + for _, f := range pass.Files { + h := &Helper{ + mode: StandaloneMode, + align: true, + } + for _, opt := range options { + opt(h) + } + + if !h.align && !h.sort { + // do nothing + return nil + } + + ast.Inspect(f, func(n ast.Node) bool { + h.find(pass, n) + return true + }) + h.Process(pass) + issues = append(issues, h.issues...) + } + return issues +} + +type Helper struct { + mode Mode + + align bool // whether enable tags align. + sort bool // whether enable tags sort. + fixedTagOrder []string // the order of tags, the other tags will be sorted by name. + + singleFields []*ast.Field + consecutiveFieldsGroups [][]*ast.Field // fields in this group, must be consecutive in struct. + issues []Issue +} + +// Issue is used to integrate with golangci-lint's inline auto fix. +type Issue struct { + Pos token.Position + Message string + InlineFix InlineFix +} +type InlineFix struct { + StartCol int // zero-based + Length int + NewString string +} + +func (w *Helper) find(pass *analysis.Pass, n ast.Node) { + v, ok := n.(*ast.StructType) + if !ok { + return + } + + fields := v.Fields.List + if len(fields) == 0 { + return + } + + fs := make([]*ast.Field, 0) + split := func() { + n := len(fs) + if n > 1 { + w.consecutiveFieldsGroups = append(w.consecutiveFieldsGroups, fs) + } else if n == 1 { + w.singleFields = append(w.singleFields, fs[0]) + } + + fs = nil + } + + for i, field := range fields { + if field.Tag == nil { + // field without tags + split() + continue + } + + if i > 0 { + if fields[i-1].Tag == nil { + // if previous filed do not have a tag + fs = append(fs, field) + continue + } + preLineNum := pass.Fset.Position(fields[i-1].Tag.Pos()).Line + lineNum := pass.Fset.Position(field.Tag.Pos()).Line + if lineNum-preLineNum > 1 { + // fields with tags are not consecutive, including two case: + // 1. splited by lines + // 2. splited by a struct + split() + + // check if the field is a struct + if _, ok := field.Type.(*ast.StructType); ok { + continue + } + } + } + + fs = append(fs, field) + } + + split() +} + +func (w *Helper) report(pass *analysis.Pass, field *ast.Field, startCol int, msg, replaceStr string) { + if w.mode == GolangciLintMode { + iss := Issue{ + Pos: pass.Fset.Position(field.Tag.Pos()), + Message: msg, + InlineFix: InlineFix{ + StartCol: startCol, + Length: len(field.Tag.Value), + NewString: replaceStr, + }, + } + w.issues = append(w.issues, iss) + } + + if w.mode == StandaloneMode { + pass.Report(analysis.Diagnostic{ + Pos: field.Tag.Pos(), + End: field.Tag.End(), + Message: msg, + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: msg, + TextEdits: []analysis.TextEdit{ + { + Pos: field.Tag.Pos(), + End: field.Tag.End(), + NewText: []byte(replaceStr), + }, + }, + }, + }, + }) + } +} + +func (w *Helper) Process(pass *analysis.Pass) { //nolint:gocognit + // process grouped fields + for _, fields := range w.consecutiveFieldsGroups { + offsets := make([]int, len(fields)) + + var maxTagNum int + var tagsGroup, notSortedTagsGroup [][]*structtag.Tag + for i, field := range fields { + offsets[i] = pass.Fset.Position(field.Tag.Pos()).Column + tag, err := strconv.Unquote(field.Tag.Value) + if err != nil { + break + } + + tags, err := structtag.Parse(tag) + if err != nil { + break + } + + maxTagNum = max(maxTagNum, tags.Len()) + + if w.sort { + cp := make([]*structtag.Tag, tags.Len()) + for i, tag := range tags.Tags() { + cp[i] = tag + } + notSortedTagsGroup = append(notSortedTagsGroup, cp) + sortBy(w.fixedTagOrder, tags) + } + + tagsGroup = append(tagsGroup, tags.Tags()) + } + + // if w.align{ + // record the max length of each column tag + tagMaxLens := make([]int, maxTagNum) + + for j := 0; j < maxTagNum; j++ { + var maxLength int + for i := 0; i < len(tagsGroup); i++ { + if len(tagsGroup[i]) <= j { + // in case of index out of range + continue + } + maxLength = max(maxLength, len(tagsGroup[i][j].String())) + } + tagMaxLens[j] = maxLength + } + + for i, field := range fields { + tags := tagsGroup[i] + + var newTagStr string + if w.align { + // if align enabled, align tags. + newTagBuilder := strings.Builder{} + for i, tag := range tags { + format := alignFormat(tagMaxLens[i] + 1) // with an extra space + newTagBuilder.WriteString(fmt.Sprintf(format, tag.String())) + } + newTagStr = newTagBuilder.String() + } else { + // otherwise check if tags order changed + if w.sort && reflect.DeepEqual(notSortedTagsGroup[i], tags) { + // if tags order not changed, do nothing + continue + } + tagsStr := make([]string, len(tags)) + for i, tag := range tags { + tagsStr[i] = tag.String() + } + newTagStr = strings.Join(tagsStr, " ") + } + + unquoteTag := strings.TrimSpace(newTagStr) + newTagValue := fmt.Sprintf("`%s`", unquoteTag) + if field.Tag.Value == newTagValue { + // nothing changed + continue + } + + msg := "tag is not aligned, should be: " + unquoteTag + + w.report(pass, field, offsets[i]-1, msg, newTagValue) + } + } + + // process single fields + for _, field := range w.singleFields { + tag, err := strconv.Unquote(field.Tag.Value) + if err != nil { + continue + } + + tags, err := structtag.Parse(tag) + if err != nil { + continue + } + originalTags := append([]*structtag.Tag(nil), tags.Tags()...) + if w.sort { + sortBy(w.fixedTagOrder, tags) + } + + if reflect.DeepEqual(originalTags, tags.Tags()) { + // if tags order not changed, do nothing + continue + } + + newTagValue := fmt.Sprintf("`%s`", tags.String()) + if field.Tag.Value == newTagValue { + // nothing changed + continue + } + + msg := "tag is not aligned , should be: " + tags.String() + + w.report(pass, field, pass.Fset.Position(field.Tag.Pos()).Column-1, msg, newTagValue) + } +} + +// Issues returns all issues found by the analyzer. +// It is used to integrate with golangci-lint. +func (w *Helper) Issues() []Issue { + log.Println("tagalign 's Issues() should only be called in golangci-lint mode") + return w.issues +} + +// sortBy sorts tags by fixed order. +// If a tag is not in the fixed order, it will be sorted by name. +func sortBy(fixedOrder []string, tags *structtag.Tags) { + // sort by fixed order + sort.Slice(tags.Tags(), func(i, j int) bool { + ti := tags.Tags()[i] + tj := tags.Tags()[j] + + oi := findIndex(fixedOrder, ti.Key) + oj := findIndex(fixedOrder, tj.Key) + + if oi == -1 && oj == -1 { + return ti.Key < tj.Key + } + + if oi == -1 { + return false + } + + if oj == -1 { + return true + } + + return oi < oj + }) +} + +func findIndex(s []string, e string) int { + for i, a := range s { + if a == e { + return i + } + } + return -1 +} + +func alignFormat(length int) string { + return "%" + fmt.Sprintf("-%ds", length) +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} diff --git a/tools/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go b/tools/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go index 71a9ddf402..6bed7696a9 100644 --- a/tools/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go +++ b/tools/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go @@ -50,15 +50,17 @@ var ( } ) -type typeSpecByName map[string]*ast.TypeSpec +type typeSpecByName map[string]typer func (n *nilNil) run(pass *analysis.Pass) (interface{}, error) { insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - typeSpecs := typeSpecByName{} + typeSpecs := typeSpecByName{ + "any": newTyper(new(ast.InterfaceType)), + } insp.Preorder(types, func(node ast.Node) { t := node.(*ast.TypeSpec) - typeSpecs[t.Name.Name] = t + typeSpecs[t.Name.Name] = newTyper(t.Type) }) var fs funcTypeStack @@ -125,7 +127,7 @@ func (n *nilNil) isDangerNilType(t ast.Expr, typeSpecs typeSpecByName) bool { case *ast.Ident: if t, ok := typeSpecs[v.Name]; ok { - return n.isDangerNilType(t.Type, nil) + return n.isDangerNilType(t.Type(), typeSpecs) } } return false @@ -146,3 +148,11 @@ func isIdent(n ast.Node, name string) bool { } return i.Name == name } + +type typer interface { + Type() ast.Expr +} + +func newTyper(t ast.Expr) typer { return typerImpl{t: t} } // +type typerImpl struct{ t ast.Expr } // +func (ti typerImpl) Type() ast.Expr { return ti.t } diff --git a/tools/vendor/github.com/BurntSushi/toml/decode.go b/tools/vendor/github.com/BurntSushi/toml/decode.go index 0ca1dc4fee..4d38f3bfce 100644 --- a/tools/vendor/github.com/BurntSushi/toml/decode.go +++ b/tools/vendor/github.com/BurntSushi/toml/decode.go @@ -91,7 +91,7 @@ const ( // UnmarshalText method. See the Unmarshaler example for a demonstration with // email addresses. // -// ### Key mapping +// # Key mapping // // TOML keys can map to either keys in a Go map or field names in a Go struct. // The special `toml` struct tag can be used to map TOML keys to struct fields @@ -248,7 +248,7 @@ func (md *MetaData) unify(data interface{}, rv reflect.Value) error { case reflect.Bool: return md.unifyBool(data, rv) case reflect.Interface: - if rv.NumMethod() > 0 { // Only support empty interfaces are supported. + if rv.NumMethod() > 0 { /// Only empty interfaces are supported. return md.e("unsupported type %s", rv.Type()) } return md.unifyAnything(data, rv) diff --git a/tools/vendor/github.com/BurntSushi/toml/deprecated.go b/tools/vendor/github.com/BurntSushi/toml/deprecated.go index c6af3f239d..b9e309717e 100644 --- a/tools/vendor/github.com/BurntSushi/toml/deprecated.go +++ b/tools/vendor/github.com/BurntSushi/toml/deprecated.go @@ -5,17 +5,25 @@ import ( "io" ) +// TextMarshaler is an alias for encoding.TextMarshaler. +// // Deprecated: use encoding.TextMarshaler type TextMarshaler encoding.TextMarshaler +// TextUnmarshaler is an alias for encoding.TextUnmarshaler. +// // Deprecated: use encoding.TextUnmarshaler type TextUnmarshaler encoding.TextUnmarshaler +// PrimitiveDecode is an alias for MetaData.PrimitiveDecode(). +// // Deprecated: use MetaData.PrimitiveDecode. func PrimitiveDecode(primValue Primitive, v interface{}) error { md := MetaData{decoded: make(map[string]struct{})} return md.unify(primValue.undecoded, rvalue(v)) } +// DecodeReader is an alias for NewDecoder(r).Decode(v). +// // Deprecated: use NewDecoder(reader).Decode(&value). func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { return NewDecoder(r).Decode(v) } diff --git a/tools/vendor/github.com/BurntSushi/toml/encode.go b/tools/vendor/github.com/BurntSushi/toml/encode.go index 930e1d521a..9cd25d7571 100644 --- a/tools/vendor/github.com/BurntSushi/toml/encode.go +++ b/tools/vendor/github.com/BurntSushi/toml/encode.go @@ -136,7 +136,8 @@ func NewEncoder(w io.Writer) *Encoder { // document. func (enc *Encoder) Encode(v interface{}) error { rv := eindirect(reflect.ValueOf(v)) - if err := enc.safeEncode(Key([]string{}), rv); err != nil { + err := enc.safeEncode(Key([]string{}), rv) + if err != nil { return err } return enc.w.Flush() @@ -457,6 +458,16 @@ func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { frv := eindirect(rv.Field(i)) + if is32Bit { + // Copy so it works correct on 32bit archs; not clear why this + // is needed. See #314, and https://www.reddit.com/r/golang/comments/pnx8v4 + // This also works fine on 64bit, but 32bit archs are somewhat + // rare and this is a wee bit faster. + copyStart := make([]int, len(start)) + copy(copyStart, start) + start = copyStart + } + // Treat anonymous struct fields with tag names as though they are // not anonymous, like encoding/json does. // @@ -471,17 +482,7 @@ func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { if typeIsTable(tomlTypeOfGo(frv)) { fieldsSub = append(fieldsSub, append(start, f.Index...)) } else { - // Copy so it works correct on 32bit archs; not clear why this - // is needed. See #314, and https://www.reddit.com/r/golang/comments/pnx8v4 - // This also works fine on 64bit, but 32bit archs are somewhat - // rare and this is a wee bit faster. - if is32Bit { - copyStart := make([]int, len(start)) - copy(copyStart, start) - fieldsDirect = append(fieldsDirect, append(copyStart, f.Index...)) - } else { - fieldsDirect = append(fieldsDirect, append(start, f.Index...)) - } + fieldsDirect = append(fieldsDirect, append(start, f.Index...)) } } } @@ -490,24 +491,27 @@ func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { writeFields := func(fields [][]int) { for _, fieldIndex := range fields { fieldType := rt.FieldByIndex(fieldIndex) - fieldVal := eindirect(rv.FieldByIndex(fieldIndex)) + fieldVal := rv.FieldByIndex(fieldIndex) - if isNil(fieldVal) { /// Don't write anything for nil fields. + opts := getOptions(fieldType.Tag) + if opts.skip { + continue + } + if opts.omitempty && isEmpty(fieldVal) { continue } - opts := getOptions(fieldType.Tag) - if opts.skip { + fieldVal = eindirect(fieldVal) + + if isNil(fieldVal) { /// Don't write anything for nil fields. continue } + keyName := fieldType.Name if opts.name != "" { keyName = opts.name } - if opts.omitempty && enc.isEmpty(fieldVal) { - continue - } if opts.omitzero && isZero(fieldVal) { continue } @@ -649,7 +653,7 @@ func isZero(rv reflect.Value) bool { return false } -func (enc *Encoder) isEmpty(rv reflect.Value) bool { +func isEmpty(rv reflect.Value) bool { switch rv.Kind() { case reflect.Array, reflect.Slice, reflect.Map, reflect.String: return rv.Len() == 0 @@ -664,13 +668,15 @@ func (enc *Encoder) isEmpty(rv reflect.Value) bool { // type b struct{ s []string } // s := a{field: b{s: []string{"AAA"}}} for i := 0; i < rv.NumField(); i++ { - if !enc.isEmpty(rv.Field(i)) { + if !isEmpty(rv.Field(i)) { return false } } return true case reflect.Bool: return !rv.Bool() + case reflect.Ptr: + return rv.IsNil() } return false } @@ -693,8 +699,11 @@ func (enc *Encoder) newline() { // v v v v vv // key = {k = 1, k2 = 2} func (enc *Encoder) writeKeyValue(key Key, val reflect.Value, inline bool) { + /// Marshaler used on top-level document; call eElement() to just call + /// Marshal{TOML,Text}. if len(key) == 0 { - encPanic(errNoKey) + enc.eElement(val) + return } enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1)) enc.eElement(val) diff --git a/tools/vendor/github.com/BurntSushi/toml/error.go b/tools/vendor/github.com/BurntSushi/toml/error.go index f4f390e647..efd68865bb 100644 --- a/tools/vendor/github.com/BurntSushi/toml/error.go +++ b/tools/vendor/github.com/BurntSushi/toml/error.go @@ -84,7 +84,7 @@ func (pe ParseError) Error() string { pe.Position.Line, pe.LastKey, msg) } -// ErrorWithUsage() returns the error with detailed location context. +// ErrorWithPosition returns the error with detailed location context. // // See the documentation on [ParseError]. func (pe ParseError) ErrorWithPosition() string { @@ -124,7 +124,7 @@ func (pe ParseError) ErrorWithPosition() string { return b.String() } -// ErrorWithUsage() returns the error with detailed location context and usage +// ErrorWithUsage returns the error with detailed location context and usage // guidance. // // See the documentation on [ParseError]. diff --git a/tools/vendor/github.com/BurntSushi/toml/lex.go b/tools/vendor/github.com/BurntSushi/toml/lex.go index d4d70871d8..3545a6ad66 100644 --- a/tools/vendor/github.com/BurntSushi/toml/lex.go +++ b/tools/vendor/github.com/BurntSushi/toml/lex.go @@ -46,12 +46,13 @@ func (p Position) String() string { } type lexer struct { - input string - start int - pos int - line int - state stateFn - items chan item + input string + start int + pos int + line int + state stateFn + items chan item + tomlNext bool // Allow for backing up up to 4 runes. This is necessary because TOML // contains 3-rune tokens (""" and '''). @@ -87,13 +88,14 @@ func (lx *lexer) nextItem() item { } } -func lex(input string) *lexer { +func lex(input string, tomlNext bool) *lexer { lx := &lexer{ - input: input, - state: lexTop, - items: make(chan item, 10), - stack: make([]stateFn, 0, 10), - line: 1, + input: input, + state: lexTop, + items: make(chan item, 10), + stack: make([]stateFn, 0, 10), + line: 1, + tomlNext: tomlNext, } return lx } @@ -408,7 +410,7 @@ func lexTableNameEnd(lx *lexer) stateFn { // Lexes only one part, e.g. only 'a' inside 'a.b'. func lexBareName(lx *lexer) stateFn { r := lx.next() - if isBareKeyChar(r) { + if isBareKeyChar(r, lx.tomlNext) { return lexBareName } lx.backup() @@ -618,6 +620,9 @@ func lexInlineTableValue(lx *lexer) stateFn { case isWhitespace(r): return lexSkip(lx, lexInlineTableValue) case isNL(r): + if lx.tomlNext { + return lexSkip(lx, lexInlineTableValue) + } return lx.errorPrevLine(errLexInlineTableNL{}) case r == '#': lx.push(lexInlineTableValue) @@ -640,6 +645,9 @@ func lexInlineTableValueEnd(lx *lexer) stateFn { case isWhitespace(r): return lexSkip(lx, lexInlineTableValueEnd) case isNL(r): + if lx.tomlNext { + return lexSkip(lx, lexInlineTableValueEnd) + } return lx.errorPrevLine(errLexInlineTableNL{}) case r == '#': lx.push(lexInlineTableValueEnd) @@ -648,6 +656,9 @@ func lexInlineTableValueEnd(lx *lexer) stateFn { lx.ignore() lx.skip(isWhitespace) if lx.peek() == '}' { + if lx.tomlNext { + return lexInlineTableValueEnd + } return lx.errorf("trailing comma not allowed in inline tables") } return lexInlineTableValue @@ -770,8 +781,8 @@ func lexRawString(lx *lexer) stateFn { } } -// lexMultilineRawString consumes a raw string. Nothing can be escaped in such -// a string. It assumes that the beginning ''' has already been consumed and +// lexMultilineRawString consumes a raw string. Nothing can be escaped in such a +// string. It assumes that the beginning triple-' has already been consumed and // ignored. func lexMultilineRawString(lx *lexer) stateFn { r := lx.next() @@ -828,6 +839,11 @@ func lexMultilineStringEscape(lx *lexer) stateFn { func lexStringEscape(lx *lexer) stateFn { r := lx.next() switch r { + case 'e': + if !lx.tomlNext { + return lx.error(errLexEscape{r}) + } + fallthrough case 'b': fallthrough case 't': @@ -846,6 +862,11 @@ func lexStringEscape(lx *lexer) stateFn { fallthrough case '\\': return lx.pop() + case 'x': + if !lx.tomlNext { + return lx.error(errLexEscape{r}) + } + return lexHexEscape case 'u': return lexShortUnicodeEscape case 'U': @@ -854,6 +875,19 @@ func lexStringEscape(lx *lexer) stateFn { return lx.error(errLexEscape{r}) } +func lexHexEscape(lx *lexer) stateFn { + var r rune + for i := 0; i < 2; i++ { + r = lx.next() + if !isHexadecimal(r) { + return lx.errorf( + `expected two hexadecimal digits after '\x', but got %q instead`, + lx.current()) + } + } + return lx.pop() +} + func lexShortUnicodeEscape(lx *lexer) stateFn { var r rune for i := 0; i < 4; i++ { @@ -1225,7 +1259,23 @@ func isOctal(r rune) bool { return r >= '0' && r <= '7' } func isHexadecimal(r rune) bool { return (r >= '0' && r <= '9') || (r >= 'a' && r <= 'f') || (r >= 'A' && r <= 'F') } -func isBareKeyChar(r rune) bool { + +func isBareKeyChar(r rune, tomlNext bool) bool { + if tomlNext { + return (r >= 'A' && r <= 'Z') || + (r >= 'a' && r <= 'z') || + (r >= '0' && r <= '9') || + r == '_' || r == '-' || + r == 0xb2 || r == 0xb3 || r == 0xb9 || (r >= 0xbc && r <= 0xbe) || + (r >= 0xc0 && r <= 0xd6) || (r >= 0xd8 && r <= 0xf6) || (r >= 0xf8 && r <= 0x037d) || + (r >= 0x037f && r <= 0x1fff) || + (r >= 0x200c && r <= 0x200d) || (r >= 0x203f && r <= 0x2040) || + (r >= 0x2070 && r <= 0x218f) || (r >= 0x2460 && r <= 0x24ff) || + (r >= 0x2c00 && r <= 0x2fef) || (r >= 0x3001 && r <= 0xd7ff) || + (r >= 0xf900 && r <= 0xfdcf) || (r >= 0xfdf0 && r <= 0xfffd) || + (r >= 0x10000 && r <= 0xeffff) + } + return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || diff --git a/tools/vendor/github.com/BurntSushi/toml/meta.go b/tools/vendor/github.com/BurntSushi/toml/meta.go index 71847a0415..2e78b24e95 100644 --- a/tools/vendor/github.com/BurntSushi/toml/meta.go +++ b/tools/vendor/github.com/BurntSushi/toml/meta.go @@ -106,7 +106,7 @@ func (k Key) maybeQuoted(i int) string { return `""` } for _, c := range k[i] { - if !isBareKeyChar(c) { + if !isBareKeyChar(c, false) { return `"` + dblQuotedReplacer.Replace(k[i]) + `"` } } diff --git a/tools/vendor/github.com/BurntSushi/toml/parse.go b/tools/vendor/github.com/BurntSushi/toml/parse.go index d2542d6f92..9c19153698 100644 --- a/tools/vendor/github.com/BurntSushi/toml/parse.go +++ b/tools/vendor/github.com/BurntSushi/toml/parse.go @@ -2,6 +2,7 @@ package toml import ( "fmt" + "os" "strconv" "strings" "time" @@ -15,6 +16,7 @@ type parser struct { context Key // Full key for the current hash in scope. currentKey string // Base key name for everything except hashes. pos Position // Current position in the TOML file. + tomlNext bool ordered []Key // List of keys in the order that they appear in the TOML data. @@ -29,6 +31,8 @@ type keyInfo struct { } func parse(data string) (p *parser, err error) { + _, tomlNext := os.LookupEnv("BURNTSUSHI_TOML_110") + defer func() { if r := recover(); r != nil { if pErr, ok := r.(ParseError); ok { @@ -41,9 +45,12 @@ func parse(data string) (p *parser, err error) { }() // Read over BOM; do this here as the lexer calls utf8.DecodeRuneInString() - // which mangles stuff. - if strings.HasPrefix(data, "\xff\xfe") || strings.HasPrefix(data, "\xfe\xff") { + // which mangles stuff. UTF-16 BOM isn't strictly valid, but some tools add + // it anyway. + if strings.HasPrefix(data, "\xff\xfe") || strings.HasPrefix(data, "\xfe\xff") { // UTF-16 data = data[2:] + } else if strings.HasPrefix(data, "\xef\xbb\xbf") { // UTF-8 + data = data[3:] } // Examine first few bytes for NULL bytes; this probably means it's a UTF-16 @@ -65,9 +72,10 @@ func parse(data string) (p *parser, err error) { p = &parser{ keyInfo: make(map[string]keyInfo), mapping: make(map[string]interface{}), - lx: lex(data), + lx: lex(data, tomlNext), ordered: make([]Key, 0), implicits: make(map[string]struct{}), + tomlNext: tomlNext, } for { item := p.next() @@ -194,12 +202,12 @@ func (p *parser) topLevel(item item) { for i := range context { p.addImplicitContext(append(p.context, context[i:i+1]...)) } + p.ordered = append(p.ordered, p.context.add(p.currentKey)) /// Set value. vItem := p.next() val, typ := p.value(vItem, false) p.set(p.currentKey, val, typ, vItem.pos) - p.ordered = append(p.ordered, p.context.add(p.currentKey)) /// Remove the context we added (preserving any context from [tbl] lines). p.context = outerContext @@ -236,7 +244,7 @@ func (p *parser) value(it item, parentIsArray bool) (interface{}, tomlType) { case itemString: return p.replaceEscapes(it, it.val), p.typeOfPrimitive(it) case itemMultilineString: - return p.replaceEscapes(it, stripFirstNewline(p.stripEscapedNewlines(it.val))), p.typeOfPrimitive(it) + return p.replaceEscapes(it, p.stripEscapedNewlines(stripFirstNewline(it.val))), p.typeOfPrimitive(it) case itemRawString: return it.val, p.typeOfPrimitive(it) case itemRawMultilineString: @@ -331,11 +339,17 @@ func (p *parser) valueFloat(it item) (interface{}, tomlType) { var dtTypes = []struct { fmt string zone *time.Location + next bool }{ - {time.RFC3339Nano, time.Local}, - {"2006-01-02T15:04:05.999999999", internal.LocalDatetime}, - {"2006-01-02", internal.LocalDate}, - {"15:04:05.999999999", internal.LocalTime}, + {time.RFC3339Nano, time.Local, false}, + {"2006-01-02T15:04:05.999999999", internal.LocalDatetime, false}, + {"2006-01-02", internal.LocalDate, false}, + {"15:04:05.999999999", internal.LocalTime, false}, + + // tomlNext + {"2006-01-02T15:04Z07:00", time.Local, true}, + {"2006-01-02T15:04", internal.LocalDatetime, true}, + {"15:04", internal.LocalTime, true}, } func (p *parser) valueDatetime(it item) (interface{}, tomlType) { @@ -346,6 +360,9 @@ func (p *parser) valueDatetime(it item) (interface{}, tomlType) { err error ) for _, dt := range dtTypes { + if dt.next && !p.tomlNext { + continue + } t, err = time.ParseInLocation(dt.fmt, it.val, dt.zone) if err == nil { ok = true @@ -384,6 +401,7 @@ func (p *parser) valueArray(it item) (interface{}, tomlType) { // // Not entirely sure how to best store this; could use "key[0]", // "key[1]" notation, or maybe store it on the Array type? + _ = types } return array, tomlArray } @@ -426,11 +444,11 @@ func (p *parser) valueInlineTable(it item, parentIsArray bool) (interface{}, tom for i := range context { p.addImplicitContext(append(p.context, context[i:i+1]...)) } + p.ordered = append(p.ordered, p.context.add(p.currentKey)) /// Set the value. val, typ := p.value(p.next(), false) p.set(p.currentKey, val, typ, it.pos) - p.ordered = append(p.ordered, p.context.add(p.currentKey)) hash[p.currentKey] = val /// Restore context. @@ -551,7 +569,6 @@ func (p *parser) addContext(key Key, array bool) { func (p *parser) set(key string, val interface{}, typ tomlType, pos Position) { p.setValue(key, val) p.setType(key, typ, pos) - } // setValue sets the given key to the given value in the current context. @@ -632,14 +649,11 @@ func (p *parser) setType(key string, typ tomlType, pos Position) { // Implicit keys need to be created when tables are implied in "a.b.c.d = 1" and // "[a.b.c]" (the "a", "b", and "c" hashes are never created explicitly). -func (p *parser) addImplicit(key Key) { p.implicits[key.String()] = struct{}{} } -func (p *parser) removeImplicit(key Key) { delete(p.implicits, key.String()) } -func (p *parser) isImplicit(key Key) bool { _, ok := p.implicits[key.String()]; return ok } -func (p *parser) isArray(key Key) bool { return p.keyInfo[key.String()].tomlType == tomlArray } -func (p *parser) addImplicitContext(key Key) { - p.addImplicit(key) - p.addContext(key, false) -} +func (p *parser) addImplicit(key Key) { p.implicits[key.String()] = struct{}{} } +func (p *parser) removeImplicit(key Key) { delete(p.implicits, key.String()) } +func (p *parser) isImplicit(key Key) bool { _, ok := p.implicits[key.String()]; return ok } +func (p *parser) isArray(key Key) bool { return p.keyInfo[key.String()].tomlType == tomlArray } +func (p *parser) addImplicitContext(key Key) { p.addImplicit(key); p.addContext(key, false) } // current returns the full key name of the current context. func (p *parser) current() string { @@ -662,49 +676,54 @@ func stripFirstNewline(s string) string { return s } -// Remove newlines inside triple-quoted strings if a line ends with "\". +// stripEscapedNewlines removes whitespace after line-ending backslashes in +// multiline strings. +// +// A line-ending backslash is an unescaped \ followed only by whitespace until +// the next newline. After a line-ending backslash, all whitespace is removed +// until the next non-whitespace character. func (p *parser) stripEscapedNewlines(s string) string { - split := strings.Split(s, "\n") - if len(split) < 1 { - return s - } - - escNL := false // Keep track of the last non-blank line was escaped. - for i, line := range split { - line = strings.TrimRight(line, " \t\r") - - if len(line) == 0 || line[len(line)-1] != '\\' { - split[i] = strings.TrimRight(split[i], "\r") - if !escNL && i != len(split)-1 { - split[i] += "\n" - } - continue + var b strings.Builder + var i int + for { + ix := strings.Index(s[i:], `\`) + if ix < 0 { + b.WriteString(s) + return b.String() } + i += ix - escBS := true - for j := len(line) - 1; j >= 0 && line[j] == '\\'; j-- { - escBS = !escBS + if len(s) > i+1 && s[i+1] == '\\' { + // Escaped backslash. + i += 2 + continue } - if escNL { - line = strings.TrimLeft(line, " \t\r") + // Scan until the next non-whitespace. + j := i + 1 + whitespaceLoop: + for ; j < len(s); j++ { + switch s[j] { + case ' ', '\t', '\r', '\n': + default: + break whitespaceLoop + } } - escNL = !escBS - - if escBS { - split[i] += "\n" + if j == i+1 { + // Not a whitespace escape. + i++ continue } - - if i == len(split)-1 { - p.panicf("invalid escape: '\\ '") - } - - split[i] = line[:len(line)-1] // Remove \ - if len(split)-1 > i { - split[i+1] = strings.TrimLeft(split[i+1], " \t\r") + if !strings.Contains(s[i:j], "\n") { + // This is not a line-ending backslash. + // (It's a bad escape sequence, but we can let + // replaceEscapes catch it.) + i++ + continue } + b.WriteString(s[:i]) + s = s[j:] + i = 0 } - return strings.Join(split, "") } func (p *parser) replaceEscapes(it item, str string) string { @@ -743,12 +762,23 @@ func (p *parser) replaceEscapes(it item, str string) string { case 'r': replaced = append(replaced, rune(0x000D)) r += 1 + case 'e': + if p.tomlNext { + replaced = append(replaced, rune(0x001B)) + r += 1 + } case '"': replaced = append(replaced, rune(0x0022)) r += 1 case '\\': replaced = append(replaced, rune(0x005C)) r += 1 + case 'x': + if p.tomlNext { + escaped := p.asciiEscapeToUnicode(it, s[r+1:r+3]) + replaced = append(replaced, escaped) + r += 3 + } case 'u': // At this point, we know we have a Unicode escape of the form // `uXXXX` at [r, r+5). (Because the lexer guarantees this diff --git a/tools/vendor/github.com/OpenPeeDeeP/depguard/README.md b/tools/vendor/github.com/OpenPeeDeeP/depguard/README.md deleted file mode 100644 index 07e9f915df..0000000000 --- a/tools/vendor/github.com/OpenPeeDeeP/depguard/README.md +++ /dev/null @@ -1,111 +0,0 @@ -# Depguard - -Go linter that checks package imports are in a list of acceptable packages. It -can also deny a list of packages and can do prefix or glob matching. -This allows you to allow imports from a whole organization or only -allow specific packages within a repository. It is recommended to use prefix -matching as it is faster than glob matching. The fewer glob matches the better. - -> If a pattern is matched by prefix it does not try to match via glob. - -## Install - -```bash -go get -u github.com/OpenPeeDeeP/depguard -``` - -## Config - -By default, Depguard looks for a file named `.depguard.json` in the current -current working directory. If it is somewhere else, pass in the `-c` flag with -the location of your configuration file. - -The following is an example configuration file. - -```json -{ - "type": "allowlist", - "packages": ["github.com/OpenPeeDeeP/depguard"], - "packageErrorMessages": { - "github.com/OpenPeeDeeP/depguards": "Please use \"github.com/OpenPeeDeeP/depguard\"," - }, - "inTests": ["github.com/stretchr/testify"], - "includeGoStdLib": true -} -``` - -- `type` can be either `allowlist` or `denylist`. This check is case insensitive. - If not specified the default is `denylist`. The values `whitelist` and `blacklist` - are also accepted for backwards compatibility. -- `packages` is a list of packages for the list type specified. -- `packageErrorMessages` is a mapping from packages to the error message to display -- `inTests` is a list of packages allowed/disallowed only in test files. -- Set `includeGoStdLib` (`includeGoRoot` for backwards compatibility) to true if you want to check the list against standard lib. - If not specified the default is false. - -### Ignore File Rules - -The configuration also allows us to specify rules to ignore certain files considered by the linter. This means that we need not apply package import checks across our entire code base. - -For example, consider the following configuration to block a test package: -```json -{ - "type": "denylist", - "packages": ["github.com/stretchr/testify"], - "inTests": ["github.com/stretchr/testify"] -} -``` - -We can use a `ignoreFileRules` field to write a configuration that only considers test files: -```json -{ - "type": "denylist", - "packages": ["github.com/stretchr/testify"], - "ignoreFileRules": ["!**/*_test.go"] -} -``` - -Or if we wanted to consider only non-test files: -```json -{ - "type": "denylist", - "packages": ["github.com/stretchr/testify"], - "ignoreFileRules": ["**/*_test.go"] -} -``` - -Like the `packages` field, the `ignoreFileRules` field can accept both string prefixes and string glob patterns. Note in the first example above, the use of the `!` character in front of the rule. This is a special character which signals that the linter should negate the rule. This allows for more precise control, but it is only available for glob patterns. - -## Gometalinter - -The binary installation of this linter can be used with -[Gometalinter](https://github.com/alecthomas/gometalinter). - -If you use a configuration file for Gometalinter then the following will need to -be added to your configuration file. - -```json -{ - "linters": { - "depguard": { - "command": "depguard -c path/to/config.json", - "pattern": "PATH:LINE:COL:MESSAGE", - "installFrom": "github.com/OpenPeeDeeP/depguard", - "isFast": true, - "partitionStrategy": "packages" - } - } -} -``` - -If you prefer the command line way the following will work for you as well. - -```bash -gometalinter --linter='depguard:depguard -c path/to/config.json:PATH:LINE:COL:MESSAGE' -``` - -## Golangci-lint - -This linter was built with -[Golangci-lint](https://github.com/golangci/golangci-lint) in mind. It is compatible -and read their docs to see how to implement all their linters, including this one. diff --git a/tools/vendor/github.com/OpenPeeDeeP/depguard/depguard.go b/tools/vendor/github.com/OpenPeeDeeP/depguard/depguard.go deleted file mode 100644 index d7011cd9f6..0000000000 --- a/tools/vendor/github.com/OpenPeeDeeP/depguard/depguard.go +++ /dev/null @@ -1,313 +0,0 @@ -package depguard - -import ( - "go/build" - "go/token" - "io/ioutil" - "path" - "sort" - "strings" - "sync" - - "github.com/gobwas/glob" - "golang.org/x/tools/go/loader" -) - -// ListType states what kind of list is passed in. -type ListType int - -const ( - // LTBlacklist states the list given is a blacklist. (default) - LTBlacklist ListType = iota - // LTWhitelist states the list given is a whitelist. - LTWhitelist -) - -// StringToListType makes it easier to turn a string into a ListType. -// It assumes that the string representation is lower case. -var StringToListType = map[string]ListType{ - "allowlist": LTWhitelist, - "denylist": LTBlacklist, - "whitelist": LTWhitelist, - "blacklist": LTBlacklist, -} - -// Issue with the package with PackageName at the Position. -type Issue struct { - PackageName string - Position token.Position -} - -// Wrapper for glob patterns that allows for custom negation -type negatableGlob struct { - g glob.Glob - negate bool -} - -// Depguard checks imports to make sure they follow the given list and constraints. -type Depguard struct { - ListType ListType - IncludeGoRoot bool - - Packages []string - prefixPackages []string - globPackages []glob.Glob - - TestPackages []string - prefixTestPackages []string - globTestPackages []glob.Glob - - IgnoreFileRules []string - prefixIgnoreFileRules []string - globIgnoreFileRules []negatableGlob - - prefixRoot []string - - isInitialized bool - isInitializedMutex sync.Mutex -} - -// Run checks for dependencies given the program and validates them against -// Packages. -func (dg *Depguard) Run(config *loader.Config, prog *loader.Program) ([]*Issue, error) { - // Shortcut execution on an empty blacklist as that means every package is allowed - if dg.ListType == LTBlacklist && len(dg.Packages) == 0 { - return nil, nil - } - - if err := dg.initialize(config, prog); err != nil { - return nil, err - } - directImports, err := dg.createImportMap(prog) - if err != nil { - return nil, err - } - var issues []*Issue - for pkg, positions := range directImports { - for _, pos := range positions { - if ignoreFile(pos.Filename, dg.prefixIgnoreFileRules, dg.globIgnoreFileRules) { - continue - } - - prefixList, globList := dg.prefixPackages, dg.globPackages - if len(dg.TestPackages) > 0 && strings.Index(pos.Filename, "_test.go") != -1 { - prefixList, globList = dg.prefixTestPackages, dg.globTestPackages - } - - if dg.flagIt(pkg, prefixList, globList) { - issues = append(issues, &Issue{ - PackageName: pkg, - Position: pos, - }) - } - } - } - return issues, nil -} - -func (dg *Depguard) initialize(config *loader.Config, prog *loader.Program) error { - dg.isInitializedMutex.Lock() - defer dg.isInitializedMutex.Unlock() - - if dg.isInitialized { - return nil - } - - // parse ordinary guarded packages - for _, pkg := range dg.Packages { - if strings.ContainsAny(pkg, "!?*[]{}") { - g, err := glob.Compile(pkg, '/') - if err != nil { - return err - } - dg.globPackages = append(dg.globPackages, g) - } else { - dg.prefixPackages = append(dg.prefixPackages, pkg) - } - } - - // Sort the packages so we can have a faster search in the array - sort.Strings(dg.prefixPackages) - - // parse guarded tests packages - for _, pkg := range dg.TestPackages { - if strings.ContainsAny(pkg, "!?*[]{}") { - g, err := glob.Compile(pkg, '/') - if err != nil { - return err - } - dg.globTestPackages = append(dg.globTestPackages, g) - } else { - dg.prefixTestPackages = append(dg.prefixTestPackages, pkg) - } - } - - // Sort the test packages so we can have a faster search in the array - sort.Strings(dg.prefixTestPackages) - - // parse ignore file rules - for _, rule := range dg.IgnoreFileRules { - if strings.ContainsAny(rule, "!?*[]{}") { - ng := negatableGlob{} - if strings.HasPrefix(rule, "!") { - ng.negate = true - rule = rule[1:] // Strip out the leading '!' - } else { - ng.negate = false - } - - g, err := glob.Compile(rule, '/') - if err != nil { - return err - } - ng.g = g - - dg.globIgnoreFileRules = append(dg.globIgnoreFileRules, ng) - } else { - dg.prefixIgnoreFileRules = append(dg.prefixIgnoreFileRules, rule) - } - } - - // Sort the rules so we can have a faster search in the array - sort.Strings(dg.prefixIgnoreFileRules) - - if !dg.IncludeGoRoot { - var err error - dg.prefixRoot, err = listRootPrefixs(config.Build) - if err != nil { - return err - } - } - - dg.isInitialized = true - return nil -} - -func (dg *Depguard) createImportMap(prog *loader.Program) (map[string][]token.Position, error) { - importMap := make(map[string][]token.Position) - // For the directly imported packages - for _, imported := range prog.InitialPackages() { - // Go through their files - for _, file := range imported.Files { - // And populate a map of all direct imports and their positions - // This will filter out GoRoot depending on the Depguard.IncludeGoRoot - for _, fileImport := range file.Imports { - fileImportPath := cleanBasicLitString(fileImport.Path.Value) - if !dg.IncludeGoRoot && dg.isRoot(fileImportPath) { - continue - } - position := prog.Fset.Position(fileImport.Pos()) - positions, found := importMap[fileImportPath] - if !found { - importMap[fileImportPath] = []token.Position{ - position, - } - continue - } - importMap[fileImportPath] = append(positions, position) - } - } - } - return importMap, nil -} - -func ignoreFile(filename string, prefixList []string, negatableGlobList []negatableGlob) bool { - if strInPrefixList(filename, prefixList) { - return true - } - return strInNegatableGlobList(filename, negatableGlobList) -} - -func pkgInList(pkg string, prefixList []string, globList []glob.Glob) bool { - if strInPrefixList(pkg, prefixList) { - return true - } - return strInGlobList(pkg, globList) -} - -func strInPrefixList(str string, prefixList []string) bool { - // Idx represents where in the prefix slice the passed in string would go - // when sorted. -1 Just means that it would be at the very front of the slice. - idx := sort.Search(len(prefixList), func(i int) bool { - return prefixList[i] > str - }) - 1 - // This means that the string passed in has no way to be prefixed by anything - // in the prefix list as it is already smaller then everything - if idx == -1 { - return false - } - return strings.HasPrefix(str, prefixList[idx]) -} - -func strInGlobList(str string, globList []glob.Glob) bool { - for _, g := range globList { - if g.Match(str) { - return true - } - } - return false -} - -func strInNegatableGlobList(str string, negatableGlobList []negatableGlob) bool { - for _, ng := range negatableGlobList { - // Return true when: - // - Match is true and negate is off - // - Match is false and negate is on - if ng.g.Match(str) != ng.negate { - return true - } - } - return false -} - -// InList | WhiteList | BlackList -// y | | x -// n | x | -func (dg *Depguard) flagIt(pkg string, prefixList []string, globList []glob.Glob) bool { - return pkgInList(pkg, prefixList, globList) == (dg.ListType == LTBlacklist) -} - -func cleanBasicLitString(value string) string { - return strings.Trim(value, "\"\\") -} - -// We can do this as all imports that are not root are either prefixed with a domain -// or prefixed with `./` or `/` to dictate it is a local file reference -func listRootPrefixs(buildCtx *build.Context) ([]string, error) { - if buildCtx == nil { - buildCtx = &build.Default - } - root := path.Join(buildCtx.GOROOT, "src") - fs, err := ioutil.ReadDir(root) - if err != nil { - return nil, err - } - var pkgPrefix []string - for _, f := range fs { - if !f.IsDir() { - continue - } - pkgPrefix = append(pkgPrefix, f.Name()) - } - return pkgPrefix, nil -} - -func (dg *Depguard) isRoot(importPath string) bool { - // Idx represents where in the package slice the passed in package would go - // when sorted. -1 Just means that it would be at the very front of the slice. - idx := sort.Search(len(dg.prefixRoot), func(i int) bool { - return dg.prefixRoot[i] > importPath - }) - 1 - // This means that the package passed in has no way to be prefixed by anything - // in the package list as it is already smaller then everything - if idx == -1 { - return false - } - // if it is prefixed by a root prefix we need to check if it is an exact match - // or prefix with `/` as this could return false posative if the domain was - // `archive.com` for example as `archive` is a go root package. - if strings.HasPrefix(importPath, dg.prefixRoot[idx]) { - return strings.HasPrefix(importPath, dg.prefixRoot[idx]+"/") || importPath == dg.prefixRoot[idx] - } - return false -} diff --git a/tools/vendor/github.com/OpenPeeDeeP/depguard/.gitignore b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/.gitignore similarity index 100% rename from tools/vendor/github.com/OpenPeeDeeP/depguard/.gitignore rename to tools/vendor/github.com/OpenPeeDeeP/depguard/v2/.gitignore diff --git a/tools/vendor/github.com/OpenPeeDeeP/depguard/LICENSE b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/LICENSE similarity index 100% rename from tools/vendor/github.com/OpenPeeDeeP/depguard/LICENSE rename to tools/vendor/github.com/OpenPeeDeeP/depguard/v2/LICENSE diff --git a/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md new file mode 100644 index 0000000000..3de3f63177 --- /dev/null +++ b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/README.md @@ -0,0 +1,146 @@ +# Depguard + +A Go linter that checks package imports are in a list of acceptable packages. +This allows you to allow imports from a whole organization or only +allow specific packages within a repository. + +## Install + +```bash +go get github.com/OpenPeeDeeP/depguard/v2 +``` + +## Config + +The Depguard binary looks for a file named `^\.?depguard\.(yaml|yml|json|toml)$` in the current +current working directory. Examples include (`.depguard.yml` or `depguard.toml`). + +The following is an example configuration file. + +```json +{ + "main": { + "files": [ + "$all", + "!$test" + ], + "allow": [ + "$gostd", + "github.com/OpenPeeDeeP" + ], + "deny": { + "reflect": "Who needs reflection", + } + }, + "tests": { + "files": [ + "$test" + ], + "deny": { + "github.com/stretchr/testify": "Please use standard library for tests" + } + } +} +``` + +- The top level is a map of lists. The key of the map is a name that shows up in +the linter's output. +- `files` - list of file globs that will match this list of settings to compare against +- `allow` - list of allowed packages +- `deny` - map of packages that are not allowed where the value is a suggestion + +Files are matched using [Globs](https://github.com/gobwas/glob). If the files +list is empty, then all files will match that list. Prefixing a file +with an exclamation mark `!` will put that glob in a "don't match" list. A file +will match a list if it is allowed and not denied. + +> Should always prefix a file glob with `**/` as files are matched against absolute paths. + +Allow is a prefix of packages to allow. A dollar sign `$` can be used at the end +of a package to specify it must be exact match only. + +Deny is a map where the key is a prefix of the package to deny, and the value +is a suggestion on what to use instead. A dollar sign `$` can be used at the end +of a package to specify it must be exact match only. + +A Prefix List just means that a package will match a value, if the value is a +prefix of the package. Example `github.com/OpenPeeDeeP/depguard` package will match +a value of `github.com/OpenPeeDeeP` but won't match `github.com/OpenPeeDeeP/depguard/v2`. + +### Variables + +There are variable replacements for each type of list (file or package). This is +to reduce repetition and tedious behaviors. + +#### File Variables + +> you can still use and exclamation mark `!` in front of a variable to say not to +use it. Example `!$test` will match any file that is not a go test file. + +- `$all` - matches all go files +- `$test` - matches all go test files + +#### Package Variables + +- `$gostd` - matches all of go's standard library (Pulled from GOROOT) + +### Example Configs + +Below: + +- non-test go files will match `Main` and test go files will match `Test`. +- both allow all of go standard library except for the `reflect` package which will +tell the user "Please don't use reflect package". +- go test files are also allowed to use https://github.com/stretchr/testify package +and any sub-package of it. + +```yaml +Main: + files: + - $all + - "!$test" + allow: + - $gostd + deny: + reflect: Please don't use reflect package +Test: + files: + - $test + allow: + - $gostd + - github.com/stretchr/testify + deny: + reflect: Please don't use reflect package +``` + +Below: + +- All go files will match `Main` +- Go files in internal will match both `Main` and `Internal` + +```yaml +Main: + files: + - $all +Internal: + files: + - "**/internal/**/*.go" +``` + +Below: + +- All packages are allowed except for `github.com/OpenPeeDeeP/depguard`. Though +`github.com/OpenPeeDeeP/depguard/v2` and `github.com/OpenPeeDeeP/depguard/somepackage` +would be allowed. + +```yaml +Main: + deny: + - github.com/OpenPeeDeeP/depguard$ +``` + +## Golangci-lint + +This linter was built with +[Golangci-lint](https://github.com/golangci/golangci-lint) in mind. It is compatible +and read their docs to see how to implement all their linters, including this one. diff --git a/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go new file mode 100644 index 0000000000..2729091e8a --- /dev/null +++ b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/depguard.go @@ -0,0 +1,95 @@ +package depguard + +import ( + "fmt" + "go/ast" + "path/filepath" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// NewAnalyzer creates a new analyzer from the settings passed in. +// This can fail if the passed in LinterSettings does not compile. +// Use NewUncompiledAnalyzer if you need control when the compile happens. +func NewAnalyzer(settings *LinterSettings) (*analysis.Analyzer, error) { + s, err := settings.compile() + if err != nil { + return nil, err + } + analyzer := newAnalyzer(s.run) + return analyzer, nil +} + +type UncompiledAnalyzer struct { + Analyzer *analysis.Analyzer + settings *LinterSettings +} + +// NewUncompiledAnalyzer creates a new analyzer from the settings passed in. +// This can never error unlike NewAnalyzer. +// It is advised to call the Compile method on the returned Analyzer before running. +func NewUncompiledAnalyzer(settings *LinterSettings) *UncompiledAnalyzer { + return &UncompiledAnalyzer{ + Analyzer: newAnalyzer(settings.run), + settings: settings, + } +} + +// Compile the settings ahead of time so each subsuquent run of the analyzer doesn't +// need to do this work. +func (ua *UncompiledAnalyzer) Compile() error { + s, err := ua.settings.compile() + if err != nil { + return err + } + ua.Analyzer.Run = s.run + return nil +} + +func (settings LinterSettings) run(pass *analysis.Pass) (interface{}, error) { + s, err := settings.compile() + if err != nil { + return nil, err + } + return s.run(pass) +} + +func newAnalyzer(run func(*analysis.Pass) (interface{}, error)) *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "depguard", + Doc: "Go linter that checks if package imports are in a list of acceptable packages", + URL: "https://github.com/OpenPeeDeeP/depguard", + Run: run, + RunDespiteErrors: false, + } +} + +func (s linterSettings) run(pass *analysis.Pass) (interface{}, error) { + for _, file := range pass.Files { + // For Windows need to replace separator with '/' + fileName := filepath.ToSlash(pass.Fset.Position(file.Pos()).Filename) + lists := s.whichLists(fileName) + for _, imp := range file.Imports { + for _, l := range lists { + if allowed, sugg := l.importAllowed(rawBasicLit(imp.Path)); !allowed { + diag := analysis.Diagnostic{ + Pos: imp.Pos(), + End: imp.End(), + Message: fmt.Sprintf("import '%s' is not allowed from list '%s'", rawBasicLit(imp.Path), l.name), + } + if sugg != "" { + diag.Message = fmt.Sprintf("%s: %s", diag.Message, sugg) + diag.SuggestedFixes = append(diag.SuggestedFixes, analysis.SuggestedFix{Message: sugg}) + } + pass.Report(diag) + } + } + } + } + return nil, nil +} + +func rawBasicLit(lit *ast.BasicLit) string { + return strings.Trim(lit.Value, "\"") +} diff --git a/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/errors.go b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/errors.go new file mode 100644 index 0000000000..65325f6128 --- /dev/null +++ b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/errors.go @@ -0,0 +1,18 @@ +package utils + +import ( + "strings" +) + +type MultiError []error + +func (me MultiError) Error() string { + b := strings.Builder{} + for i, e := range me { + b.WriteString(e.Error()) + if i < len(me)-1 { + b.WriteByte('\n') + } + } + return b.String() +} diff --git a/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/variables.go b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/variables.go new file mode 100644 index 0000000000..3363bd8400 --- /dev/null +++ b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/internal/utils/variables.go @@ -0,0 +1,131 @@ +package utils + +import ( + "fmt" + "os" + "os/exec" + "path" + "path/filepath" + "runtime" + "strings" +) + +type Expander interface { + Expand() ([]string, error) +} + +type ExpanderMap map[string]Expander + +var ( + PathExpandable = ExpanderMap{ + "$all": &allExpander{}, + "$test": &testExpander{}, + } + PackageExpandable = ExpanderMap{ + "$gostd": &gostdExpander{}, + } +) + +type allExpander struct{} + +func (*allExpander) Expand() ([]string, error) { + return []string{"**/*.go"}, nil +} + +type testExpander struct{} + +func (*testExpander) Expand() ([]string, error) { + return []string{"**/*_test.go"}, nil +} + +type gostdExpander struct { + cache []string +} + +// We can do this as all imports that are not root are either prefixed with a domain +// or prefixed with `./` or `/` to dictate it is a local file reference +func (e *gostdExpander) Expand() ([]string, error) { + if len(e.cache) != 0 { + return e.cache, nil + } + root := path.Join(findGOROOT(), "src") + fs, err := os.ReadDir(root) + if err != nil { + return nil, fmt.Errorf("could not read GOROOT directory: %w", err) + } + var pkgPrefix []string + for _, f := range fs { + if !f.IsDir() { + continue + } + pkgPrefix = append(pkgPrefix, f.Name()) + } + e.cache = pkgPrefix + return pkgPrefix, nil +} + +func findGOROOT() string { + // code borrowed from https://github.com/golang/tools/blob/86c93e8732cce300d0270bce23117456ce92bb17/cmd/godoc/goroot.go#L15-L30 + if env := os.Getenv("GOROOT"); env != "" { + return filepath.Clean(env) + } + def := filepath.Clean(runtime.GOROOT()) + if runtime.Compiler == "gccgo" { + // gccgo has no real GOROOT, and it certainly doesn't + // depend on the executable's location. + return def + } + out, err := exec.Command("go", "env", "GOROOT").Output() + if err != nil { + return def + } + return strings.TrimSpace(string(out)) +} + +func ExpandSlice(sl []string, exp ExpanderMap) ([]string, error) { + for i, s := range sl { + f, found := exp[s] + if !found { + continue + } + e, err := f.Expand() + if err != nil { + return nil, fmt.Errorf("couldn't expand %s: %w", s, err) + } + sl = insertSlice(sl, i, e...) + } + return sl, nil +} + +func ExpandMap(m map[string]string, exp ExpanderMap) error { + for k, v := range m { + f, found := exp[k] + if !found { + continue + } + e, err := f.Expand() + if err != nil { + return fmt.Errorf("couldn't expand %s: %w", k, err) + } + for _, ex := range e { + m[ex] = v + } + delete(m, k) + } + return nil +} + +func insertSlice(a []string, k int, b ...string) []string { + n := len(a) + len(b) - 1 + if n <= cap(a) { + a2 := a[:n] + copy(a2[k+len(b):], a[k+1:]) + copy(a2[k:], b) + return a2 + } + a2 := make([]string, n) + copy(a2, a[:k]) + copy(a2[k:], b) + copy(a2[k+len(b):], a[k+1:]) + return a2 +} diff --git a/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go new file mode 100644 index 0000000000..440f329850 --- /dev/null +++ b/tools/vendor/github.com/OpenPeeDeeP/depguard/v2/settings.go @@ -0,0 +1,207 @@ +package depguard + +import ( + "errors" + "fmt" + "sort" + "strings" + + "github.com/OpenPeeDeeP/depguard/v2/internal/utils" + "github.com/gobwas/glob" +) + +type List struct { + Files []string `json:"files" yaml:"files" toml:"files" mapstructure:"files"` + Allow []string `json:"allow" yaml:"allow" toml:"allow" mapstructure:"allow"` + Deny map[string]string `json:"deny" yaml:"deny" toml:"deny" mapstructure:"deny"` +} + +type list struct { + name string + files []glob.Glob + negFiles []glob.Glob + allow []string + deny []string + suggestions []string +} + +func (l *List) compile() (*list, error) { + if l == nil { + return nil, nil + } + li := &list{} + var errs utils.MultiError + var err error + + // Compile Files + for _, f := range l.Files { + var negate bool + if len(f) > 0 && f[0] == '!' { + negate = true + f = f[1:] + } + // Expand File if needed + fs, err := utils.ExpandSlice([]string{f}, utils.PathExpandable) + if err != nil { + errs = append(errs, err) + } + for _, exp := range fs { + g, err := glob.Compile(exp, '/') + if err != nil { + errs = append(errs, fmt.Errorf("%s could not be compiled: %w", exp, err)) + continue + } + if negate { + li.negFiles = append(li.negFiles, g) + continue + } + li.files = append(li.files, g) + } + } + + if len(l.Allow) > 0 { + // Expand Allow + l.Allow, err = utils.ExpandSlice(l.Allow, utils.PackageExpandable) + if err != nil { + errs = append(errs, err) + } + + // Sort Allow + li.allow = make([]string, len(l.Allow)) + copy(li.allow, l.Allow) + sort.Strings(li.allow) + } + + if l.Deny != nil { + // Expand Deny Map (to keep suggestions) + err = utils.ExpandMap(l.Deny, utils.PackageExpandable) + if err != nil { + errs = append(errs, err) + } + + // Split Deny Into Package Slice + li.deny = make([]string, 0, len(l.Deny)) + for pkg := range l.Deny { + li.deny = append(li.deny, pkg) + } + + // Sort Deny + sort.Strings(li.deny) + + // Populate Suggestions to match the Deny order + li.suggestions = make([]string, 0, len(li.deny)) + for _, dp := range li.deny { + li.suggestions = append(li.suggestions, strings.TrimSpace(l.Deny[dp])) + } + } + + // Populate the type of this list + if len(li.allow) == 0 && len(li.deny) == 0 { + errs = append(errs, errors.New("must have an Allow and/or Deny package list")) + } + + if len(errs) > 0 { + return nil, errs + } + return li, nil +} + +func (l *list) fileMatch(fileName string) bool { + inAllowed := len(l.files) == 0 || strInGlobList(fileName, l.files) + inDenied := strInGlobList(fileName, l.negFiles) + return inAllowed && !inDenied +} + +func (l *list) importAllowed(imp string) (bool, string) { + inAllowed := len(l.allow) == 0 + if !inAllowed { + inAllowed, _ = strInPrefixList(imp, l.allow) + } + inDenied, suggIdx := strInPrefixList(imp, l.deny) + sugg := "" + if inDenied && suggIdx != -1 { + sugg = l.suggestions[suggIdx] + } + return inAllowed && !inDenied, sugg +} + +type LinterSettings map[string]*List + +type linterSettings []*list + +func (l LinterSettings) compile() (linterSettings, error) { + if len(l) == 0 { + // Only allow $gostd in all files + set := &List{ + Files: []string{"$all"}, + Allow: []string{"$gostd"}, + } + li, err := set.compile() + if err != nil { + return nil, err + } + li.name = "Main" + return linterSettings{li}, nil + } + names := make([]string, 0, len(l)) + for name := range l { + names = append(names, name) + } + sort.Strings(names) + li := make(linterSettings, 0, len(l)) + var errs utils.MultiError + for _, name := range names { + c, err := l[name].compile() + if err != nil { + errs = append(errs, err) + continue + } + if c == nil { + continue + } + c.name = name + li = append(li, c) + } + if len(errs) > 0 { + return nil, errs + } + + return li, nil +} + +func (ls linterSettings) whichLists(fileName string) []*list { + var matches []*list + for _, l := range ls { + if l.fileMatch(fileName) { + matches = append(matches, l) + } + } + return matches +} + +func strInGlobList(str string, globList []glob.Glob) bool { + for _, g := range globList { + if g.Match(str) { + return true + } + } + return false +} + +func strInPrefixList(str string, prefixList []string) (bool, int) { + // Idx represents where in the prefix slice the passed in string would go + // when sorted. -1 Just means that it would be at the very front of the slice. + idx := sort.Search(len(prefixList), func(i int) bool { + return strings.TrimRight(prefixList[i], "$") > str + }) - 1 + // This means that the string passed in has no way to be prefixed by anything + // in the prefix list as it is already smaller then everything + if idx == -1 { + return false, idx + } + ioc := prefixList[idx] + if ioc[len(ioc)-1] == '$' { + return str == ioc[:len(ioc)-1], idx + } + return strings.HasPrefix(str, prefixList[idx]), idx +} diff --git a/tools/vendor/github.com/alexkohler/nakedret/v2/.gitignore b/tools/vendor/github.com/alexkohler/nakedret/v2/.gitignore new file mode 100644 index 0000000000..b4822913a0 --- /dev/null +++ b/tools/vendor/github.com/alexkohler/nakedret/v2/.gitignore @@ -0,0 +1,8 @@ +# editor specific +.vscode + +# binary +/nakedret + +# usage video for docs +.github/images diff --git a/tools/vendor/github.com/alexkohler/nakedret/v2/LICENSE b/tools/vendor/github.com/alexkohler/nakedret/v2/LICENSE new file mode 100644 index 0000000000..9310fbcffb --- /dev/null +++ b/tools/vendor/github.com/alexkohler/nakedret/v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Alex Kohler + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tools/vendor/github.com/alexkohler/nakedret/v2/README.md b/tools/vendor/github.com/alexkohler/nakedret/v2/README.md new file mode 100644 index 0000000000..e30a0cde76 --- /dev/null +++ b/tools/vendor/github.com/alexkohler/nakedret/v2/README.md @@ -0,0 +1,125 @@ +# nakedret + +nakedret is a Go static analysis tool to find naked returns in functions greater than a specified function length. + +## Installation +Install Nakedret via go install: + +```cmd +go install github.com/alexkohler/nakedret/cmd/nakedret@latest +``` + +If you have not already added your `GOPATH/bin` directory to your `PATH` environment variable then you will need to do so. + +Windows (cmd): +```cmd +set PATH=%PATH%;C:\your\GOPATH\bin +``` + +Bash (you can verify a path has been set): +```Bash +# Check if nakedret is on PATH +which nakedret +export PATH=$PATH:/your/GOPATH/bin #to set path if it does not exist +``` + +## Usage + +Similar to other Go static anaylsis tools (such as `golint`, `go vet`), nakedret can be invoked with one or more filenames, directories, or packages named by its import path. Nakedret also supports the `...` wildcard. + + nakedret [flags] files/directories/packages + +Currently, the only flag supported is -l, which is an optional numeric flag to specify the maximum length a function can be (in terms of line length). If not specified, it defaults to 5. + +It can also be run using `go vet`: + +```shell +go vet -vettool=$(which nakedret) ./... +``` + +## Purpose + +As noted in Go's [Code Review comments](https://github.com/golang/go/wiki/CodeReviewComments#named-result-parameters): + +> Naked returns are okay if the function is a handful of lines. Once it's a medium sized function, be explicit with your return +> values. Corollary: it's not worth it to name result parameters just because it enables you to use naked returns. Clarity of docs is always more important than saving a line or two in your function. + +This tool aims to catch naked returns on non-trivial functions. + +## Example + +Let's take the `types` package in the Go source as an example: + +```Bash +$ nakedret -l 25 types/ +types/check.go:245 checkFiles naked returns on 26 line function +types/typexpr.go:443 collectParams naked returns on 53 line function +types/stmt.go:275 caseTypes naked returns on 27 line function +types/lookup.go:275 MissingMethod naked returns on 39 line function +``` + +Below is one of the not so intuitive uses of naked returns in `types/lookup.go` found by nakedret (nakedret will return the line number of the last naked return in the function): + + +```Go +func MissingMethod(V Type, T *Interface, static bool) (method *Func, wrongType bool) { + // fast path for common case + if T.Empty() { + return + } + + // TODO(gri) Consider using method sets here. Might be more efficient. + + if ityp, _ := V.Underlying().(*Interface); ityp != nil { + // TODO(gri) allMethods is sorted - can do this more efficiently + for _, m := range T.allMethods { + _, obj := lookupMethod(ityp.allMethods, m.pkg, m.name) + switch { + case obj == nil: + if static { + return m, false + } + case !Identical(obj.Type(), m.typ): + return m, true + } + } + return + } + + // A concrete type implements T if it implements all methods of T. + for _, m := range T.allMethods { + obj, _, _ := lookupFieldOrMethod(V, false, m.pkg, m.name) + + f, _ := obj.(*Func) + if f == nil { + return m, false + } + + if !Identical(f.typ, m.typ) { + return m, true + } + } + + return +} +``` + +## TODO + +- Unit tests (may require some refactoring to do correctly) +- supporting toggling of `build.Context.UseAllFiles` may be useful for some. +- Configuration on whether or not to run on test files +- Vim quickfix format? + + +## Contributing + +Pull requests welcome! + + +## Other static analysis tools + +If you've enjoyed nakedret, take a look at my other static anaylsis tools! + +- [unimport](https://github.com/alexkohler/unimport) - Finds unnecessary import aliases +- [prealloc](https://github.com/alexkohler/prealloc) - Finds slice declarations that could potentially be preallocated. diff --git a/tools/vendor/github.com/alexkohler/nakedret/v2/import.go b/tools/vendor/github.com/alexkohler/nakedret/v2/import.go new file mode 100644 index 0000000000..dea8423336 --- /dev/null +++ b/tools/vendor/github.com/alexkohler/nakedret/v2/import.go @@ -0,0 +1,310 @@ +package nakedret + +/* + +This file holds a direct copy of the import path matching code of +https://github.com/golang/go/blob/master/src/cmd/go/main.go. It can be +replaced when https://golang.org/issue/8768 is resolved. + +It has been updated to follow upstream changes in a few ways. + +*/ + +import ( + "fmt" + "go/build" + "log" + "os" + "path" + "path/filepath" + "regexp" + "runtime" + "strings" +) + +var buildContext = build.Default + +var ( + goroot = filepath.Clean(runtime.GOROOT()) + gorootSrc = filepath.Join(goroot, "src") +) + +// importPathsNoDotExpansion returns the import paths to use for the given +// command line, but it does no ... expansion. +func importPathsNoDotExpansion(args []string) []string { + if len(args) == 0 { + return []string{"."} + } + var out []string + for _, a := range args { + // Arguments are supposed to be import paths, but + // as a courtesy to Windows developers, rewrite \ to / + // in command-line arguments. Handles .\... and so on. + if filepath.Separator == '\\' { + a = strings.Replace(a, `\`, `/`, -1) + } + + // Put argument in canonical form, but preserve leading ./. + if strings.HasPrefix(a, "./") { + a = "./" + path.Clean(a) + if a == "./." { + a = "." + } + } else { + a = path.Clean(a) + } + if a == "all" || a == "std" { + out = append(out, allPackages(a)...) + continue + } + out = append(out, a) + } + return out +} + +// importPaths returns the import paths to use for the given command line. +func importPaths(args []string) []string { + args = importPathsNoDotExpansion(args) + var out []string + for _, a := range args { + if strings.Contains(a, "...") { + if build.IsLocalImport(a) { + out = append(out, allPackagesInFS(a)...) + } else { + out = append(out, allPackages(a)...) + } + continue + } + out = append(out, a) + } + return out +} + +// matchPattern(pattern)(name) reports whether +// name matches pattern. Pattern is a limited glob +// pattern in which '...' means 'any string' and there +// is no other special syntax. +func matchPattern(pattern string) func(name string) bool { + re := regexp.QuoteMeta(pattern) + re = strings.Replace(re, `\.\.\.`, `.*`, -1) + // Special case: foo/... matches foo too. + if strings.HasSuffix(re, `/.*`) { + re = re[:len(re)-len(`/.*`)] + `(/.*)?` + } + reg := regexp.MustCompile(`^` + re + `$`) + return func(name string) bool { + return reg.MatchString(name) + } +} + +// hasPathPrefix reports whether the path s begins with the +// elements in prefix. +func hasPathPrefix(s, prefix string) bool { + switch { + default: + return false + case len(s) == len(prefix): + return s == prefix + case len(s) > len(prefix): + if prefix != "" && prefix[len(prefix)-1] == '/' { + return strings.HasPrefix(s, prefix) + } + return s[len(prefix)] == '/' && s[:len(prefix)] == prefix + } +} + +// treeCanMatchPattern(pattern)(name) reports whether +// name or children of name can possibly match pattern. +// Pattern is the same limited glob accepted by matchPattern. +func treeCanMatchPattern(pattern string) func(name string) bool { + wildCard := false + if i := strings.Index(pattern, "..."); i >= 0 { + wildCard = true + pattern = pattern[:i] + } + return func(name string) bool { + return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || + wildCard && strings.HasPrefix(name, pattern) + } +} + +// allPackages returns all the packages that can be found +// under the $GOPATH directories and $GOROOT matching pattern. +// The pattern is either "all" (all packages), "std" (standard packages) +// or a path including "...". +func allPackages(pattern string) []string { + pkgs := matchPackages(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +func matchPackages(pattern string) []string { + match := func(string) bool { return true } + treeCanMatch := func(string) bool { return true } + if pattern != "all" && pattern != "std" { + match = matchPattern(pattern) + treeCanMatch = treeCanMatchPattern(pattern) + } + + have := map[string]bool{ + "builtin": true, // ignore pseudo-package that exists only for documentation + } + if !buildContext.CgoEnabled { + have["runtime/cgo"] = true // ignore during walk + } + var pkgs []string + + // Commands + cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator) + filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() || path == cmd { + return nil + } + name := path[len(cmd):] + if !treeCanMatch(name) { + return filepath.SkipDir + } + // Commands are all in cmd/, not in subdirectories. + if strings.Contains(name, string(filepath.Separator)) { + return filepath.SkipDir + } + + // We use, e.g., cmd/gofmt as the pseudo import path for gofmt. + name = "cmd/" + name + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + _, err = buildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + + for _, src := range buildContext.SrcDirs() { + if (pattern == "std" || pattern == "cmd") && src != gorootSrc { + continue + } + src = filepath.Clean(src) + string(filepath.Separator) + root := src + if pattern == "cmd" { + root += "cmd" + string(filepath.Separator) + } + filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() || path == src { + return nil + } + + // Avoid .foo, _foo, testdata and vendor directory trees. + _, elem := filepath.Split(path) + if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" || elem == "vendor" { + return filepath.SkipDir + } + + name := filepath.ToSlash(path[len(src):]) + if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") { + // The name "std" is only the standard library. + // If the name is cmd, it's the root of the command tree. + return filepath.SkipDir + } + if !treeCanMatch(name) { + return filepath.SkipDir + } + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + _, err = buildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); noGo { + return nil + } + } + pkgs = append(pkgs, name) + return nil + }) + } + return pkgs +} + +// allPackagesInFS is like allPackages but is passed a pattern +// beginning ./ or ../, meaning it should scan the tree rooted +// at the given directory. There are ... in the pattern too. +func allPackagesInFS(pattern string) []string { + pkgs := matchPackagesInFS(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +func matchPackagesInFS(pattern string) []string { + // Find directory to begin the scan. + // Could be smarter but this one optimization + // is enough for now, since ... is usually at the + // end of a path. + i := strings.Index(pattern, "...") + dir, _ := path.Split(pattern[:i]) + + // pattern begins with ./ or ../. + // path.Clean will discard the ./ but not the ../. + // We need to preserve the ./ for pattern matching + // and in the returned import paths. + prefix := "" + if strings.HasPrefix(pattern, "./") { + prefix = "./" + } + match := matchPattern(pattern) + + var pkgs []string + filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() { + return nil + } + if path == dir { + // filepath.Walk starts at dir and recurses. For the recursive case, + // the path is the result of filepath.Join, which calls filepath.Clean. + // The initial case is not Cleaned, though, so we do this explicitly. + // + // This converts a path like "./io/" to "io". Without this step, running + // "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io + // package, because prepending the prefix "./" to the unclean path would + // result in "././io", and match("././io") returns false. + path = filepath.Clean(path) + } + + // Avoid .foo, _foo, testdata and vendor directory trees, but do not avoid "." or "..". + _, elem := filepath.Split(path) + dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." + if dot || strings.HasPrefix(elem, "_") || elem == "testdata" || elem == "vendor" { + return filepath.SkipDir + } + + name := prefix + filepath.ToSlash(path) + if !match(name) { + return nil + } + if _, err = build.ImportDir(path, 0); err != nil { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + return pkgs +} diff --git a/tools/vendor/github.com/alexkohler/nakedret/v2/nakedret.go b/tools/vendor/github.com/alexkohler/nakedret/v2/nakedret.go new file mode 100644 index 0000000000..f78bb8cb6c --- /dev/null +++ b/tools/vendor/github.com/alexkohler/nakedret/v2/nakedret.go @@ -0,0 +1,309 @@ +package nakedret + +import ( + "bytes" + "errors" + "flag" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/printer" + "go/token" + "log" + "os" + "path/filepath" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const pwd = "./" + +func NakedReturnAnalyzer(defaultLines uint) *analysis.Analyzer { + nakedRet := &NakedReturnRunner{} + flags := flag.NewFlagSet("nakedret", flag.ExitOnError) + flags.UintVar(&nakedRet.MaxLength, "l", defaultLines, "maximum number of lines for a naked return function") + var analyzer = &analysis.Analyzer{ + Name: "nakedret", + Doc: "Checks that functions with naked returns are not longer than a maximum size (can be zero).", + Run: nakedRet.run, + Flags: *flags, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + } + return analyzer +} + +type NakedReturnRunner struct { + MaxLength uint +} + +func (n *NakedReturnRunner) run(pass *analysis.Pass) (any, error) { + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ // filter needed nodes: visit only them + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + (*ast.ReturnStmt)(nil), + } + retVis := &returnsVisitor{ + pass: pass, + f: pass.Fset, + maxLength: n.MaxLength, + } + inspector.Nodes(nodeFilter, retVis.NodesVisit) + return nil, nil +} + +type returnsVisitor struct { + pass *analysis.Pass + f *token.FileSet + maxLength uint + + // functions contains funcInfo for each nested function definition encountered while visiting the AST. + functions []funcInfo +} + +type funcInfo struct { + // Details of the function we're currently dealing with + funcType *ast.FuncType + funcName string + funcLength int + reportNaked bool +} + +func checkNakedReturns(args []string, maxLength *uint, setExitStatus bool) error { + + fset := token.NewFileSet() + + files, err := parseInput(args, fset) + if err != nil { + return fmt.Errorf("could not parse input: %v", err) + } + + if maxLength == nil { + return errors.New("max length nil") + } + + analyzer := NakedReturnAnalyzer(*maxLength) + pass := &analysis.Pass{ + Analyzer: analyzer, + Fset: fset, + Files: files, + Report: func(d analysis.Diagnostic) { + log.Printf("%s:%d: %s", fset.Position(d.Pos).Filename, fset.Position(d.Pos).Line, d.Message) + }, + ResultOf: map[*analysis.Analyzer]any{}, + } + result, err := inspect.Analyzer.Run(pass) + if err != nil { + return err + } + pass.ResultOf[inspect.Analyzer] = result + + _, err = analyzer.Run(pass) + if err != nil { + return err + } + + return nil +} + +func parseInput(args []string, fset *token.FileSet) ([]*ast.File, error) { + var directoryList []string + var fileMode bool + files := make([]*ast.File, 0) + + if len(args) == 0 { + directoryList = append(directoryList, pwd) + } else { + for _, arg := range args { + if strings.HasSuffix(arg, "/...") && isDir(arg[:len(arg)-len("/...")]) { + + for _, dirname := range allPackagesInFS(arg) { + directoryList = append(directoryList, dirname) + } + + } else if isDir(arg) { + directoryList = append(directoryList, arg) + + } else if exists(arg) { + if strings.HasSuffix(arg, ".go") { + fileMode = true + f, err := parser.ParseFile(fset, arg, nil, 0) + if err != nil { + return nil, err + } + files = append(files, f) + } else { + return nil, fmt.Errorf("invalid file %v specified", arg) + } + } else { + + // TODO clean this up a bit + imPaths := importPaths([]string{arg}) + for _, importPath := range imPaths { + pkg, err := build.Import(importPath, ".", 0) + if err != nil { + return nil, err + } + var stringFiles []string + stringFiles = append(stringFiles, pkg.GoFiles...) + // files = append(files, pkg.CgoFiles...) + stringFiles = append(stringFiles, pkg.TestGoFiles...) + if pkg.Dir != "." { + for i, f := range stringFiles { + stringFiles[i] = filepath.Join(pkg.Dir, f) + } + } + + fileMode = true + for _, stringFile := range stringFiles { + f, err := parser.ParseFile(fset, stringFile, nil, 0) + if err != nil { + return nil, err + } + files = append(files, f) + } + + } + } + } + } + + // if we're not in file mode, then we need to grab each and every package in each directory + // we can to grab all the files + if !fileMode { + for _, fpath := range directoryList { + pkgs, err := parser.ParseDir(fset, fpath, nil, 0) + if err != nil { + return nil, err + } + + for _, pkg := range pkgs { + for _, f := range pkg.Files { + files = append(files, f) + } + } + } + } + + return files, nil +} + +func isDir(filename string) bool { + fi, err := os.Stat(filename) + return err == nil && fi.IsDir() +} + +func exists(filename string) bool { + _, err := os.Stat(filename) + return err == nil +} + +func hasNamedReturns(funcType *ast.FuncType) bool { + if funcType == nil || funcType.Results == nil { + return false + } + for _, field := range funcType.Results.List { + for _, ident := range field.Names { + if ident != nil { + return true + } + } + } + return false +} + +func nestedFuncName(functions []funcInfo) string { + var names []string + for _, f := range functions { + names = append(names, f.funcName) + } + return strings.Join(names, ".") +} + +func nakedReturnFix(s *ast.ReturnStmt, funcType *ast.FuncType) *ast.ReturnStmt { + var nameExprs []ast.Expr + for _, result := range funcType.Results.List { + for _, ident := range result.Names { + if ident != nil { + nameExprs = append(nameExprs, ident) + } + } + } + var sFix = *s + sFix.Results = nameExprs + return &sFix +} + +func (v *returnsVisitor) NodesVisit(node ast.Node, push bool) bool { + var ( + funcType *ast.FuncType + funcName string + ) + switch s := node.(type) { + case *ast.FuncDecl: + // We've found a function + funcType = s.Type + funcName = s.Name.Name + case *ast.FuncLit: + // We've found a function literal + funcType = s.Type + file := v.f.File(s.Pos()) + funcName = fmt.Sprintf("", file.Position(s.Pos()).Line) + case *ast.ReturnStmt: + // We've found a possibly naked return statement + fun := v.functions[len(v.functions)-1] + funName := nestedFuncName(v.functions) + if fun.reportNaked && len(s.Results) == 0 && push { + sFix := nakedReturnFix(s, fun.funcType) + b := &bytes.Buffer{} + err := printer.Fprint(b, v.f, sFix) + if err != nil { + log.Printf("failed to format named return fix: %s", err) + } + v.pass.Report(analysis.Diagnostic{ + Pos: s.Pos(), + End: s.End(), + Message: fmt.Sprintf("naked return in func `%s` with %d lines of code", funName, fun.funcLength), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "explicit return statement", + TextEdits: []analysis.TextEdit{{ + Pos: s.Pos(), + End: s.End(), + NewText: b.Bytes()}}, + }}, + }) + } + } + + if !push { + if funcType == nil { + return false + } + // Pop function info + v.functions = v.functions[:len(v.functions)-1] + return false + } + + if push && funcType != nil { + // Push function info to track returns for this function + file := v.f.File(node.Pos()) + length := file.Position(node.End()).Line - file.Position(node.Pos()).Line + if length == 0 { + // consider functions that finish on the same line as they start as single line functions, not zero lines! + length = 1 + } + v.functions = append(v.functions, funcInfo{ + funcType: funcType, + funcName: funcName, + funcLength: length, + reportNaked: uint(length) > v.maxLength && hasNamedReturns(funcType), + }) + } + + return true +} diff --git a/tools/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go b/tools/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go index a39f754f06..3f0ed6682a 100644 --- a/tools/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go +++ b/tools/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go @@ -2,6 +2,10 @@ package forbidigo // Code generated by github.com/launchdarkly/go-options. DO NOT EDIT. +import "fmt" + +import "github.com/google/go-cmp/cmp" + type ApplyOptionFunc func(c *config) error func (f ApplyOptionFunc) apply(c *config) error { @@ -28,18 +32,98 @@ type Option interface { apply(*config) error } +type optionExcludeGodocExamplesImpl struct { + o bool +} + +func (o optionExcludeGodocExamplesImpl) apply(c *config) error { + c.ExcludeGodocExamples = o.o + return nil +} + +func (o optionExcludeGodocExamplesImpl) Equal(v optionExcludeGodocExamplesImpl) bool { + switch { + case !cmp.Equal(o.o, v.o): + return false + } + return true +} + +func (o optionExcludeGodocExamplesImpl) String() string { + name := "OptionExcludeGodocExamples" + + // hack to avoid go vet error about passing a function to Sprintf + var value interface{} = o.o + return fmt.Sprintf("%s: %+v", name, value) +} + // OptionExcludeGodocExamples don't check inside Godoc examples (see https://blog.golang.org/examples) -func OptionExcludeGodocExamples(o bool) ApplyOptionFunc { - return func(c *config) error { - c.ExcludeGodocExamples = o - return nil +func OptionExcludeGodocExamples(o bool) Option { + return optionExcludeGodocExamplesImpl{ + o: o, + } +} + +type optionIgnorePermitDirectivesImpl struct { + o bool +} + +func (o optionIgnorePermitDirectivesImpl) apply(c *config) error { + c.IgnorePermitDirectives = o.o + return nil +} + +func (o optionIgnorePermitDirectivesImpl) Equal(v optionIgnorePermitDirectivesImpl) bool { + switch { + case !cmp.Equal(o.o, v.o): + return false } + return true +} + +func (o optionIgnorePermitDirectivesImpl) String() string { + name := "OptionIgnorePermitDirectives" + + // hack to avoid go vet error about passing a function to Sprintf + var value interface{} = o.o + return fmt.Sprintf("%s: %+v", name, value) } // OptionIgnorePermitDirectives don't check for `permit` directives(for example, in favor of `nolint`) -func OptionIgnorePermitDirectives(o bool) ApplyOptionFunc { - return func(c *config) error { - c.IgnorePermitDirectives = o - return nil +func OptionIgnorePermitDirectives(o bool) Option { + return optionIgnorePermitDirectivesImpl{ + o: o, + } +} + +type optionAnalyzeTypesImpl struct { + o bool +} + +func (o optionAnalyzeTypesImpl) apply(c *config) error { + c.AnalyzeTypes = o.o + return nil +} + +func (o optionAnalyzeTypesImpl) Equal(v optionAnalyzeTypesImpl) bool { + switch { + case !cmp.Equal(o.o, v.o): + return false + } + return true +} + +func (o optionAnalyzeTypesImpl) String() string { + name := "OptionAnalyzeTypes" + + // hack to avoid go vet error about passing a function to Sprintf + var value interface{} = o.o + return fmt.Sprintf("%s: %+v", name, value) +} + +// OptionAnalyzeTypes enable to match canonical names for types and interfaces using type info +func OptionAnalyzeTypes(o bool) Option { + return optionAnalyzeTypesImpl{ + o: o, } } diff --git a/tools/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go b/tools/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go index 9b3765405e..943a69975d 100644 --- a/tools/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go +++ b/tools/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go @@ -1,4 +1,4 @@ -// forbidigo provides a linter for forbidding the use of specific identifiers +// Package forbidigo provides a linter for forbidding the use of specific identifiers package forbidigo import ( @@ -11,8 +11,6 @@ import ( "log" "regexp" "strings" - - "github.com/pkg/errors" ) type Issue interface { @@ -66,12 +64,13 @@ type config struct { // don't check inside Godoc examples (see https://blog.golang.org/examples) ExcludeGodocExamples bool `options:",true"` IgnorePermitDirectives bool // don't check for `permit` directives(for example, in favor of `nolint`) + AnalyzeTypes bool // enable to match canonical names for types and interfaces using type info } func NewLinter(patterns []string, options ...Option) (*Linter, error) { cfg, err := newConfig(options...) if err != nil { - return nil, errors.Wrapf(err, "failed to process options") + return nil, fmt.Errorf("failed to process options: %w", err) } if len(patterns) == 0 { @@ -215,6 +214,14 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor { }) } } + + // descend into the left-side of selectors + if selector, isSelector := node.(*ast.SelectorExpr); isSelector { + if _, leftSideIsIdentifier := selector.X.(*ast.Ident); !leftSideIsIdentifier { + return v + } + } + return nil } @@ -239,33 +246,36 @@ func (v *visitor) textFor(node ast.Node) string { func (v *visitor) expandMatchText(node ast.Node, srcText string) (matchTexts []string, pkgText string) { // The text to match against is the literal source code if we cannot // come up with something different. - matchText := srcText + matchTexts = []string{srcText} - if v.runConfig.TypesInfo == nil { - return []string{matchText}, pkgText + if !v.cfg.AnalyzeTypes || v.runConfig.TypesInfo == nil { + return matchTexts, pkgText } location := v.runConfig.Fset.Position(node.Pos()) switch node := node.(type) { case *ast.Ident: - object, ok := v.runConfig.TypesInfo.Uses[node] - if !ok { + if object, ok := v.runConfig.TypesInfo.Uses[node]; !ok { // No information about the identifier. Should // not happen, but perhaps there were compile // errors? v.runConfig.DebugLog("%s: unknown identifier %q", location, srcText) - return []string{matchText}, pkgText - } - if pkg := object.Pkg(); pkg != nil { + } else if pkg := object.Pkg(); pkg != nil { pkgText = pkg.Path() - v.runConfig.DebugLog("%s: identifier: %q -> %q in package %q", location, srcText, matchText, pkgText) + // if this is a method, don't include the package name + isMethod := false + if signature, ok := object.Type().(*types.Signature); ok && signature.Recv() != nil { + isMethod = true + } + v.runConfig.DebugLog("%s: identifier: %q -> %q in package %q", location, srcText, matchTexts, pkgText) // match either with or without package name - return []string{pkg.Name() + "." + srcText, srcText}, pkgText + if !isMethod { + matchTexts = []string{pkg.Name() + "." + srcText, srcText} + } } else { - v.runConfig.DebugLog("%s: identifier: %q -> %q without package", location, srcText, matchText) + v.runConfig.DebugLog("%s: identifier: %q -> %q without package", location, srcText, matchTexts) } - return []string{matchText}, pkgText case *ast.SelectorExpr: selector := node.X field := node.Sel.Name @@ -274,58 +284,54 @@ func (v *visitor) expandMatchText(node ast.Node, srcText string) (matchTexts []s // type. We don't care about the value. selectorText := v.textFor(node) if typeAndValue, ok := v.runConfig.TypesInfo.Types[selector]; ok { - m, p, ok := pkgFromType(typeAndValue.Type) + m, p, ok := typeNameWithPackage(typeAndValue.Type) if !ok { v.runConfig.DebugLog("%s: selector %q with supported type %T", location, selectorText, typeAndValue.Type) } - matchText = m + "." + field + matchTexts = []string{m + "." + field} pkgText = p - v.runConfig.DebugLog("%s: selector %q with supported type %q: %q -> %q, package %q", location, selectorText, typeAndValue.Type.String(), srcText, matchText, pkgText) - return []string{matchText}, pkgText + v.runConfig.DebugLog("%s: selector %q with supported type %q: %q -> %q, package %q", location, selectorText, typeAndValue.Type.String(), srcText, matchTexts, pkgText) } // Some expressions need special treatment. switch selector := selector.(type) { case *ast.Ident: - object, ok := v.runConfig.TypesInfo.Uses[selector] - if !ok { + if object, hasUses := v.runConfig.TypesInfo.Uses[selector]; hasUses { + switch object := object.(type) { + case *types.PkgName: + pkgText = object.Imported().Path() + matchTexts = []string{object.Imported().Name() + "." + field} + v.runConfig.DebugLog("%s: selector %q is package: %q -> %q, package %q", location, selectorText, srcText, matchTexts, pkgText) + case *types.Var: + if typeName, packageName, ok := typeNameWithPackage(object.Type()); ok { + matchTexts = []string{typeName + "." + field} + pkgText = packageName + v.runConfig.DebugLog("%s: selector %q is variable of type %q: %q -> %q, package %q", location, selectorText, object.Type().String(), srcText, matchTexts, pkgText) + } else { + v.runConfig.DebugLog("%s: selector %q is variable with unsupported type %T", location, selectorText, object.Type()) + } + default: + // Something else? + v.runConfig.DebugLog("%s: selector %q is identifier with unsupported type %T", location, selectorText, object) + } + } else { // No information about the identifier. Should // not happen, but perhaps there were compile // errors? v.runConfig.DebugLog("%s: unknown selector identifier %q", location, selectorText) - return []string{matchText}, pkgText - } - switch object := object.(type) { - case *types.PkgName: - pkgText = object.Imported().Path() - matchText = object.Imported().Name() + "." + field - v.runConfig.DebugLog("%s: selector %q is package: %q -> %q, package %q", location, selectorText, srcText, matchText, pkgText) - return []string{matchText}, pkgText - case *types.Var: - m, p, ok := pkgFromType(object.Type()) - if !ok { - v.runConfig.DebugLog("%s: selector %q is variable with unsupported type %T", location, selectorText, object.Type()) - } - matchText = m + "." + field - pkgText = p - v.runConfig.DebugLog("%s: selector %q is variable of type %q: %q -> %q, package %q", location, selectorText, object.Type().String(), srcText, matchText, pkgText) - default: - // Something else? - v.runConfig.DebugLog("%s: selector %q is identifier with unsupported type %T", location, selectorText, object) } default: v.runConfig.DebugLog("%s: selector %q of unsupported type %T", location, selectorText, selector) } - return []string{matchText}, pkgText default: v.runConfig.DebugLog("%s: unsupported type %T", location, node) - return []string{matchText}, pkgText } + return matchTexts, pkgText } -// pkgFromType tries to determine `.` and the full +// typeNameWithPackage tries to determine `.` and the full // package path. This only needs to work for types of a selector in a selector // expression. -func pkgFromType(t types.Type) (typeStr, pkgStr string, ok bool) { +func typeNameWithPackage(t types.Type) (typeName, packagePath string, ok bool) { if ptr, ok := t.(*types.Pointer); ok { t = ptr.Elem() } diff --git a/tools/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go b/tools/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go index 9b2801352d..eaf408d6f3 100644 --- a/tools/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go +++ b/tools/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go @@ -9,13 +9,22 @@ import ( "golang.org/x/tools/go/analysis" ) +//nolint:gochecknoglobals +var flagSet flag.FlagSet + +//nolint:gochecknoglobals var ( - flagSet flag.FlagSet + maxComplexity int + packageAverage float64 + skipTests bool ) -var maxComplexity int -var packageAverage float64 -var skipTests bool +//nolint:gochecknoinits +func init() { + flagSet.IntVar(&maxComplexity, "maxComplexity", 10, "max complexity the function can have") + flagSet.Float64Var(&packageAverage, "packageAverage", 0, "max average complexity in package") + flagSet.BoolVar(&skipTests, "skipTests", false, "should the linter execute on test files as well") +} func NewAnalyzer() *analysis.Analyzer { return &analysis.Analyzer{ @@ -26,12 +35,6 @@ func NewAnalyzer() *analysis.Analyzer { } } -func init() { - flagSet.IntVar(&maxComplexity, "maxComplexity", 10, "max complexity the function can have") - flagSet.Float64Var(&packageAverage, "packageAverage", 0, "max avarage complexity in package") - flagSet.BoolVar(&skipTests, "skipTests", false, "should the linter execute on test files as well") -} - func run(pass *analysis.Pass) (interface{}, error) { var sum, count float64 var pkgName string @@ -70,7 +73,7 @@ func run(pass *analysis.Pass) (interface{}, error) { if packageAverage > 0 { avg := sum / count if avg > packageAverage { - pass.Reportf(pkgPos, "the avarage complexity for the package %s is %f, max is %f", pkgName, avg, packageAverage) + pass.Reportf(pkgPos, "the average complexity for the package %s is %f, max is %f", pkgName, avg, packageAverage) } } diff --git a/tools/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go index f4fdaaed66..3a0bf7402d 100644 --- a/tools/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/analyzer.go @@ -2,14 +2,13 @@ package analyzer import ( "flag" - "fmt" "go/ast" gotypes "go/types" "strings" "sync" - "github.com/butuzov/ireturn/config" - "github.com/butuzov/ireturn/types" + "github.com/butuzov/ireturn/analyzer/internal/config" + "github.com/butuzov/ireturn/analyzer/internal/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" @@ -39,8 +38,18 @@ func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) { return nil, a.err } - // 01. Running Inspection. ins, _ := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // 00. does file have dot-imported standard packages? + dotImportedStd := make(map[string]struct{}) + ins.Preorder([]ast.Node{(*ast.ImportSpec)(nil)}, func(node ast.Node) { + i, _ := node.(*ast.ImportSpec) + if i.Name != nil && i.Name.Name == "." { + dotImportedStd[strings.Trim(i.Path.Value, `"`)] = struct{}{} + } + }) + + // 01. Running Inspection. ins.Preorder([]ast.Node{(*ast.FuncDecl)(nil)}, func(node ast.Node) { // 001. Casting to funcdecl f, _ := node.(*ast.FuncDecl) @@ -56,17 +65,25 @@ func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) { return } + seen := make(map[string]bool, 4) + // 004. Filtering Results. - for _, i := range filterInterfaces(pass, f.Type.Results) { + for _, issue := range filterInterfaces(pass, f.Type, dotImportedStd) { + + if a.handler.IsValid(issue) { + continue + } + + issue.Enrich(f) + + key := issue.HashString() - if a.handler.IsValid(i) { + if ok := seen[key]; ok { continue } + seen[key] = true - a.found = append(a.found, analysis.Diagnostic{ //nolint: exhaustivestruct - Pos: f.Pos(), - Message: fmt.Sprintf("%s returns interface (%s)", f.Name.Name, i.Name), - }) + a.found = append(a.found, issue.ExportDiagnostic()) } }) @@ -112,25 +129,31 @@ func flags() flag.FlagSet { return *set } -func filterInterfaces(pass *analysis.Pass, fl *ast.FieldList) []types.IFace { +func filterInterfaces(p *analysis.Pass, ft *ast.FuncType, di map[string]struct{}) []types.IFace { var results []types.IFace - for pos, el := range fl.List { + if ft.Results == nil { // this can't happen, but double checking. + return results + } + + tp := newTypeParams(ft.TypeParams) + + for _, el := range ft.Results.List { switch v := el.Type.(type) { // ----- empty or anonymous interfaces case *ast.InterfaceType: if len(v.Methods.List) == 0 { - results = append(results, issue("interface{}", pos, types.EmptyInterface)) + results = append(results, types.NewIssue("interface{}", types.EmptyInterface)) continue } - results = append(results, issue("anonymous interface", pos, types.AnonInterface)) + results = append(results, types.NewIssue("anonymous interface", types.AnonInterface)) // ------ Errors and interfaces from same package case *ast.Ident: - t1 := pass.TypesInfo.TypeOf(el.Type) + t1 := p.TypesInfo.TypeOf(el.Type) if !gotypes.IsInterface(t1.Underlying()) { continue } @@ -138,56 +161,72 @@ func filterInterfaces(pass *analysis.Pass, fl *ast.FieldList) []types.IFace { word := t1.String() // only build in interface is error if obj := gotypes.Universe.Lookup(word); obj != nil { - results = append(results, issue(obj.Name(), pos, types.ErrorInterface)) + results = append(results, types.NewIssue(obj.Name(), types.ErrorInterface)) + continue + } + // found in type params + if tp.In(word) { + results = append(results, types.NewIssue(word, types.Generic)) continue } - results = append(results, issue(word, pos, types.NamedInterface)) + // is it dot-imported package? + // handling cases when stdlib package imported via "." dot-import + if len(di) > 0 { + name := stdPkgInterface(word) + if _, ok := di[name]; ok { + results = append(results, types.NewIssue(word, types.NamedStdInterface)) + + continue + } + } + + results = append(results, types.NewIssue(word, types.NamedInterface)) // ------- standard library and 3rd party interfaces case *ast.SelectorExpr: - t1 := pass.TypesInfo.TypeOf(el.Type) + t1 := p.TypesInfo.TypeOf(el.Type) if !gotypes.IsInterface(t1.Underlying()) { continue } word := t1.String() - if isStdLib(word) { - results = append(results, issue(word, pos, types.NamedStdInterface)) - + if isStdPkgInterface(word) { + results = append(results, types.NewIssue(word, types.NamedStdInterface)) continue } - results = append(results, issue(word, pos, types.NamedInterface)) + results = append(results, types.NewIssue(word, types.NamedInterface)) } } return results } -// isStdLib will run small checks against pkg to find out if named interface -// we lookling on comes from a standard library or not. -func isStdLib(named string) bool { - // find last dot index. +// stdPkgInterface will return package name if tis std lib package +// or empty string on fail. +func stdPkgInterface(named string) string { + // find last "." index. idx := strings.LastIndex(named, ".") if idx == -1 { - return false + return "" } - if _, ok := std[named[0:idx]]; ok { - return true - } + return stdPkg(named[0:idx]) +} - return false +// isStdPkgInterface will run small checks against pkg to find out if named +// interface we looking on - comes from a standard library or not. +func isStdPkgInterface(namedInterface string) bool { + return stdPkgInterface(namedInterface) != "" } -// issue is shortcut that creates issue for next filtering. -func issue(name string, pos int, interfaceType types.IType) types.IFace { - return types.IFace{ - Name: name, - Pos: pos, - Type: interfaceType, +func stdPkg(pkg string) string { + if _, ok := std[pkg]; ok { + return pkg } + + return "" } diff --git a/tools/vendor/github.com/butuzov/ireturn/config/allow.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go similarity index 87% rename from tools/vendor/github.com/butuzov/ireturn/config/allow.go rename to tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go index c171a255dd..6a294ca35f 100644 --- a/tools/vendor/github.com/butuzov/ireturn/config/allow.go +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/allow.go @@ -1,6 +1,6 @@ package config -import "github.com/butuzov/ireturn/types" +import "github.com/butuzov/ireturn/analyzer/internal/types" // allowConfig specifies a list of interfaces (keywords, patters and regular expressions) // that are allowed by ireturn as valid to return, any non listed interface are rejected. diff --git a/tools/vendor/github.com/butuzov/ireturn/config/config.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go similarity index 92% rename from tools/vendor/github.com/butuzov/ireturn/config/config.go rename to tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go index 7307ab3eac..e2f1aef6e9 100644 --- a/tools/vendor/github.com/butuzov/ireturn/config/config.go +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/config.go @@ -3,7 +3,7 @@ package config import ( "regexp" - "github.com/butuzov/ireturn/types" + "github.com/butuzov/ireturn/analyzer/internal/types" ) // defaultConfig is core of the validation, ... @@ -55,6 +55,8 @@ func (config *defaultConfig) compileList() { config.quick |= uint8(types.AnonInterface) case types.NameStdLib: config.quick |= uint8(types.NamedStdInterface) + case types.NameGeneric: + config.quick |= uint8(types.Generic) } // allow to parse regular expressions diff --git a/tools/vendor/github.com/butuzov/ireturn/config/new.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go similarity index 92% rename from tools/vendor/github.com/butuzov/ireturn/config/new.go rename to tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go index cfaa274a1e..6aa04e52e8 100644 --- a/tools/vendor/github.com/butuzov/ireturn/config/new.go +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/new.go @@ -5,12 +5,12 @@ import ( "flag" "strings" - "github.com/butuzov/ireturn/types" + "github.com/butuzov/ireturn/analyzer/internal/types" ) var ErrCollisionOfInterests = errors.New("can't have both `-accept` and `-reject` specified at same time") -//nolint: exhaustivestruct +// nolint: exhaustivestruct func DefaultValidatorConfig() *allowConfig { return allowAll([]string{ types.NameEmpty, // "empty": empty interfaces (interface{}) @@ -40,7 +40,7 @@ func New(fs *flag.FlagSet) (interface{}, error) { return rejectAll(rejectList), nil } - // can have none at same time. + // can have none (defaults are used) at same time. return nil, nil } diff --git a/tools/vendor/github.com/butuzov/ireturn/config/reject.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go similarity index 87% rename from tools/vendor/github.com/butuzov/ireturn/config/reject.go rename to tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go index 21e50114bc..bef6913bb8 100644 --- a/tools/vendor/github.com/butuzov/ireturn/config/reject.go +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/config/reject.go @@ -1,6 +1,6 @@ package config -import "github.com/butuzov/ireturn/types" +import "github.com/butuzov/ireturn/analyzer/internal/types" // rejectConfig specifies a list of interfaces (keywords, patters and regular expressions) // that are rejected by ireturn as valid to return, any non listed interface are allowed. diff --git a/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go new file mode 100644 index 0000000000..13f19a3e25 --- /dev/null +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/types/iface.go @@ -0,0 +1,49 @@ +package types + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" +) + +type IFace struct { + Name string // Interface name + Type IType // Type of the interface + + Pos token.Pos // Token Position + FuncName string // +} + +func NewIssue(name string, interfaceType IType) IFace { + return IFace{ + Name: name, + // Pos: pos, + Type: interfaceType, + } +} + +func (i *IFace) Enrich(f *ast.FuncDecl) { + i.FuncName = f.Name.Name + i.Pos = f.Pos() +} + +func (i IFace) String() string { + if i.Type == Generic { + return fmt.Sprintf("%s returns generic interface (%s)", i.FuncName, i.Name) + } + + return fmt.Sprintf("%s returns interface (%s)", i.FuncName, i.Name) +} + +func (i IFace) HashString() string { + return fmt.Sprintf("%v-%s", i.Pos, i.String()) +} + +func (i IFace) ExportDiagnostic() analysis.Diagnostic { + return analysis.Diagnostic{ //nolint: exhaustivestruct + Pos: i.Pos, + Message: i.String(), + } +} diff --git a/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/types/names.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/types/names.go new file mode 100644 index 0000000000..1092c9667c --- /dev/null +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/types/names.go @@ -0,0 +1,9 @@ +package types + +const ( + NameEmpty = "empty" + NameAnon = "anon" + NameError = "error" + NameStdLib = "stdlib" + NameGeneric = "generic" +) diff --git a/tools/vendor/github.com/butuzov/ireturn/types/types.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/types/types.go similarity index 81% rename from tools/vendor/github.com/butuzov/ireturn/types/types.go rename to tools/vendor/github.com/butuzov/ireturn/analyzer/internal/types/types.go index 837570db41..5c0bd74077 100644 --- a/tools/vendor/github.com/butuzov/ireturn/types/types.go +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/internal/types/types.go @@ -8,4 +8,5 @@ const ( ErrorInterface // ref as error NamedInterface // ref as named NamedStdInterface // ref as named stdlib + Generic // ref as generic type parameter ) diff --git a/tools/vendor/github.com/butuzov/ireturn/analyzer/std.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/std.go index 2af5284a3b..ec361cd442 100644 --- a/tools/vendor/github.com/butuzov/ireturn/analyzer/std.go +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/std.go @@ -183,4 +183,12 @@ var std = map[string]struct{}{ "runtime/metrics": {}, "testing/fstest": {}, // added in Go v1.17 in compare to v1.16 (docker image) + // added in Go v1.18 in compare to v1.17 (docker image) + "debug/buildinfo": {}, + "net/netip": {}, + // added in Go v1.19 in compare to v1.18 (docker image) + "go/doc/comment": {}, + // added in Go v1.20 in compare to v1.19 (docker image) + "crypto/ecdh": {}, + "runtime/coverage": {}, } diff --git a/tools/vendor/github.com/butuzov/ireturn/analyzer/typeparams.go b/tools/vendor/github.com/butuzov/ireturn/analyzer/typeparams.go new file mode 100644 index 0000000000..14193c355b --- /dev/null +++ b/tools/vendor/github.com/butuzov/ireturn/analyzer/typeparams.go @@ -0,0 +1,38 @@ +package analyzer + +import ( + "go/ast" +) + +type typeParams struct { + found []string +} + +func newTypeParams(fl *ast.FieldList) typeParams { + tp := typeParams{} + + if fl == nil { + return tp + } + + for _, el := range fl.List { + if el == nil { + continue + } + + for _, name := range el.Names { + tp.found = append(tp.found, name.Name) + } + } + + return tp +} + +func (tp typeParams) In(t string) bool { + for _, i := range tp.found { + if i == t { + return true + } + } + return false +} diff --git a/tools/vendor/github.com/butuzov/ireturn/types/iface.go b/tools/vendor/github.com/butuzov/ireturn/types/iface.go deleted file mode 100644 index e9baa37c07..0000000000 --- a/tools/vendor/github.com/butuzov/ireturn/types/iface.go +++ /dev/null @@ -1,7 +0,0 @@ -package types - -type IFace struct { - Name string // Preserved for named interfaces - Pos int // Position in return tuple - Type IType // Type of the interface -} diff --git a/tools/vendor/github.com/butuzov/ireturn/types/names.go b/tools/vendor/github.com/butuzov/ireturn/types/names.go deleted file mode 100644 index 0b286c4c8f..0000000000 --- a/tools/vendor/github.com/butuzov/ireturn/types/names.go +++ /dev/null @@ -1,8 +0,0 @@ -package types - -const ( - NameEmpty = "empty" - NameAnon = "anon" - NameError = "error" - NameStdLib = "stdlib" -) diff --git a/tools/vendor/github.com/butuzov/mirror/.act b/tools/vendor/github.com/butuzov/mirror/.act new file mode 100644 index 0000000000..8182d703ae --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/.act @@ -0,0 +1,2 @@ +--platform ubuntu-latest=butuzov/act-go:latest +--env DRY_RUN=1 diff --git a/tools/vendor/github.com/butuzov/mirror/.editorconfig b/tools/vendor/github.com/butuzov/mirror/.editorconfig new file mode 100644 index 0000000000..4d9c20d8d9 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/.editorconfig @@ -0,0 +1,28 @@ +# top-most EditorConfig file +root = true + + +[*] +end_of_line = lf # Unix-style newlines +charset = utf-8 + +indent_style = space # default identation - spaces +indent_size = 4 # default identation - size + +insert_final_newline = true # new line at the end of file +trim_trailing_whitespace = true # no extra sapces at the end of lines + +[*.{go,gohtml,gotpl}] # Go +indent_style = tab +indent_size = 2 + +[{Makefile,makefile}] # CMake +indent_style = tab + +[*.md] # Markdown +trim_trailing_whitespace = true +max_line_length = 100 +insert_final_newline = true +indent_size = 2 + + diff --git a/tools/vendor/github.com/butuzov/mirror/.gitignore b/tools/vendor/github.com/butuzov/mirror/.gitignore new file mode 100644 index 0000000000..109f33b98e --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/.gitignore @@ -0,0 +1,11 @@ +# artifacts +coverage.cov +bin/* +dist/* +tmp/* +out* +sandbox* +demo* +.task* +.ipynb* +.jupyter* diff --git a/tools/vendor/github.com/butuzov/mirror/.goreleaser.yaml b/tools/vendor/github.com/butuzov/mirror/.goreleaser.yaml new file mode 100644 index 0000000000..11749ed2b3 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/.goreleaser.yaml @@ -0,0 +1,61 @@ +--- +project_name: mirror + +builds: + - binary: mirror + env: + - CGO_ENABLED=0 + goos: + - darwin + - linux + - windows + goarch: + - amd64 + - 386 + - arm64 + - arm + goarm: + - 6 + ignore: + - goos: windows + goarm: 6 + - goos: windows + goarch: arm64 + - goos: linux + goarm: 6 + - goos: darwin + goarch: 386 + main: ./cmd/mirror/ + flags: + - -trimpath + ldflags: -s -w + +checksum: + name_template: 'checksums.txt' + +changelog: + sort: asc + filters: + exclude: + - '(?i)^docs?:' + - '(?i)^docs\([^:]+\):' + - '(?i)^docs\[[^:]+\]:' + - '^tests?:' + - '(?i)^dev:' + - Merge pull request + - Merge branch + +archives: + - name_template: '{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}{{ if .Mips }}_{{ .Mips }}{{ end }}' + replacements: + darwin: darwin + linux: linux + windows: windows + 386: i386 + amd64: x86_64 + format_overrides: + - goos: windows + format: zip + files: + - LICENSE + - readme.md diff --git a/tools/vendor/github.com/butuzov/mirror/LICENSE b/tools/vendor/github.com/butuzov/mirror/LICENSE new file mode 100644 index 0000000000..a9752e9726 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Oleg Butuzov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tools/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md b/tools/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md new file mode 100644 index 0000000000..776816e514 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/MIRROR_FUNCS.md @@ -0,0 +1,150 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
func (b *bufio.Writer) WriteString(s string) (int, error) + func (b *bufio.Writer) Write(p []byte) (int, error) + func (b *bufio.Writer) WriteRune(r rune) (int, error) +
func (b *bytes.Buffer) WriteString(s string) (int, error) + func (b *bytes.Buffer) Write(p []byte) (int, error) + func (b *bytes.Buffer) WriteRune(r rune) (int, error) +
func strings.Compare(a, b string) intfunc bytes.Compare(a, b []byte) int
func strings.Contains(s, substr string) boolfunc bytes.Contains(b, subslice []byte) bool
func strings.ContainsAny(s, chars string) boolfunc bytes.ContainsAny(b []byte, chars string) bool
func strings.ContainsRune(s string, r rune) boolfunc bytes.ContainsRune(b []byte, r rune) bool
func strings.Count(s, substr string) intfunc bytes.Count(s, sep []byte) int
func strings.EqualFold(s, t string) boolfunc bytes.EqualFold(s, t []byte) bool
func strings.HasPrefix(s, prefix string) boolfunc bytes.HasPrefix(s, prefix []byte) bool
func strings.HasSuffix(s, suffix string) boolfunc bytes.HasSuffix(s, suffix []byte) bool
func strings.Index(s, substr string) intfunc bytes.Index(s, sep []byte) int
func strings.IndexAny(s, chars string) intfunc bytes.IndexAny(s []byte, chars string) int
func strings.IndexByte(s string, c byte) intfunc bytes.IndexByte(b []byte, c byte) int
func strings.IndexFunc(s string, f func(rune) bool) intfunc bytes.IndexFunc(s []byte, f func(r rune) bool) int
func strings.IndexRune(s string, r rune) intfunc bytes.IndexRune(s []byte, r rune) int
func strings.LastIndex(s, sep string) intfunc bytes.LastIndex(s, sep []byte) int
func strings.LastIndexAny(s, chars string) intfunc bytes.LastIndexAny(s []byte, chars string) int
func strings.LastIndexByte(s string, c byte) intfunc bytes.LastIndexByte(s []byte, c byte) int
func strings.LastIndexFunc(s string, f func(rune) bool) intfunc bytes.LastIndexFunc(s []byte, f func(r rune) bool) int
func bytes.NewBufferString(s string) *bytes.Bufferfunc bytes.NewBuffer(buf []byte *bytes.Buffer
func (h *hash/maphash.Hash) WriteString(s string) (int, error)func (h *hash/maphash.Hash) Write(b []byte) (int, error)
func (rw *net/http/httptest.ResponseRecorder) WriteString(str string) (int, error)func (rw *net/http/httptest.ResponseRecorder) Write(buf []byte) (int, error)
func (f *os.File) WriteString(s string) (n int, err error)func (f *os.File) Write(b []byte) (n int, err error)
func regexp.MatchString(pattern string, s string) (bool, error)func regexp.Match(pattern string, b []byte) (bool, error)
func (re *regexp.Regexp) FindAllStringIndex(s string, n int) [][]intfunc (re *regexp.Regexp) FindAllIndex(b []byte, n int) [][]int
func (re *regexp.Regexp) FindAllStringSubmatch(s string, n int) [][]stringfunc (re *regexp.Regexp) FindAllSubmatch(b []byte, n int) [][][]byte
func (re *regexp.Regexp) FindStringIndex(s string) (loc []int)func (re *regexp.Regexp) FindIndex(b []byte) (loc []int)
func (re *regexp.Regexp) FindStringSubmatchIndex(s string) []intfunc (re *regexp.Regexp) FindSubmatchIndex(b []byte) []int
func (re *regexp.Regexp) MatchString(s string) boolfunc (re *regexp.Regexp) Match(b []byte) bool
func (b *strings.Builder) WriteString(s string) error + func (b *strings.Builder) Write(p []byte) (int, error) + func (b *strings.Builder) WriteRune(r rune) (int, error) +
func utf8.ValidString(s string) boolfunc utf8.Valid(p []byte) bool
func utf8.FullRuneInString(s string) boolfunc utf8.FullRune(p []byte) bool
func utf8.RuneCountInString(s string) (n int)func utf8.RuneCount(p []byte) int
func utf8.DecodeLastRuneInString(s string) (rune, int)func utf8.DecodeLastRune(p []byte) (rune, int)
func utf8.DecodeRuneInString(s string) (une, int)func utf8.DecodeRune(p []byte) (rune, int)
diff --git a/tools/vendor/github.com/butuzov/mirror/Makefile b/tools/vendor/github.com/butuzov/mirror/Makefile new file mode 100644 index 0000000000..b4b952b012 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/Makefile @@ -0,0 +1,58 @@ +# --- Required ---------------------------------------------------------------- +export PATH := $(PWD)/bin:$(PATH) # ./bin to $PATH +export SHELL := bash # Default Shell + +GOPKGS := $(shell go list ./... | grep -vE "(cmd|sandbox|testdata)" | tr -s '\n' ',' | sed 's/.\{1\}$$//' ) + + +build: + @ go build -trimpath -ldflags="-w -s" \ + -o bin/mirror ./cmd/mirror/ + +build-race: + @ go build -race -trimpath -ldflags="-w -s" \ + -o bin/mirror ./cmd/mirror/ + +tests: + go test -v -count=1 -race \ + -failfast \ + -parallel=2 \ + -timeout=1m \ + -covermode=atomic \ + -coverpkg=$(GOPKGS) -coverprofile=coverage.cov ./... + +tests-summary: + go test -v -count=1 -race \ + -failfast \ + -parallel=2 \ + -timeout=1m \ + -covermode=atomic \ + -coverpkg=$(GOPKGS) -coverprofile=coverage.cov --json ./... | tparse -all + +test-generate: + go run ./cmd/internal/generate-tests/ "$(PWD)/testdata" + +lints: + golangci-lint run --no-config ./... -D deadcode --skip-dirs "^(cmd|sandbox|testdata)" + + +cover: + go tool cover -html=coverage.cov + +install: + go install -trimpath -v -ldflags="-w -s" \ + ./cmd/mirror + +funcs: + echo "" > "out/results.txt" + go list std | grep -v "vendor" | grep -v "internal" | \ + xargs -I {} sh -c 'go doc -all {} > out/$(basename {}).txt' + +bin/goreleaser: + @curl -Ls https://github.com/goreleaser/goreleaser/releases/download/v1.17.2/goreleaser_Darwin_all.tar.gz | tar -zOxf - goreleaser > ./bin/goreleaser + chmod 0755 ./bin/goreleaser + +test-release: bin/goreleaser + goreleaser release --help + goreleaser release -f .goreleaser.yaml \ + --skip-validate --skip-publish --clean diff --git a/tools/vendor/github.com/butuzov/mirror/Taskfile.yml b/tools/vendor/github.com/butuzov/mirror/Taskfile.yml new file mode 100644 index 0000000000..26c9ba2571 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/Taskfile.yml @@ -0,0 +1,28 @@ +version: '3' + +tasks: + default: + sources: + - "./**/*.go" + method: timestamp + cmds: + - clear + - make build + - make build-race + - task: lints + # - make test-generate + - task: tests + - cmd: go run ./cmd/mirror/ --with-tests --with-debug ./sandbox + ignore_error: true + + testcase: go test -v -failfast -count=1 -run "TestAll/{{ .Case }}" ./... + + tests: + cmds: + - cmd: make tests + ignore_error: true + + lints: + cmds: + - cmd: make lints + ignore_error: true diff --git a/tools/vendor/github.com/butuzov/mirror/analyzer.go b/tools/vendor/github.com/butuzov/mirror/analyzer.go new file mode 100644 index 0000000000..13ded46c6d --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/analyzer.go @@ -0,0 +1,144 @@ +package mirror + +import ( + "flag" + "go/ast" + "strings" + + "github.com/butuzov/mirror/internal/checker" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +func NewAnalyzer() *analysis.Analyzer { + flags := flags() + + return &analysis.Analyzer{ + Name: "mirror", + Doc: "reports wrong mirror patterns of bytes/strings usage", + Run: run, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, + Flags: flags, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + withTests := pass.Analyzer.Flags.Lookup("with-tests").Value.String() == "true" + // --- Reporting violations via issues --------------------------------------- + for _, violation := range Run(pass, withTests) { + pass.Report(violation.Diagnostic(pass.Fset)) + } + + return nil, nil +} + +func Run(pass *analysis.Pass, withTests bool) []*checker.Violation { + violations := []*checker.Violation{} + // --- Setup ----------------------------------------------------------------- + + check := checker.New( + BytesFunctions, BytesBufferMethods, + RegexpFunctions, RegexpRegexpMethods, + StringFunctions, StringsBuilderMethods, + BufioMethods, HTTPTestMethods, + OsFileMethods, MaphashMethods, + UTF8Functions, + ) + + check.Type = checker.WrapType(pass.TypesInfo) + check.Print = checker.WrapPrint(pass.Fset) + + ins, _ := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + imports := checker.Load(pass.Fset, ins) + + // --- Preorder Checker ------------------------------------------------------ + ins.Preorder([]ast.Node{(*ast.CallExpr)(nil)}, func(n ast.Node) { + callExpr := n.(*ast.CallExpr) + fileName := pass.Fset.Position(callExpr.Pos()).Filename + + if !withTests && strings.HasSuffix(fileName, "_test.go") { + return + } + + // ------------------------------------------------------------------------- + switch expr := callExpr.Fun.(type) { + // NOTE(butuzov): Regular calls (`*ast.SelectorExpr`) like strings.HasPrefix + // or re.Match are handled by this check + case *ast.SelectorExpr: + + x, ok := expr.X.(*ast.Ident) + if !ok { + return + } + + // TODO(butuzov): Add check for the ast.ParenExpr in e.Fun so we can + // target the constructions like this (and other calls) + // ----------------------------------------------------------------------- + // Example: + // (&maphash.Hash{}).Write([]byte("foobar")) + // ----------------------------------------------------------------------- + + // Case 1: Is this is a function call? + pkgName, name := x.Name, expr.Sel.Name + if pkg, ok := imports.Lookup(fileName, pkgName); ok { + if v := check.Match(pkg, name); v != nil { + if args, found := check.Handle(v, callExpr); found { + violations = append(violations, v.With(check.Print(expr.X), callExpr, args)) + } + return + } + } + + // Case 2: Is this is a method call? + tv := pass.TypesInfo.Types[expr.X] + if !tv.IsValue() || tv.Type == nil { + return + } + + pkgStruct, name := cleanAsterisk(tv.Type.String()), expr.Sel.Name + for _, v := range check.Matches(pkgStruct, name) { + if v == nil { + continue + } + + if args, found := check.Handle(v, callExpr); found { + violations = append(violations, v.With(check.Print(expr.X), callExpr, args)) + return + } + } + + case *ast.Ident: + // NOTE(butuzov): Special case of "." imported packages, only functions. + + if pkg, ok := imports.Lookup(fileName, "."); ok { + if v := check.Match(pkg, expr.Name); v != nil { + if args, found := check.Handle(v, callExpr); found { + violations = append(violations, v.With(nil, callExpr, args)) + } + return + } + } + } + }) + + return violations +} + +func flags() flag.FlagSet { + set := flag.NewFlagSet("", flag.PanicOnError) + set.Bool("with-tests", false, "do not skip tests in reports") + set.Bool("with-debug", false, "debug linter run (development only)") + return *set +} + +func cleanAsterisk(s string) string { + if strings.HasPrefix(s, "*") { + return s[1:] + } + + return s +} diff --git a/tools/vendor/github.com/butuzov/mirror/checkers_bufio.go b/tools/vendor/github.com/butuzov/mirror/checkers_bufio.go new file mode 100644 index 0000000000..292ed269aa --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/checkers_bufio.go @@ -0,0 +1,56 @@ +package mirror + +import "github.com/butuzov/mirror/internal/checker" + +var BufioMethods = []checker.Violation{ + { // (*bufio.Writer).Write + Targets: checker.Bytes, + Type: checker.Method, + Package: "bufio", + Struct: "Writer", + Caller: "Write", + Args: []int{0}, + AltCaller: "WriteString", + + Generate: &checker.Generate{ + PreCondition: `b := bufio.Writer{}`, + Pattern: `Write($0)`, + Returns: 2, + }, + }, + { // (*bufio.Writer).WriteString + Type: checker.Method, + Targets: checker.Strings, + Package: "bufio", + Struct: "Writer", + Caller: "WriteString", + Args: []int{0}, + AltCaller: "Write", + + Generate: &checker.Generate{ + PreCondition: `b := bufio.Writer{}`, + Pattern: `WriteString($0)`, + Returns: 2, + }, + }, + { // (*bufio.Writer).WriteString -> (*bufio.Writer).WriteRune + Targets: checker.Strings, + Type: checker.Method, + Package: "bufio", + Struct: "Writer", + Caller: "WriteString", + Args: []int{0}, + ArgsType: checker.Rune, + AltCaller: "WriteRune", + }, + // { // (*bufio.Writer).WriteString -> (*bufio.Writer).WriteByte + // Targets: checker.Strings, + // Type: checker.Method, + // Package: "strings", + // Struct: "Builder", + // Caller: "WriteString", + // Args: []int{0}, + // ArgsType: checker.Byte, + // AltCaller: "WriteByte", // byte + // }, +} diff --git a/tools/vendor/github.com/butuzov/mirror/checkers_bytes.go b/tools/vendor/github.com/butuzov/mirror/checkers_bytes.go new file mode 100644 index 0000000000..c490a3784e --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/checkers_bytes.go @@ -0,0 +1,326 @@ +package mirror + +import "github.com/butuzov/mirror/internal/checker" + +var ( + BytesFunctions = []checker.Violation{ + { // bytes.NewBuffer + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "NewBuffer", + Args: []int{0}, + AltCaller: "NewBufferString", + + Generate: &checker.Generate{ + Pattern: `NewBuffer($0)`, + Returns: 1, + }, + }, + { // bytes.NewBufferString + Targets: checker.Strings, + Type: checker.Function, + Package: "bytes", + Caller: "NewBufferString", + Args: []int{0}, + AltCaller: "NewBuffer", + + Generate: &checker.Generate{ + Pattern: `NewBufferString($0)`, + Returns: 1, + }, + }, + { // bytes.Compare: + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "Compare", + Args: []int{0, 1}, + AltPackage: "strings", + AltCaller: "Compare", + + Generate: &checker.Generate{ + Pattern: `Compare($0, $1)`, + Returns: 1, + }, + }, + { // bytes.Contains: + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "Contains", + Args: []int{0, 1}, + AltPackage: "strings", + AltCaller: "Contains", + + Generate: &checker.Generate{ + Pattern: `Contains($0, $1)`, + Returns: 1, + }, + }, + { // bytes.ContainsAny + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "ContainsAny", + Args: []int{0}, + AltPackage: "strings", + AltCaller: "ContainsAny", + + Generate: &checker.Generate{ + Pattern: `ContainsAny($0, "f")`, + Returns: 1, + }, + }, + { // bytes.ContainsRune + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "ContainsRune", + Args: []int{0}, + AltPackage: "strings", + AltCaller: "ContainsRune", + + Generate: &checker.Generate{ + Pattern: `ContainsRune($0, 'ф')`, + Returns: 1, + }, + }, + { // bytes.Count + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "Count", + Args: []int{0, 1}, + AltPackage: "strings", + AltCaller: "Count", + + Generate: &checker.Generate{ + Pattern: `Count($0, $1)`, + Returns: 1, + }, + }, + { // bytes.EqualFold + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "EqualFold", + Args: []int{0, 1}, + AltPackage: "strings", + AltCaller: "EqualFold", + + Generate: &checker.Generate{ + Pattern: `EqualFold($0, $1)`, + Returns: 1, + }, + }, + + { // bytes.HasPrefix + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "HasPrefix", + Args: []int{0, 1}, + AltPackage: "strings", + AltCaller: "HasPrefix", + + Generate: &checker.Generate{ + Pattern: `HasPrefix($0, $1)`, + Returns: 1, + }, + }, + { // bytes.HasSuffix + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "HasSuffix", + Args: []int{0, 1}, + AltPackage: "strings", + AltCaller: "HasSuffix", + + Generate: &checker.Generate{ + Pattern: `HasSuffix($0, $1)`, + Returns: 1, + }, + }, + { // bytes.Index + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "Index", + Args: []int{0, 1}, + AltPackage: "strings", + AltCaller: "Index", + + Generate: &checker.Generate{ + Pattern: `Index($0, $1)`, + Returns: 1, + }, + }, + { // bytes.IndexAny + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "IndexAny", + Args: []int{0}, + AltPackage: "strings", + AltCaller: "IndexAny", + + Generate: &checker.Generate{ + Pattern: `IndexAny($0, "f")`, + Returns: 1, + }, + }, + { // bytes.IndexByte + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "IndexByte", + Args: []int{0}, + AltPackage: "strings", + AltCaller: "IndexByte", + + Generate: &checker.Generate{ + Pattern: `IndexByte($0, 'f')`, + Returns: 1, + }, + }, + { // bytes.IndexFunc + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "IndexFunc", + Args: []int{0}, + AltPackage: "strings", + AltCaller: "IndexFunc", + + Generate: &checker.Generate{ + Pattern: `IndexFunc($0, func(rune) bool {return true })`, + Returns: 1, + }, + }, + { // bytes.IndexRune + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "IndexRune", + Args: []int{0}, + AltPackage: "strings", + AltCaller: "IndexRune", + + Generate: &checker.Generate{ + Pattern: `IndexRune($0, rune('ф'))`, + Returns: 1, + }, + }, + { // bytes.LastIndex + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "LastIndex", + Args: []int{0, 1}, + AltPackage: "strings", + AltCaller: "LastIndex", + + Generate: &checker.Generate{ + Pattern: `LastIndex($0, $1)`, + Returns: 1, + }, + }, + { // bytes.LastIndexAny + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "LastIndexAny", + Args: []int{0}, + AltPackage: "strings", + AltCaller: "LastIndexAny", + + Generate: &checker.Generate{ + Pattern: `LastIndexAny($0, "ф")`, + Returns: 1, + }, + }, + { // bytes.LastIndexByte + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "LastIndexByte", + Args: []int{0}, + AltPackage: "strings", + AltCaller: "LastIndexByte", + + Generate: &checker.Generate{ + Pattern: `LastIndexByte($0, 'f')`, + Returns: 1, + }, + }, + { // bytes.LastIndexFunc + Targets: checker.Bytes, + Type: checker.Function, + Package: "bytes", + Caller: "LastIndexFunc", + Args: []int{0}, + AltPackage: "strings", + AltCaller: "LastIndexFunc", + + Generate: &checker.Generate{ + Pattern: `LastIndexFunc($0, func(rune) bool {return true })`, + Returns: 1, + }, + }, + } + + BytesBufferMethods = []checker.Violation{ + { // (*bytes.Buffer).Write + Targets: checker.Bytes, + Type: checker.Method, + Package: "bytes", + Struct: "Buffer", + Caller: "Write", + Args: []int{0}, + AltCaller: "WriteString", + + Generate: &checker.Generate{ + PreCondition: `bb := bytes.Buffer{}`, + Pattern: `Write($0)`, + Returns: 2, + }, + }, + { // (*bytes.Buffer).WriteString + Targets: checker.Strings, + Type: checker.Method, + Package: "bytes", + Struct: "Buffer", + Caller: "WriteString", + Args: []int{0}, + AltCaller: "Write", + + Generate: &checker.Generate{ + PreCondition: `bb := bytes.Buffer{}`, + Pattern: `WriteString($0)`, + Returns: 2, + }, + }, + { // (*bytes.Buffer).WriteString -> (*bytes.Buffer).WriteRune + Targets: checker.Strings, + Type: checker.Method, + Package: "bytes", + Struct: "Buffer", + Caller: "WriteString", + Args: []int{0}, + ArgsType: checker.Rune, + AltCaller: "WriteRune", + }, + // { // (*bytes.Buffer).WriteString -> (*bytes.Buffer).WriteByte + // Targets: checker.Strings, + // Type: checker.Method, + // Package: "bytes", + // Struct: "Buffer", + // Caller: "WriteString", + // Args: []int{0}, + // ArgsType: checker.Byte, + // AltCaller: "WriteByte", + // }, + } +) diff --git a/tools/vendor/github.com/butuzov/mirror/checkers_httptest.go b/tools/vendor/github.com/butuzov/mirror/checkers_httptest.go new file mode 100644 index 0000000000..ae67509300 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/checkers_httptest.go @@ -0,0 +1,36 @@ +package mirror + +import "github.com/butuzov/mirror/internal/checker" + +var HTTPTestMethods = []checker.Violation{ + { // (*net/http/httptest.ResponseRecorder).Write + Targets: checker.Bytes, + Type: checker.Method, + Package: "net/http/httptest", + Struct: "ResponseRecorder", + Caller: "Write", + Args: []int{0}, + AltCaller: "WriteString", + + Generate: &checker.Generate{ + PreCondition: `h := httptest.ResponseRecorder{}`, + Pattern: `Write($0)`, + Returns: 2, + }, + }, + { // (*net/http/httptest.ResponseRecorder).WriteString + Targets: checker.Strings, + Type: checker.Method, + Package: "net/http/httptest", + Struct: "ResponseRecorder", + Caller: "WriteString", + Args: []int{0}, + AltCaller: "Write", + + Generate: &checker.Generate{ + PreCondition: `h := httptest.ResponseRecorder{}`, + Pattern: `WriteString($0)`, + Returns: 2, + }, + }, +} diff --git a/tools/vendor/github.com/butuzov/mirror/checkers_maphash.go b/tools/vendor/github.com/butuzov/mirror/checkers_maphash.go new file mode 100644 index 0000000000..4d184d2a95 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/checkers_maphash.go @@ -0,0 +1,36 @@ +package mirror + +import "github.com/butuzov/mirror/internal/checker" + +var MaphashMethods = []checker.Violation{ + { // (*hash/maphash).Write + Targets: checker.Bytes, + Type: checker.Method, + Package: "hash/maphash", + Struct: "Hash", + Caller: "Write", + Args: []int{0}, + AltCaller: "WriteString", + + Generate: &checker.Generate{ + PreCondition: `h := maphash.Hash{}`, + Pattern: `Write($0)`, + Returns: 2, + }, + }, + { // (*hash/maphash).WriteString + Targets: checker.Strings, + Type: checker.Method, + Package: "hash/maphash", + Struct: "Hash", + Caller: "WriteString", + Args: []int{0}, + AltCaller: "Write", + + Generate: &checker.Generate{ + PreCondition: `h := maphash.Hash{}`, + Pattern: `WriteString($0)`, + Returns: 2, + }, + }, +} diff --git a/tools/vendor/github.com/butuzov/mirror/checkers_os.go b/tools/vendor/github.com/butuzov/mirror/checkers_os.go new file mode 100644 index 0000000000..09f5a18e58 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/checkers_os.go @@ -0,0 +1,36 @@ +package mirror + +import "github.com/butuzov/mirror/internal/checker" + +var OsFileMethods = []checker.Violation{ + { // (*os.File).Write + Targets: checker.Bytes, + Type: checker.Method, + Package: "os", + Struct: "File", + Caller: "Write", + Args: []int{0}, + AltCaller: "WriteString", + + Generate: &checker.Generate{ + PreCondition: `f := &os.File{}`, + Pattern: `Write($0)`, + Returns: 2, + }, + }, + { // (*os.File).WriteString + Targets: checker.Strings, + Type: checker.Method, + Package: "os", + Struct: "File", + Caller: "WriteString", + Args: []int{0}, + AltCaller: "Write", + + Generate: &checker.Generate{ + PreCondition: `f := &os.File{}`, + Pattern: `WriteString($0)`, + Returns: 2, + }, + }, +} diff --git a/tools/vendor/github.com/butuzov/mirror/checkers_regexp.go b/tools/vendor/github.com/butuzov/mirror/checkers_regexp.go new file mode 100644 index 0000000000..17175e0286 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/checkers_regexp.go @@ -0,0 +1,187 @@ +package mirror + +import "github.com/butuzov/mirror/internal/checker" + +var ( + RegexpFunctions = []checker.Violation{ + { // regexp.Match + Targets: checker.Bytes, + Type: checker.Function, + Package: "regexp", + Caller: "Match", + Args: []int{1}, + AltCaller: "MatchString", + + Generate: &checker.Generate{ + Pattern: `Match("foo", $0)`, + Returns: 2, + }, + }, + { // regexp.MatchString + Targets: checker.Strings, + Type: checker.Function, + Package: "regexp", + Caller: "MatchString", + Args: []int{1}, + AltCaller: "Match", + + Generate: &checker.Generate{ + Pattern: `MatchString("foo", $0)`, + Returns: 2, + }, + }, + } + + RegexpRegexpMethods = []checker.Violation{ + { // (*regexp.Regexp).Match + Targets: checker.Bytes, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "Match", + Args: []int{0}, + AltCaller: "MatchString", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `Match($0)`, + Returns: 1, + }, + }, + { // (*regexp.Regexp).MatchString + Targets: checker.Strings, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "MatchString", + Args: []int{0}, + AltCaller: "Match", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `MatchString($0)`, + Returns: 1, + }, + }, + { // (*regexp.Regexp).FindAllIndex + Targets: checker.Bytes, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "FindAllIndex", + Args: []int{0}, + AltCaller: "FindAllStringIndex", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `FindAllIndex($0, 1)`, + Returns: 1, + }, + }, + { // (*regexp.Regexp).FindAllStringIndex + Targets: checker.Strings, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "FindAllStringIndex", + Args: []int{0}, + AltCaller: "FindAllIndex", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `FindAllStringIndex($0, 1)`, + Returns: 1, + }, + }, + { // (*regexp.Regexp).FindAllSubmatchIndex + Targets: checker.Bytes, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "FindAllSubmatchIndex", + Args: []int{0}, + AltCaller: "FindAllStringSubmatchIndex", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `FindAllSubmatchIndex($0, 1)`, + Returns: 1, + }, + }, + { // (*regexp.Regexp).FindAllStringSubmatchIndex + Targets: checker.Strings, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "FindAllStringSubmatchIndex", + Args: []int{0}, + AltCaller: "FindAllSubmatchIndex", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `FindAllStringSubmatchIndex($0, 1)`, + Returns: 1, + }, + }, + { // (*regexp.Regexp).FindIndex + Targets: checker.Bytes, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "FindIndex", + Args: []int{0}, + AltCaller: "FindStringIndex", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `FindIndex($0)`, + Returns: 1, + }, + }, + { // (*regexp.Regexp).FindStringIndex + Targets: checker.Strings, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "FindStringIndex", + Args: []int{0}, + AltCaller: "FindIndex", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `FindStringIndex($0)`, + Returns: 1, + }, + }, + { // (*regexp.Regexp).FindSubmatchIndex + Targets: checker.Bytes, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "FindSubmatchIndex", + Args: []int{0}, + AltCaller: "FindStringSubmatchIndex", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `FindSubmatchIndex($0)`, + Returns: 1, + }, + }, + { // (*regexp.Regexp).FindStringSubmatchIndex + Targets: checker.Strings, + Type: checker.Method, + Package: "regexp", + Struct: "Regexp", + Caller: "FindStringSubmatchIndex", + Args: []int{0}, + AltCaller: "FindSubmatchIndex", + + Generate: &checker.Generate{ + PreCondition: `re := regexp.MustCompile(".*")`, + Pattern: `FindStringSubmatchIndex($0)`, + Returns: 1, + }, + }, + } +) diff --git a/tools/vendor/github.com/butuzov/mirror/checkers_strings.go b/tools/vendor/github.com/butuzov/mirror/checkers_strings.go new file mode 100644 index 0000000000..ead7e9cc7e --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/checkers_strings.go @@ -0,0 +1,299 @@ +package mirror + +import "github.com/butuzov/mirror/internal/checker" + +var ( + StringFunctions = []checker.Violation{ + { // strings.Compare + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "Compare", + Args: []int{0, 1}, + AltPackage: "bytes", + AltCaller: "Compare", + + Generate: &checker.Generate{ + Pattern: `Compare($0,$1)`, + Returns: 1, + }, + }, + { // strings.Contains + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "Contains", + Args: []int{0, 1}, + AltPackage: "bytes", + AltCaller: "Contains", + + Generate: &checker.Generate{ + Pattern: `Contains($0,$1)`, + Returns: 1, + }, + }, + { // strings.ContainsAny + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "ContainsAny", + Args: []int{0}, + AltPackage: "bytes", + AltCaller: "ContainsAny", + + Generate: &checker.Generate{ + Pattern: `ContainsAny($0,"foobar")`, + Returns: 1, + }, + }, + { // strings.ContainsRune + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "ContainsRune", + Args: []int{0}, + AltPackage: "bytes", + AltCaller: "ContainsRune", + + Generate: &checker.Generate{ + Pattern: `ContainsRune($0,'ф')`, + Returns: 1, + }, + }, + { // strings.Count + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "Count", + Args: []int{0, 1}, + AltPackage: "bytes", + AltCaller: "Count", + + Generate: &checker.Generate{ + Pattern: `Count($0, $1)`, + Returns: 1, + }, + }, + { // strings.EqualFold + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "EqualFold", + Args: []int{0, 1}, + AltPackage: "bytes", + AltCaller: "EqualFold", + + Generate: &checker.Generate{ + Pattern: `EqualFold($0,$1)`, + Returns: 1, + }, + }, + { // strings.HasPrefix + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "HasPrefix", + Args: []int{0, 1}, + AltPackage: "bytes", + AltCaller: "HasPrefix", + + Generate: &checker.Generate{ + Pattern: `HasPrefix($0,$1)`, + Returns: 1, + }, + }, + { // strings.HasSuffix + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "HasSuffix", + Args: []int{0, 1}, + AltPackage: "bytes", + AltCaller: "HasSuffix", + + Generate: &checker.Generate{ + Pattern: `HasSuffix($0,$1)`, + Returns: 1, + }, + }, + { // strings.Index + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "Index", + Args: []int{0, 1}, + AltPackage: "bytes", + AltCaller: "Index", + + Generate: &checker.Generate{ + Pattern: `Index($0,$1)`, + Returns: 1, + }, + }, + { // strings.IndexAny + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "IndexAny", + Args: []int{0}, + AltPackage: "bytes", + AltCaller: "IndexAny", + + Generate: &checker.Generate{ + Pattern: `IndexAny($0, "f")`, + Returns: 1, + }, + }, + { // strings.IndexByte + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "IndexByte", + Args: []int{0}, + AltPackage: "bytes", + AltCaller: "IndexByte", + + Generate: &checker.Generate{ + Pattern: `IndexByte($0, byte('f'))`, + Returns: 1, + }, + }, + { // strings.IndexFunc + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "IndexFunc", + Args: []int{0}, + AltPackage: "bytes", + AltCaller: "IndexFunc", + + Generate: &checker.Generate{ + Pattern: `IndexFunc($0,func(r rune) bool { return true })`, + Returns: 1, + }, + }, + { // strings.IndexRune + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "IndexRune", + Args: []int{0}, + AltPackage: "bytes", + AltCaller: "IndexRune", + + Generate: &checker.Generate{ + Pattern: `IndexRune($0, rune('ф'))`, + Returns: 1, + }, + }, + { // strings.LastIndex + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "LastIndex", + Args: []int{0, 1}, + AltPackage: "bytes", + AltCaller: "LastIndex", + + Generate: &checker.Generate{ + Pattern: `LastIndex($0,$1)`, + Returns: 1, + }, + }, + { // strings.LastIndexAny + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "LastIndexAny", + Args: []int{0}, + AltPackage: "bytes", + AltCaller: "LastIndexAny", + + Generate: &checker.Generate{ + Pattern: `LastIndexAny($0,"f")`, + Returns: 1, + }, + }, + { // strings.LastIndexByte + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "LastIndexByte", + Args: []int{0}, + AltPackage: "bytes", + AltCaller: "LastIndexByte", + + Generate: &checker.Generate{ + Pattern: `LastIndexByte($0, byte('f'))`, + Returns: 1, + }, + }, + { // strings.LastIndexFunc + Targets: checker.Strings, + Type: checker.Function, + Package: "strings", + Caller: "LastIndexFunc", + Args: []int{0}, + AltPackage: "bytes", + AltCaller: "LastIndexFunc", + + Generate: &checker.Generate{ + Pattern: `LastIndexFunc($0, func(r rune) bool { return true })`, + Returns: 1, + }, + }, + } + + StringsBuilderMethods = []checker.Violation{ + { // (*strings.Builder).Write + Targets: checker.Bytes, + Type: checker.Method, + Package: "strings", + Struct: "Builder", + Caller: "Write", + Args: []int{0}, + AltCaller: "WriteString", + + Generate: &checker.Generate{ + PreCondition: `builder := strings.Builder{}`, + Pattern: `Write($0)`, + Returns: 2, + }, + }, + { // (*strings.Builder).WriteString + Targets: checker.Strings, + Type: checker.Method, + Package: "strings", + Struct: "Builder", + Caller: "WriteString", + Args: []int{0}, + AltCaller: "Write", + + Generate: &checker.Generate{ + PreCondition: `builder := strings.Builder{}`, + Pattern: `WriteString($0)`, + Returns: 2, + }, + }, + { // (*strings.Builder).WriteString -> (*strings.Builder).WriteRune + Targets: checker.Strings, + Type: checker.Method, + Package: "strings", + Struct: "Builder", + Caller: "WriteString", + Args: []int{0}, + ArgsType: checker.Rune, + AltCaller: "WriteRune", + }, + // { // (*strings.Builder).WriteString -> (*strings.Builder).WriteByte + // Targets: checker.Strings, + // Type: checker.Method, + // Package: "strings", + // Struct: "Builder", + // Caller: "WriteString", + // Args: []int{0}, + // ArgsType: checker.Byte, + // AltCaller: "WriteByte", // byte + // }, + } +) diff --git a/tools/vendor/github.com/butuzov/mirror/checkers_utf8.go b/tools/vendor/github.com/butuzov/mirror/checkers_utf8.go new file mode 100644 index 0000000000..e7c4d5ba4d --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/checkers_utf8.go @@ -0,0 +1,138 @@ +package mirror + +import "github.com/butuzov/mirror/internal/checker" + +var UTF8Functions = []checker.Violation{ + { // utf8.Valid + Type: checker.Function, + Targets: checker.Bytes, + Package: "unicode/utf8", + Caller: "Valid", + Args: []int{0}, + AltCaller: "ValidString", + + Generate: &checker.Generate{ + Pattern: `Valid($0)`, + Returns: 1, + }, + }, + { // utf8.ValidString + Targets: checker.Strings, + Type: checker.Function, + Package: "unicode/utf8", + Caller: "ValidString", + Args: []int{0}, + AltCaller: "Valid", + + Generate: &checker.Generate{ + Pattern: `ValidString($0)`, + Returns: 1, + }, + }, + { // utf8.FullRune + Targets: checker.Bytes, + Type: checker.Function, + Package: "unicode/utf8", + Caller: "FullRune", + Args: []int{0}, + AltCaller: "FullRuneInString", + + Generate: &checker.Generate{ + Pattern: `FullRune($0)`, + Returns: 1, + }, + }, + { // utf8.FullRuneInString + Targets: checker.Strings, + Type: checker.Function, + Package: "unicode/utf8", + Caller: "FullRuneInString", + Args: []int{0}, + AltCaller: "FullRune", + + Generate: &checker.Generate{ + Pattern: `FullRuneInString($0)`, + Returns: 1, + }, + }, + + { // bytes.RuneCount + Targets: checker.Bytes, + Type: checker.Function, + Package: "unicode/utf8", + Caller: "RuneCount", + Args: []int{0}, + AltCaller: "RuneCountInString", + + Generate: &checker.Generate{ + Pattern: `RuneCount($0)`, + Returns: 1, + }, + }, + { // bytes.RuneCountInString + Targets: checker.Strings, + Type: checker.Function, + Package: "unicode/utf8", + Caller: "RuneCountInString", + Args: []int{0}, + AltCaller: "RuneCount", + + Generate: &checker.Generate{ + Pattern: `RuneCountInString($0)`, + Returns: 1, + }, + }, + + { // bytes.DecodeLastRune + Targets: checker.Bytes, + Type: checker.Function, + Package: "unicode/utf8", + Caller: "DecodeLastRune", + Args: []int{0}, + AltCaller: "DecodeLastRuneInString", + + Generate: &checker.Generate{ + Pattern: `DecodeLastRune($0)`, + Returns: 2, + }, + }, + { // utf8.DecodeLastRuneInString + Targets: checker.Strings, + Type: checker.Function, + Package: "unicode/utf8", + Caller: "DecodeLastRuneInString", + Args: []int{0}, + AltCaller: "DecodeLastRune", + + Generate: &checker.Generate{ + Pattern: `DecodeLastRuneInString($0)`, + Returns: 2, + }, + }, + { // utf8.DecodeRune + Targets: checker.Bytes, + Type: checker.Function, + Package: "unicode/utf8", + Caller: "DecodeRune", + Args: []int{0}, + AltCaller: "DecodeRuneInString", + + Generate: &checker.Generate{ + Pattern: `DecodeRune($0)`, + Returns: 2, + }, + }, + { // utf8.DecodeRuneInString + Targets: checker.Strings, + Type: checker.Function, + Package: "unicode/utf8", + Args: []int{0}, + Caller: "DecodeRuneInString", + AltCaller: "DecodeRune", + + Generate: &checker.Generate{ + Pattern: `DecodeRuneInString($0)`, + Returns: 2, + }, + }, +} diff --git a/tools/vendor/github.com/butuzov/mirror/internal/checker/checker.go b/tools/vendor/github.com/butuzov/mirror/internal/checker/checker.go new file mode 100644 index 0000000000..c1a9416314 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/internal/checker/checker.go @@ -0,0 +1,147 @@ +package checker + +import ( + "bytes" + "go/ast" + "go/printer" + "go/token" + "go/types" + "strings" +) + +// Checker will perform standart check on package and its methods. +type Checker struct { + Violations []Violation // List of available violations + Packages map[string][]int // Storing indexes of Violations per pkg/kg.Struct + Type func(ast.Expr) string // Type Checker closure. + Print func(ast.Node) []byte // String representation of the expresion. +} + +func New(violations ...[]Violation) Checker { + c := Checker{ + Packages: make(map[string][]int), + } + + for i := range violations { + c.register(violations[i]) + } + + return c +} + +// Match will check the available violations we got from checks against +// the `name` caller from package `pkgName`. +func (c *Checker) Match(pkgName, name string) *Violation { + for _, v := range c.Matches(pkgName, name) { + return v + } + + return nil +} + +// Matches do same thing as Match but return a slice of violations +// as only things that require this are bytes.Buffer and strings.Builder +// it only be used in matching methods in analyzer. +func (c *Checker) Matches(pkgName, name string) []*Violation { + var matches []*Violation + checkStruct := strings.Contains(pkgName, ".") + + for _, idx := range c.Packages[pkgName] { + if c.Violations[idx].Caller == name { + if checkStruct == (len(c.Violations[idx].Struct) == 0) { + continue + } + + // copy violation + v := c.Violations[idx] + matches = append(matches, &v) + } + } + + return matches +} + +func (c *Checker) Handle(v *Violation, ce *ast.CallExpr) (map[int]ast.Expr, bool) { + m := map[int]ast.Expr{} + + // We going to check each of elements we mark for checking, in order to find, + // a call that violates our rules. + for _, i := range v.Args { + if i >= len(ce.Args) { + continue + } + + call, ok := ce.Args[i].(*ast.CallExpr) + if !ok { + continue + } + + // is it convertsion call + if !c.callConverts(call) { + continue + } + + // somehow no argument of call + if len(call.Args) == 0 { + continue + } + + // wrong argument type + if normalType(c.Type(call.Args[0])) != v.getArgType() { + continue + } + + m[i] = call.Args[0] + } + + return m, len(m) == len(v.Args) +} + +func (c *Checker) callConverts(ce *ast.CallExpr) bool { + switch ce.Fun.(type) { + case *ast.ArrayType, *ast.Ident: + res := c.Type(ce.Fun) + return res == "[]byte" || res == "string" + } + + return false +} + +// register violations. +func (c *Checker) register(violations []Violation) { + for _, v := range violations { // nolint: gocritic + c.Violations = append(c.Violations, v) + if len(v.Struct) > 0 { + c.registerIdxPer(v.Package + "." + v.Struct) + } + c.registerIdxPer(v.Package) + } +} + +// registerIdxPer will register last added violation element +// under pkg string. +func (c *Checker) registerIdxPer(pkg string) { + c.Packages[pkg] = append(c.Packages[pkg], len(c.Violations)-1) +} + +func WrapType(info *types.Info) func(node ast.Expr) string { + return func(node ast.Expr) string { + if t := info.TypeOf(node); t != nil { + return t.String() + } + + if tv, ok := info.Types[node]; ok { + return tv.Type.Underlying().String() + } + + return "" + } +} + +func WrapPrint(fSet *token.FileSet) func(ast.Node) []byte { + return func(node ast.Node) []byte { + var buf bytes.Buffer + printer.Fprint(&buf, fSet, node) + return buf.Bytes() + } +} diff --git a/tools/vendor/github.com/butuzov/mirror/internal/checker/imports.go b/tools/vendor/github.com/butuzov/mirror/internal/checker/imports.go new file mode 100644 index 0000000000..4015de5970 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/internal/checker/imports.go @@ -0,0 +1,89 @@ +package checker + +import ( + "go/ast" + "go/token" + "path" + "sort" + "strings" + "sync" + + "golang.org/x/tools/go/ast/inspector" +) + +// Imports represents an imported package in a nice for lookup way... +// +// examples: +// import . "bytes" -> checker.Import{Pkg:"bytes", Val:"."} +// import name "bytes" -> checker.Import{Pkg:"bytes", Val:"name"} +type Import struct { + Pkg string // package name + Name string // alias +} + +type Imports map[string][]Import + +// we are going to have Imports entries to be sorted, but if it has less then +// `sortLowerLimit` elements we are skipping this step as its not going to +// be worth of effort. +const sortLowerLimit int = 13 + +// Package level lock is to prevent import map corruption +var lock sync.RWMutex + +func Load(fs *token.FileSet, ins *inspector.Inspector) Imports { + lock.Lock() + defer lock.Unlock() + + imports := make(Imports) + + // Populate imports map + ins.Preorder([]ast.Node{(*ast.ImportSpec)(nil)}, func(node ast.Node) { + importSpec, _ := node.(*ast.ImportSpec) + + var ( + key = fs.Position(node.Pos()).Filename + pkg = strings.Trim(importSpec.Path.Value, `"`) + name = importSpec.Name.String() + ) + + if importSpec.Name == nil { + name = path.Base(pkg) // note: we need only basename of the package + } + + imports[key] = append(imports[key], Import{ + Pkg: pkg, + Name: name, + }) + }) + + imports.sort() + + return imports +} + +// sort will sort imports for each of the checking files. +func (i *Imports) sort() { + for k := range *i { + if len((*i)[k]) < sortLowerLimit { + continue + } + + k := k + sort.Slice((*i)[k], func(left, right int) bool { + return (*i)[k][left].Name < (*i)[k][right].Name + }) + } +} + +func (i Imports) Lookup(file, pkg string) (string, bool) { + if _, ok := i[file]; ok { + for idx := range i[file] { + if i[file][idx].Name == pkg { + return i[file][idx].Pkg, true + } + } + } + + return "", false +} diff --git a/tools/vendor/github.com/butuzov/mirror/internal/checker/violation.go b/tools/vendor/github.com/butuzov/mirror/internal/checker/violation.go new file mode 100644 index 0000000000..375d3c8e65 --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/internal/checker/violation.go @@ -0,0 +1,208 @@ +package checker + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "path" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// Type of violation: can be method or function +type ViolationType int + +const ( + Function ViolationType = iota + 1 + Method +) + +const ( + Strings string = "string" + Bytes string = "[]byte" + Byte string = "byte" + Rune string = "rune" + UntypedRune string = "untyped rune" +) + +// Violation describs what message we going to give to a particular code violation +type Violation struct { + Type ViolationType // + Args []int // Indexes of the arguments needs to be checked + ArgsType string + + Targets string + Package string + AltPackage string + Struct string + Caller string + AltCaller string + + // --- tests generation information + Generate *Generate + + // --- suggestions related info about violation of rules. + base []byte // receiver of the method or pkg name + callExpr *ast.CallExpr // actual call expression, to extract arguments + arguments map[int]ast.Expr // fixed arguments +} + +// Tests (generation) related struct. +type Generate struct { + PreCondition string // Precondition we want to be generated + Pattern string // Generate pattern (for the `want` message) + Returns int // Expected to return n elements +} + +func (v *Violation) With(base []byte, e *ast.CallExpr, args map[int]ast.Expr) *Violation { + v.base = base + v.callExpr = e + v.arguments = args + + return v +} + +func (v *Violation) getArgType() string { + if v.ArgsType != "" { + return v.ArgsType + } + + if v.Targets == Strings { + return Bytes + } + + return Strings +} + +func (v *Violation) Message() string { + if v.Type == Method { + return fmt.Sprintf("avoid allocations with (*%s.%s).%s", + path.Base(v.Package), v.Struct, v.AltCaller) + } + + pkg := v.Package + if len(v.AltPackage) > 0 { + pkg = v.AltPackage + } + + return fmt.Sprintf("avoid allocations with %s.%s", path.Base(pkg), v.AltCaller) +} + +func (v *Violation) suggest(fSet *token.FileSet) []byte { + var buf bytes.Buffer + + if len(v.base) > 0 { + buf.Write(v.base) + buf.WriteString(".") + } + + buf.WriteString(v.AltCaller) + buf.WriteByte('(') + for idx := range v.callExpr.Args { + if arg, ok := v.arguments[idx]; ok { + printer.Fprint(&buf, fSet, arg) + } else { + printer.Fprint(&buf, fSet, v.callExpr.Args[idx]) + } + + if idx != len(v.callExpr.Args)-1 { + buf.WriteString(", ") + } + } + buf.WriteByte(')') + + return buf.Bytes() +} + +func (v *Violation) Diagnostic(fSet *token.FileSet) analysis.Diagnostic { + diagnostic := analysis.Diagnostic{ + Pos: v.callExpr.Pos(), + End: v.callExpr.Pos(), + Message: v.Message(), + } + + var buf bytes.Buffer + printer.Fprint(&buf, fSet, v.callExpr) + noNl := bytes.IndexByte(buf.Bytes(), '\n') < 0 + + // Struct based fix. + if v.Type == Method && noNl { + diagnostic.SuggestedFixes = []analysis.SuggestedFix{{ + Message: "Fix Issue With", + TextEdits: []analysis.TextEdit{{ + Pos: v.callExpr.Pos(), End: v.callExpr.End(), NewText: v.suggest(fSet), + }}, + }} + } + + if v.AltPackage == "" { + v.AltPackage = v.Package + } + + // Hooray! we dont need to change package and redo imports. + if v.Type == Function && v.AltPackage == v.Package && noNl { + diagnostic.SuggestedFixes = []analysis.SuggestedFix{{ + Message: "Fix Issue With", + TextEdits: []analysis.TextEdit{{ + Pos: v.callExpr.Pos(), End: v.callExpr.End(), NewText: v.suggest(fSet), + }}, + }} + } + + // do not change + + return diagnostic +} + +type GolangIssue struct { + Start token.Position + End token.Position + Message string + InlineFix string + Original string +} + +// Issue inteanded to be used only with golangci-lint, bu you can use use it +// alongside Diagnostic if you wish. +func (v *Violation) Issue(fSet *token.FileSet) GolangIssue { + issue := GolangIssue{ + Start: fSet.Position(v.callExpr.Pos()), + End: fSet.Position(v.callExpr.End()), + Message: v.Message(), + } + + // original expression (useful for debug & requied for replace) + var buf bytes.Buffer + printer.Fprint(&buf, fSet, v.callExpr) + issue.Original = buf.String() + + noNl := strings.IndexByte(issue.Original, '\n') < 0 + + if v.Type == Method && noNl { + fix := v.suggest(fSet) + issue.InlineFix = string(fix) + } + + if v.AltPackage == "" { + v.AltPackage = v.Package + } + + // Hooray! we don't need to change package and redo imports. + if v.Type == Function && v.AltPackage == v.Package && noNl { + fix := v.suggest(fSet) + issue.InlineFix = string(fix) + } + + return issue +} + +// ofType normalize input types (mostly typed and untyped runes). +func normalType(s string) string { + if s == UntypedRune { + return Rune + } + return s +} diff --git a/tools/vendor/github.com/butuzov/mirror/readme.md b/tools/vendor/github.com/butuzov/mirror/readme.md new file mode 100644 index 0000000000..fcfd1de11a --- /dev/null +++ b/tools/vendor/github.com/butuzov/mirror/readme.md @@ -0,0 +1,60 @@ +# `mirror` [![Code Coverage](https://coveralls.io/repos/github/butuzov/mirror/badge.svg?branch=main)](https://coveralls.io/github/butuzov/mirror?branch=main) [![build status](https://github.com/butuzov/mirror/actions/workflows/main.yaml/badge.svg?branch=main)]() + +`mirror` suggests use of alternative functions/methods in order to gain performance boosts by avoiding unnecessary `[]byte/string` conversion calls. See [MIRROR_FUNCS.md](MIRROR_FUNCS.md) list of mirror functions you can use in go's stdlib. + +## 🇺🇦 PLEASE HELP ME 🇺🇦 +Fundrise for scout drone **DJI Matrice 30T** for my squad (Ukrainian Forces). See more details at [butuzov/README.md](https://github.com/butuzov/butuzov/) + +## Linter Use Cases + +### `github.com/argoproj/argo-cd` + +```go +// Before +func IsValidHostname(hostname string, fqdn bool) bool { + if !fqdn { + return validHostNameRegexp.Match([]byte(hostname)) || validIPv6Regexp.Match([]byte(hostname)) + } else { + return validFQDNRegexp.Match([]byte(hostname)) + } +} + +// After: With alternative method (and lost `else` case) +func IsValidHostname(hostname string, fqdn bool) bool { + if !fqdn { + return validHostNameRegexp.MatchString(hostname) || validIPv6Regexp.MatchString(hostname) + } + + return validFQDNRegexp.MatchString(hostname) +} +``` + +## Install + +``` +go install github.com/butuzov/mirror/cmd/mirror@latest +``` + +## How to use + +You run `mirror` with [`go vet`](https://pkg.go.dev/cmd/vet): + +``` +go vet -vettool=$(which mirror) ./... +# github.com/jcmoraisjr/haproxy-ingress/pkg/common/net/ssl +pkg/common/net/ssl/ssl.go:64:11: avoid allocations with (*os.File).WriteString +pkg/common/net/ssl/ssl.go:161:12: avoid allocations with (*os.File).WriteString +pkg/common/net/ssl/ssl.go:166:3: avoid allocations with (*os.File).WriteString +``` + +Can be called directly: +``` +mirror ./... +# https://github.com/cosmtrek/air +/air/runner/util.go:149:6: avoid allocations with (*regexp.Regexp).MatchString +/air/runner/util.go:173:14: avoid allocations with (*os.File).WriteString +``` + +## Command line + +- You can add checks for `_test.go` files with cli option `--with-tests` diff --git a/tools/vendor/github.com/daixiang0/gci/pkg/gci/gci.go b/tools/vendor/github.com/daixiang0/gci/pkg/gci/gci.go index 7418db209b..0fd7a0ec66 100644 --- a/tools/vendor/github.com/daixiang0/gci/pkg/gci/gci.go +++ b/tools/vendor/github.com/daixiang0/gci/pkg/gci/gci.go @@ -146,7 +146,7 @@ func LoadFormatGoFile(file io.FileObj, cfg config.Config) (src, dist []byte, err // order by section list for _, s := range cfg.Sections { if len(result[s.String()]) > 0 { - if body != nil && len(body) > 0 { + if len(body) > 0 { body = append(body, utils.Linebreak) } for _, d := range result[s.String()] { @@ -161,7 +161,6 @@ func LoadFormatGoFile(file io.FileObj, cfg config.Config) (src, dist []byte, err tail := make([]byte, len(src)-tailStart) copy(tail, src[tailStart:]) - head = append(head, utils.Linebreak) // ensure C if cStart != 0 { head = append(head, src[cStart:cEnd]...) diff --git a/tools/vendor/github.com/daixiang0/gci/pkg/section/prefix.go b/tools/vendor/github.com/daixiang0/gci/pkg/section/prefix.go index 92ef963777..a274347cdd 100644 --- a/tools/vendor/github.com/daixiang0/gci/pkg/section/prefix.go +++ b/tools/vendor/github.com/daixiang0/gci/pkg/section/prefix.go @@ -12,12 +12,19 @@ type Custom struct { Prefix string } +// CustomSeparator allows you to group multiple custom prefix together in the same section +// gci diff -s standard -s default -s prefix(github.com/company,gitlab.com/company,companysuffix) +const CustomSeparator = "," + const CustomType = "custom" func (c Custom) MatchSpecificity(spec *parse.GciImports) specificity.MatchSpecificity { - if strings.HasPrefix(spec.Path, c.Prefix) { - return specificity.Match{Length: len(c.Prefix)} + for _, prefix := range strings.Split(c.Prefix, CustomSeparator) { + if strings.HasPrefix(spec.Path, prefix) { + return specificity.Match{Length: len(prefix)} + } } + return specificity.MisMatch{} } diff --git a/tools/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go b/tools/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go index 62decfe1c7..f0e904d4d1 100644 --- a/tools/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go +++ b/tools/vendor/github.com/daixiang0/gci/pkg/section/standard_list.go @@ -1,6 +1,6 @@ package section -// Code generated based on go1.19.2. DO NOT EDIT. +// Code generated based on go1.20.1. DO NOT EDIT. var standardPackages = map[string]struct{}{ "archive/tar": {}, @@ -21,6 +21,7 @@ var standardPackages = map[string]struct{}{ "crypto/cipher": {}, "crypto/des": {}, "crypto/dsa": {}, + "crypto/ecdh": {}, "crypto/ecdsa": {}, "crypto/ed25519": {}, "crypto/elliptic": {}, @@ -132,6 +133,7 @@ var standardPackages = map[string]struct{}{ "regexp/syntax": {}, "runtime": {}, "runtime/cgo": {}, + "runtime/coverage": {}, "runtime/debug": {}, "runtime/metrics": {}, "runtime/pprof": {}, diff --git a/tools/vendor/github.com/fatih/color/color_windows.go b/tools/vendor/github.com/fatih/color/color_windows.go new file mode 100644 index 0000000000..be01c558e5 --- /dev/null +++ b/tools/vendor/github.com/fatih/color/color_windows.go @@ -0,0 +1,19 @@ +package color + +import ( + "os" + + "golang.org/x/sys/windows" +) + +func init() { + // Opt-in for ansi color support for current process. + // https://learn.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences#output-sequences + var outMode uint32 + out := windows.Handle(os.Stdout.Fd()) + if err := windows.GetConsoleMode(out, &outMode); err != nil { + return + } + outMode |= windows.ENABLE_PROCESSED_OUTPUT | windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING + _ = windows.SetConsoleMode(out, outMode) +} diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go index a9324dd02e..2a67dccec8 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go @@ -6,7 +6,8 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/astp" "golang.org/x/tools/go/ast/astutil" @@ -15,7 +16,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "appendAssign" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects suspicious append result assignments" info.Before = ` p.positives = append(p.negatives, x) diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go index 3c81449e9c..81a7aa30b3 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go @@ -5,7 +5,8 @@ import ( "go/token" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" ) @@ -13,7 +14,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "appendCombine" - info.Tags = []string{"performance"} + info.Tags = []string{linter.PerformanceTag} info.Summary = "Detects `append` chains to the same slice that can be done in a single `append` call" info.Before = ` xs = append(xs, 1) diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go index d3d139a085..9be45ccc78 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go @@ -7,7 +7,8 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" @@ -18,7 +19,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "badCond" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects suspicious condition expressions" info.Before = ` for i := 0; i > n; i++ { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go index 8a359000a2..6c6845053d 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go @@ -9,14 +9,15 @@ import ( "unicode/utf8" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/quasilyte/regex/syntax" ) func init() { var info linter.CheckerInfo info.Name = "badRegexp" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects suspicious regexp patterns" info.Before = "regexp.MustCompile(`(?:^aa|bb|cc)foo[aba]`)" info.After = "regexp.MustCompile(`^(?:aa|bb|cc)foo[ab]`)" diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go index b4000a8ce7..a1c69cb7ab 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go @@ -5,22 +5,22 @@ import ( "go/token" "strconv" + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/astp" "github.com/go-toolsmith/typep" "golang.org/x/tools/go/ast/astutil" - - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" ) func init() { var info linter.CheckerInfo info.Name = "boolExprSimplify" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects bool expressions that can be simplified" info.Before = ` a := !(elapsed >= expectElapsedMin) diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go index 94d51a996a..d8be10ce9c 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "builtinShadowDecl" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects top-level declarations that shadow the predeclared identifiers" info.Before = `type int struct {}` info.After = `type myInt struct {}` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go index 1e1661deb2..0b4b7bafb8 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "builtinShadow" - info.Tags = []string{"style", "opinionated"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag} info.Summary = "Detects when predeclared identifiers are shadowed in assignments" info.Before = `len := 10` info.After = `length := 10` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go index d9b4b7e75d..b31a6f7fd3 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "captLocal" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Params = linter.CheckerParams{ "paramsOnly": { Value: true, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go index 047ea4fee0..306756834b 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go @@ -5,13 +5,13 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "caseOrder" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects erroneous case order inside switch statements" info.Before = ` switch x.(type) { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/checkers.go b/tools/vendor/github.com/go-critic/go-critic/checkers/checkers.go index 0c2ebc00ca..5797dafdf4 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/checkers.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/checkers.go @@ -4,7 +4,7 @@ package checkers import ( "os" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) var collection = &linter.CheckerCollection{ diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go index 52a72d28c8..6eeb0bb5db 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go @@ -6,13 +6,13 @@ import ( "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "codegenComment" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects malformed 'code generated' file comments" info.Before = `// This file was automatically generated by foogen` info.After = `// Code generated by foogen. DO NOT EDIT.` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go index f330b723a0..5a9564a0f9 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go @@ -8,13 +8,13 @@ import ( "unicode/utf8" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "commentFormatting" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Summary = "Detects comments with non-idiomatic formatting" info.Before = `//This is a comment` info.After = `// This is a comment` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go index 554e0621fd..402ba33066 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go @@ -8,14 +8,15 @@ import ( "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/strparse" ) func init() { var info linter.CheckerInfo info.Name = "commentedOutCode" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects commented-out code inside function bodies" info.Before = ` // fmt.Println("Debugging hard") diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go index 3c086569b1..e0855da812 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go @@ -6,13 +6,13 @@ import ( "regexp" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "commentedOutImport" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects commented-out imports" info.Before = ` import ( diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go index e06944d624..cdebaef987 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "defaultCaseOrder" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Summary = "Detects when default case in switch isn't on 1st or last position" info.Before = ` switch { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go index da90fe67a2..37c80c864a 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/deferInLoop_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "deferInLoop" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects loops inside functions that use defer" info.Before = ` for _, filename := range []string{"foo", "bar"} { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go index 0eb5072375..c61d773da6 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go @@ -5,13 +5,13 @@ import ( "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "deprecatedComment" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects malformed 'deprecated' doc-comments" info.Before = ` // deprecated, use FuncNew instead diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go index d8aaaf7437..aa23de42c4 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go @@ -7,13 +7,13 @@ import ( "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "docStub" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects comments that silence go lint complaints about doc-comment" info.Before = ` // Foo ... diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go index ad16e3b3f2..c4f0183878 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go @@ -4,14 +4,15 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astequal" ) func init() { var info linter.CheckerInfo info.Name = "dupBranchBody" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects duplicated branch bodies inside conditional statements" info.Before = ` if cond { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go index a565007601..381bad68b8 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go @@ -5,13 +5,13 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "dupCase" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects duplicated case clauses inside switch or select statements" info.Before = ` switch x { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go index 54658eb9f4..19079871f7 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go @@ -4,13 +4,13 @@ import ( "fmt" "go/ast" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "dupImport" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects multiple imports of the same package under different aliases" info.Before = ` import ( diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go index 00f8fd0eb5..9ab75945cd 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go @@ -6,7 +6,8 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" ) @@ -14,7 +15,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "dupSubExpr" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects suspicious duplicated sub-expressions" info.Before = ` sort.Slice(xs, func(i, j int) bool { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go index dcc964846f..857d09fa0e 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go @@ -4,14 +4,15 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astp" ) func init() { var info linter.CheckerInfo info.Name = "elseif" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Params = linter.CheckerParams{ "skipBalanced": { Value: true, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go b/tools/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go index b17178e09a..ad507425e6 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go @@ -7,10 +7,10 @@ import ( "go/token" "os" - "github.com/quasilyte/go-ruleguard/ruleguard" - "github.com/go-critic/go-critic/checkers/rulesdata" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + + "github.com/quasilyte/go-ruleguard/ruleguard" ) //go:generate go run ./rules/precompile.go -rules ./rules/rules.go -o ./rulesdata/rulesdata.go @@ -101,6 +101,7 @@ func (c *embeddedRuleguardChecker) WalkFile(f *ast.File) { Pkg: c.ctx.Pkg, Types: c.ctx.TypesInfo, Sizes: c.ctx.SizesInfo, + GoVersion: ruleguard.GoVersion(c.ctx.GoVersion), Fset: c.ctx.FileSet, TruncateLen: 100, }) diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go index ebb8dad455..a008c61870 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go @@ -5,13 +5,13 @@ import ( "go/token" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "emptyFallthrough" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects fallthrough that can be avoided by using multi case values" info.Before = `switch kind { case reflect.Int: diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go index 6ba07fe869..f8c5ae5423 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go @@ -7,7 +7,8 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" @@ -16,7 +17,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "evalOrder" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects unwanted dependencies on the evaluation order" info.Before = `return x, f(&x)` info.After = ` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go index 63e0049f2c..9889f48e8e 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go @@ -4,7 +4,8 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astfmt" "github.com/go-toolsmith/astp" "golang.org/x/tools/go/ast/astutil" @@ -13,7 +14,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "exitAfterDefer" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects calls to exit/fatal inside functions that use defer" info.Before = ` defer os.Remove(filename) diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go index 698f5366d6..17ab0ea83f 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go @@ -5,14 +5,15 @@ import ( "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" ) func init() { var info linter.CheckerInfo info.Name = "filepathJoin" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects problems in filepath.Join() function calls" info.Before = `filepath.Join("dir/", filename)` info.After = `filepath.Join("dir", filename)` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go index 7f6ce3c01f..98b76e2618 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go @@ -7,14 +7,15 @@ import ( "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" ) func init() { var info linter.CheckerInfo info.Name = "flagName" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects suspicious flag names" info.Before = `b := flag.Bool(" foo ", false, "description")` info.After = `b := flag.Bool("foo", false, "description")` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go index ae61a1125e..7301bd325a 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go @@ -6,14 +6,15 @@ import ( "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" ) func init() { var info linter.CheckerInfo info.Name = "hexLiteral" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects hex literals that have mixed case letter digits" info.Before = ` x := 0X12 diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go index 742652a12d..3b7f1d12b3 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "hugeParam" - info.Tags = []string{"performance"} + info.Tags = []string{linter.PerformanceTag} info.Params = linter.CheckerParams{ "sizeThreshold": { Value: 80, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go index c3d127c567..e73c609d5c 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "ifElseChain" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Params = linter.CheckerParams{ "minThreshold": { Value: 2, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go index 5ac711fc1e..b690487b7b 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go @@ -5,13 +5,13 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "importShadow" - info.Tags = []string{"style", "opinionated"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag} info.Summary = "Detects when imported package names shadowed in the assignments" info.Before = ` // "path/filepath" is imported. diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go index a1b6b2a8a8..8612717b27 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go @@ -4,14 +4,15 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astp" ) func init() { var info linter.CheckerInfo info.Name = "initClause" - info.Tags = []string{"style", "opinionated", "experimental"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} info.Summary = "Detects non-assignment statements inside if/switch init clause" info.Before = `if sideEffect(); cond { }` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go b/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go index 5fcce6a2a7..0c9c14955e 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go @@ -18,20 +18,18 @@ type LocalDefVisitor interface { VisitLocalDef(Name, ast.Expr) } -type ( - // NameKind describes what kind of name Name object holds. - NameKind int +// NameKind describes what kind of name Name object holds. +type NameKind int - // Name holds ver/const/param definition symbol info. - Name struct { - ID *ast.Ident - Kind NameKind +// Name holds ver/const/param definition symbol info. +type Name struct { + ID *ast.Ident + Kind NameKind - // Index is NameVar-specific field that is used to - // specify nth tuple element being assigned to the name. - Index int - } -) + // Index is NameVar-specific field that is used to + // specify nth tuple element being assigned to the name. + Index int +} // NOTE: set of name kinds is not stable and may change over time. // diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go b/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go index e5031a909f..3486a8e622 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go @@ -4,67 +4,64 @@ import ( "go/ast" ) -// Visitor interfaces. -type ( - // DocCommentVisitor visits every doc-comment. - // Does not visit doc-comments for function-local definitions (types, etc). - // Also does not visit package doc-comment (file-level doc-comments). - DocCommentVisitor interface { - VisitDocComment(*ast.CommentGroup) - } +// DocCommentVisitor visits every doc-comment. +// Does not visit doc-comments for function-local definitions (types, etc). +// Also does not visit package doc-comment (file-level doc-comments). +type DocCommentVisitor interface { + VisitDocComment(*ast.CommentGroup) +} - // FuncDeclVisitor visits every top-level function declaration. - FuncDeclVisitor interface { - walkerEvents - VisitFuncDecl(*ast.FuncDecl) - } +// FuncDeclVisitor visits every top-level function declaration. +type FuncDeclVisitor interface { + walkerEvents + VisitFuncDecl(*ast.FuncDecl) +} - // ExprVisitor visits every expression inside AST file. - ExprVisitor interface { - walkerEvents - VisitExpr(ast.Expr) - } +// ExprVisitor visits every expression inside AST file. +type ExprVisitor interface { + walkerEvents + VisitExpr(ast.Expr) +} - // LocalExprVisitor visits every expression inside function body. - LocalExprVisitor interface { - walkerEvents - VisitLocalExpr(ast.Expr) - } +// LocalExprVisitor visits every expression inside function body. +type LocalExprVisitor interface { + walkerEvents + VisitLocalExpr(ast.Expr) +} - // StmtListVisitor visits every statement list inside function body. - // This includes block statement bodies as well as implicit blocks - // introduced by case clauses and alike. - StmtListVisitor interface { - walkerEvents - VisitStmtList(ast.Node, []ast.Stmt) - } +// StmtListVisitor visits every statement list inside function body. +// This includes block statement bodies as well as implicit blocks +// introduced by case clauses and alike. +type StmtListVisitor interface { + walkerEvents + VisitStmtList(ast.Node, []ast.Stmt) +} - // StmtVisitor visits every statement inside function body. - StmtVisitor interface { - walkerEvents - VisitStmt(ast.Stmt) - } +// StmtVisitor visits every statement inside function body. +type StmtVisitor interface { + walkerEvents + VisitStmt(ast.Stmt) +} - // TypeExprVisitor visits every type describing expression. - // It also traverses struct types and interface types to run - // checker over their fields/method signatures. - TypeExprVisitor interface { - walkerEvents - VisitTypeExpr(ast.Expr) - } +// TypeExprVisitor visits every type describing expression. +// It also traverses struct types and interface types to run +// checker over their fields/method signatures. +type TypeExprVisitor interface { + walkerEvents + VisitTypeExpr(ast.Expr) +} - // LocalCommentVisitor visits every comment inside function body. - LocalCommentVisitor interface { - walkerEvents - VisitLocalComment(*ast.CommentGroup) - } +// LocalCommentVisitor visits every comment inside function body. +type LocalCommentVisitor interface { + walkerEvents + VisitLocalComment(*ast.CommentGroup) +} - // CommentVisitor visits every comment. - CommentVisitor interface { - walkerEvents - VisitComment(*ast.CommentGroup) - } -) +// CommentVisitor visits every comment. +type CommentVisitor interface { + walkerEvents + VisitComment(*ast.CommentGroup) +} // walkerEvents describes common hooks available for most visitor types. type walkerEvents interface { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go index cd5e1c9793..f838a64c15 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go @@ -3,7 +3,7 @@ package astwalk import ( "go/types" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) // WalkerForFuncDecl returns file walker implementation for FuncDeclVisitor. diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go index 64c2821dd1..ebc61c12a4 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go @@ -7,7 +7,8 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astp" "github.com/go-toolsmith/typep" @@ -16,7 +17,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "mapKey" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects suspicious map literal keys" info.Before = ` _ = map[string]int{ diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go index 2553def14f..755d3b4722 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go @@ -5,7 +5,8 @@ import ( "go/token" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/typep" @@ -14,7 +15,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "methodExprCall" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects method expression call that can be replaced with a method call" info.Before = `f := foo{} foo.bar(f)` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go index a68acecca5..dfe73018c8 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "nestingReduce" - info.Tags = []string{"style", "opinionated", "experimental"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} info.Params = linter.CheckerParams{ "bodyWidth": { Value: 5, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go index 04a3474f95..1a1b05e0df 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go @@ -2,19 +2,20 @@ package checkers import ( "go/ast" + "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" - "golang.org/x/exp/typeparams" "golang.org/x/tools/go/ast/astutil" ) func init() { var info linter.CheckerInfo info.Name = "newDeref" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Summary = "Detects immediate dereferencing of `new` expressions" info.Before = `x := *new(bool)` info.After = `x := false` @@ -35,7 +36,7 @@ func (c *newDerefChecker) VisitExpr(expr ast.Expr) { if astcast.ToIdent(call.Fun).Name == "new" { typ := c.ctx.TypeOf(call.Args[0]) // allow *new(T) if T is a type parameter, see #1272 for details - if typeparams.IsTypeParam(typ) { + if _, ok := typ.(*types.TypeParam); ok { return } zv := lintutil.ZeroValueOf(astutil.Unparen(call.Args[0]), typ) diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go index 0a8e793eea..9a1213f5c2 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go @@ -5,7 +5,8 @@ import ( "go/token" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" ) @@ -13,7 +14,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "nilValReturn" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects return statements those results evaluate to nil" info.Before = ` if err == nil { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go index bed227ac3d..a25fac85cc 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go @@ -7,14 +7,15 @@ import ( "unicode" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" ) func init() { var info linter.CheckerInfo info.Name = "octalLiteral" - info.Tags = []string{"style", "experimental", "opinionated"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag, linter.OpinionatedTag} info.Summary = "Detects old-style octal literals" info.Before = `foo(02)` info.After = `foo(0o2)` @@ -30,6 +31,9 @@ type octalLiteralChecker struct { } func (c *octalLiteralChecker) VisitExpr(expr ast.Expr) { + if !c.ctx.GoVersion.GreaterOrEqual(linter.GoVersion{Major: 1, Minor: 13}) { + return + } lit := astcast.ToBasicLit(expr) if lit.Kind != token.INT { return diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go index c80e6f8bcd..c777fec9e6 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go @@ -4,7 +4,8 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" ) @@ -12,7 +13,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "paramTypeCombine" - info.Tags = []string{"style", "opinionated"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag} info.Summary = "Detects if function parameters could be combined by type and suggest the way to do it" info.Before = `func foo(a, b int, c, d int, e, f int, g int) {}` info.After = `func foo(a, b, c, d, e, f, g int) {}` @@ -46,6 +47,7 @@ func (c *paramTypeCombineChecker) optimizeFuncType(f *ast.FuncType) *ast.FuncTyp return optimizedParamFunc } + func (c *paramTypeCombineChecker) optimizeParams(params *ast.FieldList) *ast.FieldList { // To avoid false positives, skip unnamed param lists. // @@ -71,8 +73,7 @@ func (c *paramTypeCombineChecker) optimizeParams(params *ast.FieldList) *ast.Fie names = make([]*ast.Ident, len(p.Names)) copy(names, p.Names) if astequal.Expr(p.Type, params.List[i].Type) { - list[len(list)-1].Names = - append(list[len(list)-1].Names, names...) + list[len(list)-1].Names = append(list[len(list)-1].Names, names...) } else { list = append(list, &ast.Field{ Names: names, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go index 88c8f4cb36..172a4acb58 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go @@ -5,13 +5,13 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "ptrToRefParam" - info.Tags = []string{"style", "opinionated", "experimental"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} info.Summary = "Detects input and output parameters that have a type of pointer to referential type" info.Before = `func f(m *map[string]int) (*chan *int)` info.After = `func f(m map[string]int) (chan *int)` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go index 813fff36a4..3f61ee0bda 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go @@ -5,13 +5,13 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "rangeExprCopy" - info.Tags = []string{"performance"} + info.Tags = []string{linter.PerformanceTag} info.Params = linter.CheckerParams{ "sizeThreshold": { Value: 512, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go index 37f469657b..6d15c30cd1 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go @@ -4,13 +4,13 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "rangeValCopy" - info.Tags = []string{"performance"} + info.Tags = []string{linter.PerformanceTag} info.Params = linter.CheckerParams{ "sizeThreshold": { Value: 128, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go index 31dc4aad3e..45aba261ba 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go @@ -7,13 +7,13 @@ import ( "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "regexpPattern" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects suspicious regexp patterns" info.Before = "regexp.MustCompile(`google.com|yandex.ru`)" info.After = "regexp.MustCompile(`google\\.com|yandex\\.ru`)" diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go index 5b15e05ed2..f500f43500 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go @@ -8,16 +8,16 @@ import ( "strings" "unicode/utf8" - "github.com/quasilyte/regex/syntax" - "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + + "github.com/quasilyte/regex/syntax" ) func init() { var info linter.CheckerInfo info.Name = "regexpSimplify" - info.Tags = []string{"style", "experimental", "opinionated"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag, linter.OpinionatedTag} info.Summary = "Detects regexp patterns that can be simplified" info.Before = "regexp.MustCompile(`(?:a|b|c) [a-z][a-z]*`)" info.After = "regexp.MustCompile(`[abc] {3}[a-z]+`)" diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go index 000007a314..29723a69a9 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go @@ -12,15 +12,15 @@ import ( "sort" "strings" - "github.com/quasilyte/go-ruleguard/ruleguard" + "github.com/go-critic/go-critic/linter" - "github.com/go-critic/go-critic/framework/linter" + "github.com/quasilyte/go-ruleguard/ruleguard" ) func init() { var info linter.CheckerInfo info.Name = "ruleguard" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Params = linter.CheckerParams{ "rules": { Value: "", @@ -84,7 +84,7 @@ func newErrorHandler(failOnErrorFlag string) (*parseErrorHandler, error) { h := parseErrorHandler{ failureConditions: make(map[string]func(err error) bool), } - var failOnErrorPredicates = map[string]func(error) bool{ + failOnErrorPredicates := map[string]func(error) bool{ "dsl": func(err error) bool { var e *ruleguard.ImportError; return !errors.As(err, &e) }, "import": func(err error) bool { var e *ruleguard.ImportError; return errors.As(err, &e) }, "all": func(err error) bool { return true }, @@ -160,8 +160,8 @@ func newRuleguardChecker(info *linter.CheckerInfo, ctx *linter.CheckerContext) ( } } - if !enabledTags["experimental"] { - disabledTags["experimental"] = true + if !enabledTags[linter.ExperimentalTag] { + disabledTags[linter.ExperimentalTag] = true } ruleguardDebug := os.Getenv("GOCRITIC_RULEGUARD_DEBUG") != "" diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go b/tools/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go index f0b147a682..503118c7ec 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go @@ -61,37 +61,20 @@ var PrecompiledRules = &ir.File{ {Line: 17, Value: "fmt.Sprintf(\"%s\", $x)"}, {Line: 17, Value: "fmt.Sprintf(\"%v\", $x)"}, }, - ReportTemplate: "use $x.Error() instead", - SuggestTemplate: "$x.Error()", - WhereExpr: ir.FilterExpr{ - Line: 18, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"x\"].Type.Implements(`error`)", - Value: "x", - Args: []ir.FilterExpr{{Line: 18, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}}, - }, - }, - { - Line: 22, - SyntaxPatterns: []ir.PatternString{ - {Line: 22, Value: "fmt.Sprint($x)"}, - {Line: 22, Value: "fmt.Sprintf(\"%s\", $x)"}, - {Line: 22, Value: "fmt.Sprintf(\"%v\", $x)"}, - }, ReportTemplate: "$x is already string", SuggestTemplate: "$x", WhereExpr: ir.FilterExpr{ - Line: 23, + Line: 18, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`string`)", Value: "x", - Args: []ir.FilterExpr{{Line: 23, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 18, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }, { - Line: 32, + Line: 27, Name: "deferUnlambda", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -100,55 +83,55 @@ var PrecompiledRules = &ir.File{ DocAfter: "defer f()", Rules: []ir.Rule{ { - Line: 33, - SyntaxPatterns: []ir.PatternString{{Line: 33, Value: "defer func() { $f($*args) }()"}}, + Line: 28, + SyntaxPatterns: []ir.PatternString{{Line: 28, Value: "defer func() { $f($*args) }()"}}, ReportTemplate: "can rewrite as `defer $f($args)`", WhereExpr: ir.FilterExpr{ - Line: 34, + Line: 29, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"f\"].Text != \"panic\" && m[\"f\"].Text != \"recover\" && m[\"args\"].Const", Args: []ir.FilterExpr{ { - Line: 34, + Line: 29, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"f\"].Text != \"panic\" && m[\"f\"].Text != \"recover\"", Args: []ir.FilterExpr{ { - Line: 34, + Line: 29, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"f\"].Text != \"panic\"", Args: []ir.FilterExpr{ { - Line: 34, + Line: 29, Op: ir.FilterVarNodeIsOp, Src: "m[\"f\"].Node.Is(`Ident`)", Value: "f", - Args: []ir.FilterExpr{{Line: 34, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, + Args: []ir.FilterExpr{{Line: 29, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, }, { - Line: 34, + Line: 29, Op: ir.FilterNeqOp, Src: "m[\"f\"].Text != \"panic\"", Args: []ir.FilterExpr{ - {Line: 34, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, - {Line: 34, Op: ir.FilterStringOp, Src: "\"panic\"", Value: "panic"}, + {Line: 29, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, + {Line: 29, Op: ir.FilterStringOp, Src: "\"panic\"", Value: "panic"}, }, }, }, }, { - Line: 34, + Line: 29, Op: ir.FilterNeqOp, Src: "m[\"f\"].Text != \"recover\"", Args: []ir.FilterExpr{ - {Line: 34, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, - {Line: 34, Op: ir.FilterStringOp, Src: "\"recover\"", Value: "recover"}, + {Line: 29, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, + {Line: 29, Op: ir.FilterStringOp, Src: "\"recover\"", Value: "recover"}, }, }, }, }, { - Line: 34, + Line: 29, Op: ir.FilterVarConstOp, Src: "m[\"args\"].Const", Value: "args", @@ -157,28 +140,28 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 37, - SyntaxPatterns: []ir.PatternString{{Line: 37, Value: "defer func() { $pkg.$f($*args) }()"}}, + Line: 32, + SyntaxPatterns: []ir.PatternString{{Line: 32, Value: "defer func() { $pkg.$f($*args) }()"}}, ReportTemplate: "can rewrite as `defer $pkg.$f($args)`", WhereExpr: ir.FilterExpr{ - Line: 38, + Line: 33, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"args\"].Const && m[\"pkg\"].Object.Is(`PkgName`)", Args: []ir.FilterExpr{ { - Line: 38, + Line: 33, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"args\"].Const", Args: []ir.FilterExpr{ { - Line: 38, + Line: 33, Op: ir.FilterVarNodeIsOp, Src: "m[\"f\"].Node.Is(`Ident`)", Value: "f", - Args: []ir.FilterExpr{{Line: 38, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, + Args: []ir.FilterExpr{{Line: 33, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, }, { - Line: 38, + Line: 33, Op: ir.FilterVarConstOp, Src: "m[\"args\"].Const", Value: "args", @@ -186,11 +169,11 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 38, + Line: 33, Op: ir.FilterVarObjectIsOp, Src: "m[\"pkg\"].Object.Is(`PkgName`)", Value: "pkg", - Args: []ir.FilterExpr{{Line: 38, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}}, + Args: []ir.FilterExpr{{Line: 33, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}}, }, }, }, @@ -198,84 +181,7 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 46, - Name: "ioutilDeprecated", - MatcherName: "m", - DocTags: []string{"style", "experimental"}, - DocSummary: "Detects deprecated io/ioutil package usages", - DocBefore: "ioutil.ReadAll(r)", - DocAfter: "io.ReadAll(r)", - Rules: []ir.Rule{ - { - Line: 47, - SyntaxPatterns: []ir.PatternString{{Line: 47, Value: "ioutil.ReadAll($_)"}}, - ReportTemplate: "ioutil.ReadAll is deprecated, use io.ReadAll instead", - WhereExpr: ir.FilterExpr{ - Line: 48, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.16\")", - Value: "1.16", - }, - }, - { - Line: 51, - SyntaxPatterns: []ir.PatternString{{Line: 51, Value: "ioutil.ReadFile($_)"}}, - ReportTemplate: "ioutil.ReadFile is deprecated, use os.ReadFile instead", - WhereExpr: ir.FilterExpr{ - Line: 52, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.16\")", - Value: "1.16", - }, - }, - { - Line: 55, - SyntaxPatterns: []ir.PatternString{{Line: 55, Value: "ioutil.WriteFile($_, $_, $_)"}}, - ReportTemplate: "ioutil.WriteFile is deprecated, use os.WriteFile instead", - WhereExpr: ir.FilterExpr{ - Line: 56, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.16\")", - Value: "1.16", - }, - }, - { - Line: 59, - SyntaxPatterns: []ir.PatternString{{Line: 59, Value: "ioutil.ReadDir($_)"}}, - ReportTemplate: "ioutil.ReadDir is deprecated, use os.ReadDir instead", - WhereExpr: ir.FilterExpr{ - Line: 60, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.16\")", - Value: "1.16", - }, - }, - { - Line: 63, - SyntaxPatterns: []ir.PatternString{{Line: 63, Value: "ioutil.NopCloser($_)"}}, - ReportTemplate: "ioutil.NopCloser is deprecated, use io.NopCloser instead", - WhereExpr: ir.FilterExpr{ - Line: 64, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.16\")", - Value: "1.16", - }, - }, - { - Line: 67, - SyntaxPatterns: []ir.PatternString{{Line: 67, Value: "ioutil.Discard"}}, - ReportTemplate: "ioutil.Discard is deprecated, use io.Discard instead", - WhereExpr: ir.FilterExpr{ - Line: 68, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.16\")", - Value: "1.16", - }, - }, - }, - }, - { - Line: 76, + Line: 41, Name: "badLock", MatcherName: "m", DocTags: []string{"diagnostic", "experimental"}, @@ -284,91 +190,91 @@ var PrecompiledRules = &ir.File{ DocAfter: "mu.Lock(); defer mu.Unlock()", Rules: []ir.Rule{ { - Line: 80, - SyntaxPatterns: []ir.PatternString{{Line: 80, Value: "$mu1.Lock(); $mu2.Unlock()"}}, + Line: 45, + SyntaxPatterns: []ir.PatternString{{Line: 45, Value: "$mu1.Lock(); $mu2.Unlock()"}}, ReportTemplate: "defer is missing, mutex is unlocked immediately", WhereExpr: ir.FilterExpr{ - Line: 81, + Line: 46, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - {Line: 81, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 81, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 46, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 46, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, { - Line: 85, - SyntaxPatterns: []ir.PatternString{{Line: 85, Value: "$mu1.RLock(); $mu2.RUnlock()"}}, + Line: 50, + SyntaxPatterns: []ir.PatternString{{Line: 50, Value: "$mu1.RLock(); $mu2.RUnlock()"}}, ReportTemplate: "defer is missing, mutex is unlocked immediately", WhereExpr: ir.FilterExpr{ - Line: 86, + Line: 51, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - {Line: 86, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 86, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 51, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 51, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, { - Line: 91, - SyntaxPatterns: []ir.PatternString{{Line: 91, Value: "$mu1.Lock(); defer $mu2.RUnlock()"}}, + Line: 56, + SyntaxPatterns: []ir.PatternString{{Line: 56, Value: "$mu1.Lock(); defer $mu2.RUnlock()"}}, ReportTemplate: "suspicious unlock, maybe Unlock was intended?", WhereExpr: ir.FilterExpr{ - Line: 92, + Line: 57, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - {Line: 92, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 92, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 57, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 57, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, { - Line: 96, - SyntaxPatterns: []ir.PatternString{{Line: 96, Value: "$mu1.RLock(); defer $mu2.Unlock()"}}, + Line: 61, + SyntaxPatterns: []ir.PatternString{{Line: 61, Value: "$mu1.RLock(); defer $mu2.Unlock()"}}, ReportTemplate: "suspicious unlock, maybe RUnlock was intended?", WhereExpr: ir.FilterExpr{ - Line: 97, + Line: 62, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - {Line: 97, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 97, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 62, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 62, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, { - Line: 102, - SyntaxPatterns: []ir.PatternString{{Line: 102, Value: "$mu1.Lock(); defer $mu2.Lock()"}}, + Line: 67, + SyntaxPatterns: []ir.PatternString{{Line: 67, Value: "$mu1.Lock(); defer $mu2.Lock()"}}, ReportTemplate: "maybe defer $mu1.Unlock() was intended?", WhereExpr: ir.FilterExpr{ - Line: 103, + Line: 68, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - {Line: 103, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 103, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 68, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 68, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, { - Line: 107, - SyntaxPatterns: []ir.PatternString{{Line: 107, Value: "$mu1.RLock(); defer $mu2.RLock()"}}, + Line: 72, + SyntaxPatterns: []ir.PatternString{{Line: 72, Value: "$mu1.RLock(); defer $mu2.RLock()"}}, ReportTemplate: "maybe defer $mu1.RUnlock() was intended?", WhereExpr: ir.FilterExpr{ - Line: 108, + Line: 73, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - {Line: 108, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - {Line: 108, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 73, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 73, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", @@ -376,7 +282,7 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 117, + Line: 82, Name: "httpNoBody", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -385,39 +291,54 @@ var PrecompiledRules = &ir.File{ DocAfter: "http.NewRequest(\"GET\", url, http.NoBody)", Rules: []ir.Rule{ { - Line: 118, - SyntaxPatterns: []ir.PatternString{{Line: 118, Value: "http.NewRequest($method, $url, $nil)"}}, + Line: 83, + SyntaxPatterns: []ir.PatternString{{Line: 83, Value: "http.NewRequest($method, $url, $nil)"}}, ReportTemplate: "http.NoBody should be preferred to the nil request body", SuggestTemplate: "http.NewRequest($method, $url, http.NoBody)", WhereExpr: ir.FilterExpr{ - Line: 119, + Line: 84, Op: ir.FilterEqOp, Src: "m[\"nil\"].Text == \"nil\"", Args: []ir.FilterExpr{ - {Line: 119, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, - {Line: 119, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, + {Line: 84, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, + {Line: 84, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, }, }, }, { - Line: 123, - SyntaxPatterns: []ir.PatternString{{Line: 123, Value: "http.NewRequestWithContext($ctx, $method, $url, $nil)"}}, + Line: 88, + SyntaxPatterns: []ir.PatternString{{Line: 88, Value: "http.NewRequestWithContext($ctx, $method, $url, $nil)"}}, ReportTemplate: "http.NoBody should be preferred to the nil request body", SuggestTemplate: "http.NewRequestWithContext($ctx, $method, $url, http.NoBody)", WhereExpr: ir.FilterExpr{ - Line: 124, + Line: 89, Op: ir.FilterEqOp, Src: "m[\"nil\"].Text == \"nil\"", Args: []ir.FilterExpr{ - {Line: 124, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, - {Line: 124, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, + {Line: 89, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, + {Line: 89, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, + }, + }, + }, + { + Line: 93, + SyntaxPatterns: []ir.PatternString{{Line: 93, Value: "httptest.NewRequest($method, $url, $nil)"}}, + ReportTemplate: "http.NoBody should be preferred to the nil request body", + SuggestTemplate: "httptest.NewRequest($method, $url, http.NoBody)", + WhereExpr: ir.FilterExpr{ + Line: 94, + Op: ir.FilterEqOp, + Src: "m[\"nil\"].Text == \"nil\"", + Args: []ir.FilterExpr{ + {Line: 94, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, + {Line: 94, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, }, }, }, }, }, { - Line: 134, + Line: 104, Name: "preferDecodeRune", MatcherName: "m", DocTags: []string{"performance", "experimental"}, @@ -426,20 +347,20 @@ var PrecompiledRules = &ir.File{ DocAfter: "r, _ := utf8.DecodeRuneInString(s)", DocNote: "See Go issue for details: https://github.com/golang/go/issues/45260", Rules: []ir.Rule{{ - Line: 135, - SyntaxPatterns: []ir.PatternString{{Line: 135, Value: "[]rune($s)[0]"}}, + Line: 105, + SyntaxPatterns: []ir.PatternString{{Line: 105, Value: "[]rune($s)[0]"}}, ReportTemplate: "consider replacing $$ with utf8.DecodeRuneInString($s)", WhereExpr: ir.FilterExpr{ - Line: 136, + Line: 106, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{{Line: 136, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 106, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }}, }, { - Line: 144, + Line: 114, Name: "sloppyLen", MatcherName: "m", DocTags: []string{"style"}, @@ -448,24 +369,24 @@ var PrecompiledRules = &ir.File{ DocAfter: "len(arr) == 0", Rules: []ir.Rule{ { - Line: 145, - SyntaxPatterns: []ir.PatternString{{Line: 145, Value: "len($_) >= 0"}}, + Line: 115, + SyntaxPatterns: []ir.PatternString{{Line: 115, Value: "len($_) >= 0"}}, ReportTemplate: "$$ is always true", }, { - Line: 146, - SyntaxPatterns: []ir.PatternString{{Line: 146, Value: "len($_) < 0"}}, + Line: 116, + SyntaxPatterns: []ir.PatternString{{Line: 116, Value: "len($_) < 0"}}, ReportTemplate: "$$ is always false", }, { - Line: 147, - SyntaxPatterns: []ir.PatternString{{Line: 147, Value: "len($x) <= 0"}}, + Line: 117, + SyntaxPatterns: []ir.PatternString{{Line: 117, Value: "len($x) <= 0"}}, ReportTemplate: "$$ can be len($x) == 0", }, }, }, { - Line: 154, + Line: 124, Name: "valSwap", MatcherName: "m", DocTags: []string{"style"}, @@ -473,13 +394,13 @@ var PrecompiledRules = &ir.File{ DocBefore: "*tmp = *x; *x = *y; *y = *tmp", DocAfter: "*x, *y = *y, *x", Rules: []ir.Rule{{ - Line: 155, - SyntaxPatterns: []ir.PatternString{{Line: 155, Value: "$tmp := $y; $y = $x; $x = $tmp"}}, + Line: 125, + SyntaxPatterns: []ir.PatternString{{Line: 125, Value: "$tmp := $y; $y = $x; $x = $tmp"}}, ReportTemplate: "can re-write as `$y, $x = $x, $y`", }}, }, { - Line: 163, + Line: 133, Name: "switchTrue", MatcherName: "m", DocTags: []string{"style"}, @@ -488,19 +409,19 @@ var PrecompiledRules = &ir.File{ DocAfter: "switch {...}", Rules: []ir.Rule{ { - Line: 164, - SyntaxPatterns: []ir.PatternString{{Line: 164, Value: "switch true { $*_ }"}}, + Line: 134, + SyntaxPatterns: []ir.PatternString{{Line: 134, Value: "switch true { $*_ }"}}, ReportTemplate: "replace 'switch true {}' with 'switch {}'", }, { - Line: 166, - SyntaxPatterns: []ir.PatternString{{Line: 166, Value: "switch $x; true { $*_ }"}}, + Line: 136, + SyntaxPatterns: []ir.PatternString{{Line: 136, Value: "switch $x; true { $*_ }"}}, ReportTemplate: "replace 'switch $x; true {}' with 'switch $x; {}'", }, }, }, { - Line: 174, + Line: 144, Name: "flagDeref", MatcherName: "m", DocTags: []string{"diagnostic"}, @@ -509,49 +430,49 @@ var PrecompiledRules = &ir.File{ DocAfter: "var b bool; flag.BoolVar(&b, \"b\", false, \"b docs\")", Rules: []ir.Rule{ { - Line: 175, - SyntaxPatterns: []ir.PatternString{{Line: 175, Value: "*flag.Bool($*_)"}}, + Line: 145, + SyntaxPatterns: []ir.PatternString{{Line: 145, Value: "*flag.Bool($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.BoolVar", }, { - Line: 176, - SyntaxPatterns: []ir.PatternString{{Line: 176, Value: "*flag.Duration($*_)"}}, + Line: 146, + SyntaxPatterns: []ir.PatternString{{Line: 146, Value: "*flag.Duration($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.DurationVar", }, { - Line: 177, - SyntaxPatterns: []ir.PatternString{{Line: 177, Value: "*flag.Float64($*_)"}}, + Line: 147, + SyntaxPatterns: []ir.PatternString{{Line: 147, Value: "*flag.Float64($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Float64Var", }, { - Line: 178, - SyntaxPatterns: []ir.PatternString{{Line: 178, Value: "*flag.Int($*_)"}}, + Line: 148, + SyntaxPatterns: []ir.PatternString{{Line: 148, Value: "*flag.Int($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.IntVar", }, { - Line: 179, - SyntaxPatterns: []ir.PatternString{{Line: 179, Value: "*flag.Int64($*_)"}}, + Line: 149, + SyntaxPatterns: []ir.PatternString{{Line: 149, Value: "*flag.Int64($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Int64Var", }, { - Line: 180, - SyntaxPatterns: []ir.PatternString{{Line: 180, Value: "*flag.String($*_)"}}, + Line: 150, + SyntaxPatterns: []ir.PatternString{{Line: 150, Value: "*flag.String($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.StringVar", }, { - Line: 181, - SyntaxPatterns: []ir.PatternString{{Line: 181, Value: "*flag.Uint($*_)"}}, + Line: 151, + SyntaxPatterns: []ir.PatternString{{Line: 151, Value: "*flag.Uint($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.UintVar", }, { - Line: 182, - SyntaxPatterns: []ir.PatternString{{Line: 182, Value: "*flag.Uint64($*_)"}}, + Line: 152, + SyntaxPatterns: []ir.PatternString{{Line: 152, Value: "*flag.Uint64($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Uint64Var", }, }, }, { - Line: 189, + Line: 159, Name: "emptyStringTest", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -560,33 +481,33 @@ var PrecompiledRules = &ir.File{ DocAfter: "s == \"\"", Rules: []ir.Rule{ { - Line: 190, - SyntaxPatterns: []ir.PatternString{{Line: 190, Value: "len($s) != 0"}}, + Line: 160, + SyntaxPatterns: []ir.PatternString{{Line: 160, Value: "len($s) != 0"}}, ReportTemplate: "replace `$$` with `$s != \"\"`", WhereExpr: ir.FilterExpr{ - Line: 191, + Line: 161, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{{Line: 191, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 161, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, { - Line: 194, - SyntaxPatterns: []ir.PatternString{{Line: 194, Value: "len($s) == 0"}}, + Line: 164, + SyntaxPatterns: []ir.PatternString{{Line: 164, Value: "len($s) == 0"}}, ReportTemplate: "replace `$$` with `$s == \"\"`", WhereExpr: ir.FilterExpr{ - Line: 195, + Line: 165, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{{Line: 195, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 165, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }, { - Line: 203, + Line: 173, Name: "stringXbytes", MatcherName: "m", DocTags: []string{"performance"}, @@ -595,180 +516,180 @@ var PrecompiledRules = &ir.File{ DocAfter: "copy(b, s)", Rules: []ir.Rule{ { - Line: 204, - SyntaxPatterns: []ir.PatternString{{Line: 204, Value: "copy($_, []byte($s))"}}, + Line: 174, + SyntaxPatterns: []ir.PatternString{{Line: 174, Value: "copy($_, []byte($s))"}}, ReportTemplate: "can simplify `[]byte($s)` to `$s`", }, { - Line: 206, - SyntaxPatterns: []ir.PatternString{{Line: 206, Value: "string($b) == \"\""}}, + Line: 176, + SyntaxPatterns: []ir.PatternString{{Line: 176, Value: "string($b) == \"\""}}, ReportTemplate: "suggestion: len($b) == 0", SuggestTemplate: "len($b) == 0", WhereExpr: ir.FilterExpr{ - Line: 206, + Line: 176, Op: ir.FilterVarTypeIsOp, Src: "m[\"b\"].Type.Is(`[]byte`)", Value: "b", - Args: []ir.FilterExpr{{Line: 206, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, + Args: []ir.FilterExpr{{Line: 176, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, { - Line: 207, - SyntaxPatterns: []ir.PatternString{{Line: 207, Value: "string($b) != \"\""}}, + Line: 177, + SyntaxPatterns: []ir.PatternString{{Line: 177, Value: "string($b) != \"\""}}, ReportTemplate: "suggestion: len($b) != 0", SuggestTemplate: "len($b) != 0", WhereExpr: ir.FilterExpr{ - Line: 207, + Line: 177, Op: ir.FilterVarTypeIsOp, Src: "m[\"b\"].Type.Is(`[]byte`)", Value: "b", - Args: []ir.FilterExpr{{Line: 207, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, + Args: []ir.FilterExpr{{Line: 177, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, { - Line: 209, - SyntaxPatterns: []ir.PatternString{{Line: 209, Value: "len(string($b))"}}, + Line: 179, + SyntaxPatterns: []ir.PatternString{{Line: 179, Value: "len(string($b))"}}, ReportTemplate: "suggestion: len($b)", SuggestTemplate: "len($b)", WhereExpr: ir.FilterExpr{ - Line: 209, + Line: 179, Op: ir.FilterVarTypeIsOp, Src: "m[\"b\"].Type.Is(`[]byte`)", Value: "b", - Args: []ir.FilterExpr{{Line: 209, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, + Args: []ir.FilterExpr{{Line: 179, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, { - Line: 211, - SyntaxPatterns: []ir.PatternString{{Line: 211, Value: "string($x) == string($y)"}}, + Line: 181, + SyntaxPatterns: []ir.PatternString{{Line: 181, Value: "string($x) == string($y)"}}, ReportTemplate: "suggestion: bytes.Equal($x, $y)", SuggestTemplate: "bytes.Equal($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 212, + Line: 182, Op: ir.FilterAndOp, Src: "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)", Args: []ir.FilterExpr{ { - Line: 212, + Line: 182, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]byte`)", Value: "x", - Args: []ir.FilterExpr{{Line: 212, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, + Args: []ir.FilterExpr{{Line: 182, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, { - Line: 212, + Line: 182, Op: ir.FilterVarTypeIsOp, Src: "m[\"y\"].Type.Is(`[]byte`)", Value: "y", - Args: []ir.FilterExpr{{Line: 212, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, + Args: []ir.FilterExpr{{Line: 182, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, }, }, { - Line: 215, - SyntaxPatterns: []ir.PatternString{{Line: 215, Value: "string($x) != string($y)"}}, + Line: 185, + SyntaxPatterns: []ir.PatternString{{Line: 185, Value: "string($x) != string($y)"}}, ReportTemplate: "suggestion: !bytes.Equal($x, $y)", SuggestTemplate: "!bytes.Equal($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 216, + Line: 186, Op: ir.FilterAndOp, Src: "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)", Args: []ir.FilterExpr{ { - Line: 216, + Line: 186, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]byte`)", Value: "x", - Args: []ir.FilterExpr{{Line: 216, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, + Args: []ir.FilterExpr{{Line: 186, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, { - Line: 216, + Line: 186, Op: ir.FilterVarTypeIsOp, Src: "m[\"y\"].Type.Is(`[]byte`)", Value: "y", - Args: []ir.FilterExpr{{Line: 216, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, + Args: []ir.FilterExpr{{Line: 186, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, }, }, { - Line: 219, - SyntaxPatterns: []ir.PatternString{{Line: 219, Value: "$re.Match([]byte($s))"}}, + Line: 189, + SyntaxPatterns: []ir.PatternString{{Line: 189, Value: "$re.Match([]byte($s))"}}, ReportTemplate: "suggestion: $re.MatchString($s)", SuggestTemplate: "$re.MatchString($s)", WhereExpr: ir.FilterExpr{ - Line: 220, + Line: 190, Op: ir.FilterAndOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", Args: []ir.FilterExpr{ { - Line: 220, + Line: 190, Op: ir.FilterVarTypeIsOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", Value: "re", - Args: []ir.FilterExpr{{Line: 220, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, + Args: []ir.FilterExpr{{Line: 190, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, }, { - Line: 220, + Line: 190, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{{Line: 220, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 190, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }, { - Line: 223, - SyntaxPatterns: []ir.PatternString{{Line: 223, Value: "$re.FindIndex([]byte($s))"}}, + Line: 193, + SyntaxPatterns: []ir.PatternString{{Line: 193, Value: "$re.FindIndex([]byte($s))"}}, ReportTemplate: "suggestion: $re.FindStringIndex($s)", SuggestTemplate: "$re.FindStringIndex($s)", WhereExpr: ir.FilterExpr{ - Line: 224, + Line: 194, Op: ir.FilterAndOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", Args: []ir.FilterExpr{ { - Line: 224, + Line: 194, Op: ir.FilterVarTypeIsOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", Value: "re", - Args: []ir.FilterExpr{{Line: 224, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, + Args: []ir.FilterExpr{{Line: 194, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, }, { - Line: 224, + Line: 194, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{{Line: 224, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 194, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }, { - Line: 227, - SyntaxPatterns: []ir.PatternString{{Line: 227, Value: "$re.FindAllIndex([]byte($s), $n)"}}, + Line: 197, + SyntaxPatterns: []ir.PatternString{{Line: 197, Value: "$re.FindAllIndex([]byte($s), $n)"}}, ReportTemplate: "suggestion: $re.FindAllStringIndex($s, $n)", SuggestTemplate: "$re.FindAllStringIndex($s, $n)", WhereExpr: ir.FilterExpr{ - Line: 228, + Line: 198, Op: ir.FilterAndOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", Args: []ir.FilterExpr{ { - Line: 228, + Line: 198, Op: ir.FilterVarTypeIsOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", Value: "re", - Args: []ir.FilterExpr{{Line: 228, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, + Args: []ir.FilterExpr{{Line: 198, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, }, { - Line: 228, + Line: 198, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{{Line: 228, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 198, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, @@ -776,7 +697,7 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 237, + Line: 207, Name: "indexAlloc", MatcherName: "m", DocTags: []string{"performance"}, @@ -785,22 +706,22 @@ var PrecompiledRules = &ir.File{ DocAfter: "bytes.Index(x, []byte(y))", DocNote: "See Go issue for details: https://github.com/golang/go/issues/25864", Rules: []ir.Rule{{ - Line: 238, - SyntaxPatterns: []ir.PatternString{{Line: 238, Value: "strings.Index(string($x), $y)"}}, + Line: 208, + SyntaxPatterns: []ir.PatternString{{Line: 208, Value: "strings.Index(string($x), $y)"}}, ReportTemplate: "consider replacing $$ with bytes.Index($x, []byte($y))", WhereExpr: ir.FilterExpr{ - Line: 239, + Line: 209, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - {Line: 239, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - {Line: 239, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + {Line: 209, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + {Line: 209, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, }}, }, { - Line: 247, + Line: 217, Name: "wrapperFunc", MatcherName: "m", DocTags: []string{"style"}, @@ -809,83 +730,169 @@ var PrecompiledRules = &ir.File{ DocAfter: "wg.Done()", Rules: []ir.Rule{ { - Line: 248, - SyntaxPatterns: []ir.PatternString{{Line: 248, Value: "$wg.Add(-1)"}}, + Line: 218, + SyntaxPatterns: []ir.PatternString{{Line: 218, Value: "$wg.Add(-1)"}}, ReportTemplate: "use WaitGroup.Done method in `$$`", WhereExpr: ir.FilterExpr{ - Line: 249, + Line: 219, Op: ir.FilterVarTypeIsOp, Src: "m[\"wg\"].Type.Is(`sync.WaitGroup`)", Value: "wg", - Args: []ir.FilterExpr{{Line: 249, Op: ir.FilterStringOp, Src: "`sync.WaitGroup`", Value: "sync.WaitGroup"}}, + Args: []ir.FilterExpr{{Line: 219, Op: ir.FilterStringOp, Src: "`sync.WaitGroup`", Value: "sync.WaitGroup"}}, }, }, { - Line: 252, - SyntaxPatterns: []ir.PatternString{{Line: 252, Value: "$buf.Truncate(0)"}}, + Line: 222, + SyntaxPatterns: []ir.PatternString{{Line: 222, Value: "$buf.Truncate(0)"}}, ReportTemplate: "use Buffer.Reset method in `$$`", WhereExpr: ir.FilterExpr{ - Line: 253, + Line: 223, Op: ir.FilterVarTypeIsOp, Src: "m[\"buf\"].Type.Is(`bytes.Buffer`)", Value: "buf", - Args: []ir.FilterExpr{{Line: 253, Op: ir.FilterStringOp, Src: "`bytes.Buffer`", Value: "bytes.Buffer"}}, + Args: []ir.FilterExpr{{Line: 223, Op: ir.FilterStringOp, Src: "`bytes.Buffer`", Value: "bytes.Buffer"}}, }, }, { - Line: 256, - SyntaxPatterns: []ir.PatternString{{Line: 256, Value: "http.HandlerFunc(http.NotFound)"}}, + Line: 226, + SyntaxPatterns: []ir.PatternString{{Line: 226, Value: "http.HandlerFunc(http.NotFound)"}}, ReportTemplate: "use http.NotFoundHandler method in `$$`", }, { - Line: 258, - SyntaxPatterns: []ir.PatternString{{Line: 258, Value: "strings.SplitN($_, $_, -1)"}}, + Line: 228, + SyntaxPatterns: []ir.PatternString{{Line: 228, Value: "strings.SplitN($_, $_, -1)"}}, ReportTemplate: "use strings.Split method in `$$`", }, { - Line: 259, - SyntaxPatterns: []ir.PatternString{{Line: 259, Value: "strings.Replace($_, $_, $_, -1)"}}, + Line: 229, + SyntaxPatterns: []ir.PatternString{{Line: 229, Value: "strings.Replace($_, $_, $_, -1)"}}, ReportTemplate: "use strings.ReplaceAll method in `$$`", }, { - Line: 260, - SyntaxPatterns: []ir.PatternString{{Line: 260, Value: "strings.Map(unicode.ToTitle, $_)"}}, + Line: 230, + SyntaxPatterns: []ir.PatternString{{Line: 230, Value: "strings.Map(unicode.ToTitle, $_)"}}, ReportTemplate: "use strings.ToTitle method in `$$`", }, { - Line: 262, - SyntaxPatterns: []ir.PatternString{{Line: 262, Value: "bytes.SplitN(b, []byte(\".\"), -1)"}}, + Line: 231, + SyntaxPatterns: []ir.PatternString{ + {Line: 231, Value: "strings.Index($s1, $s2) >= 0"}, + {Line: 231, Value: "strings.Index($s1, $s2) != -1"}, + }, + ReportTemplate: "suggestion: strings.Contains($s1, $s2)", + SuggestTemplate: "strings.Contains($s1, $s2)", + }, + { + Line: 232, + SyntaxPatterns: []ir.PatternString{ + {Line: 232, Value: "strings.IndexAny($s1, $s2) >= 0"}, + {Line: 232, Value: "strings.IndexAny($s1, $s2) != -1"}, + }, + ReportTemplate: "suggestion: strings.ContainsAny($s1, $s2)", + SuggestTemplate: "strings.ContainsAny($s1, $s2)", + }, + { + Line: 233, + SyntaxPatterns: []ir.PatternString{ + {Line: 233, Value: "strings.IndexRune($s1, $s2) >= 0"}, + {Line: 233, Value: "strings.IndexRune($s1, $s2) != -1"}, + }, + ReportTemplate: "suggestion: strings.ContainsRune($s1, $s2)", + SuggestTemplate: "strings.ContainsRune($s1, $s2)", + }, + { + Line: 235, + SyntaxPatterns: []ir.PatternString{ + {Line: 235, Value: "$i := strings.Index($s, $sep); $*_; $x, $y = $s[:$i], $s[$i+1:]"}, + {Line: 236, Value: "$i := strings.Index($s, $sep); $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]"}, + }, + ReportTemplate: "suggestion: $x, $y, _ = strings.Cut($s, $sep)", + SuggestTemplate: "$x, $y, _ = strings.Cut($s, $sep)", + WhereExpr: ir.FilterExpr{ + Line: 237, + Op: ir.FilterGoVersionGreaterEqThanOp, + Src: "m.GoVersion().GreaterEqThan(\"1.18\")", + Value: "1.18", + }, + }, + { + Line: 240, + SyntaxPatterns: []ir.PatternString{ + {Line: 241, Value: "if $i := strings.Index($s, $sep); $i != -1 { $*_; $x, $y = $s[:$i], $s[$i+1:]; $*_ }"}, + {Line: 242, Value: "if $i := strings.Index($s, $sep); $i != -1 { $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]; $*_ }"}, + {Line: 243, Value: "if $i := strings.Index($s, $sep); $i >= 0 { $*_; $x, $y = $s[:$i], $s[$i+1:]; $*_ }"}, + {Line: 244, Value: "if $i := strings.Index($s, $sep); $i >= 0 { $*_; $x = $s[:$i]; $*_; $y = $s[$i+1:]; $*_ }"}, + }, + ReportTemplate: "suggestion: if $x, $y, ok = strings.Cut($s, $sep); ok { ... }", + SuggestTemplate: "if $x, $y, ok = strings.Cut($s, $sep); ok { ... }", + WhereExpr: ir.FilterExpr{ + Line: 245, + Op: ir.FilterGoVersionGreaterEqThanOp, + Src: "m.GoVersion().GreaterEqThan(\"1.18\")", + Value: "1.18", + }, + }, + { + Line: 248, + SyntaxPatterns: []ir.PatternString{{Line: 248, Value: "bytes.SplitN(b, []byte(\".\"), -1)"}}, ReportTemplate: "use bytes.Split method in `$$`", }, { - Line: 263, - SyntaxPatterns: []ir.PatternString{{Line: 263, Value: "bytes.Replace($_, $_, $_, -1)"}}, + Line: 249, + SyntaxPatterns: []ir.PatternString{{Line: 249, Value: "bytes.Replace($_, $_, $_, -1)"}}, ReportTemplate: "use bytes.ReplaceAll method in `$$`", }, { - Line: 264, - SyntaxPatterns: []ir.PatternString{{Line: 264, Value: "bytes.Map(unicode.ToUpper, $_)"}}, + Line: 250, + SyntaxPatterns: []ir.PatternString{{Line: 250, Value: "bytes.Map(unicode.ToUpper, $_)"}}, ReportTemplate: "use bytes.ToUpper method in `$$`", }, { - Line: 265, - SyntaxPatterns: []ir.PatternString{{Line: 265, Value: "bytes.Map(unicode.ToLower, $_)"}}, + Line: 251, + SyntaxPatterns: []ir.PatternString{{Line: 251, Value: "bytes.Map(unicode.ToLower, $_)"}}, ReportTemplate: "use bytes.ToLower method in `$$`", }, { - Line: 266, - SyntaxPatterns: []ir.PatternString{{Line: 266, Value: "bytes.Map(unicode.ToTitle, $_)"}}, + Line: 252, + SyntaxPatterns: []ir.PatternString{{Line: 252, Value: "bytes.Map(unicode.ToTitle, $_)"}}, ReportTemplate: "use bytes.ToTitle method in `$$`", }, { - Line: 268, - SyntaxPatterns: []ir.PatternString{{Line: 268, Value: "draw.DrawMask($_, $_, $_, $_, nil, image.Point{}, $_)"}}, + Line: 253, + SyntaxPatterns: []ir.PatternString{ + {Line: 253, Value: "bytes.Index($b1, $b2) >= 0"}, + {Line: 253, Value: "bytes.Index($b1, $b2) != -1"}, + }, + ReportTemplate: "suggestion: bytes.Contains($b1, $b2)", + SuggestTemplate: "bytes.Contains($b1, $b2)", + }, + { + Line: 254, + SyntaxPatterns: []ir.PatternString{ + {Line: 254, Value: "bytes.IndexAny($b1, $b2) >= 0"}, + {Line: 254, Value: "bytes.IndexAny($b1, $b2) != -1"}, + }, + ReportTemplate: "suggestion: bytes.ContainsAny($b1, $b2)", + SuggestTemplate: "bytes.ContainsAny($b1, $b2)", + }, + { + Line: 255, + SyntaxPatterns: []ir.PatternString{ + {Line: 255, Value: "bytes.IndexRune($b1, $b2) >= 0"}, + {Line: 255, Value: "bytes.IndexRune($b1, $b2) != -1"}, + }, + ReportTemplate: "suggestion: bytes.ContainsRune($b1, $b2)", + SuggestTemplate: "bytes.ContainsRune($b1, $b2)", + }, + { + Line: 257, + SyntaxPatterns: []ir.PatternString{{Line: 257, Value: "draw.DrawMask($_, $_, $_, $_, nil, image.Point{}, $_)"}}, ReportTemplate: "use draw.Draw method in `$$`", }, }, }, { - Line: 276, + Line: 265, Name: "regexpMust", MatcherName: "m", DocTags: []string{"style"}, @@ -894,22 +901,22 @@ var PrecompiledRules = &ir.File{ DocAfter: "re := regexp.MustCompile(\"const pattern\")", Rules: []ir.Rule{ { - Line: 277, - SyntaxPatterns: []ir.PatternString{{Line: 277, Value: "regexp.Compile($pat)"}}, + Line: 266, + SyntaxPatterns: []ir.PatternString{{Line: 266, Value: "regexp.Compile($pat)"}}, ReportTemplate: "for const patterns like $pat, use regexp.MustCompile", WhereExpr: ir.FilterExpr{ - Line: 278, + Line: 267, Op: ir.FilterVarConstOp, Src: "m[\"pat\"].Const", Value: "pat", }, }, { - Line: 281, - SyntaxPatterns: []ir.PatternString{{Line: 281, Value: "regexp.CompilePOSIX($pat)"}}, + Line: 270, + SyntaxPatterns: []ir.PatternString{{Line: 270, Value: "regexp.CompilePOSIX($pat)"}}, ReportTemplate: "for const patterns like $pat, use regexp.MustCompilePOSIX", WhereExpr: ir.FilterExpr{ - Line: 282, + Line: 271, Op: ir.FilterVarConstOp, Src: "m[\"pat\"].Const", Value: "pat", @@ -918,7 +925,7 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 290, + Line: 279, Name: "badCall", MatcherName: "m", DocTags: []string{"diagnostic"}, @@ -927,22 +934,22 @@ var PrecompiledRules = &ir.File{ DocAfter: "strings.Replace(s, from, to, -1)", Rules: []ir.Rule{ { - Line: 291, - SyntaxPatterns: []ir.PatternString{{Line: 291, Value: "strings.Replace($_, $_, $_, $zero)"}}, + Line: 280, + SyntaxPatterns: []ir.PatternString{{Line: 280, Value: "strings.Replace($_, $_, $_, $zero)"}}, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ - Line: 292, + Line: 281, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ { - Line: 292, + Line: 281, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, { - Line: 292, + Line: 281, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -952,22 +959,22 @@ var PrecompiledRules = &ir.File{ LocationVar: "zero", }, { - Line: 294, - SyntaxPatterns: []ir.PatternString{{Line: 294, Value: "bytes.Replace($_, $_, $_, $zero)"}}, + Line: 283, + SyntaxPatterns: []ir.PatternString{{Line: 283, Value: "bytes.Replace($_, $_, $_, $zero)"}}, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ - Line: 295, + Line: 284, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ { - Line: 295, + Line: 284, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, { - Line: 295, + Line: 284, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -977,22 +984,22 @@ var PrecompiledRules = &ir.File{ LocationVar: "zero", }, { - Line: 298, - SyntaxPatterns: []ir.PatternString{{Line: 298, Value: "strings.SplitN($_, $_, $zero)"}}, + Line: 287, + SyntaxPatterns: []ir.PatternString{{Line: 287, Value: "strings.SplitN($_, $_, $zero)"}}, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ - Line: 299, + Line: 288, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ { - Line: 299, + Line: 288, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, { - Line: 299, + Line: 288, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -1002,22 +1009,22 @@ var PrecompiledRules = &ir.File{ LocationVar: "zero", }, { - Line: 301, - SyntaxPatterns: []ir.PatternString{{Line: 301, Value: "bytes.SplitN($_, $_, $zero)"}}, + Line: 290, + SyntaxPatterns: []ir.PatternString{{Line: 290, Value: "bytes.SplitN($_, $_, $zero)"}}, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ - Line: 302, + Line: 291, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ { - Line: 302, + Line: 291, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, { - Line: 302, + Line: 291, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -1027,19 +1034,19 @@ var PrecompiledRules = &ir.File{ LocationVar: "zero", }, { - Line: 305, - SyntaxPatterns: []ir.PatternString{{Line: 305, Value: "append($_)"}}, + Line: 294, + SyntaxPatterns: []ir.PatternString{{Line: 294, Value: "append($_)"}}, ReportTemplate: "no-op append call, probably missing arguments", }, { - Line: 307, - SyntaxPatterns: []ir.PatternString{{Line: 307, Value: "filepath.Join($_)"}}, + Line: 296, + SyntaxPatterns: []ir.PatternString{{Line: 296, Value: "filepath.Join($_)"}}, ReportTemplate: "suspicious Join on 1 argument", }, }, }, { - Line: 314, + Line: 303, Name: "assignOp", MatcherName: "m", DocTags: []string{"style"}, @@ -1048,87 +1055,87 @@ var PrecompiledRules = &ir.File{ DocAfter: "x *= 2", Rules: []ir.Rule{ { - Line: 315, - SyntaxPatterns: []ir.PatternString{{Line: 315, Value: "$x = $x + 1"}}, + Line: 304, + SyntaxPatterns: []ir.PatternString{{Line: 304, Value: "$x = $x + 1"}}, ReportTemplate: "replace `$$` with `$x++`", - WhereExpr: ir.FilterExpr{Line: 315, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 304, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 316, - SyntaxPatterns: []ir.PatternString{{Line: 316, Value: "$x = $x - 1"}}, + Line: 305, + SyntaxPatterns: []ir.PatternString{{Line: 305, Value: "$x = $x - 1"}}, ReportTemplate: "replace `$$` with `$x--`", - WhereExpr: ir.FilterExpr{Line: 316, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 305, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 318, - SyntaxPatterns: []ir.PatternString{{Line: 318, Value: "$x = $x + $y"}}, + Line: 307, + SyntaxPatterns: []ir.PatternString{{Line: 307, Value: "$x = $x + $y"}}, ReportTemplate: "replace `$$` with `$x += $y`", - WhereExpr: ir.FilterExpr{Line: 318, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 307, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 319, - SyntaxPatterns: []ir.PatternString{{Line: 319, Value: "$x = $x - $y"}}, + Line: 308, + SyntaxPatterns: []ir.PatternString{{Line: 308, Value: "$x = $x - $y"}}, ReportTemplate: "replace `$$` with `$x -= $y`", - WhereExpr: ir.FilterExpr{Line: 319, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 308, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 321, - SyntaxPatterns: []ir.PatternString{{Line: 321, Value: "$x = $x * $y"}}, + Line: 310, + SyntaxPatterns: []ir.PatternString{{Line: 310, Value: "$x = $x * $y"}}, ReportTemplate: "replace `$$` with `$x *= $y`", - WhereExpr: ir.FilterExpr{Line: 321, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 310, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 322, - SyntaxPatterns: []ir.PatternString{{Line: 322, Value: "$x = $x / $y"}}, + Line: 311, + SyntaxPatterns: []ir.PatternString{{Line: 311, Value: "$x = $x / $y"}}, ReportTemplate: "replace `$$` with `$x /= $y`", - WhereExpr: ir.FilterExpr{Line: 322, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 311, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 323, - SyntaxPatterns: []ir.PatternString{{Line: 323, Value: "$x = $x % $y"}}, + Line: 312, + SyntaxPatterns: []ir.PatternString{{Line: 312, Value: "$x = $x % $y"}}, ReportTemplate: "replace `$$` with `$x %= $y`", - WhereExpr: ir.FilterExpr{Line: 323, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 312, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 324, - SyntaxPatterns: []ir.PatternString{{Line: 324, Value: "$x = $x & $y"}}, + Line: 313, + SyntaxPatterns: []ir.PatternString{{Line: 313, Value: "$x = $x & $y"}}, ReportTemplate: "replace `$$` with `$x &= $y`", - WhereExpr: ir.FilterExpr{Line: 324, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 313, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 325, - SyntaxPatterns: []ir.PatternString{{Line: 325, Value: "$x = $x | $y"}}, + Line: 314, + SyntaxPatterns: []ir.PatternString{{Line: 314, Value: "$x = $x | $y"}}, ReportTemplate: "replace `$$` with `$x |= $y`", - WhereExpr: ir.FilterExpr{Line: 325, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 314, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 326, - SyntaxPatterns: []ir.PatternString{{Line: 326, Value: "$x = $x ^ $y"}}, + Line: 315, + SyntaxPatterns: []ir.PatternString{{Line: 315, Value: "$x = $x ^ $y"}}, ReportTemplate: "replace `$$` with `$x ^= $y`", - WhereExpr: ir.FilterExpr{Line: 326, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 315, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 327, - SyntaxPatterns: []ir.PatternString{{Line: 327, Value: "$x = $x << $y"}}, + Line: 316, + SyntaxPatterns: []ir.PatternString{{Line: 316, Value: "$x = $x << $y"}}, ReportTemplate: "replace `$$` with `$x <<= $y`", - WhereExpr: ir.FilterExpr{Line: 327, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 316, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 328, - SyntaxPatterns: []ir.PatternString{{Line: 328, Value: "$x = $x >> $y"}}, + Line: 317, + SyntaxPatterns: []ir.PatternString{{Line: 317, Value: "$x = $x >> $y"}}, ReportTemplate: "replace `$$` with `$x >>= $y`", - WhereExpr: ir.FilterExpr{Line: 328, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 317, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 329, - SyntaxPatterns: []ir.PatternString{{Line: 329, Value: "$x = $x &^ $y"}}, + Line: 318, + SyntaxPatterns: []ir.PatternString{{Line: 318, Value: "$x = $x &^ $y"}}, ReportTemplate: "replace `$$` with `$x &^= $y`", - WhereExpr: ir.FilterExpr{Line: 329, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 318, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, }, }, { - Line: 336, + Line: 325, Name: "preferWriteByte", MatcherName: "m", DocTags: []string{"performance", "experimental", "opinionated"}, @@ -1136,45 +1143,45 @@ var PrecompiledRules = &ir.File{ DocBefore: "w.WriteRune('\\n')", DocAfter: "w.WriteByte('\\n')", Rules: []ir.Rule{{ - Line: 340, - SyntaxPatterns: []ir.PatternString{{Line: 340, Value: "$w.WriteRune($c)"}}, + Line: 329, + SyntaxPatterns: []ir.PatternString{{Line: 329, Value: "$w.WriteRune($c)"}}, ReportTemplate: "consider writing single byte rune $c with $w.WriteByte($c)", WhereExpr: ir.FilterExpr{ - Line: 341, + Line: 330, Op: ir.FilterAndOp, Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\") && (m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", Args: []ir.FilterExpr{ { - Line: 341, + Line: 330, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\")", Value: "w", - Args: []ir.FilterExpr{{Line: 341, Op: ir.FilterStringOp, Src: "\"io.ByteWriter\"", Value: "io.ByteWriter"}}, + Args: []ir.FilterExpr{{Line: 330, Op: ir.FilterStringOp, Src: "\"io.ByteWriter\"", Value: "io.ByteWriter"}}, }, { - Line: 341, + Line: 330, Op: ir.FilterAndOp, Src: "(m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", Args: []ir.FilterExpr{ { - Line: 341, + Line: 330, Op: ir.FilterVarConstOp, Src: "m[\"c\"].Const", Value: "c", }, { - Line: 341, + Line: 330, Op: ir.FilterLtOp, Src: "m[\"c\"].Value.Int() < runeSelf", Args: []ir.FilterExpr{ { - Line: 341, + Line: 330, Op: ir.FilterVarValueIntOp, Src: "m[\"c\"].Value.Int()", Value: "c", }, { - Line: 341, + Line: 330, Op: ir.FilterIntOp, Src: "runeSelf", Value: int64(128), @@ -1188,7 +1195,7 @@ var PrecompiledRules = &ir.File{ }}, }, { - Line: 349, + Line: 338, Name: "preferFprint", MatcherName: "m", DocTags: []string{"performance", "experimental"}, @@ -1197,66 +1204,147 @@ var PrecompiledRules = &ir.File{ DocAfter: "fmt.Fprintf(w, \"%x\", 10)", Rules: []ir.Rule{ { - Line: 350, - SyntaxPatterns: []ir.PatternString{{Line: 350, Value: "$w.Write([]byte(fmt.Sprint($*args)))"}}, + Line: 339, + SyntaxPatterns: []ir.PatternString{{Line: 339, Value: "$w.Write([]byte(fmt.Sprint($*args)))"}}, ReportTemplate: "fmt.Fprint($w, $args) should be preferred to the $$", SuggestTemplate: "fmt.Fprint($w, $args)", WhereExpr: ir.FilterExpr{ - Line: 351, + Line: 340, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.Writer\")", Value: "w", - Args: []ir.FilterExpr{{Line: 351, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, + Args: []ir.FilterExpr{{Line: 340, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, }, }, { - Line: 355, - SyntaxPatterns: []ir.PatternString{{Line: 355, Value: "$w.Write([]byte(fmt.Sprintf($*args)))"}}, + Line: 344, + SyntaxPatterns: []ir.PatternString{{Line: 344, Value: "$w.Write([]byte(fmt.Sprintf($*args)))"}}, ReportTemplate: "fmt.Fprintf($w, $args) should be preferred to the $$", SuggestTemplate: "fmt.Fprintf($w, $args)", WhereExpr: ir.FilterExpr{ - Line: 356, + Line: 345, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.Writer\")", Value: "w", - Args: []ir.FilterExpr{{Line: 356, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, + Args: []ir.FilterExpr{{Line: 345, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, }, }, { - Line: 360, - SyntaxPatterns: []ir.PatternString{{Line: 360, Value: "$w.Write([]byte(fmt.Sprintln($*args)))"}}, + Line: 349, + SyntaxPatterns: []ir.PatternString{{Line: 349, Value: "$w.Write([]byte(fmt.Sprintln($*args)))"}}, ReportTemplate: "fmt.Fprintln($w, $args) should be preferred to the $$", SuggestTemplate: "fmt.Fprintln($w, $args)", WhereExpr: ir.FilterExpr{ - Line: 361, + Line: 350, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.Writer\")", Value: "w", - Args: []ir.FilterExpr{{Line: 361, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, + Args: []ir.FilterExpr{{Line: 350, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, }, }, { - Line: 365, - SyntaxPatterns: []ir.PatternString{{Line: 365, Value: "io.WriteString($w, fmt.Sprint($*args))"}}, + Line: 354, + SyntaxPatterns: []ir.PatternString{{Line: 354, Value: "io.WriteString($w, fmt.Sprint($*args))"}}, ReportTemplate: "suggestion: fmt.Fprint($w, $args)", SuggestTemplate: "fmt.Fprint($w, $args)", }, { - Line: 366, - SyntaxPatterns: []ir.PatternString{{Line: 366, Value: "io.WriteString($w, fmt.Sprintf($*args))"}}, + Line: 355, + SyntaxPatterns: []ir.PatternString{{Line: 355, Value: "io.WriteString($w, fmt.Sprintf($*args))"}}, ReportTemplate: "suggestion: fmt.Fprintf($w, $args)", SuggestTemplate: "fmt.Fprintf($w, $args)", }, { - Line: 367, - SyntaxPatterns: []ir.PatternString{{Line: 367, Value: "io.WriteString($w, fmt.Sprintln($*args))"}}, + Line: 356, + SyntaxPatterns: []ir.PatternString{{Line: 356, Value: "io.WriteString($w, fmt.Sprintln($*args))"}}, ReportTemplate: "suggestion: fmt.Fprintln($w, $args)", SuggestTemplate: "fmt.Fprintln($w, $args)", }, + { + Line: 358, + SyntaxPatterns: []ir.PatternString{{Line: 358, Value: "$w.WriteString(fmt.Sprint($*args))"}}, + ReportTemplate: "suggestion: fmt.Fprint($w, $args)", + SuggestTemplate: "fmt.Fprint($w, $args)", + WhereExpr: ir.FilterExpr{ + Line: 359, + Op: ir.FilterAndOp, + Src: "m[\"w\"].Type.Implements(\"io.Writer\") && m[\"w\"].Type.Implements(\"io.StringWriter\")", + Args: []ir.FilterExpr{ + { + Line: 359, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"w\"].Type.Implements(\"io.Writer\")", + Value: "w", + Args: []ir.FilterExpr{{Line: 359, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, + }, + { + Line: 359, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", + Value: "w", + Args: []ir.FilterExpr{{Line: 359, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, + }, + }, + }, + }, + { + Line: 361, + SyntaxPatterns: []ir.PatternString{{Line: 361, Value: "$w.WriteString(fmt.Sprintf($*args))"}}, + ReportTemplate: "suggestion: fmt.Fprintf($w, $args)", + SuggestTemplate: "fmt.Fprintf($w, $args)", + WhereExpr: ir.FilterExpr{ + Line: 362, + Op: ir.FilterAndOp, + Src: "m[\"w\"].Type.Implements(\"io.Writer\") && m[\"w\"].Type.Implements(\"io.StringWriter\")", + Args: []ir.FilterExpr{ + { + Line: 362, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"w\"].Type.Implements(\"io.Writer\")", + Value: "w", + Args: []ir.FilterExpr{{Line: 362, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, + }, + { + Line: 362, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", + Value: "w", + Args: []ir.FilterExpr{{Line: 362, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, + }, + }, + }, + }, + { + Line: 364, + SyntaxPatterns: []ir.PatternString{{Line: 364, Value: "$w.WriteString(fmt.Sprintln($*args))"}}, + ReportTemplate: "suggestion: fmt.Fprintln($w, $args)", + SuggestTemplate: "fmt.Fprintln($w, $args)", + WhereExpr: ir.FilterExpr{ + Line: 365, + Op: ir.FilterAndOp, + Src: "m[\"w\"].Type.Implements(\"io.Writer\") && m[\"w\"].Type.Implements(\"io.StringWriter\")", + Args: []ir.FilterExpr{ + { + Line: 365, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"w\"].Type.Implements(\"io.Writer\")", + Value: "w", + Args: []ir.FilterExpr{{Line: 365, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, + }, + { + Line: 365, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", + Value: "w", + Args: []ir.FilterExpr{{Line: 365, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, + }, + }, + }, + }, }, }, { - Line: 374, + Line: 373, Name: "dupArg", MatcherName: "m", DocTags: []string{"diagnostic"}, @@ -1265,62 +1353,62 @@ var PrecompiledRules = &ir.File{ DocAfter: "copy(dst, src)", Rules: []ir.Rule{ { - Line: 375, + Line: 374, SyntaxPatterns: []ir.PatternString{ - {Line: 375, Value: "$x.Equal($x)"}, - {Line: 375, Value: "$x.Equals($x)"}, - {Line: 375, Value: "$x.Compare($x)"}, - {Line: 375, Value: "$x.Cmp($x)"}, + {Line: 374, Value: "$x.Equal($x)"}, + {Line: 374, Value: "$x.Equals($x)"}, + {Line: 374, Value: "$x.Compare($x)"}, + {Line: 374, Value: "$x.Cmp($x)"}, }, ReportTemplate: "suspicious method call with the same argument and receiver", - WhereExpr: ir.FilterExpr{Line: 376, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 375, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, { - Line: 379, + Line: 378, SyntaxPatterns: []ir.PatternString{ - {Line: 379, Value: "copy($x, $x)"}, - {Line: 380, Value: "math.Max($x, $x)"}, - {Line: 381, Value: "math.Min($x, $x)"}, - {Line: 382, Value: "reflect.Copy($x, $x)"}, - {Line: 383, Value: "reflect.DeepEqual($x, $x)"}, - {Line: 384, Value: "strings.Contains($x, $x)"}, - {Line: 385, Value: "strings.Compare($x, $x)"}, - {Line: 386, Value: "strings.EqualFold($x, $x)"}, - {Line: 387, Value: "strings.HasPrefix($x, $x)"}, - {Line: 388, Value: "strings.HasSuffix($x, $x)"}, - {Line: 389, Value: "strings.Index($x, $x)"}, - {Line: 390, Value: "strings.LastIndex($x, $x)"}, - {Line: 391, Value: "strings.Split($x, $x)"}, - {Line: 392, Value: "strings.SplitAfter($x, $x)"}, - {Line: 393, Value: "strings.SplitAfterN($x, $x, $_)"}, - {Line: 394, Value: "strings.SplitN($x, $x, $_)"}, - {Line: 395, Value: "strings.Replace($_, $x, $x, $_)"}, - {Line: 396, Value: "strings.ReplaceAll($_, $x, $x)"}, - {Line: 397, Value: "bytes.Contains($x, $x)"}, - {Line: 398, Value: "bytes.Compare($x, $x)"}, - {Line: 399, Value: "bytes.Equal($x, $x)"}, - {Line: 400, Value: "bytes.EqualFold($x, $x)"}, - {Line: 401, Value: "bytes.HasPrefix($x, $x)"}, - {Line: 402, Value: "bytes.HasSuffix($x, $x)"}, - {Line: 403, Value: "bytes.Index($x, $x)"}, - {Line: 404, Value: "bytes.LastIndex($x, $x)"}, - {Line: 405, Value: "bytes.Split($x, $x)"}, - {Line: 406, Value: "bytes.SplitAfter($x, $x)"}, - {Line: 407, Value: "bytes.SplitAfterN($x, $x, $_)"}, - {Line: 408, Value: "bytes.SplitN($x, $x, $_)"}, - {Line: 409, Value: "bytes.Replace($_, $x, $x, $_)"}, - {Line: 410, Value: "bytes.ReplaceAll($_, $x, $x)"}, - {Line: 411, Value: "types.Identical($x, $x)"}, - {Line: 412, Value: "types.IdenticalIgnoreTags($x, $x)"}, - {Line: 413, Value: "draw.Draw($x, $_, $x, $_, $_)"}, + {Line: 378, Value: "copy($x, $x)"}, + {Line: 379, Value: "math.Max($x, $x)"}, + {Line: 380, Value: "math.Min($x, $x)"}, + {Line: 381, Value: "reflect.Copy($x, $x)"}, + {Line: 382, Value: "reflect.DeepEqual($x, $x)"}, + {Line: 383, Value: "strings.Contains($x, $x)"}, + {Line: 384, Value: "strings.Compare($x, $x)"}, + {Line: 385, Value: "strings.EqualFold($x, $x)"}, + {Line: 386, Value: "strings.HasPrefix($x, $x)"}, + {Line: 387, Value: "strings.HasSuffix($x, $x)"}, + {Line: 388, Value: "strings.Index($x, $x)"}, + {Line: 389, Value: "strings.LastIndex($x, $x)"}, + {Line: 390, Value: "strings.Split($x, $x)"}, + {Line: 391, Value: "strings.SplitAfter($x, $x)"}, + {Line: 392, Value: "strings.SplitAfterN($x, $x, $_)"}, + {Line: 393, Value: "strings.SplitN($x, $x, $_)"}, + {Line: 394, Value: "strings.Replace($_, $x, $x, $_)"}, + {Line: 395, Value: "strings.ReplaceAll($_, $x, $x)"}, + {Line: 396, Value: "bytes.Contains($x, $x)"}, + {Line: 397, Value: "bytes.Compare($x, $x)"}, + {Line: 398, Value: "bytes.Equal($x, $x)"}, + {Line: 399, Value: "bytes.EqualFold($x, $x)"}, + {Line: 400, Value: "bytes.HasPrefix($x, $x)"}, + {Line: 401, Value: "bytes.HasSuffix($x, $x)"}, + {Line: 402, Value: "bytes.Index($x, $x)"}, + {Line: 403, Value: "bytes.LastIndex($x, $x)"}, + {Line: 404, Value: "bytes.Split($x, $x)"}, + {Line: 405, Value: "bytes.SplitAfter($x, $x)"}, + {Line: 406, Value: "bytes.SplitAfterN($x, $x, $_)"}, + {Line: 407, Value: "bytes.SplitN($x, $x, $_)"}, + {Line: 408, Value: "bytes.Replace($_, $x, $x, $_)"}, + {Line: 409, Value: "bytes.ReplaceAll($_, $x, $x)"}, + {Line: 410, Value: "types.Identical($x, $x)"}, + {Line: 411, Value: "types.IdenticalIgnoreTags($x, $x)"}, + {Line: 412, Value: "draw.Draw($x, $_, $x, $_, $_)"}, }, ReportTemplate: "suspicious duplicated args in $$", - WhereExpr: ir.FilterExpr{Line: 414, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + WhereExpr: ir.FilterExpr{Line: 413, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, }, }, { - Line: 422, + Line: 421, Name: "returnAfterHttpError", MatcherName: "m", DocTags: []string{"diagnostic", "experimental"}, @@ -1328,14 +1416,14 @@ var PrecompiledRules = &ir.File{ DocBefore: "if err != nil { http.Error(...); }", DocAfter: "if err != nil { http.Error(...); return; }", Rules: []ir.Rule{{ - Line: 423, - SyntaxPatterns: []ir.PatternString{{Line: 423, Value: "if $_ { $*_; http.Error($w, $err, $code) }"}}, + Line: 422, + SyntaxPatterns: []ir.PatternString{{Line: 422, Value: "if $_ { $*_; http.Error($w, $err, $code) }"}}, ReportTemplate: "Possibly return is missed after the http.Error call", LocationVar: "w", }}, }, { - Line: 432, + Line: 431, Name: "preferFilepathJoin", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -1343,35 +1431,35 @@ var PrecompiledRules = &ir.File{ DocBefore: "x + string(os.PathSeparator) + y", DocAfter: "filepath.Join(x, y)", Rules: []ir.Rule{{ - Line: 433, - SyntaxPatterns: []ir.PatternString{{Line: 433, Value: "$x + string(os.PathSeparator) + $y"}}, + Line: 432, + SyntaxPatterns: []ir.PatternString{{Line: 432, Value: "$x + string(os.PathSeparator) + $y"}}, ReportTemplate: "filepath.Join($x, $y) should be preferred to the $$", SuggestTemplate: "filepath.Join($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 434, + Line: 433, Op: ir.FilterAndOp, Src: "m[\"x\"].Type.Is(`string`) && m[\"y\"].Type.Is(`string`)", Args: []ir.FilterExpr{ { - Line: 434, + Line: 433, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`string`)", Value: "x", - Args: []ir.FilterExpr{{Line: 434, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 433, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, { - Line: 434, + Line: 433, Op: ir.FilterVarTypeIsOp, Src: "m[\"y\"].Type.Is(`string`)", Value: "y", - Args: []ir.FilterExpr{{Line: 434, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 433, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }}, }, { - Line: 443, + Line: 442, Name: "preferStringWriter", MatcherName: "m", DocTags: []string{"performance", "experimental"}, @@ -1380,35 +1468,35 @@ var PrecompiledRules = &ir.File{ DocAfter: "w.WriteString(\"foo\")", Rules: []ir.Rule{ { - Line: 444, - SyntaxPatterns: []ir.PatternString{{Line: 444, Value: "$w.Write([]byte($s))"}}, + Line: 443, + SyntaxPatterns: []ir.PatternString{{Line: 443, Value: "$w.Write([]byte($s))"}}, ReportTemplate: "$w.WriteString($s) should be preferred to the $$", SuggestTemplate: "$w.WriteString($s)", WhereExpr: ir.FilterExpr{ - Line: 445, + Line: 444, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", Value: "w", - Args: []ir.FilterExpr{{Line: 445, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, + Args: []ir.FilterExpr{{Line: 444, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, }, }, { - Line: 449, - SyntaxPatterns: []ir.PatternString{{Line: 449, Value: "io.WriteString($w, $s)"}}, + Line: 448, + SyntaxPatterns: []ir.PatternString{{Line: 448, Value: "io.WriteString($w, $s)"}}, ReportTemplate: "$w.WriteString($s) should be preferred to the $$", SuggestTemplate: "$w.WriteString($s)", WhereExpr: ir.FilterExpr{ - Line: 450, + Line: 449, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", Value: "w", - Args: []ir.FilterExpr{{Line: 450, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, + Args: []ir.FilterExpr{{Line: 449, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, }, }, }, }, { - Line: 459, + Line: 458, Name: "sliceClear", MatcherName: "m", DocTags: []string{"performance", "experimental"}, @@ -1416,22 +1504,22 @@ var PrecompiledRules = &ir.File{ DocBefore: "for i := 0; i < len(buf); i++ { buf[i] = 0 }", DocAfter: "for i := range buf { buf[i] = 0 }", Rules: []ir.Rule{{ - Line: 460, - SyntaxPatterns: []ir.PatternString{{Line: 460, Value: "for $i := 0; $i < len($xs); $i++ { $xs[$i] = $zero }"}}, + Line: 459, + SyntaxPatterns: []ir.PatternString{{Line: 459, Value: "for $i := 0; $i < len($xs); $i++ { $xs[$i] = $zero }"}}, ReportTemplate: "rewrite as for-range so compiler can recognize this pattern", WhereExpr: ir.FilterExpr{ - Line: 461, + Line: 460, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ { - Line: 461, + Line: 460, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, { - Line: 461, + Line: 460, Op: ir.FilterIntOp, Src: "0", Value: int64(0), @@ -1441,7 +1529,7 @@ var PrecompiledRules = &ir.File{ }}, }, { - Line: 469, + Line: 468, Name: "syncMapLoadAndDelete", MatcherName: "m", DocTags: []string{"diagnostic", "experimental"}, @@ -1449,33 +1537,33 @@ var PrecompiledRules = &ir.File{ DocBefore: "v, ok := m.Load(k); if ok { m.Delete($k); f(v); }", DocAfter: "v, deleted := m.LoadAndDelete(k); if deleted { f(v) }", Rules: []ir.Rule{{ - Line: 470, - SyntaxPatterns: []ir.PatternString{{Line: 470, Value: "$_, $ok := $m.Load($k); if $ok { $m.Delete($k); $*_ }"}}, + Line: 469, + SyntaxPatterns: []ir.PatternString{{Line: 469, Value: "$_, $ok := $m.Load($k); if $ok { $m.Delete($k); $*_ }"}}, ReportTemplate: "use $m.LoadAndDelete to perform load+delete operations atomically", WhereExpr: ir.FilterExpr{ - Line: 471, + Line: 470, Op: ir.FilterAndOp, Src: "m.GoVersion().GreaterEqThan(\"1.15\") &&\n\tm[\"m\"].Type.Is(`*sync.Map`)", Args: []ir.FilterExpr{ { - Line: 471, + Line: 470, Op: ir.FilterGoVersionGreaterEqThanOp, Src: "m.GoVersion().GreaterEqThan(\"1.15\")", Value: "1.15", }, { - Line: 472, + Line: 471, Op: ir.FilterVarTypeIsOp, Src: "m[\"m\"].Type.Is(`*sync.Map`)", Value: "m", - Args: []ir.FilterExpr{{Line: 472, Op: ir.FilterStringOp, Src: "`*sync.Map`", Value: "*sync.Map"}}, + Args: []ir.FilterExpr{{Line: 471, Op: ir.FilterStringOp, Src: "`*sync.Map`", Value: "*sync.Map"}}, }, }, }, }}, }, { - Line: 480, + Line: 479, Name: "sprintfQuotedString", MatcherName: "m", DocTags: []string{"diagnostic", "experimental"}, @@ -1483,34 +1571,34 @@ var PrecompiledRules = &ir.File{ DocBefore: "fmt.Sprintf(`\"%s\"`, s)", DocAfter: "fmt.Sprintf(`%q`, s)", Rules: []ir.Rule{{ - Line: 481, - SyntaxPatterns: []ir.PatternString{{Line: 481, Value: "fmt.Sprintf($s, $*_)"}}, + Line: 480, + SyntaxPatterns: []ir.PatternString{{Line: 480, Value: "fmt.Sprintf($s, $*_)"}}, ReportTemplate: "use %q instead of \"%s\" for quoted strings", WhereExpr: ir.FilterExpr{ - Line: 482, + Line: 481, Op: ir.FilterOrOp, Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\") ||\n\tm[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", Args: []ir.FilterExpr{ { - Line: 482, + Line: 481, Op: ir.FilterVarTextMatchesOp, Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\")", Value: "s", - Args: []ir.FilterExpr{{Line: 482, Op: ir.FilterStringOp, Src: "\"^`.*\\\"%s\\\".*`$\"", Value: "^`.*\"%s\".*`$"}}, + Args: []ir.FilterExpr{{Line: 481, Op: ir.FilterStringOp, Src: "\"^`.*\\\"%s\\\".*`$\"", Value: "^`.*\"%s\".*`$"}}, }, { - Line: 483, + Line: 482, Op: ir.FilterVarTextMatchesOp, Src: "m[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", Value: "s", - Args: []ir.FilterExpr{{Line: 483, Op: ir.FilterStringOp, Src: "`^\".*\\\\\"%s\\\\\".*\"$`", Value: "^\".*\\\\\"%s\\\\\".*\"$"}}, + Args: []ir.FilterExpr{{Line: 482, Op: ir.FilterStringOp, Src: "`^\".*\\\\\"%s\\\\\".*\"$`", Value: "^\".*\\\\\"%s\\\\\".*\"$"}}, }, }, }, }}, }, { - Line: 491, + Line: 490, Name: "offBy1", MatcherName: "m", DocTags: []string{"diagnostic"}, @@ -1519,80 +1607,80 @@ var PrecompiledRules = &ir.File{ DocAfter: "xs[len(xs)-1]", Rules: []ir.Rule{ { - Line: 492, - SyntaxPatterns: []ir.PatternString{{Line: 492, Value: "$x[len($x)]"}}, + Line: 491, + SyntaxPatterns: []ir.PatternString{{Line: 491, Value: "$x[len($x)]"}}, ReportTemplate: "index expr always panics; maybe you wanted $x[len($x)-1]?", SuggestTemplate: "$x[len($x)-1]", WhereExpr: ir.FilterExpr{ - Line: 493, + Line: 492, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"x\"].Type.Is(`[]$_`)", Args: []ir.FilterExpr{ - {Line: 493, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + {Line: 492, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, { - Line: 493, + Line: 492, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]$_`)", Value: "x", - Args: []ir.FilterExpr{{Line: 493, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}}, + Args: []ir.FilterExpr{{Line: 492, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}}, }, }, }, }, { - Line: 500, + Line: 499, SyntaxPatterns: []ir.PatternString{ - {Line: 501, Value: "$i := strings.Index($s, $_); $_ := $slicing[$i:]"}, - {Line: 502, Value: "$i := strings.Index($s, $_); $_ = $slicing[$i:]"}, - {Line: 503, Value: "$i := bytes.Index($s, $_); $_ := $slicing[$i:]"}, - {Line: 504, Value: "$i := bytes.Index($s, $_); $_ = $slicing[$i:]"}, + {Line: 500, Value: "$i := strings.Index($s, $_); $_ := $slicing[$i:]"}, + {Line: 501, Value: "$i := strings.Index($s, $_); $_ = $slicing[$i:]"}, + {Line: 502, Value: "$i := bytes.Index($s, $_); $_ := $slicing[$i:]"}, + {Line: 503, Value: "$i := bytes.Index($s, $_); $_ = $slicing[$i:]"}, }, ReportTemplate: "Index() can return -1; maybe you wanted to do $s[$i+1:]", WhereExpr: ir.FilterExpr{ - Line: 505, + Line: 504, Op: ir.FilterEqOp, Src: "m[\"s\"].Text == m[\"slicing\"].Text", Args: []ir.FilterExpr{ - {Line: 505, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, - {Line: 505, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, + {Line: 504, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, + {Line: 504, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, }, }, LocationVar: "slicing", }, { - Line: 509, + Line: 508, SyntaxPatterns: []ir.PatternString{ - {Line: 510, Value: "$i := strings.Index($s, $_); $_ := $slicing[:$i]"}, - {Line: 511, Value: "$i := strings.Index($s, $_); $_ = $slicing[:$i]"}, - {Line: 512, Value: "$i := bytes.Index($s, $_); $_ := $slicing[:$i]"}, - {Line: 513, Value: "$i := bytes.Index($s, $_); $_ = $slicing[:$i]"}, + {Line: 509, Value: "$i := strings.Index($s, $_); $_ := $slicing[:$i]"}, + {Line: 510, Value: "$i := strings.Index($s, $_); $_ = $slicing[:$i]"}, + {Line: 511, Value: "$i := bytes.Index($s, $_); $_ := $slicing[:$i]"}, + {Line: 512, Value: "$i := bytes.Index($s, $_); $_ = $slicing[:$i]"}, }, ReportTemplate: "Index() can return -1; maybe you wanted to do $s[:$i+1]", WhereExpr: ir.FilterExpr{ - Line: 514, + Line: 513, Op: ir.FilterEqOp, Src: "m[\"s\"].Text == m[\"slicing\"].Text", Args: []ir.FilterExpr{ - {Line: 514, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, - {Line: 514, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, + {Line: 513, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, + {Line: 513, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, }, }, LocationVar: "slicing", }, { - Line: 518, + Line: 517, SyntaxPatterns: []ir.PatternString{ - {Line: 519, Value: "$s[strings.Index($s, $_):]"}, - {Line: 520, Value: "$s[:strings.Index($s, $_)]"}, - {Line: 521, Value: "$s[bytes.Index($s, $_):]"}, - {Line: 522, Value: "$s[:bytes.Index($s, $_)]"}, + {Line: 518, Value: "$s[strings.Index($s, $_):]"}, + {Line: 519, Value: "$s[:strings.Index($s, $_)]"}, + {Line: 520, Value: "$s[bytes.Index($s, $_):]"}, + {Line: 521, Value: "$s[:bytes.Index($s, $_)]"}, }, ReportTemplate: "Index() can return -1; maybe you wanted to do Index()+1", }, }, }, { - Line: 530, + Line: 529, Name: "unslice", MatcherName: "m", DocTags: []string{"style"}, @@ -1600,35 +1688,35 @@ var PrecompiledRules = &ir.File{ DocBefore: "copy(b[:], values...)", DocAfter: "copy(b, values...)", Rules: []ir.Rule{{ - Line: 531, - SyntaxPatterns: []ir.PatternString{{Line: 531, Value: "$s[:]"}}, + Line: 530, + SyntaxPatterns: []ir.PatternString{{Line: 530, Value: "$s[:]"}}, ReportTemplate: "could simplify $$ to $s", SuggestTemplate: "$s", WhereExpr: ir.FilterExpr{ - Line: 532, + Line: 531, Op: ir.FilterOrOp, Src: "m[\"s\"].Type.Is(`string`) || m[\"s\"].Type.Is(`[]$_`)", Args: []ir.FilterExpr{ { - Line: 532, + Line: 531, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{{Line: 532, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + Args: []ir.FilterExpr{{Line: 531, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, { - Line: 532, + Line: 531, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`[]$_`)", Value: "s", - Args: []ir.FilterExpr{{Line: 532, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}}, + Args: []ir.FilterExpr{{Line: 531, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}}, }, }, }, }}, }, { - Line: 541, + Line: 540, Name: "yodaStyleExpr", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -1637,105 +1725,105 @@ var PrecompiledRules = &ir.File{ DocAfter: "return ptr != nil", Rules: []ir.Rule{ { - Line: 542, - SyntaxPatterns: []ir.PatternString{{Line: 542, Value: "$constval != $x"}}, + Line: 541, + SyntaxPatterns: []ir.PatternString{{Line: 541, Value: "$constval != $x"}}, ReportTemplate: "consider to change order in expression to $x != $constval", WhereExpr: ir.FilterExpr{ - Line: 542, + Line: 541, Op: ir.FilterAndOp, Src: "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ { - Line: 542, + Line: 541, Op: ir.FilterVarNodeIsOp, Src: "m[\"constval\"].Node.Is(`BasicLit`)", Value: "constval", - Args: []ir.FilterExpr{{Line: 542, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + Args: []ir.FilterExpr{{Line: 541, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, }, { - Line: 542, + Line: 541, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{{ - Line: 542, + Line: 541, Op: ir.FilterVarNodeIsOp, Src: "m[\"x\"].Node.Is(`BasicLit`)", Value: "x", - Args: []ir.FilterExpr{{Line: 542, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + Args: []ir.FilterExpr{{Line: 541, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, }}, }, }, }, }, { - Line: 544, - SyntaxPatterns: []ir.PatternString{{Line: 544, Value: "$constval == $x"}}, + Line: 543, + SyntaxPatterns: []ir.PatternString{{Line: 543, Value: "$constval == $x"}}, ReportTemplate: "consider to change order in expression to $x == $constval", WhereExpr: ir.FilterExpr{ - Line: 544, + Line: 543, Op: ir.FilterAndOp, Src: "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ { - Line: 544, + Line: 543, Op: ir.FilterVarNodeIsOp, Src: "m[\"constval\"].Node.Is(`BasicLit`)", Value: "constval", - Args: []ir.FilterExpr{{Line: 544, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + Args: []ir.FilterExpr{{Line: 543, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, }, { - Line: 544, + Line: 543, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{{ - Line: 544, + Line: 543, Op: ir.FilterVarNodeIsOp, Src: "m[\"x\"].Node.Is(`BasicLit`)", Value: "x", - Args: []ir.FilterExpr{{Line: 544, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + Args: []ir.FilterExpr{{Line: 543, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, }}, }, }, }, }, { - Line: 547, - SyntaxPatterns: []ir.PatternString{{Line: 547, Value: "nil != $x"}}, + Line: 546, + SyntaxPatterns: []ir.PatternString{{Line: 546, Value: "nil != $x"}}, ReportTemplate: "consider to change order in expression to $x != nil", WhereExpr: ir.FilterExpr{ - Line: 547, + Line: 546, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{{ - Line: 547, + Line: 546, Op: ir.FilterVarNodeIsOp, Src: "m[\"x\"].Node.Is(`BasicLit`)", Value: "x", - Args: []ir.FilterExpr{{Line: 547, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + Args: []ir.FilterExpr{{Line: 546, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, }}, }, }, { - Line: 549, - SyntaxPatterns: []ir.PatternString{{Line: 549, Value: "nil == $x"}}, + Line: 548, + SyntaxPatterns: []ir.PatternString{{Line: 548, Value: "nil == $x"}}, ReportTemplate: "consider to change order in expression to $x == nil", WhereExpr: ir.FilterExpr{ - Line: 549, + Line: 548, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{{ - Line: 549, + Line: 548, Op: ir.FilterVarNodeIsOp, Src: "m[\"x\"].Node.Is(`BasicLit`)", Value: "x", - Args: []ir.FilterExpr{{Line: 549, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + Args: []ir.FilterExpr{{Line: 548, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, }}, }, }, }, }, { - Line: 557, + Line: 556, Name: "equalFold", MatcherName: "m", DocTags: []string{"performance", "experimental"}, @@ -1744,114 +1832,114 @@ var PrecompiledRules = &ir.File{ DocAfter: "strings.EqualFold(x, y)", Rules: []ir.Rule{ { - Line: 566, + Line: 565, SyntaxPatterns: []ir.PatternString{ - {Line: 567, Value: "strings.ToLower($x) == $y"}, - {Line: 568, Value: "strings.ToLower($x) == strings.ToLower($y)"}, - {Line: 569, Value: "$x == strings.ToLower($y)"}, - {Line: 570, Value: "strings.ToUpper($x) == $y"}, - {Line: 571, Value: "strings.ToUpper($x) == strings.ToUpper($y)"}, - {Line: 572, Value: "$x == strings.ToUpper($y)"}, + {Line: 566, Value: "strings.ToLower($x) == $y"}, + {Line: 567, Value: "strings.ToLower($x) == strings.ToLower($y)"}, + {Line: 568, Value: "$x == strings.ToLower($y)"}, + {Line: 569, Value: "strings.ToUpper($x) == $y"}, + {Line: 570, Value: "strings.ToUpper($x) == strings.ToUpper($y)"}, + {Line: 571, Value: "$x == strings.ToUpper($y)"}, }, ReportTemplate: "consider replacing with strings.EqualFold($x, $y)", SuggestTemplate: "strings.EqualFold($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 573, + Line: 572, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ { - Line: 573, + Line: 572, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - {Line: 573, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - {Line: 573, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + {Line: 572, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + {Line: 572, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, { - Line: 573, + Line: 572, Op: ir.FilterNeqOp, Src: "m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - {Line: 573, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - {Line: 573, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, + {Line: 572, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, + {Line: 572, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, }, }, }, }, }, { - Line: 578, + Line: 577, SyntaxPatterns: []ir.PatternString{ - {Line: 579, Value: "strings.ToLower($x) != $y"}, - {Line: 580, Value: "strings.ToLower($x) != strings.ToLower($y)"}, - {Line: 581, Value: "$x != strings.ToLower($y)"}, - {Line: 582, Value: "strings.ToUpper($x) != $y"}, - {Line: 583, Value: "strings.ToUpper($x) != strings.ToUpper($y)"}, - {Line: 584, Value: "$x != strings.ToUpper($y)"}, + {Line: 578, Value: "strings.ToLower($x) != $y"}, + {Line: 579, Value: "strings.ToLower($x) != strings.ToLower($y)"}, + {Line: 580, Value: "$x != strings.ToLower($y)"}, + {Line: 581, Value: "strings.ToUpper($x) != $y"}, + {Line: 582, Value: "strings.ToUpper($x) != strings.ToUpper($y)"}, + {Line: 583, Value: "$x != strings.ToUpper($y)"}, }, ReportTemplate: "consider replacing with !strings.EqualFold($x, $y)", SuggestTemplate: "!strings.EqualFold($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 585, + Line: 584, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ { - Line: 585, + Line: 584, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - {Line: 585, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - {Line: 585, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + {Line: 584, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + {Line: 584, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, { - Line: 585, + Line: 584, Op: ir.FilterNeqOp, Src: "m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - {Line: 585, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - {Line: 585, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, + {Line: 584, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, + {Line: 584, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, }, }, }, }, }, { - Line: 590, + Line: 589, SyntaxPatterns: []ir.PatternString{ - {Line: 591, Value: "bytes.Equal(bytes.ToLower($x), $y)"}, - {Line: 592, Value: "bytes.Equal(bytes.ToLower($x), bytes.ToLower($y))"}, - {Line: 593, Value: "bytes.Equal($x, bytes.ToLower($y))"}, - {Line: 594, Value: "bytes.Equal(bytes.ToUpper($x), $y)"}, - {Line: 595, Value: "bytes.Equal(bytes.ToUpper($x), bytes.ToUpper($y))"}, - {Line: 596, Value: "bytes.Equal($x, bytes.ToUpper($y))"}, + {Line: 590, Value: "bytes.Equal(bytes.ToLower($x), $y)"}, + {Line: 591, Value: "bytes.Equal(bytes.ToLower($x), bytes.ToLower($y))"}, + {Line: 592, Value: "bytes.Equal($x, bytes.ToLower($y))"}, + {Line: 593, Value: "bytes.Equal(bytes.ToUpper($x), $y)"}, + {Line: 594, Value: "bytes.Equal(bytes.ToUpper($x), bytes.ToUpper($y))"}, + {Line: 595, Value: "bytes.Equal($x, bytes.ToUpper($y))"}, }, ReportTemplate: "consider replacing with bytes.EqualFold($x, $y)", SuggestTemplate: "bytes.EqualFold($x, $y)", WhereExpr: ir.FilterExpr{ - Line: 597, + Line: 596, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ { - Line: 597, + Line: 596, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - {Line: 597, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - {Line: 597, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + {Line: 596, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + {Line: 596, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, { - Line: 597, + Line: 596, Op: ir.FilterNeqOp, Src: "m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - {Line: 597, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - {Line: 597, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, + {Line: 596, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, + {Line: 596, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, }, }, }, @@ -1860,7 +1948,7 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 606, + Line: 605, Name: "argOrder", MatcherName: "m", DocTags: []string{"diagnostic"}, @@ -1868,45 +1956,45 @@ var PrecompiledRules = &ir.File{ DocBefore: "strings.HasPrefix(\"#\", userpass)", DocAfter: "strings.HasPrefix(userpass, \"#\")", Rules: []ir.Rule{{ - Line: 607, + Line: 606, SyntaxPatterns: []ir.PatternString{ - {Line: 608, Value: "strings.HasPrefix($lit, $s)"}, - {Line: 609, Value: "bytes.HasPrefix($lit, $s)"}, - {Line: 610, Value: "strings.HasSuffix($lit, $s)"}, - {Line: 611, Value: "bytes.HasSuffix($lit, $s)"}, - {Line: 612, Value: "strings.Contains($lit, $s)"}, - {Line: 613, Value: "bytes.Contains($lit, $s)"}, - {Line: 614, Value: "strings.TrimPrefix($lit, $s)"}, - {Line: 615, Value: "bytes.TrimPrefix($lit, $s)"}, - {Line: 616, Value: "strings.TrimSuffix($lit, $s)"}, - {Line: 617, Value: "bytes.TrimSuffix($lit, $s)"}, - {Line: 618, Value: "strings.Split($lit, $s)"}, - {Line: 619, Value: "bytes.Split($lit, $s)"}, + {Line: 607, Value: "strings.HasPrefix($lit, $s)"}, + {Line: 608, Value: "bytes.HasPrefix($lit, $s)"}, + {Line: 609, Value: "strings.HasSuffix($lit, $s)"}, + {Line: 610, Value: "bytes.HasSuffix($lit, $s)"}, + {Line: 611, Value: "strings.Contains($lit, $s)"}, + {Line: 612, Value: "bytes.Contains($lit, $s)"}, + {Line: 613, Value: "strings.TrimPrefix($lit, $s)"}, + {Line: 614, Value: "bytes.TrimPrefix($lit, $s)"}, + {Line: 615, Value: "strings.TrimSuffix($lit, $s)"}, + {Line: 616, Value: "bytes.TrimSuffix($lit, $s)"}, + {Line: 617, Value: "strings.Split($lit, $s)"}, + {Line: 618, Value: "bytes.Split($lit, $s)"}, }, ReportTemplate: "$lit and $s arguments order looks reversed", WhereExpr: ir.FilterExpr{ - Line: 620, + Line: 619, Op: ir.FilterAndOp, Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice) &&\n\t!m[\"lit\"].Node.Is(`Ident`)", Args: []ir.FilterExpr{ { - Line: 620, + Line: 619, Op: ir.FilterAndOp, Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice)", Args: []ir.FilterExpr{ { - Line: 620, + Line: 619, Op: ir.FilterOrOp, Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice)", Args: []ir.FilterExpr{ { - Line: 620, + Line: 619, Op: ir.FilterVarConstOp, Src: "m[\"lit\"].Const", Value: "lit", }, { - Line: 620, + Line: 619, Op: ir.FilterVarConstSliceOp, Src: "m[\"lit\"].ConstSlice", Value: "lit", @@ -1914,22 +2002,22 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 621, + Line: 620, Op: ir.FilterNotOp, Src: "!(m[\"s\"].Const || m[\"s\"].ConstSlice)", Args: []ir.FilterExpr{{ - Line: 621, + Line: 620, Op: ir.FilterOrOp, Src: "(m[\"s\"].Const || m[\"s\"].ConstSlice)", Args: []ir.FilterExpr{ { - Line: 621, + Line: 620, Op: ir.FilterVarConstOp, Src: "m[\"s\"].Const", Value: "s", }, { - Line: 621, + Line: 620, Op: ir.FilterVarConstSliceOp, Src: "m[\"s\"].ConstSlice", Value: "s", @@ -1940,15 +2028,15 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 622, + Line: 621, Op: ir.FilterNotOp, Src: "!m[\"lit\"].Node.Is(`Ident`)", Args: []ir.FilterExpr{{ - Line: 622, + Line: 621, Op: ir.FilterVarNodeIsOp, Src: "m[\"lit\"].Node.Is(`Ident`)", Value: "lit", - Args: []ir.FilterExpr{{Line: 622, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, + Args: []ir.FilterExpr{{Line: 621, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, }}, }, }, @@ -1956,7 +2044,7 @@ var PrecompiledRules = &ir.File{ }}, }, { - Line: 630, + Line: 629, Name: "stringConcatSimplify", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -1965,27 +2053,27 @@ var PrecompiledRules = &ir.File{ DocAfter: "x + \"_\" + y", Rules: []ir.Rule{ { - Line: 631, - SyntaxPatterns: []ir.PatternString{{Line: 631, Value: "strings.Join([]string{$x, $y}, \"\")"}}, + Line: 630, + SyntaxPatterns: []ir.PatternString{{Line: 630, Value: "strings.Join([]string{$x, $y}, \"\")"}}, ReportTemplate: "suggestion: $x + $y", SuggestTemplate: "$x + $y", }, { - Line: 632, - SyntaxPatterns: []ir.PatternString{{Line: 632, Value: "strings.Join([]string{$x, $y, $z}, \"\")"}}, + Line: 631, + SyntaxPatterns: []ir.PatternString{{Line: 631, Value: "strings.Join([]string{$x, $y, $z}, \"\")"}}, ReportTemplate: "suggestion: $x + $y + $z", SuggestTemplate: "$x + $y + $z", }, { - Line: 633, - SyntaxPatterns: []ir.PatternString{{Line: 633, Value: "strings.Join([]string{$x, $y}, $glue)"}}, + Line: 632, + SyntaxPatterns: []ir.PatternString{{Line: 632, Value: "strings.Join([]string{$x, $y}, $glue)"}}, ReportTemplate: "suggestion: $x + $glue + $y", SuggestTemplate: "$x + $glue + $y", }, }, }, { - Line: 640, + Line: 639, Name: "timeExprSimplify", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -1994,39 +2082,39 @@ var PrecompiledRules = &ir.File{ DocAfter: "t.UnixMilli()", Rules: []ir.Rule{ { - Line: 645, - SyntaxPatterns: []ir.PatternString{{Line: 645, Value: "$t.Unix() / 1000"}}, + Line: 644, + SyntaxPatterns: []ir.PatternString{{Line: 644, Value: "$t.Unix() / 1000"}}, ReportTemplate: "use $t.UnixMilli() instead of $$", SuggestTemplate: "$t.UnixMilli()", WhereExpr: ir.FilterExpr{ - Line: 646, + Line: 645, Op: ir.FilterAndOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])", Args: []ir.FilterExpr{ { - Line: 646, + Line: 645, Op: ir.FilterGoVersionGreaterEqThanOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\")", Value: "1.17", }, { - Line: 646, + Line: 645, Op: ir.FilterOrOp, Src: "isTime(m[\"t\"])", Args: []ir.FilterExpr{ { - Line: 646, + Line: 645, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`time.Time`)", Value: "t", - Args: []ir.FilterExpr{{Line: 642, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, + Args: []ir.FilterExpr{{Line: 641, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, }, { - Line: 646, + Line: 645, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`*time.Time`)", Value: "t", - Args: []ir.FilterExpr{{Line: 642, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, + Args: []ir.FilterExpr{{Line: 641, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, }, }, }, @@ -2034,39 +2122,39 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 650, - SyntaxPatterns: []ir.PatternString{{Line: 650, Value: "$t.UnixNano() * 1000"}}, + Line: 649, + SyntaxPatterns: []ir.PatternString{{Line: 649, Value: "$t.UnixNano() * 1000"}}, ReportTemplate: "use $t.UnixMicro() instead of $$", SuggestTemplate: "$t.UnixMicro()", WhereExpr: ir.FilterExpr{ - Line: 651, + Line: 650, Op: ir.FilterAndOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])", Args: []ir.FilterExpr{ { - Line: 651, + Line: 650, Op: ir.FilterGoVersionGreaterEqThanOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\")", Value: "1.17", }, { - Line: 651, + Line: 650, Op: ir.FilterOrOp, Src: "isTime(m[\"t\"])", Args: []ir.FilterExpr{ { - Line: 651, + Line: 650, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`time.Time`)", Value: "t", - Args: []ir.FilterExpr{{Line: 642, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, + Args: []ir.FilterExpr{{Line: 641, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, }, { - Line: 651, + Line: 650, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`*time.Time`)", Value: "t", - Args: []ir.FilterExpr{{Line: 642, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, + Args: []ir.FilterExpr{{Line: 641, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, }, }, }, @@ -2076,7 +2164,7 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 660, + Line: 659, Name: "timeCmpSimplify", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -2085,55 +2173,55 @@ var PrecompiledRules = &ir.File{ DocAfter: "t.After(tt)", Rules: []ir.Rule{ { - Line: 665, - SyntaxPatterns: []ir.PatternString{{Line: 665, Value: "!$t.Before($tt)"}}, + Line: 664, + SyntaxPatterns: []ir.PatternString{{Line: 664, Value: "!$t.Before($tt)"}}, ReportTemplate: "suggestion: $t.After($tt)", SuggestTemplate: "$t.After($tt)", WhereExpr: ir.FilterExpr{ - Line: 666, + Line: 665, Op: ir.FilterOrOp, Src: "isTime(m[\"t\"])", Args: []ir.FilterExpr{ { - Line: 666, + Line: 665, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`time.Time`)", Value: "t", - Args: []ir.FilterExpr{{Line: 662, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, + Args: []ir.FilterExpr{{Line: 661, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, }, { - Line: 666, + Line: 665, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`*time.Time`)", Value: "t", - Args: []ir.FilterExpr{{Line: 662, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, + Args: []ir.FilterExpr{{Line: 661, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, }, }, }, }, { - Line: 669, - SyntaxPatterns: []ir.PatternString{{Line: 669, Value: "!$t.After($tt)"}}, + Line: 668, + SyntaxPatterns: []ir.PatternString{{Line: 668, Value: "!$t.After($tt)"}}, ReportTemplate: "suggestion: $t.Before($tt)", SuggestTemplate: "$t.Before($tt)", WhereExpr: ir.FilterExpr{ - Line: 670, + Line: 669, Op: ir.FilterOrOp, Src: "isTime(m[\"t\"])", Args: []ir.FilterExpr{ { - Line: 670, + Line: 669, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`time.Time`)", Value: "t", - Args: []ir.FilterExpr{{Line: 662, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, + Args: []ir.FilterExpr{{Line: 661, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, }, { - Line: 670, + Line: 669, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`*time.Time`)", Value: "t", - Args: []ir.FilterExpr{{Line: 662, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, + Args: []ir.FilterExpr{{Line: 661, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, }, }, }, @@ -2141,7 +2229,7 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 678, + Line: 677, Name: "exposedSyncMutex", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -2150,57 +2238,57 @@ var PrecompiledRules = &ir.File{ DocAfter: "type Foo struct{ ...; mu sync.Mutex; ... }", Rules: []ir.Rule{ { - Line: 683, - SyntaxPatterns: []ir.PatternString{{Line: 683, Value: "type $x struct { $*_; sync.Mutex; $*_ }"}}, + Line: 682, + SyntaxPatterns: []ir.PatternString{{Line: 682, Value: "type $x struct { $*_; sync.Mutex; $*_ }"}}, ReportTemplate: "don't embed sync.Mutex", WhereExpr: ir.FilterExpr{ - Line: 684, + Line: 683, Op: ir.FilterVarTextMatchesOp, Src: "isExported(m[\"x\"])", Value: "x", - Args: []ir.FilterExpr{{Line: 680, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, + Args: []ir.FilterExpr{{Line: 679, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, }, }, { - Line: 687, - SyntaxPatterns: []ir.PatternString{{Line: 687, Value: "type $x struct { $*_; *sync.Mutex; $*_ }"}}, + Line: 686, + SyntaxPatterns: []ir.PatternString{{Line: 686, Value: "type $x struct { $*_; *sync.Mutex; $*_ }"}}, ReportTemplate: "don't embed *sync.Mutex", WhereExpr: ir.FilterExpr{ - Line: 688, + Line: 687, Op: ir.FilterVarTextMatchesOp, Src: "isExported(m[\"x\"])", Value: "x", - Args: []ir.FilterExpr{{Line: 680, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, + Args: []ir.FilterExpr{{Line: 679, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, }, }, { - Line: 691, - SyntaxPatterns: []ir.PatternString{{Line: 691, Value: "type $x struct { $*_; sync.RWMutex; $*_ }"}}, + Line: 690, + SyntaxPatterns: []ir.PatternString{{Line: 690, Value: "type $x struct { $*_; sync.RWMutex; $*_ }"}}, ReportTemplate: "don't embed sync.RWMutex", WhereExpr: ir.FilterExpr{ - Line: 692, + Line: 691, Op: ir.FilterVarTextMatchesOp, Src: "isExported(m[\"x\"])", Value: "x", - Args: []ir.FilterExpr{{Line: 680, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, + Args: []ir.FilterExpr{{Line: 679, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, }, }, { - Line: 695, - SyntaxPatterns: []ir.PatternString{{Line: 695, Value: "type $x struct { $*_; *sync.RWMutex; $*_ }"}}, + Line: 694, + SyntaxPatterns: []ir.PatternString{{Line: 694, Value: "type $x struct { $*_; *sync.RWMutex; $*_ }"}}, ReportTemplate: "don't embed *sync.RWMutex", WhereExpr: ir.FilterExpr{ - Line: 696, + Line: 695, Op: ir.FilterVarTextMatchesOp, Src: "isExported(m[\"x\"])", Value: "x", - Args: []ir.FilterExpr{{Line: 680, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, + Args: []ir.FilterExpr{{Line: 679, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, }, }, }, }, { - Line: 704, + Line: 703, Name: "badSorting", MatcherName: "m", DocTags: []string{"diagnostic", "experimental"}, @@ -2209,48 +2297,48 @@ var PrecompiledRules = &ir.File{ DocAfter: "sort.Strings(xs)", Rules: []ir.Rule{ { - Line: 705, - SyntaxPatterns: []ir.PatternString{{Line: 705, Value: "$x = sort.IntSlice($x)"}}, + Line: 704, + SyntaxPatterns: []ir.PatternString{{Line: 704, Value: "$x = sort.IntSlice($x)"}}, ReportTemplate: "suspicious sort.IntSlice usage, maybe sort.Ints was intended?", SuggestTemplate: "sort.Ints($x)", WhereExpr: ir.FilterExpr{ - Line: 706, + Line: 705, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]int`)", Value: "x", - Args: []ir.FilterExpr{{Line: 706, Op: ir.FilterStringOp, Src: "`[]int`", Value: "[]int"}}, + Args: []ir.FilterExpr{{Line: 705, Op: ir.FilterStringOp, Src: "`[]int`", Value: "[]int"}}, }, }, { - Line: 710, - SyntaxPatterns: []ir.PatternString{{Line: 710, Value: "$x = sort.Float64Slice($x)"}}, + Line: 709, + SyntaxPatterns: []ir.PatternString{{Line: 709, Value: "$x = sort.Float64Slice($x)"}}, ReportTemplate: "suspicious sort.Float64s usage, maybe sort.Float64s was intended?", SuggestTemplate: "sort.Float64s($x)", WhereExpr: ir.FilterExpr{ - Line: 711, + Line: 710, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]float64`)", Value: "x", - Args: []ir.FilterExpr{{Line: 711, Op: ir.FilterStringOp, Src: "`[]float64`", Value: "[]float64"}}, + Args: []ir.FilterExpr{{Line: 710, Op: ir.FilterStringOp, Src: "`[]float64`", Value: "[]float64"}}, }, }, { - Line: 715, - SyntaxPatterns: []ir.PatternString{{Line: 715, Value: "$x = sort.StringSlice($x)"}}, + Line: 714, + SyntaxPatterns: []ir.PatternString{{Line: 714, Value: "$x = sort.StringSlice($x)"}}, ReportTemplate: "suspicious sort.StringSlice usage, maybe sort.Strings was intended?", SuggestTemplate: "sort.Strings($x)", WhereExpr: ir.FilterExpr{ - Line: 716, + Line: 715, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]string`)", Value: "x", - Args: []ir.FilterExpr{{Line: 716, Op: ir.FilterStringOp, Src: "`[]string`", Value: "[]string"}}, + Args: []ir.FilterExpr{{Line: 715, Op: ir.FilterStringOp, Src: "`[]string`", Value: "[]string"}}, }, }, }, }, { - Line: 725, + Line: 724, Name: "externalErrorReassign", MatcherName: "m", DocTags: []string{"diagnostic", "experimental"}, @@ -2258,34 +2346,34 @@ var PrecompiledRules = &ir.File{ DocBefore: "io.EOF = nil", DocAfter: "/* don't do it */", Rules: []ir.Rule{{ - Line: 726, - SyntaxPatterns: []ir.PatternString{{Line: 726, Value: "$pkg.$err = $x"}}, + Line: 725, + SyntaxPatterns: []ir.PatternString{{Line: 725, Value: "$pkg.$err = $x"}}, ReportTemplate: "suspicious reassigment of error from another package", WhereExpr: ir.FilterExpr{ - Line: 727, + Line: 726, Op: ir.FilterAndOp, Src: "m[\"err\"].Type.Is(`error`) && m[\"pkg\"].Object.Is(`PkgName`)", Args: []ir.FilterExpr{ { - Line: 727, + Line: 726, Op: ir.FilterVarTypeIsOp, Src: "m[\"err\"].Type.Is(`error`)", Value: "err", - Args: []ir.FilterExpr{{Line: 727, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}}, + Args: []ir.FilterExpr{{Line: 726, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}}, }, { - Line: 727, + Line: 726, Op: ir.FilterVarObjectIsOp, Src: "m[\"pkg\"].Object.Is(`PkgName`)", Value: "pkg", - Args: []ir.FilterExpr{{Line: 727, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}}, + Args: []ir.FilterExpr{{Line: 726, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}}, }, }, }, }}, }, { - Line: 735, + Line: 734, Name: "emptyDecl", MatcherName: "m", DocTags: []string{"diagnostic", "experimental"}, @@ -2294,24 +2382,24 @@ var PrecompiledRules = &ir.File{ DocAfter: "/* nothing */", Rules: []ir.Rule{ { - Line: 736, - SyntaxPatterns: []ir.PatternString{{Line: 736, Value: "var()"}}, + Line: 735, + SyntaxPatterns: []ir.PatternString{{Line: 735, Value: "var()"}}, ReportTemplate: "empty var() block", }, { - Line: 737, - SyntaxPatterns: []ir.PatternString{{Line: 737, Value: "const()"}}, + Line: 736, + SyntaxPatterns: []ir.PatternString{{Line: 736, Value: "const()"}}, ReportTemplate: "empty const() block", }, { - Line: 738, - SyntaxPatterns: []ir.PatternString{{Line: 738, Value: "type()"}}, + Line: 737, + SyntaxPatterns: []ir.PatternString{{Line: 737, Value: "type()"}}, ReportTemplate: "empty type() block", }, }, }, { - Line: 745, + Line: 744, Name: "dynamicFmtString", MatcherName: "m", DocTags: []string{"diagnostic", "experimental"}, @@ -2320,16 +2408,16 @@ var PrecompiledRules = &ir.File{ DocAfter: "fmt.Errorf(\"%s\", msg)", Rules: []ir.Rule{ { - Line: 746, - SyntaxPatterns: []ir.PatternString{{Line: 746, Value: "fmt.Errorf($f)"}}, + Line: 745, + SyntaxPatterns: []ir.PatternString{{Line: 745, Value: "fmt.Errorf($f)"}}, ReportTemplate: "use errors.New($f) or fmt.Errorf(\"%s\", $f) instead", SuggestTemplate: "errors.New($f)", WhereExpr: ir.FilterExpr{ - Line: 747, + Line: 746, Op: ir.FilterNotOp, Src: "!m[\"f\"].Const", Args: []ir.FilterExpr{{ - Line: 747, + Line: 746, Op: ir.FilterVarConstOp, Src: "m[\"f\"].Const", Value: "f", @@ -2337,15 +2425,15 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 751, - SyntaxPatterns: []ir.PatternString{{Line: 751, Value: "fmt.Errorf($f($*args))"}}, + Line: 750, + SyntaxPatterns: []ir.PatternString{{Line: 750, Value: "fmt.Errorf($f($*args))"}}, ReportTemplate: "use errors.New($f($*args)) or fmt.Errorf(\"%s\", $f($*args)) instead", SuggestTemplate: "errors.New($f($*args))", }, }, }, { - Line: 760, + Line: 759, Name: "stringsCompare", MatcherName: "m", DocTags: []string{"style", "experimental"}, @@ -2354,25 +2442,25 @@ var PrecompiledRules = &ir.File{ DocAfter: "x < y", Rules: []ir.Rule{ { - Line: 761, - SyntaxPatterns: []ir.PatternString{{Line: 761, Value: "strings.Compare($s1, $s2) == 0"}}, + Line: 760, + SyntaxPatterns: []ir.PatternString{{Line: 760, Value: "strings.Compare($s1, $s2) == 0"}}, ReportTemplate: "suggestion: $s1 == $s2", SuggestTemplate: "$s1 == $s2", }, { - Line: 764, + Line: 763, SyntaxPatterns: []ir.PatternString{ - {Line: 764, Value: "strings.Compare($s1, $s2) == -1"}, - {Line: 765, Value: "strings.Compare($s1, $s2) < 0"}, + {Line: 763, Value: "strings.Compare($s1, $s2) == -1"}, + {Line: 764, Value: "strings.Compare($s1, $s2) < 0"}, }, ReportTemplate: "suggestion: $s1 < $s2", SuggestTemplate: "$s1 < $s2", }, { - Line: 768, + Line: 767, SyntaxPatterns: []ir.PatternString{ - {Line: 768, Value: "strings.Compare($s1, $s2) == 1"}, - {Line: 769, Value: "strings.Compare($s1, $s2) > 0"}, + {Line: 767, Value: "strings.Compare($s1, $s2) == 1"}, + {Line: 768, Value: "strings.Compare($s1, $s2) > 0"}, }, ReportTemplate: "suggestion: $s1 > $s2", SuggestTemplate: "$s1 > $s2", @@ -2380,7 +2468,7 @@ var PrecompiledRules = &ir.File{ }, }, { - Line: 777, + Line: 776, Name: "uncheckedInlineErr", MatcherName: "m", DocTags: []string{"diagnostic", "experimental"}, @@ -2388,47 +2476,47 @@ var PrecompiledRules = &ir.File{ DocBefore: "if err := expr(); err2 != nil { /*...*/ }", DocAfter: "if err := expr(); err != nil { /*...*/ }", Rules: []ir.Rule{{ - Line: 778, + Line: 777, SyntaxPatterns: []ir.PatternString{ - {Line: 779, Value: "if $err := $_($*_); $err2 != nil { $*_ }"}, - {Line: 780, Value: "if $err = $_($*_); $err2 != nil { $*_ }"}, - {Line: 781, Value: "if $*_, $err := $_($*_); $err2 != nil { $*_ }"}, - {Line: 782, Value: "if $*_, $err = $_($*_); $err2 != nil { $*_ }"}, + {Line: 778, Value: "if $err := $_($*_); $err2 != nil { $*_ }"}, + {Line: 779, Value: "if $err = $_($*_); $err2 != nil { $*_ }"}, + {Line: 780, Value: "if $*_, $err := $_($*_); $err2 != nil { $*_ }"}, + {Line: 781, Value: "if $*_, $err = $_($*_); $err2 != nil { $*_ }"}, }, ReportTemplate: "$err error is unchecked, maybe intended to check it instead of $err2", WhereExpr: ir.FilterExpr{ - Line: 783, + Line: 782, Op: ir.FilterAndOp, Src: "m[\"err\"].Type.Implements(\"error\") && m[\"err2\"].Type.Implements(\"error\") &&\n\tm[\"err\"].Text != m[\"err2\"].Text", Args: []ir.FilterExpr{ { - Line: 783, + Line: 782, Op: ir.FilterAndOp, Src: "m[\"err\"].Type.Implements(\"error\") && m[\"err2\"].Type.Implements(\"error\")", Args: []ir.FilterExpr{ { - Line: 783, + Line: 782, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"err\"].Type.Implements(\"error\")", Value: "err", - Args: []ir.FilterExpr{{Line: 783, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}}, + Args: []ir.FilterExpr{{Line: 782, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}}, }, { - Line: 783, + Line: 782, Op: ir.FilterVarTypeImplementsOp, Src: "m[\"err2\"].Type.Implements(\"error\")", Value: "err2", - Args: []ir.FilterExpr{{Line: 783, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}}, + Args: []ir.FilterExpr{{Line: 782, Op: ir.FilterStringOp, Src: "\"error\"", Value: "error"}}, }, }, }, { - Line: 784, + Line: 783, Op: ir.FilterNeqOp, Src: "m[\"err\"].Text != m[\"err2\"].Text", Args: []ir.FilterExpr{ - {Line: 784, Op: ir.FilterVarTextOp, Src: "m[\"err\"].Text", Value: "err"}, - {Line: 784, Op: ir.FilterVarTextOp, Src: "m[\"err2\"].Text", Value: "err2"}, + {Line: 783, Op: ir.FilterVarTextOp, Src: "m[\"err\"].Text", Value: "err"}, + {Line: 783, Op: ir.FilterVarTextOp, Src: "m[\"err2\"].Text", Value: "err2"}, }, }, }, @@ -2436,6 +2524,113 @@ var PrecompiledRules = &ir.File{ LocationVar: "err", }}, }, + { + Line: 792, + Name: "sloppyTestFuncName", + MatcherName: "m", + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects unsupported test and benchmark funcs", + DocBefore: "func TessstUnit(t *testing.T)", + DocAfter: "func TestUnit(t *testing.T)", + Rules: []ir.Rule{ + { + Line: 793, + SyntaxPatterns: []ir.PatternString{{Line: 793, Value: "func $test($_ *testing.T) { $*_ }"}}, + ReportTemplate: "function $test should be of form TestXXX(t *testing.T)", + WhereExpr: ir.FilterExpr{ + Line: 794, + Op: ir.FilterAndOp, + Src: "!m[\"test\"].Text.Matches(\"Test.*\") &&\n\t!m[\"test\"].Text.Matches(\"test.*\")", + Args: []ir.FilterExpr{ + { + Line: 794, + Op: ir.FilterNotOp, + Src: "!m[\"test\"].Text.Matches(\"Test.*\")", + Args: []ir.FilterExpr{{ + Line: 794, + Op: ir.FilterVarTextMatchesOp, + Src: "m[\"test\"].Text.Matches(\"Test.*\")", + Value: "test", + Args: []ir.FilterExpr{{Line: 794, Op: ir.FilterStringOp, Src: "\"Test.*\"", Value: "Test.*"}}, + }}, + }, + { + Line: 795, + Op: ir.FilterNotOp, + Src: "!m[\"test\"].Text.Matches(\"test.*\")", + Args: []ir.FilterExpr{{ + Line: 795, + Op: ir.FilterVarTextMatchesOp, + Src: "m[\"test\"].Text.Matches(\"test.*\")", + Value: "test", + Args: []ir.FilterExpr{{Line: 795, Op: ir.FilterStringOp, Src: "\"test.*\"", Value: "test.*"}}, + }}, + }, + }, + }, + }, + { + Line: 798, + SyntaxPatterns: []ir.PatternString{{Line: 798, Value: "func $bench($_ *testing.B) { $*_ }"}}, + ReportTemplate: "function $bench should be of form BenchmarkXXX(b *testing.B)", + WhereExpr: ir.FilterExpr{ + Line: 799, + Op: ir.FilterAndOp, + Src: "!m[\"bench\"].Text.Matches(\"Benchmark.*\") &&\n\t!m[\"bench\"].Text.Matches(\"bench.*\")", + Args: []ir.FilterExpr{ + { + Line: 799, + Op: ir.FilterNotOp, + Src: "!m[\"bench\"].Text.Matches(\"Benchmark.*\")", + Args: []ir.FilterExpr{{ + Line: 799, + Op: ir.FilterVarTextMatchesOp, + Src: "m[\"bench\"].Text.Matches(\"Benchmark.*\")", + Value: "bench", + Args: []ir.FilterExpr{{Line: 799, Op: ir.FilterStringOp, Src: "\"Benchmark.*\"", Value: "Benchmark.*"}}, + }}, + }, + { + Line: 800, + Op: ir.FilterNotOp, + Src: "!m[\"bench\"].Text.Matches(\"bench.*\")", + Args: []ir.FilterExpr{{ + Line: 800, + Op: ir.FilterVarTextMatchesOp, + Src: "m[\"bench\"].Text.Matches(\"bench.*\")", + Value: "bench", + Args: []ir.FilterExpr{{Line: 800, Op: ir.FilterStringOp, Src: "\"bench.*\"", Value: "bench.*"}}, + }}, + }, + }, + }, + }, + { + Line: 803, + SyntaxPatterns: []ir.PatternString{{Line: 803, Value: "func $test($_ *testing.T) { $*_ }"}}, + ReportTemplate: "function $test looks like a test helper, consider to change 1st param to 'tb testing.TB'", + WhereExpr: ir.FilterExpr{ + Line: 804, + Op: ir.FilterVarTextMatchesOp, + Src: "m[\"test\"].Text.Matches(\"^test.*\")", + Value: "test", + Args: []ir.FilterExpr{{Line: 804, Op: ir.FilterStringOp, Src: "\"^test.*\"", Value: "^test.*"}}, + }, + }, + { + Line: 807, + SyntaxPatterns: []ir.PatternString{{Line: 807, Value: "func $bench($_ *testing.B) { $*_ }"}}, + ReportTemplate: "function $bench looks like a benchmark helper, consider to change 1st param to 'tb testing.TB'", + WhereExpr: ir.FilterExpr{ + Line: 808, + Op: ir.FilterVarTextMatchesOp, + Src: "m[\"bench\"].Text.Matches(\"^bench(mark)?.*\")", + Value: "bench", + Args: []ir.FilterExpr{{Line: 808, Op: ir.FilterStringOp, Src: "\"^bench(mark)?.*\"", Value: "^bench(mark)?.*"}}, + }, + }, + }, + }, }, } diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go index b369a43447..a1a399fdaa 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go @@ -5,14 +5,15 @@ import ( "go/token" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "golang.org/x/tools/go/ast/astutil" ) func init() { var info linter.CheckerInfo info.Name = "singleCaseSwitch" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Summary = "Detects switch statements that could be better written as if statement" info.Before = ` switch x := x.(type) { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go index 2f9ac62e1f..d83d7fd5a1 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go @@ -5,7 +5,8 @@ import ( "go/token" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" @@ -14,7 +15,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "sloppyReassign" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects suspicious/confusing re-assignments" info.Before = `if err = f(); err != nil { return err }` info.After = `if err := f(); err != nil { return err }` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go index 554197768e..454ab78b19 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go @@ -5,14 +5,15 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" ) func init() { var info linter.CheckerInfo info.Name = "sloppyTypeAssert" - info.Tags = []string{"diagnostic"} + info.Tags = []string{linter.DiagnosticTag} info.Summary = "Detects redundant type assertions" info.Before = ` func f(r io.Reader) interface{} { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go index 29550da3fb..22ef3b16a7 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go @@ -6,7 +6,8 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" @@ -16,7 +17,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "sortSlice" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects suspicious sort.Slice calls" info.Before = `sort.Slice(xs, func(i, j) bool { return keys[i] < keys[j] })` info.After = `sort.Slice(kv, func(i, j) bool { return kv[i].key < kv[j].key })` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go index eb3b49d881..8a132b5860 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go @@ -5,14 +5,15 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" ) func init() { var info linter.CheckerInfo info.Name = "sqlQuery" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects issue in Query() and Exec() calls" info.Before = `_, err := db.Query("UPDATE ...")` info.After = `_, err := db.Exec("UPDATE ...")` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go index 5ec2881b4b..f8e4b9b3c0 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go @@ -5,13 +5,13 @@ import ( "regexp" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "todoCommentWithoutDetail" - info.Tags = []string{"style", "opinionated", "experimental"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} info.Summary = "Detects TODO comments without detail/assignee" info.Before = ` // TODO diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go index 4d4dcc26e7..57411ba249 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go @@ -5,13 +5,13 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "tooManyResultsChecker" - info.Tags = []string{"style", "opinionated", "experimental"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} info.Params = linter.CheckerParams{ "maxResults": { Value: 5, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go index 9d40c2b631..b369025267 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go @@ -6,7 +6,8 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astp" ) @@ -14,7 +15,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "truncateCmp" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Params = linter.CheckerParams{ "skipArchDependent": { Value: true, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go index d87657c3b9..e0d20fd4c5 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go @@ -6,7 +6,8 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/astp" @@ -15,7 +16,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "typeAssertChain" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects repeated type assertions and suggests to replace them with type switch statement" info.Before = ` if x, ok := v.(T1); ok { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go index bc59eef1ca..11381c4014 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go @@ -5,14 +5,13 @@ import ( "go/token" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" - "golang.org/x/exp/typeparams" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "typeDefFirst" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects method declarations preceding the type definition itself" info.Before = ` func (r rec) Method() {} @@ -81,7 +80,7 @@ func (c *typeDefFirstChecker) receiverType(e ast.Expr) string { return e.Name case *ast.IndexExpr: return c.receiverType(e.X) - case *typeparams.IndexListExpr: + case *ast.IndexListExpr: return c.receiverType(e.X) default: panic("unreachable") diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go index 1e11e49372..4b27b17928 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go @@ -5,7 +5,8 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/astp" ) @@ -13,7 +14,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "typeSwitchVar" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Summary = "Detects type switches that can benefit from type guard clause with variable" info.Before = ` switch v.(type) { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go index cd8e04337a..d270268bd0 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go @@ -4,7 +4,8 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcopy" "github.com/go-toolsmith/astequal" ) @@ -12,7 +13,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "typeUnparen" - info.Tags = []string{"style", "opinionated"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag} info.Summary = "Detects unneded parenthesis inside type expressions and suggests to remove them" info.Before = `type foo [](func([](func())))` info.After = `type foo []func([]func())` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go index d0426a9a50..0ce2c89ba7 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go @@ -5,7 +5,8 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astp" ) @@ -13,7 +14,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "underef" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Params = linter.CheckerParams{ "skipRecvDeref": { Value: true, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go index bcca24d2a8..d0e83f3c2e 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go @@ -6,13 +6,13 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "unlabelStmt" - info.Tags = []string{"style", "experimental"} + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} info.Summary = "Detects redundant statement labels" info.Before = ` derp: diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go index cce995d7a2..bcfe5a0c47 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go @@ -7,7 +7,8 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" @@ -16,7 +17,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "unlambda" - info.Tags = []string{"style"} + info.Tags = []string{linter.StyleTag} info.Summary = "Detects function literals that can be simplified" info.Before = `func(x int) int { return fn(x) }` info.After = `fn` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go index 3149d9e87d..0d40addf75 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go @@ -5,13 +5,13 @@ import ( "go/types" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { var info linter.CheckerInfo info.Name = "unnamedResult" - info.Tags = []string{"style", "opinionated", "experimental"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} info.Params = linter.CheckerParams{ "checkExported": { Value: false, diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go index 6cbdfdfd0b..b577ff4219 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go @@ -5,14 +5,15 @@ import ( "go/token" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astp" ) func init() { var info linter.CheckerInfo info.Name = "unnecessaryBlock" - info.Tags = []string{"style", "opinionated", "experimental"} + info.Tags = []string{linter.StyleTag, linter.OpinionatedTag, linter.ExperimentalTag} info.Summary = "Detects unnecessary braced statement blocks" info.Before = ` x := 1 diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go index ef72142a10..4358ab1713 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go @@ -4,14 +4,15 @@ import ( "go/ast" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astfmt" ) func init() { var info linter.CheckerInfo info.Name = "unnecessaryDefer" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects redundantly deferred calls" info.Before = ` func() { diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/utils.go b/tools/vendor/github.com/go-critic/go-critic/checkers/utils.go index b71f24d749..e9123352d2 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/utils.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/utils.go @@ -5,7 +5,7 @@ import ( "go/types" "strings" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) // goStdlib contains `go list std` command output list. diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go index 831857c41a..3d7c9c1225 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go @@ -6,7 +6,8 @@ import ( "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/checkers/internal/lintutil" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" "github.com/go-toolsmith/astequal" "github.com/go-toolsmith/typep" @@ -16,7 +17,7 @@ import ( func init() { var info linter.CheckerInfo info.Name = "weakCond" - info.Tags = []string{"diagnostic", "experimental"} + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} info.Summary = "Detects conditions that are unsafe due to not being exhaustive" info.Before = `xs != nil && xs[0] != nil` info.After = `len(xs) != 0 && xs[0] != nil` diff --git a/tools/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go b/tools/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go index 6829433ea0..eaa53e5d5b 100644 --- a/tools/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go +++ b/tools/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go @@ -6,17 +6,16 @@ import ( "strings" "github.com/go-critic/go-critic/checkers/internal/astwalk" - "github.com/go-critic/go-critic/framework/linter" + "github.com/go-critic/go-critic/linter" ) func init() { - info := linter.CheckerInfo{ - Name: "whyNoLint", - Tags: []string{"style", "experimental"}, - Summary: "Ensures that `//nolint` comments include an explanation", - Before: `//nolint`, - After: `//nolint // reason`, - } + var info linter.CheckerInfo + info.Name = "whyNoLint" + info.Tags = []string{linter.StyleTag, linter.ExperimentalTag} + info.Summary = "Ensures that `//nolint` comments include an explanation" + info.Before = `//nolint` + info.After = `//nolint // reason` collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { return astwalk.WalkerForComment(&whyNoLintChecker{ diff --git a/tools/vendor/github.com/go-critic/go-critic/framework/linter/context.go b/tools/vendor/github.com/go-critic/go-critic/framework/linter/context.go deleted file mode 100644 index 6e108ab6a5..0000000000 --- a/tools/vendor/github.com/go-critic/go-critic/framework/linter/context.go +++ /dev/null @@ -1,35 +0,0 @@ -package linter - -import ( - "go/ast" - "go/types" - "strconv" -) - -func resolvePkgObjects(ctx *Context, f *ast.File) { - ctx.PkgObjects = make(map[*types.PkgName]string, len(f.Imports)) - - for _, spec := range f.Imports { - if spec.Name != nil { - obj := ctx.TypesInfo.ObjectOf(spec.Name) - ctx.PkgObjects[obj.(*types.PkgName)] = spec.Name.Name - } else { - obj := ctx.TypesInfo.Implicits[spec] - ctx.PkgObjects[obj.(*types.PkgName)] = obj.Name() - } - } -} - -func resolvePkgRenames(ctx *Context, f *ast.File) { - ctx.PkgRenames = make(map[string]string) - - for _, spec := range f.Imports { - if spec.Name != nil { - path, err := strconv.Unquote(spec.Path.Value) - if err != nil { - panic(err) - } - ctx.PkgRenames[path] = spec.Name.Name - } - } -} diff --git a/tools/vendor/github.com/go-critic/go-critic/framework/linter/go_version.go b/tools/vendor/github.com/go-critic/go-critic/linter/go_version.go similarity index 91% rename from tools/vendor/github.com/go-critic/go-critic/framework/linter/go_version.go rename to tools/vendor/github.com/go-critic/go-critic/linter/go_version.go index d8091d4535..b5ef2f75ff 100644 --- a/tools/vendor/github.com/go-critic/go-critic/framework/linter/go_version.go +++ b/tools/vendor/github.com/go-critic/go-critic/linter/go_version.go @@ -18,13 +18,14 @@ type GoVersion struct { // // As a special case, Major=0 covers all versions. func (v GoVersion) GreaterOrEqual(other GoVersion) bool { - if v.Major == 0 { + switch { + case v.Major == 0: return true - } - if v.Major == other.Major { + case v.Major == other.Major: return v.Minor >= other.Minor + default: + return v.Major >= other.Major } - return v.Major >= other.Major } func ParseGoVersion(version string) (GoVersion, error) { diff --git a/tools/vendor/github.com/go-critic/go-critic/framework/linter/checkers_db.go b/tools/vendor/github.com/go-critic/go-critic/linter/helpers.go similarity index 100% rename from tools/vendor/github.com/go-critic/go-critic/framework/linter/checkers_db.go rename to tools/vendor/github.com/go-critic/go-critic/linter/helpers.go diff --git a/tools/vendor/github.com/go-critic/go-critic/framework/linter/linter.go b/tools/vendor/github.com/go-critic/go-critic/linter/linter.go similarity index 92% rename from tools/vendor/github.com/go-critic/go-critic/framework/linter/linter.go rename to tools/vendor/github.com/go-critic/go-critic/linter/linter.go index 8573ace305..27e9b659fb 100644 --- a/tools/vendor/github.com/go-critic/go-critic/framework/linter/linter.go +++ b/tools/vendor/github.com/go-critic/go-critic/linter/linter.go @@ -4,10 +4,32 @@ import ( "go/ast" "go/token" "go/types" + "strconv" "github.com/go-toolsmith/astfmt" ) +const ( + DiagnosticTag = "diagnostic" + ExperimentalTag = "experimental" + OpinionatedTag = "opinionated" + PerformanceTag = "performance" + SecurityTag = "security" + StyleTag = "style" +) + +// UnknownType is a special sentinel value that is returned from the CheckerContext.TypeOf +// method instead of the nil type. +var UnknownType types.Type = types.Typ[types.Invalid] + +// FileWalker is an interface every checker should implement. +// +// The WalkFile method is executed for every Go file inside the +// package that is being checked. +type FileWalker interface { + WalkFile(*ast.File) +} + // CheckerCollection provides additional information for a group of checkers. type CheckerCollection struct { // URL is a link for a main source of information on the collection. @@ -123,6 +145,14 @@ type Checker struct { fileWalker FileWalker } +// NewChecker returns initialized checker identified by an info. +// info must be non-nil. +// Returns an error if info describes a checker that was not properly registered, +// or if checker fails to initialize. +func NewChecker(ctx *Context, info *CheckerInfo) (*Checker, error) { + return newChecker(ctx, info) +} + // Check runs rule checker over file f. func (c *Checker) Check(f *ast.File) []Warning { c.ctx.warnings = c.ctx.warnings[:0] @@ -159,14 +189,6 @@ func (warn Warning) HasQuickFix() bool { return warn.Suggestion.Replacement != nil } -// NewChecker returns initialized checker identified by an info. -// info must be non-nil. -// Returns an error if info describes a checker that was not properly registered, -// or if checker fails to initialize. -func NewChecker(ctx *Context, info *CheckerInfo) (*Checker, error) { - return newChecker(ctx, info) -} - // Context is a readonly state shared among every checker. type Context struct { // TypesInfo carries parsed packages types information. @@ -301,10 +323,6 @@ func (ctx *CheckerContext) WarnFixableWithPos(pos token.Pos, fix QuickFix, forma }) } -// UnknownType is a special sentinel value that is returned from the CheckerContext.TypeOf -// method instead of the nil type. -var UnknownType types.Type = types.Typ[types.Invalid] - // TypeOf returns the type of expression x. // // Unlike TypesInfo.TypeOf, it never returns nil. @@ -335,10 +353,30 @@ func (ctx *CheckerContext) SizeOf(typ types.Type) (int64, bool) { return ctx.SizesInfo.Sizeof(typ), true } -// FileWalker is an interface every checker should implement. -// -// The WalkFile method is executed for every Go file inside the -// package that is being checked. -type FileWalker interface { - WalkFile(*ast.File) +func resolvePkgObjects(ctx *Context, f *ast.File) { + ctx.PkgObjects = make(map[*types.PkgName]string, len(f.Imports)) + + for _, spec := range f.Imports { + if spec.Name != nil { + obj := ctx.TypesInfo.ObjectOf(spec.Name) + ctx.PkgObjects[obj.(*types.PkgName)] = spec.Name.Name + } else { + obj := ctx.TypesInfo.Implicits[spec] + ctx.PkgObjects[obj.(*types.PkgName)] = obj.Name() + } + } +} + +func resolvePkgRenames(ctx *Context, f *ast.File) { + ctx.PkgRenames = make(map[string]string) + + for _, spec := range f.Imports { + if spec.Name != nil { + path, err := strconv.Unquote(spec.Path.Value) + if err != nil { + panic(err) + } + ctx.PkgRenames[path] = spec.Name.Name + } + } } diff --git a/tools/vendor/github.com/go-toolsmith/astcopy/.travis.yml b/tools/vendor/github.com/go-toolsmith/astcopy/.travis.yml deleted file mode 100644 index 8994d395c6..0000000000 --- a/tools/vendor/github.com/go-toolsmith/astcopy/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: go -go: - - 1.x -install: - - # Prevent default install action "go get -t -v ./...". -script: - - go get -t -v ./... - - go tool vet . - - go test -v -race ./... \ No newline at end of file diff --git a/tools/vendor/github.com/go-toolsmith/astcopy/README.md b/tools/vendor/github.com/go-toolsmith/astcopy/README.md index 4dae5c41b2..7adc665250 100644 --- a/tools/vendor/github.com/go-toolsmith/astcopy/README.md +++ b/tools/vendor/github.com/go-toolsmith/astcopy/README.md @@ -1,13 +1,16 @@ -[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/astcopy)](https://goreportcard.com/report/github.com/go-toolsmith/astcopy) -[![GoDoc](https://godoc.org/github.com/go-toolsmith/astcopy?status.svg)](https://godoc.org/github.com/go-toolsmith/astcopy) -[![Build Status](https://travis-ci.org/go-toolsmith/astcopy.svg?branch=master)](https://travis-ci.org/go-toolsmith/astcopy) - # astcopy -Package astcopy implements Go AST reflection-free deep copy operations. +[![build-img]][build-url] +[![pkg-img]][pkg-url] +[![reportcard-img]][reportcard-url] +[![version-img]][version-url] + +Package `astcopy` implements Go AST reflection-free deep copy operations. ## Installation: +Go version 1.16+ + ```bash go get github.com/go-toolsmith/astcopy ``` @@ -39,3 +42,16 @@ func main() { fmt.Println(astequal.Expr(x, y)) // => false } ``` + +## License + +[MIT License](LICENSE). + +[build-img]: https://github.com/go-toolsmith/astp/workflows/build/badge.svg +[build-url]: https://github.com/go-toolsmith/astp/actions +[pkg-img]: https://pkg.go.dev/badge/go-toolsmith/astp +[pkg-url]: https://pkg.go.dev/github.com/go-toolsmith/astp +[reportcard-img]: https://goreportcard.com/badge/go-toolsmith/astp +[reportcard-url]: https://goreportcard.com/report/go-toolsmith/astp +[version-img]: https://img.shields.io/github/v/release/go-toolsmith/astp +[version-url]: https://github.com/go-toolsmith/astp/releases diff --git a/tools/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go b/tools/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go index 282d794b82..9d1daa81df 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go +++ b/tools/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go @@ -3,12 +3,15 @@ package main import ( "fmt" "os" + "runtime/debug" "github.com/golangci/golangci-lint/pkg/commands" "github.com/golangci/golangci-lint/pkg/exitcodes" ) var ( + goVersion = "unknown" + // Populated by goreleaser during build version = "master" commit = "?" @@ -16,7 +19,24 @@ var ( ) func main() { - e := commands.NewExecutor(version, commit, date) + if buildInfo, available := debug.ReadBuildInfo(); available { + goVersion = buildInfo.GoVersion + + if date == "" { + version = buildInfo.Main.Version + commit = fmt.Sprintf("(unknown, mod sum: %q)", buildInfo.Main.Sum) + date = "(unknown)" + } + } + + info := commands.BuildInfo{ + GoVersion: goVersion, + Version: version, + Commit: commit, + Date: date, + } + + e := commands.NewExecutor(info) if err := e.Execute(); err != nil { fmt.Fprintf(os.Stderr, "failed executing command with error %v\n", err) diff --git a/tools/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/mod_version.go b/tools/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/mod_version.go deleted file mode 100644 index 119a8a60db..0000000000 --- a/tools/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/mod_version.go +++ /dev/null @@ -1,17 +0,0 @@ -package main - -import ( - "fmt" - "runtime/debug" -) - -//nolint:gochecknoinits -func init() { - if info, available := debug.ReadBuildInfo(); available { - if date == "" { - version = info.Main.Version - commit = fmt.Sprintf("(unknown, mod sum: %q)", info.Main.Sum) - date = "(unknown)" - } - } -} diff --git a/tools/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go b/tools/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go index e9638c8e16..299fd52790 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go +++ b/tools/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go @@ -12,6 +12,7 @@ import ( "bytes" "crypto/sha256" "encoding/hex" + "errors" "fmt" "io" "os" @@ -20,8 +21,6 @@ import ( "strings" "time" - "github.com/pkg/errors" - "github.com/golangci/golangci-lint/internal/renameio" "github.com/golangci/golangci-lint/internal/robustio" ) @@ -80,7 +79,7 @@ func (c *Cache) fileName(id [HashSize]byte, key string) string { var errMissing = errors.New("cache entry not found") func IsErrMissing(err error) bool { - return errors.Cause(err) == errMissing + return errors.Is(err, errMissing) } const ( @@ -169,10 +168,10 @@ func (c *Cache) get(id ActionID) (Entry, error) { etime := entry[1 : 1+20] var buf [HashSize]byte if _, err = hex.Decode(buf[:], eid); err != nil || buf != id { - return failed(errors.Wrapf(err, "failed to hex decode eid data in %s", fileName)) + return failed(fmt.Errorf("failed to hex decode eid data in %s: %w", fileName, err)) } if _, err = hex.Decode(buf[:], eout); err != nil { - return failed(errors.Wrapf(err, "failed to hex decode eout data in %s", fileName)) + return failed(fmt.Errorf("failed to hex decode eout data in %s: %w", fileName, err)) } i := 0 for i < len(esize) && esize[i] == ' ' { @@ -192,7 +191,7 @@ func (c *Cache) get(id ActionID) (Entry, error) { } if err = c.used(fileName); err != nil { - return failed(errors.Wrapf(err, "failed to mark %s as used", fileName)) + return failed(fmt.Errorf("failed to mark %s as used: %w", fileName, err)) } return Entry{buf, size, time.Unix(0, tm)}, nil @@ -264,7 +263,7 @@ func (c *Cache) used(file string) error { if os.IsNotExist(err) { return errMissing } - return errors.Wrapf(err, "failed to stat file %s", file) + return fmt.Errorf("failed to stat file %s: %w", file, err) } if c.now().Sub(info.ModTime()) < mtimeInterval { @@ -272,7 +271,7 @@ func (c *Cache) used(file string) error { } if err := os.Chtimes(file, c.now(), c.now()); err != nil { - return errors.Wrapf(err, "failed to change time of file %s", file) + return fmt.Errorf("failed to change time of file %s: %w", file, err) } return nil @@ -385,7 +384,7 @@ func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify return err } if err = os.Chtimes(file, c.now(), c.now()); err != nil { // mainly for tests - return errors.Wrapf(err, "failed to change time of file %s", file) + return fmt.Errorf("failed to change time of file %s: %w", file, err) } return nil @@ -443,7 +442,7 @@ func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error { if f, openErr := os.Open(name); openErr == nil { h := sha256.New() if _, copyErr := io.Copy(h, f); copyErr != nil { - return errors.Wrap(copyErr, "failed to copy to sha256") + return fmt.Errorf("failed to copy to sha256: %w", copyErr) } f.Close() @@ -519,7 +518,7 @@ func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error { return err } if err = os.Chtimes(name, c.now(), c.now()); err != nil { // mainly for tests - return errors.Wrapf(err, "failed to change time of file %s", name) + return fmt.Errorf("failed to change time of file %s: %w", name, err) } return nil diff --git a/tools/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go b/tools/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go index 5cb86d6698..c8a3a0357e 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go +++ b/tools/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go @@ -6,11 +6,11 @@ import ( // PanicError can be used to not print stacktrace twice type PanicError struct { - recovered interface{} + recovered any stack []byte } -func NewPanicError(recovered interface{}, stack []byte) *PanicError { +func NewPanicError(recovered any, stack []byte) *PanicError { return &PanicError{recovered: recovered, stack: stack} } diff --git a/tools/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go b/tools/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go index 83e6073875..3b3422eb7a 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go +++ b/tools/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go @@ -4,12 +4,12 @@ import ( "bytes" "encoding/gob" "encoding/hex" + "errors" "fmt" "runtime" "sort" "sync" - "github.com/pkg/errors" "golang.org/x/tools/go/packages" "github.com/golangci/golangci-lint/internal/cache" @@ -54,14 +54,14 @@ func (c *Cache) Trim() { }) } -func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data interface{}) error { +func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data any) error { var err error buf := &bytes.Buffer{} c.sw.TrackStage("gob", func() { err = gob.NewEncoder(buf).Encode(data) }) if err != nil { - return errors.Wrap(err, "failed to gob encode") + return fmt.Errorf("failed to gob encode: %w", err) } var aID cache.ActionID @@ -71,13 +71,13 @@ func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data inter if err == nil { subkey, subkeyErr := cache.Subkey(aID, key) if subkeyErr != nil { - err = errors.Wrap(subkeyErr, "failed to build subkey") + err = fmt.Errorf("failed to build subkey: %w", subkeyErr) } aID = subkey } }) if err != nil { - return errors.Wrapf(err, "failed to calculate package %s action id", pkg.Name) + return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err) } c.ioSem <- struct{}{} c.sw.TrackStage("cache io", func() { @@ -85,7 +85,7 @@ func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data inter }) <-c.ioSem if err != nil { - return errors.Wrapf(err, "failed to save data to low-level cache by key %s for package %s", key, pkg.Name) + return fmt.Errorf("failed to save data to low-level cache by key %s for package %s: %w", key, pkg.Name, err) } return nil @@ -93,7 +93,7 @@ func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data inter var ErrMissing = errors.New("missing data") -func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data interface{}) error { +func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data any) error { var aID cache.ActionID var err error c.sw.TrackStage("key build", func() { @@ -101,13 +101,13 @@ func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data inter if err == nil { subkey, subkeyErr := cache.Subkey(aID, key) if subkeyErr != nil { - err = errors.Wrap(subkeyErr, "failed to build subkey") + err = fmt.Errorf("failed to build subkey: %w", subkeyErr) } aID = subkey } }) if err != nil { - return errors.Wrapf(err, "failed to calculate package %s action id", pkg.Name) + return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err) } var b []byte @@ -120,14 +120,14 @@ func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data inter if cache.IsErrMissing(err) { return ErrMissing } - return errors.Wrapf(err, "failed to get data from low-level cache by key %s for package %s", key, pkg.Name) + return fmt.Errorf("failed to get data from low-level cache by key %s for package %s: %w", key, pkg.Name, err) } c.sw.TrackStage("gob", func() { err = gob.NewDecoder(bytes.NewReader(b)).Decode(data) }) if err != nil { - return errors.Wrap(err, "failed to gob decode") + return fmt.Errorf("failed to gob decode: %w", err) } return nil @@ -136,12 +136,12 @@ func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data inter func (c *Cache) pkgActionID(pkg *packages.Package, mode HashMode) (cache.ActionID, error) { hash, err := c.packageHash(pkg, mode) if err != nil { - return cache.ActionID{}, errors.Wrap(err, "failed to get package hash") + return cache.ActionID{}, fmt.Errorf("failed to get package hash: %w", err) } key, err := cache.NewHash("action ID") if err != nil { - return cache.ActionID{}, errors.Wrap(err, "failed to make a hash") + return cache.ActionID{}, fmt.Errorf("failed to make a hash: %w", err) } fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) fmt.Fprintf(key, "pkghash %s\n", hash) @@ -167,7 +167,7 @@ func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error key, err := cache.NewHash("package hash") if err != nil { - return "", errors.Wrap(err, "failed to make a hash") + return "", fmt.Errorf("failed to make a hash: %w", err) } fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) @@ -176,7 +176,7 @@ func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error h, fErr := cache.FileHash(f) <-c.ioSem if fErr != nil { - return "", errors.Wrapf(fErr, "failed to calculate file %s hash", f) + return "", fmt.Errorf("failed to calculate file %s hash: %w", f, fErr) } fmt.Fprintf(key, "file %s %x\n", f, h) } @@ -199,7 +199,7 @@ func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error depHash, depErr := c.packageHash(dep, depMode) if depErr != nil { - return errors.Wrapf(depErr, "failed to calculate hash for dependency %s with mode %d", dep.Name, depMode) + return fmt.Errorf("failed to calculate hash for dependency %s with mode %d: %w", dep.Name, depMode, depErr) } fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, depHash) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go index 6dc3b74b60..109edcb90c 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go @@ -4,6 +4,7 @@ import ( "bytes" "context" "crypto/sha256" + "fmt" "io" "os" "path/filepath" @@ -12,7 +13,6 @@ import ( "github.com/fatih/color" "github.com/gofrs/flock" - "github.com/pkg/errors" "github.com/spf13/cobra" "github.com/spf13/pflag" "gopkg.in/yaml.v3" @@ -30,13 +30,20 @@ import ( "github.com/golangci/golangci-lint/pkg/timeutils" ) +type BuildInfo struct { + GoVersion string `json:"goVersion"` + Version string `json:"version"` + Commit string `json:"commit"` + Date string `json:"date"` +} + type Executor struct { rootCmd *cobra.Command runCmd *cobra.Command lintersCmd *cobra.Command - exitCode int - version, commit, date string + exitCode int + buildInfo BuildInfo cfg *config.Config // cfg is the unmarshaled data from the golangci config file. log logutils.Log @@ -56,13 +63,11 @@ type Executor struct { } // NewExecutor creates and initializes a new command executor. -func NewExecutor(version, commit, date string) *Executor { +func NewExecutor(buildInfo BuildInfo) *Executor { startedAt := time.Now() e := &Executor{ cfg: config.NewDefault(), - version: version, - commit: commit, - date: date, + buildInfo: buildInfo, DBManager: lintersdb.NewManager(nil, nil), debugf: logutils.Debug(logutils.DebugKeyExec), } @@ -135,7 +140,7 @@ func NewExecutor(version, commit, date string) *Executor { e.loadGuard = load.NewGuard() e.contextLoader = lint.NewContextLoader(e.cfg, e.log.Child(logutils.DebugKeyLoader), e.goenv, e.lineCache, e.fileCache, e.pkgCache, e.loadGuard) - if err = e.initHashSalt(version); err != nil { + if err = e.initHashSalt(buildInfo.Version); err != nil { e.log.Fatalf("Failed to init hash salt: %s", err) } e.debugf("Initialized executor in %s", time.Since(startedAt)) @@ -149,16 +154,15 @@ func (e *Executor) Execute() error { func (e *Executor) initHashSalt(version string) error { binSalt, err := computeBinarySalt(version) if err != nil { - return errors.Wrap(err, "failed to calculate binary salt") + return fmt.Errorf("failed to calculate binary salt: %w", err) } configSalt, err := computeConfigSalt(e.cfg) if err != nil { - return errors.Wrap(err, "failed to calculate config salt") + return fmt.Errorf("failed to calculate config salt: %w", err) } - var b bytes.Buffer - b.Write(binSalt) + b := bytes.NewBuffer(binSalt) b.Write(configSalt) cache.SetSalt(b.Bytes()) return nil @@ -195,11 +199,10 @@ func computeConfigSalt(cfg *config.Config) ([]byte, error) { lintersSettingsBytes, err := yaml.Marshal(cfg.LintersSettings) if err != nil { - return nil, errors.Wrap(err, "failed to json marshal config linter settings") + return nil, fmt.Errorf("failed to json marshal config linter settings: %w", err) } - var configData bytes.Buffer - configData.WriteString("linters-settings=") + configData := bytes.NewBufferString("linters-settings=") configData.Write(lintersSettingsBytes) configData.WriteString("\nbuild-tags=%s" + strings.Join(cfg.Run.BuildTags, ",")) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go index 0fdcccff7d..5fe4c784d6 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go @@ -24,7 +24,7 @@ const ( func (e *Executor) persistentPreRun(_ *cobra.Command, _ []string) error { if e.cfg.Run.PrintVersion { - _, _ = fmt.Fprintf(logutils.StdOut, "golangci-lint has version %s built from %s on %s\n", e.version, e.commit, e.date) + _ = printVersion(logutils.StdOut, e.buildInfo) os.Exit(exitcodes.Success) // a return nil is not enough to stop the process because we are inside the `preRun`. } @@ -145,7 +145,7 @@ func (e *Executor) initRoot() { } func (e *Executor) needVersionOption() bool { - return e.date != "" + return e.buildInfo.Date != "" } func initRootFlagSet(fs *pflag.FlagSet, cfg *config.Config, needVersionOption bool) { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go index 8106dbdbd9..9149b177bc 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go @@ -2,6 +2,7 @@ package commands import ( "context" + "errors" "fmt" "io" "log" @@ -11,7 +12,6 @@ import ( "time" "github.com/fatih/color" - "github.com/pkg/errors" "github.com/spf13/cobra" "github.com/spf13/pflag" @@ -23,7 +23,6 @@ import ( "github.com/golangci/golangci-lint/pkg/packages" "github.com/golangci/golangci-lint/pkg/printers" "github.com/golangci/golangci-lint/pkg/result" - "github.com/golangci/golangci-lint/pkg/result/processors" ) const defaultFileMode = 0644 @@ -192,15 +191,6 @@ func initFlagSet(fs *pflag.FlagSet, cfg *config.Config, m *lintersdb.Manager, is true, "Goconst: ignore when constant is not used as function argument") hideFlag("goconst.ignore-calls") - // (@dixonwille) These flag is only used for testing purposes. - fs.StringSliceVar(&lsc.Depguard.Packages, "depguard.packages", nil, - "Depguard: packages to add to the list") - hideFlag("depguard.packages") - - fs.BoolVar(&lsc.Depguard.IncludeGoRoot, "depguard.include-go-root", false, - "Depguard: check list against standard lib") - hideFlag("depguard.include-go-root") - fs.IntVar(&lsc.Lll.TabWidth, "lll.tab-width", 1, "Lll: tab width in spaces") hideFlag("lll.tab-width") @@ -355,23 +345,17 @@ func (e *Executor) runAnalysis(ctx context.Context, args []string) ([]result.Iss lintCtx, err := e.contextLoader.Load(ctx, lintersToRun) if err != nil { - return nil, errors.Wrap(err, "context loading failed") + return nil, fmt.Errorf("context loading failed: %w", err) } lintCtx.Log = e.log.Child(logutils.DebugKeyLintersContext) runner, err := lint.NewRunner(e.cfg, e.log.Child(logutils.DebugKeyRunner), - e.goenv, e.EnabledLintersSet, e.lineCache, e.DBManager, lintCtx.Packages) - if err != nil { - return nil, err - } - - issues, err := runner.Run(ctx, lintersToRun, lintCtx) + e.goenv, e.EnabledLintersSet, e.lineCache, e.fileCache, e.DBManager, lintCtx.Packages) if err != nil { return nil, err } - fixer := processors.NewFixer(e.cfg, e.log, e.fileCache) - return fixer.Process(issues), nil + return runner.Run(ctx, lintersToRun, lintCtx) } func (e *Executor) setOutputToDevNull() (savedStdout, savedStderr *os.File) { @@ -418,7 +402,7 @@ func (e *Executor) runAndPrint(ctx context.Context, args []string) error { out = append(out, "") } - err := e.printReports(ctx, issues, out[1], out[0]) + err := e.printReports(issues, out[1], out[0]) if err != nil { return err } @@ -431,7 +415,7 @@ func (e *Executor) runAndPrint(ctx context.Context, args []string) error { return nil } -func (e *Executor) printReports(ctx context.Context, issues []result.Issue, path, format string) error { +func (e *Executor) printReports(issues []result.Issue, path, format string) error { w, shouldClose, err := e.createWriter(path) if err != nil { return fmt.Errorf("can't create output for %s: %w", path, err) @@ -445,7 +429,7 @@ func (e *Executor) printReports(ctx context.Context, issues []result.Issue, path return err } - if err = p.Print(ctx, issues); err != nil { + if err = p.Print(issues); err != nil { if file, ok := w.(io.Closer); shouldClose && ok { _ = file.Close() } @@ -482,8 +466,10 @@ func (e *Executor) createPrinter(format string, w io.Writer) (printers.Printer, p = printers.NewText(e.cfg.Output.PrintIssuedLine, format == config.OutFormatColoredLineNumber, e.cfg.Output.PrintLinterName, e.log.Child(logutils.DebugKeyTextPrinter), w) - case config.OutFormatTab: - p = printers.NewTab(e.cfg.Output.PrintLinterName, e.log.Child(logutils.DebugKeyTabPrinter), w) + case config.OutFormatTab, config.OutFormatColoredTab: + p = printers.NewTab(e.cfg.Output.PrintLinterName, + format == config.OutFormatColoredTab, + e.log.Child(logutils.DebugKeyTabPrinter), w) case config.OutFormatCheckstyle: p = printers.NewCheckstyle(w) case config.OutFormatCodeClimate: @@ -494,6 +480,8 @@ func (e *Executor) createPrinter(format string, w io.Writer) (printers.Printer, p = printers.NewJunitXML(w) case config.OutFormatGithubActions: p = printers.NewGithub(w) + case config.OutFormatTeamCity: + p = printers.NewTeamCity(w) default: return nil, fmt.Errorf("unknown output format %s", format) } @@ -522,7 +510,8 @@ func (e *Executor) executeRun(_ *cobra.Command, args []string) { if err := e.runAndPrint(ctx, args); err != nil { e.log.Errorf("Running error: %s", err) if e.exitCode == exitcodes.Success { - if exitErr, ok := errors.Cause(err).(*exitcodes.ExitError); ok { + var exitErr *exitcodes.ExitError + if errors.As(err, &exitErr) { e.exitCode = exitErr.Code } else { e.exitCode = exitcodes.Failure diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go index 93e4a8ed90..bb7732250f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go @@ -3,7 +3,9 @@ package commands import ( "encoding/json" "fmt" + "io" "os" + "runtime/debug" "strings" "github.com/spf13/cobra" @@ -12,10 +14,9 @@ import ( "github.com/golangci/golangci-lint/pkg/config" ) -type jsonVersion struct { - Version string `json:"version"` - Commit string `json:"commit"` - Date string `json:"date"` +type versionInfo struct { + Info BuildInfo + BuildInfo *debug.BuildInfo } func (e *Executor) initVersionConfiguration(cmd *cobra.Command) { @@ -28,6 +29,7 @@ func initVersionFlagSet(fs *pflag.FlagSet, cfg *config.Config) { // Version config vc := &cfg.Version fs.StringVar(&vc.Format, "format", "", wh("The version's format can be: 'short', 'json'")) + fs.BoolVar(&vc.Debug, "debug", false, wh("Add build information")) } func (e *Executor) initVersion() { @@ -37,22 +39,35 @@ func (e *Executor) initVersion() { Args: cobra.NoArgs, ValidArgsFunction: cobra.NoFileCompletions, RunE: func(cmd *cobra.Command, _ []string) error { + if e.cfg.Version.Debug { + info, ok := debug.ReadBuildInfo() + if !ok { + return nil + } + + switch strings.ToLower(e.cfg.Version.Format) { + case "json": + return json.NewEncoder(os.Stdout).Encode(versionInfo{ + Info: e.buildInfo, + BuildInfo: info, + }) + + default: + fmt.Println(info.String()) + return printVersion(os.Stdout, e.buildInfo) + } + } + switch strings.ToLower(e.cfg.Version.Format) { case "short": - fmt.Println(e.version) + fmt.Println(e.buildInfo.Version) return nil case "json": - ver := jsonVersion{ - Version: e.version, - Commit: e.commit, - Date: e.date, - } - return json.NewEncoder(os.Stdout).Encode(&ver) + return json.NewEncoder(os.Stdout).Encode(e.buildInfo) default: - fmt.Printf("golangci-lint has version %s built from %s on %s\n", e.version, e.commit, e.date) - return nil + return printVersion(os.Stdout, e.buildInfo) } }, } @@ -60,3 +75,9 @@ func (e *Executor) initVersion() { e.rootCmd.AddCommand(versionCmd) e.initVersionConfiguration(versionCmd) } + +func printVersion(w io.Writer, buildInfo BuildInfo) error { + _, err := fmt.Fprintf(w, "golangci-lint has version %s built with %s from %s on %s\n", + buildInfo.Version, buildInfo.GoVersion, buildInfo.Commit, buildInfo.Date) + return err +} diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/config.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/config.go index 9536c80cd0..af40c63bdb 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/config.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/config.go @@ -38,6 +38,7 @@ func NewDefault() *Config { type Version struct { Format string `mapstructure:"format"` + Debug bool `mapstructure:"debug"` } func IsGreaterThanOrEqualGo118(v string) bool { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go index b2437ec974..417b28bdbf 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go @@ -125,21 +125,25 @@ type ExcludeRule struct { BaseRule `mapstructure:",squash"` } -func (e ExcludeRule) Validate() error { +func (e *ExcludeRule) Validate() error { return e.BaseRule.Validate(excludeRuleMinConditionsCount) } type BaseRule struct { - Linters []string - Path string - Text string - Source string + Linters []string + Path string + PathExcept string `mapstructure:"path-except"` + Text string + Source string } -func (b BaseRule) Validate(minConditionsCount int) error { +func (b *BaseRule) Validate(minConditionsCount int) error { if err := validateOptionalRegex(b.Path); err != nil { return fmt.Errorf("invalid path regex: %v", err) } + if err := validateOptionalRegex(b.PathExcept); err != nil { + return fmt.Errorf("invalid path-except regex: %v", err) + } if err := validateOptionalRegex(b.Text); err != nil { return fmt.Errorf("invalid text regex: %v", err) } @@ -150,7 +154,10 @@ func (b BaseRule) Validate(minConditionsCount int) error { if len(b.Linters) > 0 { nonBlank++ } - if b.Path != "" { + // Filtering by path counts as one condition, regardless how it is done (one or both). + // Otherwise, a rule with Path and PathExcept set would pass validation + // whereas before the introduction of path-except that wouldn't have been precise enough. + if b.Path != "" || b.PathExcept != "" { nonBlank++ } if b.Text != "" { @@ -160,7 +167,7 @@ func (b BaseRule) Validate(minConditionsCount int) error { nonBlank++ } if nonBlank < minConditionsCount { - return fmt.Errorf("at least %d of (text, source, path, linters) should be set", minConditionsCount) + return fmt.Errorf("at least %d of (text, source, path[-except], linters) should be set", minConditionsCount) } return nil } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go index 76a16b87d6..b520ea4c6e 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go @@ -1,9 +1,11 @@ package config import ( + "encoding" + "errors" "runtime" - "github.com/pkg/errors" + "gopkg.in/yaml.v3" ) var defaultLintersSettings = LintersSettings{ @@ -20,9 +22,10 @@ var defaultLintersSettings = LintersSettings{ MaxBlankIdentifiers: 2, }, ErrorLint: ErrorLintSettings{ - Errorf: true, - Asserts: true, - Comparison: true, + Errorf: true, + ErrorfMulti: true, + Asserts: true, + Comparison: true, }, Exhaustive: ExhaustiveSettings{ Check: []string{"switch"}, @@ -61,6 +64,12 @@ var defaultLintersSettings = LintersSettings{ Gosec: GoSecSettings{ Concurrency: runtime.NumCPU(), }, + Gosmopolitan: GosmopolitanSettings{ + AllowTimeLocal: false, + EscapeHatches: []string{}, + IgnoreTests: true, + WatchForScripts: []string{"Han"}, + }, Ifshort: IfshortSettings{ MaxDeclLines: 1, MaxDeclChars: 30, @@ -104,6 +113,11 @@ var defaultLintersSettings = LintersSettings{ Ignore: "", Qualified: false, }, + TagAlign: TagAlignSettings{ + Align: true, + Sort: true, + Order: nil, + }, Testpackage: TestpackageSettings{ SkipRegexp: `(export|internal)_test\.go`, AllowPackages: []string{"main"}, @@ -111,6 +125,10 @@ var defaultLintersSettings = LintersSettings{ Unparam: UnparamSettings{ Algo: "cha", }, + UseStdlibVars: UseStdlibVarsSettings{ + HTTPMethod: true, + HTTPStatusCode: true, + }, Varnamelen: VarnamelenSettings{ MaxDistance: 5, MinNameLength: 3, @@ -167,6 +185,7 @@ type LintersSettings struct { Gomodguard GoModGuardSettings Gosec GoSecSettings Gosimple StaticCheckSettings + Gosmopolitan GosmopolitanSettings Govet GovetSettings Grouper GrouperSettings Ifshort IfshortSettings @@ -196,12 +215,12 @@ type LintersSettings struct { Staticcheck StaticCheckSettings Structcheck StructCheckSettings Stylecheck StaticCheckSettings + TagAlign TagAlignSettings Tagliatelle TagliatelleSettings Tenv TenvSettings Testpackage TestpackageSettings Thelper ThelperSettings Unparam UnparamSettings - Unused StaticCheckSettings UseStdlibVars UseStdlibVarsSettings Varcheck VarCheckSettings Varnamelen VarnamelenSettings @@ -237,12 +256,18 @@ type Cyclop struct { } type DepGuardSettings struct { - ListType string `mapstructure:"list-type"` - Packages []string - IncludeGoRoot bool `mapstructure:"include-go-root"` - PackagesWithErrorMessage map[string]string `mapstructure:"packages-with-error-message"` - IgnoreFileRules []string `mapstructure:"ignore-file-rules"` - AdditionalGuards []DepGuardSettings `mapstructure:"additional-guards"` + Rules map[string]*DepGuardList `mapstructure:"rules"` +} + +type DepGuardList struct { + Files []string `mapstructure:"files"` + Allow []string `mapstructure:"allow"` + Deny []DepGuardDeny `mapstructure:"deny"` +} + +type DepGuardDeny struct { + Pkg string `mapstructure:"pkg"` + Desc string `mapstructure:"desc"` } type DecorderSettings struct { @@ -281,9 +306,10 @@ type ErrChkJSONSettings struct { } type ErrorLintSettings struct { - Errorf bool `mapstructure:"errorf"` - Asserts bool `mapstructure:"asserts"` - Comparison bool `mapstructure:"comparison"` + Errorf bool `mapstructure:"errorf"` + ErrorfMulti bool `mapstructure:"errorf-multi"` + Asserts bool `mapstructure:"asserts"` + Comparison bool `mapstructure:"comparison"` } type ExhaustiveSettings struct { @@ -307,8 +333,45 @@ type ExhaustructSettings struct { } type ForbidigoSettings struct { - Forbid []string `mapstructure:"forbid"` - ExcludeGodocExamples bool `mapstructure:"exclude-godoc-examples"` + Forbid []ForbidigoPattern `mapstructure:"forbid"` + ExcludeGodocExamples bool `mapstructure:"exclude-godoc-examples"` + AnalyzeTypes bool `mapstructure:"analyze-types"` +} + +var _ encoding.TextUnmarshaler = &ForbidigoPattern{} + +// ForbidigoPattern corresponds to forbidigo.pattern and adds mapstructure support. +// The YAML field names must match what forbidigo expects. +type ForbidigoPattern struct { + // patternString gets populated when the config contains a string as entry in ForbidigoSettings.Forbid[] + // because ForbidigoPattern implements encoding.TextUnmarshaler + // and the reader uses the mapstructure.TextUnmarshallerHookFunc as decoder hook. + // + // If the entry is a map, then the other fields are set as usual by mapstructure. + patternString string + + Pattern string `yaml:"p" mapstructure:"p"` + Package string `yaml:"pkg,omitempty" mapstructure:"pkg,omitempty"` + Msg string `yaml:"msg,omitempty" mapstructure:"msg,omitempty"` +} + +func (p *ForbidigoPattern) UnmarshalText(text []byte) error { + // Validation happens when instantiating forbidigo. + p.patternString = string(text) + return nil +} + +// MarshalString converts the pattern into a string as needed by forbidigo.NewLinter. +// +// MarshalString is intentionally not called MarshalText, +// although it has the same signature +// because implementing encoding.TextMarshaler led to infinite recursion when yaml.Marshal called MarshalText. +func (p *ForbidigoPattern) MarshalString() ([]byte, error) { + if p.patternString != "" { + return []byte(p.patternString), nil + } + + return yaml.Marshal(p) } type FunlenSettings struct { @@ -324,9 +387,12 @@ type GciSettings struct { } type GinkgoLinterSettings struct { - SuppressLenAssertion bool `mapstructure:"suppress-len-assertion"` - SuppressNilAssertion bool `mapstructure:"suppress-nil-assertion"` - SuppressErrAssertion bool `mapstructure:"suppress-err-assertion"` + SuppressLenAssertion bool `mapstructure:"suppress-len-assertion"` + SuppressNilAssertion bool `mapstructure:"suppress-nil-assertion"` + SuppressErrAssertion bool `mapstructure:"suppress-err-assertion"` + SuppressCompareAssertion bool `mapstructure:"suppress-compare-assertion"` + SuppressAsyncAssertion bool `mapstructure:"suppress-async-assertion"` + AllowHaveLenZero bool `mapstructure:"allow-havelen-zero"` } type GocognitSettings struct { @@ -345,6 +411,7 @@ type GoConstSettings struct { } type GoCriticSettings struct { + Go string `mapstructure:"-"` EnabledChecks []string `mapstructure:"enabled-checks"` DisabledChecks []string `mapstructure:"disabled-checks"` EnabledTags []string `mapstructure:"enabled-tags"` @@ -352,7 +419,7 @@ type GoCriticSettings struct { SettingsPerCheck map[string]GoCriticCheckSettings `mapstructure:"settings"` } -type GoCriticCheckSettings map[string]interface{} +type GoCriticCheckSettings map[string]any type GoCycloSettings struct { MinComplexity int `mapstructure:"min-complexity"` @@ -405,11 +472,11 @@ type GoLintSettings struct { } type GoMndSettings struct { - Settings map[string]map[string]interface{} // Deprecated - Checks []string `mapstructure:"checks"` - IgnoredNumbers []string `mapstructure:"ignored-numbers"` - IgnoredFiles []string `mapstructure:"ignored-files"` - IgnoredFunctions []string `mapstructure:"ignored-functions"` + Settings map[string]map[string]any // Deprecated + Checks []string `mapstructure:"checks"` + IgnoredNumbers []string `mapstructure:"ignored-numbers"` + IgnoredFiles []string `mapstructure:"ignored-files"` + IgnoredFunctions []string `mapstructure:"ignored-functions"` } type GoModDirectivesSettings struct { @@ -438,19 +505,26 @@ type GoModGuardSettings struct { } type GoSecSettings struct { - Includes []string `mapstructure:"includes"` - Excludes []string `mapstructure:"excludes"` - Severity string `mapstructure:"severity"` - Confidence string `mapstructure:"confidence"` - ExcludeGenerated bool `mapstructure:"exclude-generated"` - Config map[string]interface{} `mapstructure:"config"` - Concurrency int `mapstructure:"concurrency"` + Includes []string `mapstructure:"includes"` + Excludes []string `mapstructure:"excludes"` + Severity string `mapstructure:"severity"` + Confidence string `mapstructure:"confidence"` + ExcludeGenerated bool `mapstructure:"exclude-generated"` + Config map[string]any `mapstructure:"config"` + Concurrency int `mapstructure:"concurrency"` +} + +type GosmopolitanSettings struct { + AllowTimeLocal bool `mapstructure:"allow-time-local"` + EscapeHatches []string `mapstructure:"escape-hatches"` + IgnoreTests bool `mapstructure:"ignore-tests"` + WatchForScripts []string `mapstructure:"watch-for-scripts"` } type GovetSettings struct { Go string `mapstructure:"-"` CheckShadowing bool `mapstructure:"check-shadowing"` - Settings map[string]map[string]interface{} + Settings map[string]map[string]any Enable []string Disable []string @@ -605,7 +679,7 @@ type ReviveSettings struct { EnableAllRules bool `mapstructure:"enable-all-rules"` Rules []struct { Name string - Arguments []interface{} + Arguments []any Severity string Disabled bool } @@ -639,6 +713,12 @@ type StructCheckSettings struct { CheckExportedFields bool `mapstructure:"exported-fields"` } +type TagAlignSettings struct { + Align bool `mapstructure:"align"` + Sort bool `mapstructure:"sort"` + Order []string `mapstructure:"order"` +} + type TagliatelleSettings struct { Case struct { Rules map[string]string diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/output.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/output.go index d67f110f67..e872639205 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/output.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/output.go @@ -5,11 +5,13 @@ const ( OutFormatLineNumber = "line-number" OutFormatColoredLineNumber = "colored-line-number" OutFormatTab = "tab" + OutFormatColoredTab = "colored-tab" OutFormatCheckstyle = "checkstyle" OutFormatCodeClimate = "code-climate" OutFormatHTML = "html" OutFormatJunitXML = "junit-xml" OutFormatGithubActions = "github-actions" + OutFormatTeamCity = "teamcity" ) var OutFormats = []string{ @@ -22,15 +24,18 @@ var OutFormats = []string{ OutFormatHTML, OutFormatJunitXML, OutFormatGithubActions, + OutFormatTeamCity, } type Output struct { Format string - Color string PrintIssuedLine bool `mapstructure:"print-issued-lines"` PrintLinterName bool `mapstructure:"print-linter-name"` UniqByLine bool `mapstructure:"uniq-by-line"` SortResults bool `mapstructure:"sort-results"` PrintWelcomeMessage bool `mapstructure:"print-welcome"` PathPrefix string `mapstructure:"path-prefix"` + + // only work with CLI flags because the setup of logs is done before the config file parsing. + Color string } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go index 2dfd3c06c9..de203876e9 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go @@ -8,12 +8,13 @@ import ( "strings" "github.com/mitchellh/go-homedir" + "github.com/mitchellh/mapstructure" "github.com/spf13/viper" + "golang.org/x/exp/slices" "github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/sliceutil" ) type FileReader struct { @@ -72,18 +73,33 @@ func (r *FileReader) parseConfig() error { return nil } - usedConfigFile, err := fsutils.ShortestRelPath(usedConfigFile, "") - if err != nil { - r.log.Warnf("Can't pretty print config file path: %s", err) + if usedConfigFile == os.Stdin.Name() { + usedConfigFile = "" + r.log.Infof("Reading config file stdin") + } else { + var err error + usedConfigFile, err = fsutils.ShortestRelPath(usedConfigFile, "") + if err != nil { + r.log.Warnf("Can't pretty print config file path: %v", err) + } + + r.log.Infof("Used config file %s", usedConfigFile) } - r.log.Infof("Used config file %s", usedConfigFile) - usedConfigDir := filepath.Dir(usedConfigFile) - if usedConfigDir, err = filepath.Abs(usedConfigDir); err != nil { + + usedConfigDir, err := filepath.Abs(filepath.Dir(usedConfigFile)) + if err != nil { return errors.New("can't get config directory") } r.cfg.cfgDir = usedConfigDir - if err := viper.Unmarshal(r.cfg); err != nil { + if err := viper.Unmarshal(r.cfg, viper.DecodeHook(mapstructure.ComposeDecodeHookFunc( + // Default hooks (https://github.com/spf13/viper/blob/518241257478c557633ab36e474dfcaeb9a3c623/viper.go#L135-L138). + mapstructure.StringToTimeDurationHookFunc(), + mapstructure.StringToSliceHookFunc(","), + + // Needed for forbidigo. + mapstructure.TextUnmarshallerHookFunc(), + ))); err != nil { return fmt.Errorf("can't unmarshal config by viper: %s", err) } @@ -195,7 +211,7 @@ func (r *FileReader) setupConfigFileSearch() { // find home directory for global config if home, err := homedir.Dir(); err != nil { r.log.Warnf("Can't get user's home directory: %s", err.Error()) - } else if !sliceutil.Contains(configSearchPaths, home) { + } else if !slices.Contains(configSearchPaths, home) { configSearchPaths = append(configSearchPaths, home) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go index 04c66823df..e8e5ba19b7 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go @@ -5,8 +5,6 @@ import ( "os" "sync" - "github.com/pkg/errors" - "github.com/golangci/golangci-lint/pkg/logutils" ) @@ -26,7 +24,7 @@ func (fc *FileCache) GetFileBytes(filePath string) ([]byte, error) { fileBytes, err := os.ReadFile(filePath) if err != nil { - return nil, errors.Wrapf(err, "can't read file %s", filePath) + return nil, fmt.Errorf("can't read file %s: %w", filePath, err) } fc.files.Store(filePath, fileBytes) @@ -56,7 +54,7 @@ func PrettifyBytesCount(n int64) string { func (fc *FileCache) PrintStats(log logutils.Log) { var size int64 var mapLen int - fc.files.Range(func(_, fileBytes interface{}) bool { + fc.files.Range(func(_, fileBytes any) bool { mapLen++ size += int64(len(fileBytes.([]byte))) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/files.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/files.go new file mode 100644 index 0000000000..4398ab9fc1 --- /dev/null +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/files.go @@ -0,0 +1,33 @@ +package fsutils + +import "path/filepath" + +// Files combines different operations related to handling file paths and content. +type Files struct { + *LineCache + pathPrefix string +} + +func NewFiles(lc *LineCache, pathPrefix string) *Files { + return &Files{ + LineCache: lc, + pathPrefix: pathPrefix, + } +} + +// WithPathPrefix takes a path that is relative to the current directory (as used in issues) +// and adds the configured path prefix, if there is one. +// The resulting path then can be shown to the user or compared against paths specified in the configuration. +func (f *Files) WithPathPrefix(relativePath string) string { + return WithPathPrefix(f.pathPrefix, relativePath) +} + +// WithPathPrefix takes a path that is relative to the current directory (as used in issues) +// and adds the configured path prefix, if there is one. +// The resulting path then can be shown to the user or compared against paths specified in the configuration. +func WithPathPrefix(pathPrefix, relativePath string) string { + if pathPrefix == "" { + return relativePath + } + return filepath.Join(pathPrefix, relativePath) +} diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go index b02751537e..2e92264846 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go @@ -4,8 +4,6 @@ import ( "bytes" "fmt" "sync" - - "github.com/pkg/errors" ) type fileLinesCache [][]byte @@ -39,7 +37,7 @@ func (lc *LineCache) GetLine(filePath string, index1 int) (string, error) { func (lc *LineCache) getRawLine(filePath string, index0 int) ([]byte, error) { fc, err := lc.getFileCache(filePath) if err != nil { - return nil, errors.Wrapf(err, "failed to get file %s lines cache", filePath) + return nil, fmt.Errorf("failed to get file %s lines cache: %w", filePath, err) } if index0 < 0 { @@ -61,7 +59,7 @@ func (lc *LineCache) getFileCache(filePath string) (fileLinesCache, error) { fileBytes, err := lc.fileCache.GetFileBytes(filePath) if err != nil { - return nil, errors.Wrapf(err, "can't get file %s bytes from cache", filePath) + return nil, fmt.Errorf("can't get file %s bytes from cache: %w", filePath, err) } fc := bytes.Split(fileBytes, []byte("\n")) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go index 44215b7e90..e1b3471764 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/bidichk.go @@ -13,7 +13,7 @@ import ( func NewBiDiChkFuncName(cfg *config.BiDiChkSettings) *goanalysis.Linter { a := bidichk.NewAnalyzer() - cfgMap := map[string]map[string]interface{}{} + cfgMap := map[string]map[string]any{} if cfg != nil { var opts []string @@ -45,7 +45,7 @@ func NewBiDiChkFuncName(cfg *config.BiDiChkSettings) *goanalysis.Linter { opts = append(opts, "POP-DIRECTIONAL-ISOLATE") } - cfgMap[a.Name] = map[string]interface{}{ + cfgMap[a.Name] = map[string]any{ "disallowed-runes": strings.Join(opts, ","), } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go index 8592eef1f9..8f7859af7d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/containedctx.go @@ -15,5 +15,5 @@ func NewContainedCtx() *goanalysis.Linter { a.Doc, []*analysis.Analyzer{a}, nil, - ).WithLoadMode(goanalysis.LoadModeSyntax) + ).WithLoadMode(goanalysis.LoadModeTypesInfo) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go index 6f55b27975..5ad65f122c 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go @@ -13,9 +13,9 @@ const cyclopName = "cyclop" func NewCyclop(settings *config.Cyclop) *goanalysis.Linter { a := analyzer.NewAnalyzer() - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil { - d := map[string]interface{}{ + d := map[string]any{ "skipTests": settings.SkipTests, } @@ -27,7 +27,7 @@ func NewCyclop(settings *config.Cyclop) *goanalysis.Linter { d["packageAverage"] = settings.PackageAverage } - cfg = map[string]map[string]interface{}{a.Name: d} + cfg = map[string]map[string]any{a.Name: d} } return goanalysis.NewLinter( diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go index 408b180b93..4f563c3813 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go @@ -21,7 +21,7 @@ func NewDeadcode() *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: deadcodeName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { prog := goanalysis.MakeFakeLoaderProgram(pass) issues, err := deadcodeAPI.Run(prog) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go index 1c93acaa2c..9d492c4e82 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/decorder.go @@ -14,7 +14,7 @@ func NewDecorder(settings *config.DecorderSettings) *goanalysis.Linter { a := decorder.Analyzer // disable all rules/checks by default - cfg := map[string]interface{}{ + cfg := map[string]any{ "disable-dec-num-check": true, "disable-dec-order-check": true, "disable-init-func-first-check": true, @@ -31,6 +31,6 @@ func NewDecorder(settings *config.DecorderSettings) *goanalysis.Linter { a.Name, a.Doc, []*analysis.Analyzer{a}, - map[string]map[string]interface{}{a.Name: cfg}, + map[string]map[string]any{a.Name: cfg}, ).WithLoadMode(goanalysis.LoadModeSyntax) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go index eb7b0f3304..23986708c9 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go @@ -1,200 +1,49 @@ package golinters import ( - "fmt" - "strings" - "sync" - - "github.com/OpenPeeDeeP/depguard" + "github.com/OpenPeeDeeP/depguard/v2" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/loader" //nolint:staticcheck // require changes in github.com/OpenPeeDeeP/depguard "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" ) -const depguardName = "depguard" - func NewDepguard(settings *config.DepGuardSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue + conf := depguard.LinterSettings{} - analyzer := &analysis.Analyzer{ - Name: depguardName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: goanalysis.DummyRun, - } - - return goanalysis.NewLinter( - depguardName, - "Go linter that checks if package imports are in a list of acceptable packages", - []*analysis.Analyzer{analyzer}, - nil, - ).WithContextSetter(func(lintCtx *linter.Context) { - dg, err := newDepGuard(settings) - - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { - if err != nil { - return nil, err + if settings != nil { + for s, rule := range settings.Rules { + list := &depguard.List{ + Files: rule.Files, + Allow: rule.Allow, } - issues, errRun := dg.run(pass) - if errRun != nil { - return nil, errRun - } + // because of bug with Viper parsing (split on dot) we use a list of struct instead of a map. + // https://github.com/spf13/viper/issues/324 + // https://github.com/golangci/golangci-lint/issues/3749#issuecomment-1492536630 - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() + deny := map[string]string{} + for _, r := range rule.Deny { + deny[r.Pkg] = r.Desc + } + list.Deny = deny - return nil, nil + conf[s] = list } - }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -type depGuard struct { - loadConfig *loader.Config - guardians []*guardian -} - -func newDepGuard(settings *config.DepGuardSettings) (*depGuard, error) { - ps, err := newGuardian(settings) - if err != nil { - return nil, err } - d := &depGuard{ - loadConfig: &loader.Config{ - Cwd: "", // fallbacked to os.Getcwd - Build: nil, // fallbacked to build.Default - }, - guardians: []*guardian{ps}, - } + a := depguard.NewUncompiledAnalyzer(&conf) - for _, additional := range settings.AdditionalGuards { - add := additional - ps, err = newGuardian(&add) + return goanalysis.NewLinter( + a.Analyzer.Name, + a.Analyzer.Doc, + []*analysis.Analyzer{a.Analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + err := a.Compile() if err != nil { - return nil, err - } - - d.guardians = append(d.guardians, ps) - } - - return d, nil -} - -func (d depGuard) run(pass *analysis.Pass) ([]goanalysis.Issue, error) { - prog := goanalysis.MakeFakeLoaderProgram(pass) - - var resIssues []goanalysis.Issue - for _, g := range d.guardians { - issues, errRun := g.run(d.loadConfig, prog, pass) - if errRun != nil { - return nil, errRun - } - - resIssues = append(resIssues, issues...) - } - - return resIssues, nil -} - -type guardian struct { - *depguard.Depguard - pkgsWithErrorMessage map[string]string -} - -func newGuardian(settings *config.DepGuardSettings) (*guardian, error) { - var ignoreFileRules []string - for _, rule := range settings.IgnoreFileRules { - ignoreFileRules = append(ignoreFileRules, fsutils.NormalizePathInRegex(rule)) - } - - dg := &depguard.Depguard{ - Packages: settings.Packages, - IncludeGoRoot: settings.IncludeGoRoot, - IgnoreFileRules: ignoreFileRules, - } - - var err error - dg.ListType, err = getDepGuardListType(settings.ListType) - if err != nil { - return nil, err - } - - // if the list type was a denylist the packages with error messages should be included in the denylist package list - if dg.ListType == depguard.LTBlacklist { - noMessagePackages := make(map[string]bool) - for _, pkg := range dg.Packages { - noMessagePackages[pkg] = true + lintCtx.Log.Errorf("create analyzer: %v", err) } - - for pkg := range settings.PackagesWithErrorMessage { - if _, ok := noMessagePackages[pkg]; !ok { - dg.Packages = append(dg.Packages, pkg) - } - } - } - - return &guardian{ - Depguard: dg, - pkgsWithErrorMessage: settings.PackagesWithErrorMessage, - }, nil -} - -func (g guardian) run(loadConfig *loader.Config, prog *loader.Program, pass *analysis.Pass) ([]goanalysis.Issue, error) { - issues, err := g.Run(loadConfig, prog) - if err != nil { - return nil, err - } - - res := make([]goanalysis.Issue, 0, len(issues)) - - for _, issue := range issues { - res = append(res, - goanalysis.NewIssue(&result.Issue{ - Pos: issue.Position, - Text: g.createMsg(issue.PackageName), - FromLinter: depguardName, - }, pass), - ) - } - - return res, nil -} - -func (g guardian) createMsg(pkgName string) string { - msgSuffix := "is in the denylist" - if g.ListType == depguard.LTWhitelist { - msgSuffix = "is not in the allowlist" - } - - var userSuppliedMsgSuffix string - if g.pkgsWithErrorMessage != nil { - userSuppliedMsgSuffix = g.pkgsWithErrorMessage[pkgName] - if userSuppliedMsgSuffix != "" { - userSuppliedMsgSuffix = ": " + userSuppliedMsgSuffix - } - } - - return fmt.Sprintf("%s %s%s", formatCode(pkgName, nil), msgSuffix, userSuppliedMsgSuffix) -} - -func getDepGuardListType(listType string) (depguard.ListType, error) { - if listType == "" { - return depguard.LTBlacklist, nil - } - - listT, found := depguard.StringToListType[strings.ToLower(listType)] - if !found { - return depguard.LTBlacklist, fmt.Errorf("unsure what list type %s is", listType) - } - - return listT, nil + }).WithLoadMode(goanalysis.LoadModeSyntax) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go index 00c32c2dc9..79502fe8be 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go @@ -24,7 +24,7 @@ func NewDogsled(settings *config.DogsledSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: dogsledName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runDogsled(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go index fe7b127735..5d772a5f2f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go @@ -6,7 +6,6 @@ import ( "sync" duplAPI "github.com/golangci/dupl" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" @@ -26,7 +25,7 @@ func NewDupl(settings *config.DuplSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: duplName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runDupl(pass, settings) if err != nil { return nil, err @@ -71,7 +70,7 @@ func runDupl(pass *analysis.Pass, settings *config.DuplSettings) ([]goanalysis.I for _, i := range issues { toFilename, err := fsutils.ShortestRelPath(i.To.Filename(), "") if err != nil { - return nil, errors.Wrapf(err, "failed to get shortest rel path for %q", i.To.Filename()) + return nil, fmt.Errorf("failed to get shortest rel path for %q: %w", i.To.Filename(), err) } dupl := fmt.Sprintf("%s:%d-%d", toFilename, i.To.LineStart(), i.To.LineEnd()) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go index ae85a6d002..f5a99bc0df 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupword.go @@ -13,9 +13,9 @@ import ( func NewDupWord(setting *config.DupWordSettings) *goanalysis.Linter { a := dupword.NewAnalyzer() - cfgMap := map[string]map[string]interface{}{} + cfgMap := map[string]map[string]any{} if setting != nil { - cfgMap[a.Name] = map[string]interface{}{ + cfgMap[a.Name] = map[string]any{ "keyword": strings.Join(setting.Keywords, ","), } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go index 53fe22e68c..89b18519c9 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go @@ -11,7 +11,6 @@ import ( "sync" "github.com/kisielk/errcheck/errcheck" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" @@ -36,8 +35,8 @@ func NewErrcheck(settings *config.ErrcheckSettings) *goanalysis.Linter { return goanalysis.NewLinter( errcheckName, - "Errcheck is a program for checking for unchecked errors "+ - "in go programs. These unchecked errors can be critical bugs in some cases", + "errcheck is a program for checking for unchecked errors in Go code. "+ + "These unchecked errors can be critical bugs in some cases", []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { @@ -50,7 +49,7 @@ func NewErrcheck(settings *config.ErrcheckSettings) *goanalysis.Linter { checker.Tags = lintCtx.Cfg.Run.BuildTags - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { issues := runErrCheck(lintCtx, pass, checker) if err != nil { return nil, err @@ -143,7 +142,7 @@ func parseIgnoreConfig(s string) (map[string]*regexp.Regexp, error) { func getChecker(errCfg *config.ErrcheckSettings) (*errcheck.Checker, error) { ignoreConfig, err := parseIgnoreConfig(errCfg.Ignore) if err != nil { - return nil, errors.Wrap(err, "failed to parse 'ignore' directive") + return nil, fmt.Errorf("failed to parse 'ignore' directive: %w", err) } checker := errcheck.Checker{ @@ -252,7 +251,7 @@ func readExcludeFile(name string) ([]string, error) { } if fh == nil { - return nil, errors.Wrapf(err, "failed reading exclude file: %s", name) + return nil, fmt.Errorf("failed reading exclude file: %s: %w", name, err) } scanner := bufio.NewScanner(fh) @@ -263,7 +262,7 @@ func readExcludeFile(name string) ([]string, error) { } if err := scanner.Err(); err != nil { - return nil, errors.Wrapf(err, "failed scanning file: %s", name) + return nil, fmt.Errorf("failed scanning file: %s: %w", name, err) } return excludes, nil diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go index 6cc2208a36..171de00a4f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go @@ -11,12 +11,12 @@ import ( func NewErrChkJSONFuncName(cfg *config.ErrChkJSONSettings) *goanalysis.Linter { a := errchkjson.NewAnalyzer() - cfgMap := map[string]map[string]interface{}{} - cfgMap[a.Name] = map[string]interface{}{ + cfgMap := map[string]map[string]any{} + cfgMap[a.Name] = map[string]any{ "omit-safe": true, } if cfg != nil { - cfgMap[a.Name] = map[string]interface{}{ + cfgMap[a.Name] = map[string]any{ "omit-safe": !cfg.CheckErrorFreeEncoding, "report-no-exported": cfg.ReportNoExported, } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go index dd9d901617..cac94159d6 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go @@ -11,13 +11,14 @@ import ( func NewErrorLint(cfg *config.ErrorLintSettings) *goanalysis.Linter { a := errorlint.NewAnalyzer() - cfgMap := map[string]map[string]interface{}{} + cfgMap := map[string]map[string]any{} if cfg != nil { - cfgMap[a.Name] = map[string]interface{}{ - "errorf": cfg.Errorf, - "asserts": cfg.Asserts, - "comparison": cfg.Comparison, + cfgMap[a.Name] = map[string]any{ + "errorf": cfg.Errorf, + "errorf-multi": cfg.ErrorfMulti, + "asserts": cfg.Asserts, + "comparison": cfg.Comparison, } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go index a66f95190a..3824afa0b9 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go @@ -11,9 +11,9 @@ import ( func NewExhaustive(settings *config.ExhaustiveSettings) *goanalysis.Linter { a := exhaustive.Analyzer - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil { - cfg = map[string]map[string]interface{}{ + cfg = map[string]map[string]any{ a.Name: { exhaustive.CheckFlag: settings.Check, exhaustive.CheckGeneratedFlag: settings.CheckGenerated, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go index 6a1dbd71c5..9bc9bbfb0b 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go @@ -13,9 +13,9 @@ import ( func NewExhaustiveStruct(settings *config.ExhaustiveStructSettings) *goanalysis.Linter { a := analyzer.Analyzer - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil { - cfg = map[string]map[string]interface{}{ + cfg = map[string]map[string]any{ a.Name: { "struct_patterns": strings.Join(settings.StructPatterns, ","), }, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go index 95fb47e47b..6aced29226 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go @@ -1,15 +1,16 @@ package golinters import ( + "fmt" "sync" "github.com/ashanbrown/forbidigo/forbidigo" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -23,7 +24,7 @@ func NewForbidigo(settings *config.ForbidigoSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: forbidigoName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runForbidigo(pass, settings) if err != nil { return nil, err @@ -40,6 +41,9 @@ func NewForbidigo(settings *config.ForbidigoSettings) *goanalysis.Linter { }, } + // Without AnalyzeTypes, LoadModeSyntax is enough. + // But we cannot make this depend on the settings and have to mirror the mode chosen in GetAllSupportedLinterConfigs, + // therefore we have to use LoadModeTypesInfo in all cases. return goanalysis.NewLinter( forbidigoName, "Forbids identifiers", @@ -47,7 +51,7 @@ func NewForbidigo(settings *config.ForbidigoSettings) *goanalysis.Linter { nil, ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) + }).WithLoadMode(goanalysis.LoadModeTypesInfo) } func runForbidigo(pass *analysis.Pass, settings *config.ForbidigoSettings) ([]goanalysis.Issue, error) { @@ -55,18 +59,36 @@ func runForbidigo(pass *analysis.Pass, settings *config.ForbidigoSettings) ([]go forbidigo.OptionExcludeGodocExamples(settings.ExcludeGodocExamples), // disable "//permit" directives so only "//nolint" directives matters within golangci-lint forbidigo.OptionIgnorePermitDirectives(true), + forbidigo.OptionAnalyzeTypes(settings.AnalyzeTypes), } - forbid, err := forbidigo.NewLinter(settings.Forbid, options...) + // Convert patterns back to strings because that is what NewLinter accepts. + var patterns []string + for _, pattern := range settings.Forbid { + buffer, err := pattern.MarshalString() + if err != nil { + return nil, err + } + patterns = append(patterns, string(buffer)) + } + + forbid, err := forbidigo.NewLinter(patterns, options...) if err != nil { - return nil, errors.Wrapf(err, "failed to create linter %q", forbidigoName) + return nil, fmt.Errorf("failed to create linter %q: %w", forbidigoName, err) } var issues []goanalysis.Issue for _, file := range pass.Files { - hints, err := forbid.RunWithConfig(forbidigo.RunConfig{Fset: pass.Fset}, file) + runConfig := forbidigo.RunConfig{ + Fset: pass.Fset, + DebugLog: logutils.Debug(logutils.DebugKeyForbidigo), + } + if settings != nil && settings.AnalyzeTypes { + runConfig.TypesInfo = pass.TypesInfo + } + hints, err := forbid.RunWithConfig(runConfig, file) if err != nil { - return nil, errors.Wrapf(err, "forbidigo linter failed on file %q", file.Name.String()) + return nil, fmt.Errorf("forbidigo linter failed on file %q: %w", file.Name.String(), err) } for _, hint := range hints { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go index c562c2aa04..aae1623c7f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go @@ -24,7 +24,7 @@ func NewFunlen(settings *config.FunlenSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: funlenName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runFunlen(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go index d07c2126d1..4eb26dbdfc 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go @@ -2,7 +2,6 @@ package golinters import ( "fmt" - "strings" "sync" gcicfg "github.com/daixiang0/gci/pkg/config" @@ -12,7 +11,6 @@ import ( "github.com/hexops/gotextdiff" "github.com/hexops/gotextdiff/myers" "github.com/hexops/gotextdiff/span" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" @@ -58,11 +56,11 @@ func NewGci(settings *config.GciSettings) *goanalysis.Linter { return goanalysis.NewLinter( gciName, - "Gci controls golang package import order and makes it always deterministic.", + "Gci controls Go package import order and makes it always deterministic.", []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { issues, err := runGci(pass, lintCtx, cfg, &lock) if err != nil { return nil, err @@ -101,7 +99,7 @@ func runGci(pass *analysis.Pass, lintCtx *linter.Context, cfg *gcicfg.Config, lo is, err := extractIssuesFromPatch(diff, lintCtx, gciName) if err != nil { - return nil, errors.Wrapf(err, "can't extract issues from gci diff output %s", diff) + return nil, fmt.Errorf("can't extract issues from gci diff output %s: %w", diff, err) } for i := range is { @@ -146,7 +144,13 @@ func getErrorTextForGci(settings config.GciSettings) string { } if len(settings.Sections) > 0 { - text += " -s " + strings.Join(settings.Sections, ",") + for _, section := range settings.Sections { + text += " -s " + section + } + } + + if settings.CustomOrder { + text += " --custom-order" } return text diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go index d97050c2ee..b9e69b265d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter.go @@ -11,18 +11,21 @@ import ( func NewGinkgoLinter(cfg *config.GinkgoLinterSettings) *goanalysis.Linter { a := ginkgolinter.NewAnalyzer() - cfgMap := make(map[string]map[string]interface{}) + cfgMap := make(map[string]map[string]any) if cfg != nil { - cfgMap[a.Name] = map[string]interface{}{ - "suppress-len-assertion": cfg.SuppressLenAssertion, - "suppress-nil-assertion": cfg.SuppressNilAssertion, - "suppress-err-assertion": cfg.SuppressErrAssertion, + cfgMap[a.Name] = map[string]any{ + "suppress-len-assertion": cfg.SuppressLenAssertion, + "suppress-nil-assertion": cfg.SuppressNilAssertion, + "suppress-err-assertion": cfg.SuppressErrAssertion, + "suppress-compare-assertion": cfg.SuppressCompareAssertion, + "suppress-async-assertion": cfg.SuppressAsyncAssertion, + "allow-havelen-0": cfg.AllowHaveLenZero, } } return goanalysis.NewLinter( a.Name, - a.Doc, + "enforces standards of using ginkgo and gomega", []*analysis.Analyzer{a}, cfgMap, ).WithLoadMode(goanalysis.LoadModeTypesInfo) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go index 13b9ccf0af..f59e02cc64 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go @@ -1,9 +1,9 @@ package goanalysis import ( + "errors" "fmt" - "github.com/pkg/errors" "golang.org/x/tools/go/packages" "github.com/golangci/golangci-lint/pkg/lint/linter" diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go index 50a4ca088e..f8ca2e7553 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go @@ -2,11 +2,11 @@ package goanalysis import ( "context" + "errors" "flag" "fmt" "strings" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/lint/linter" @@ -44,14 +44,14 @@ const ( type Linter struct { name, desc string analyzers []*analysis.Analyzer - cfg map[string]map[string]interface{} + cfg map[string]map[string]any issuesReporter func(*linter.Context) []Issue contextSetter func(*linter.Context) loadMode LoadMode needUseOriginalPackages bool } -func NewLinter(name, desc string, analyzers []*analysis.Analyzer, cfg map[string]map[string]interface{}) *Linter { +func NewLinter(name, desc string, analyzers []*analysis.Analyzer, cfg map[string]map[string]any) *Linter { return &Linter{name: name, desc: desc, analyzers: analyzers, cfg: cfg} } @@ -102,7 +102,7 @@ func (lnt *Linter) allAnalyzerNames() []string { return ret } -func (lnt *Linter) configureAnalyzer(a *analysis.Analyzer, cfg map[string]interface{}) error { +func (lnt *Linter) configureAnalyzer(a *analysis.Analyzer, cfg map[string]any) error { for k, v := range cfg { f := a.Flags.Lookup(k) if f == nil { @@ -116,7 +116,7 @@ func (lnt *Linter) configureAnalyzer(a *analysis.Analyzer, cfg map[string]interf } if err := f.Value.Set(valueToString(v)); err != nil { - return errors.Wrapf(err, "failed to set analyzer setting %q with value %v", k, v) + return fmt.Errorf("failed to set analyzer setting %q with value %v: %w", k, v, err) } } @@ -137,7 +137,7 @@ func (lnt *Linter) configure() error { } if err := lnt.configureAnalyzer(a, analyzerSettings); err != nil { - return errors.Wrapf(err, "failed to configure analyzer %s", analyzerName) + return fmt.Errorf("failed to configure analyzer %s: %w", analyzerName, err) } } @@ -146,11 +146,11 @@ func (lnt *Linter) configure() error { func (lnt *Linter) preRun(lintCtx *linter.Context) error { if err := analysis.Validate(lnt.analyzers); err != nil { - return errors.Wrap(err, "failed to validate analyzers") + return fmt.Errorf("failed to validate analyzers: %w", err) } if err := lnt.configure(); err != nil { - return errors.Wrap(err, "failed to configure analyzers") + return fmt.Errorf("failed to configure analyzers: %w", err) } if lnt.contextSetter != nil { @@ -195,12 +195,12 @@ func allFlagNames(fs *flag.FlagSet) []string { return ret } -func valueToString(v interface{}) string { +func valueToString(v any) string { if ss, ok := v.([]string); ok { return strings.Join(ss, ",") } - if is, ok := v.([]interface{}); ok { + if is, ok := v.([]any); ok { var ss []string for _, i := range is { ss = append(ss, fmt.Sprint(i)) @@ -212,6 +212,6 @@ func valueToString(v interface{}) string { return fmt.Sprint(v) } -func DummyRun(_ *analysis.Pass) (interface{}, error) { +func DummyRun(_ *analysis.Pass) (any, error) { return nil, nil } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go index 5c24d10964..333ab20f1f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go @@ -2,8 +2,8 @@ package goanalysis import ( "context" + "fmt" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/lint/linter" @@ -24,7 +24,7 @@ func NewMetaLinter(linters []*Linter) *MetaLinter { func (ml MetaLinter) Run(_ context.Context, lintCtx *linter.Context) ([]result.Issue, error) { for _, l := range ml.linters { if err := l.preRun(lintCtx); err != nil { - return nil, errors.Wrapf(err, "failed to pre-run %s", l.Name()) + return nil, fmt.Errorf("failed to pre-run %s: %w", l.Name(), err) } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go index 4a52c11002..46871bc5b2 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go @@ -11,12 +11,12 @@ package goanalysis import ( "encoding/gob" + "fmt" "go/token" "runtime" "sort" "sync" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" @@ -311,7 +311,7 @@ func extractDiagnostics(roots []*action) (retDiags []Diagnostic, retErrors []err if pe, ok := act.err.(*errorutil.PanicError); ok { panic(pe) } - retErrors = append(retErrors, errors.Wrap(act.err, act.a.Name)) + retErrors = append(retErrors, fmt.Errorf("%s: %w", act.a.Name, act.err)) return } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go index d6f40a0c46..5ded9fac9d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go @@ -1,6 +1,7 @@ package goanalysis import ( + "errors" "fmt" "go/types" "io" @@ -9,7 +10,6 @@ import ( "time" "github.com/hashicorp/go-multierror" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" "golang.org/x/tools/go/types/objectpath" @@ -50,7 +50,7 @@ type action struct { deps []*action objectFacts map[objectFactKey]analysis.Fact packageFacts map[packageFactKey]analysis.Fact - result interface{} + result any diagnostics []analysis.Diagnostic err error r *runner @@ -98,6 +98,13 @@ func (act *action) waitUntilDependingAnalyzersWorked() { func (act *action) analyzeSafe() { defer func() { if p := recover(); p != nil { + if !act.isroot { + // This line allows to display "hidden" panic with analyzers like buildssa. + // Some linters are dependent of sub-analyzers but when a sub-analyzer fails the linter is not aware of that, + // this results to another panic (ex: "interface conversion: interface {} is nil, not *buildssa.SSA"). + act.r.log.Errorf("%s: panic during analysis: %v, %s", act.a.Name, p, string(debug.Stack())) + } + act.err = errorutil.NewPanicError(fmt.Sprintf("%s: package %q (isInitialPkg: %t, needAnalyzeSource: %t): %s", act.a.Name, act.pkg.Name, act.isInitialPkg, act.needAnalyzeSource, p), debug.Stack()) } @@ -125,7 +132,7 @@ func (act *action) analyze() { continue } - depErrors = multierror.Append(depErrors, errors.Cause(dep.err)) + depErrors = multierror.Append(depErrors, errors.Unwrap(dep.err)) } if depErrors != nil { depErrors.ErrorFormat = func(e []error) string { @@ -138,7 +145,7 @@ func (act *action) analyze() { // Plumb the output values of the dependencies // into the inputs of this action. Also facts. - inputs := make(map[*analysis.Analyzer]interface{}) + inputs := make(map[*analysis.Analyzer]any) startedAt := time.Now() for _, dep := range act.deps { if dep.pkg == act.pkg { @@ -182,7 +189,7 @@ func (act *action) analyze() { // It looks like there should be !pass.Analyzer.RunDespiteErrors // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here, // but it exits before it if packages.Load have failed. - act.err = errors.Wrap(&IllTypedError{Pkg: act.pkg}, "analysis skipped") + act.err = fmt.Errorf("analysis skipped: %w", &IllTypedError{Pkg: act.pkg}) } else { startedAt = time.Now() act.result, act.err = pass.Analyzer.Run(pass) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go index e76b6ab6ee..e39e2212c3 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go @@ -1,6 +1,7 @@ package goanalysis import ( + "errors" "fmt" "go/ast" "go/parser" @@ -11,7 +12,6 @@ import ( "sync" "sync/atomic" - "github.com/pkg/errors" "golang.org/x/tools/go/gcexportdata" "golang.org/x/tools/go/packages" @@ -59,7 +59,7 @@ func (lp *loadingPackage) analyze(loadMode LoadMode, loadSem chan struct{}) { defer lp.decUse(loadMode < LoadModeWholeProgram) if err := lp.loadWithFacts(loadMode); err != nil { - werr := errors.Wrapf(err, "failed to load package %s", lp.pkg.Name) + werr := fmt.Errorf("failed to load package %s: %w", lp.pkg.Name, err) // Don't need to write error to errCh, it will be extracted and reported on another layer. // Unblock depending on actions and propagate error. for _, act := range lp.actions { @@ -290,7 +290,7 @@ func (lp *loadingPackage) loadImportedPackageWithFacts(loadMode LoadMode) error Msg: fmt.Sprintf("could not load export data: %s", err), Kind: packages.ParseError, }) - return errors.Wrap(err, "could not load export data") + return fmt.Errorf("could not load export data: %w", err) } } @@ -446,7 +446,7 @@ type importerFunc func(path string) (*types.Package, error) func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } -func sizeOfValueTreeBytes(v interface{}) int { +func sizeOfValueTreeBytes(v any) int { return sizeOfReflectValueTreeBytes(reflect.ValueOf(v), map[uintptr]struct{}{}) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go index 0fe67e2b0f..6e18aeb27d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go @@ -14,7 +14,7 @@ func NewGochecknoglobals() *goanalysis.Linter { // pass the `t` flag as true to the analyzer before running it. This can be // turned off by using the regular golangci-lint flags such as `--tests` or // `--skip-files`. - linterConfig := map[string]map[string]interface{}{ + linterConfig := map[string]map[string]any{ gochecknoglobals.Name: { "t": true, }, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go index bb0b783c6c..a51b531b94 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go @@ -22,7 +22,7 @@ func NewGochecknoinits() *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: gochecknoinitsName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { var res []goanalysis.Issue for _, file := range pass.Files { fileIssues := checkFileForInits(file, pass.Fset) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go index 49146c52c8..406d34ed6c 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go @@ -24,7 +24,7 @@ func NewGocognit(settings *config.GocognitSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: goanalysis.TheOnlyAnalyzerName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runGocognit(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go index 24d3198b9a..e277509d2d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go @@ -23,7 +23,7 @@ func NewGoconst(settings *config.GoConstSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: goconstName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runGoconst(pass, settings) if err != nil { return nil, err diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go index 41fd600903..1319c72d9d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go @@ -1,6 +1,7 @@ package golinters import ( + "errors" "fmt" "go/ast" "go/types" @@ -12,8 +13,7 @@ import ( "sync" "github.com/go-critic/go-critic/checkers" - gocriticlinter "github.com/go-critic/go-critic/framework/linter" - "github.com/pkg/errors" + gocriticlinter "github.com/go-critic/go-critic/linter" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" @@ -42,7 +42,7 @@ func NewGoCritic(settings *config.GoCriticSettings, cfg *config.Config) *goanaly analyzer := &analysis.Analyzer{ Name: goCriticName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := wrapper.run(pass) if err != nil { return nil, err @@ -113,6 +113,8 @@ func (w *goCriticWrapper) run(pass *analysis.Pass) ([]goanalysis.Issue, error) { linterCtx := gocriticlinter.NewContext(pass.Fset, w.sizes) + linterCtx.SetGoVersion(w.settingsWrapper.Go) + enabledCheckers, err := w.buildEnabledCheckers(linterCtx) if err != nil { return nil, err @@ -245,7 +247,7 @@ func normalizeCheckerInfoParams(info *gocriticlinter.CheckerInfo) gocriticlinter // but the file parsers (TOML, YAML, JSON) don't create the same representation for raw type. // then we have to convert value types into the expected value types. // Maybe in the future, this kind of conversion will be done in go-critic itself. -func (w *goCriticWrapper) normalizeCheckerParamsValue(p interface{}) interface{} { +func (w *goCriticWrapper) normalizeCheckerParamsValue(p any) any { rv := reflect.ValueOf(p) switch rv.Type().Kind() { case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int: @@ -425,7 +427,7 @@ func (s *goCriticSettingsWrapper) validate() error { } } else { if err := validateStringsUniq(s.EnabledTags); err != nil { - return errors.Wrap(err, "validate enabled tags") + return fmt.Errorf("validate enabled tags: %w", err) } tagToCheckers := s.buildTagToCheckersMap() @@ -447,15 +449,15 @@ func (s *goCriticSettingsWrapper) validate() error { } if err := validateStringsUniq(s.EnabledChecks); err != nil { - return errors.Wrap(err, "validate enabled checks") + return fmt.Errorf("validate enabled checks: %w", err) } if err := validateStringsUniq(s.DisabledChecks); err != nil { - return errors.Wrap(err, "validate disabled checks") + return fmt.Errorf("validate disabled checks: %w", err) } if err := s.validateCheckerNames(); err != nil { - return errors.Wrap(err, "validation failed") + return fmt.Errorf("validation failed: %w", err) } return nil @@ -621,7 +623,7 @@ func sprintStrings(ss []string) string { return fmt.Sprint(ss) } -func debugChecksListf(checks []string, format string, args ...interface{}) { +func debugChecksListf(checks []string, format string, args ...any) { if !isGoCriticDebug { return } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go index ea82195711..b502623ba6 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go @@ -23,7 +23,7 @@ func NewGocyclo(settings *config.GoCycloSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: gocycloName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runGoCyclo(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go index 93ca7577ae..b0ee644349 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go @@ -42,7 +42,7 @@ func NewGodot(settings *config.GodotSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: godotName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runGodot(pass, dotSettings) if err != nil { return nil, err diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go index 4dba9df003..955810417d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go @@ -24,7 +24,7 @@ func NewGodox(settings *config.GodoxSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: godoxName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runGodox(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go index c97b6d5878..10addc57c2 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go @@ -10,7 +10,7 @@ import ( func NewGoerr113() *goanalysis.Linter { return goanalysis.NewLinter( "goerr113", - "Golang linter to check the errors handling expressions", + "Go linter to check the errors handling expressions", []*analysis.Analyzer{err113.NewAnalyzer()}, nil, ).WithLoadMode(goanalysis.LoadModeTypesInfo) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go index 112f422ffe..d2d0d3ccc5 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go @@ -1,10 +1,10 @@ package golinters import ( + "fmt" "sync" gofmtAPI "github.com/golangci/gofmt/gofmt" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" @@ -31,7 +31,7 @@ func NewGofmt(settings *config.GoFmtSettings) *goanalysis.Linter { []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { issues, err := runGofmt(lintCtx, pass, settings) if err != nil { return nil, err @@ -73,7 +73,7 @@ func runGofmt(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoF is, err := extractIssuesFromPatch(string(diff), lintCtx, gofmtName) if err != nil { - return nil, errors.Wrapf(err, "can't extract issues from gofmt diff output %q", string(diff)) + return nil, fmt.Errorf("can't extract issues from gofmt diff output %q: %w", string(diff), err) } for i := range is { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go index 59f4f4590e..cbed4e0bc0 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go @@ -6,7 +6,6 @@ import ( "go/token" "strings" - "github.com/pkg/errors" diffpkg "github.com/sourcegraph/go-diff/diff" "github.com/golangci/golangci-lint/pkg/config" @@ -238,7 +237,7 @@ func getErrorTextForLinter(settings *config.LintersSettings, linterName string) func extractIssuesFromPatch(patch string, lintCtx *linter.Context, linterName string) ([]result.Issue, error) { diffs, err := diffpkg.ParseMultiFileDiff([]byte(patch)) if err != nil { - return nil, errors.Wrap(err, "can't parse patch") + return nil, fmt.Errorf("can't parse patch: %w", err) } if len(diffs) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go index 312dfd6d93..c2aaf121de 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go @@ -7,7 +7,6 @@ import ( "os" "sync" - "github.com/pkg/errors" "github.com/shazow/go-diff/difflib" "golang.org/x/tools/go/analysis" "mvdan.cc/gofumpt/format" @@ -51,7 +50,7 @@ func NewGofumpt(settings *config.GofumptSettings) *goanalysis.Linter { []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { issues, err := runGofumpt(lintCtx, pass, diff, options) if err != nil { return nil, err @@ -89,13 +88,9 @@ func runGofumpt(lintCtx *linter.Context, pass *analysis.Pass, diff differ, optio } if !bytes.Equal(input, output) { - out := bytes.Buffer{} - _, err = out.WriteString(fmt.Sprintf("--- %[1]s\n+++ %[1]s\n", f)) - if err != nil { - return nil, fmt.Errorf("error while running gofumpt: %w", err) - } + out := bytes.NewBufferString(fmt.Sprintf("--- %[1]s\n+++ %[1]s\n", f)) - err = diff.Diff(&out, bytes.NewReader(input), bytes.NewReader(output)) + err := diff.Diff(out, bytes.NewReader(input), bytes.NewReader(output)) if err != nil { return nil, fmt.Errorf("error while running gofumpt: %w", err) } @@ -103,7 +98,7 @@ func runGofumpt(lintCtx *linter.Context, pass *analysis.Pass, diff differ, optio diff := out.String() is, err := extractIssuesFromPatch(diff, lintCtx, gofumptName) if err != nil { - return nil, errors.Wrapf(err, "can't extract issues from gofumpt diff output %q", diff) + return nil, fmt.Errorf("can't extract issues from gofumpt diff output %q: %w", diff, err) } for i := range is { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go index d7d27326ec..d3cfefa90b 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go @@ -31,7 +31,7 @@ func NewGoHeader(settings *config.GoHeaderSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: goHeaderName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runGoHeader(pass, conf) if err != nil { return nil, err diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go index 97ad6d4606..aac27f38e5 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go @@ -1,10 +1,10 @@ package golinters import ( + "fmt" "sync" goimportsAPI "github.com/golangci/gofmt/goimports" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "golang.org/x/tools/imports" @@ -34,7 +34,7 @@ func NewGoimports(settings *config.GoImportsSettings) *goanalysis.Linter { ).WithContextSetter(func(lintCtx *linter.Context) { imports.LocalPrefix = settings.LocalPrefixes - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { issues, err := runGoImports(lintCtx, pass) if err != nil { return nil, err @@ -71,7 +71,7 @@ func runGoImports(lintCtx *linter.Context, pass *analysis.Pass) ([]goanalysis.Is is, err := extractIssuesFromPatch(string(diff), lintCtx, goimportsName) if err != nil { - return nil, errors.Wrapf(err, "can't extract issues from gofmt diff output %q", string(diff)) + return nil, fmt.Errorf("can't extract issues from gofmt diff output %q: %w", string(diff), err) } for i := range is { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go index 95c579e34b..a6fc73c9ec 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go @@ -23,7 +23,7 @@ func NewGolint(settings *config.GoLintSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: golintName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runGoLint(pass, settings) if err != nil { return nil, err diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go index 15d84b48bf..2e6d77a801 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go @@ -9,14 +9,14 @@ import ( ) func NewGoMND(settings *config.GoMndSettings) *goanalysis.Linter { - var linterCfg map[string]map[string]interface{} + var linterCfg map[string]map[string]any if settings != nil { // TODO(ldez) For compatibility only, must be drop in v2. if len(settings.Settings) > 0 { linterCfg = settings.Settings } else { - cfg := make(map[string]interface{}) + cfg := make(map[string]any) if len(settings.Checks) > 0 { cfg["checks"] = settings.Checks } @@ -30,7 +30,7 @@ func NewGoMND(settings *config.GoMndSettings) *goanalysis.Linter { cfg["ignored-functions"] = settings.IgnoredFunctions } - linterCfg = map[string]map[string]interface{}{ + linterCfg = map[string]map[string]any{ "mnd": cfg, } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go index 81831129a2..56afcd465f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go @@ -39,7 +39,7 @@ func NewGoModDirectives(settings *config.GoModDirectivesSettings) *goanalysis.Li []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { once.Do(func() { results, err := gomoddirectives.Analyze(opts) if err != nil { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go index e21658d5d2..157bf56c35 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go @@ -72,7 +72,7 @@ func NewGomodguard(settings *config.GoModGuardSettings) *goanalysis.Linter { return } - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { gomodguardIssues := processor.ProcessFiles(getFileNames(pass)) mu.Lock() diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go index 3b102a92f5..235f0e9141 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go @@ -9,8 +9,8 @@ import ( "strings" "sync" - "github.com/pkg/errors" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" "github.com/securego/gosec/v2/rules" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" @@ -27,20 +27,11 @@ func NewGosec(settings *config.GoSecSettings) *goanalysis.Linter { var mu sync.Mutex var resIssues []goanalysis.Issue - conf := gosec.NewConfig() - var filters []rules.RuleFilter + conf := gosec.NewConfig() if settings != nil { filters = gosecRuleFilters(settings.Includes, settings.Excludes) - - for k, v := range settings.Config { - if k != gosec.Globals { - // Uses ToUpper because the parsing of the map's key change the key to lowercase. - // The value is not impacted by that: the case is respected. - k = strings.ToUpper(k) - } - conf.Set(k, v) - } + conf = toGosecConfig(settings) } logger := log.New(io.Discard, "", 0) @@ -59,7 +50,7 @@ func NewGosec(settings *config.GoSecSettings) *goanalysis.Linter { []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { // The `gosecAnalyzer` is here because of concurrency issue. gosecAnalyzer := gosec.NewAnalyzer(conf, true, settings.ExcludeGenerated, false, settings.Concurrency, logger) gosecAnalyzer.LoadRules(ruleDefinitions.RulesInfo()) @@ -85,7 +76,7 @@ func runGoSec(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoS TypesInfo: pass.TypesInfo, } - analyzer.Check(pkg) + analyzer.CheckRules(pkg) secIssues, _, _ := analyzer.Report() if len(secIssues) == 0 { @@ -141,6 +132,35 @@ func runGoSec(lintCtx *linter.Context, pass *analysis.Pass, settings *config.GoS return issues } +func toGosecConfig(settings *config.GoSecSettings) gosec.Config { + conf := gosec.NewConfig() + + for k, v := range settings.Config { + if k == gosec.Globals { + convertGosecGlobals(v, conf) + continue + } + + // Uses ToUpper because the parsing of the map's key change the key to lowercase. + // The value is not impacted by that: the case is respected. + conf.Set(strings.ToUpper(k), v) + } + + return conf +} + +// based on https://github.com/securego/gosec/blob/47bfd4eb6fc7395940933388550b547538b4c946/config.go#L52-L62 +func convertGosecGlobals(globalOptionFromConfig any, conf gosec.Config) { + globalOptionMap, ok := globalOptionFromConfig.(map[string]any) + if !ok { + return + } + + for k, v := range globalOptionMap { + conf.SetGlobal(gosec.GlobalOption(k), fmt.Sprintf("%v", v)) + } +} + // based on https://github.com/securego/gosec/blob/569328eade2ccbad4ce2d0f21ee158ab5356a5cf/cmd/gosec/main.go#L170-L188 func gosecRuleFilters(includes, excludes []string) []rules.RuleFilter { var filters []rules.RuleFilter @@ -157,27 +177,29 @@ func gosecRuleFilters(includes, excludes []string) []rules.RuleFilter { } // code borrowed from https://github.com/securego/gosec/blob/69213955dacfd560562e780f723486ef1ca6d486/cmd/gosec/main.go#L250-L262 -func convertToScore(str string) (gosec.Score, error) { +func convertToScore(str string) (issue.Score, error) { str = strings.ToLower(str) switch str { case "", "low": - return gosec.Low, nil + return issue.Low, nil case "medium": - return gosec.Medium, nil + return issue.Medium, nil case "high": - return gosec.High, nil + return issue.High, nil default: - return gosec.Low, errors.Errorf("'%s' is invalid, use low instead. Valid options: low, medium, high", str) + return issue.Low, fmt.Errorf("'%s' is invalid, use low instead. Valid options: low, medium, high", str) } } // code borrowed from https://github.com/securego/gosec/blob/69213955dacfd560562e780f723486ef1ca6d486/cmd/gosec/main.go#L264-L276 -func filterIssues(issues []*gosec.Issue, severity, confidence gosec.Score) []*gosec.Issue { - res := make([]*gosec.Issue, 0) - for _, issue := range issues { - if issue.Severity >= severity && issue.Confidence >= confidence { - res = append(res, issue) +func filterIssues(issues []*issue.Issue, severity, confidence issue.Score) []*issue.Issue { + res := make([]*issue.Issue, 0) + + for _, i := range issues { + if i.Severity >= severity && i.Confidence >= confidence { + res = append(res, i) } } + return res } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan.go new file mode 100644 index 0000000000..2e01fcc70d --- /dev/null +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan.go @@ -0,0 +1,32 @@ +package golinters + +import ( + "strings" + + "github.com/xen0n/gosmopolitan" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewGosmopolitan(s *config.GosmopolitanSettings) *goanalysis.Linter { + a := gosmopolitan.NewAnalyzer() + + cfgMap := map[string]map[string]any{} + if s != nil { + cfgMap[a.Name] = map[string]any{ + "allowtimelocal": s.AllowTimeLocal, + "escapehatches": strings.Join(s.EscapeHatches, ","), + "lookattests": !s.IgnoreTests, + "watchforscripts": strings.Join(s.WatchForScripts, ","), + } + } + + return goanalysis.NewLinter( + a.Name, + a.Doc, + []*analysis.Analyzer{a}, + cfgMap, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go index afed08214e..704dd6c577 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go @@ -123,7 +123,7 @@ var ( ) func NewGovet(settings *config.GovetSettings) *goanalysis.Linter { - var conf map[string]map[string]interface{} + var conf map[string]map[string]any if settings != nil { conf = settings.Settings } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go index e8c1340e4b..9feecf3baf 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/grouper.go @@ -9,9 +9,9 @@ import ( ) func NewGrouper(settings *config.GrouperSettings) *goanalysis.Linter { - linterCfg := map[string]map[string]interface{}{} + linterCfg := map[string]map[string]any{} if settings != nil { - linterCfg["grouper"] = map[string]interface{}{ + linterCfg["grouper"] = map[string]any{ "const-require-single-const": settings.ConstRequireSingleConst, "const-require-grouping": settings.ConstRequireGrouping, "import-require-single-import": settings.ImportRequireSingleImport, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go index c26f08e403..1574eaf709 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go @@ -9,9 +9,9 @@ import ( ) func NewIfshort(settings *config.IfshortSettings) *goanalysis.Linter { - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil { - cfg = map[string]map[string]interface{}{ + cfg = map[string]map[string]any{ analyzer.Analyzer.Name: { "max-decl-lines": settings.MaxDeclLines, "max-decl-chars": settings.MaxDeclChars, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go index 1917bbb0c6..b06aec7a3b 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go @@ -3,6 +3,7 @@ package golinters import ( "fmt" "strconv" + "strings" "github.com/julz/importas" //nolint:misspell "golang.org/x/tools/go/analysis" @@ -36,12 +37,27 @@ func NewImportAs(settings *config.ImportAsSettings) *goanalysis.Linter { lintCtx.Log.Errorf("failed to parse configuration: %v", err) } + uniqPackages := make(map[string]config.ImportAsAlias) + uniqAliases := make(map[string]config.ImportAsAlias) for _, a := range settings.Alias { if a.Pkg == "" { lintCtx.Log.Errorf("invalid configuration, empty package: pkg=%s alias=%s", a.Pkg, a.Alias) continue } + if v, ok := uniqPackages[a.Pkg]; ok { + lintCtx.Log.Errorf("invalid configuration, multiple aliases for the same package: pkg=%s aliases=[%s,%s]", a.Pkg, a.Alias, v.Alias) + } else { + uniqPackages[a.Pkg] = a + } + + // skip the duplication check when the alias is a regular expression replacement pattern (ie. contains `$`). + if v, ok := uniqAliases[a.Alias]; ok && !strings.Contains(a.Alias, "$") { + lintCtx.Log.Errorf("invalid configuration, multiple packages with the same alias: alias=%s packages=[%s,%s]", a.Alias, a.Pkg, v.Pkg) + } else { + uniqAliases[a.Alias] = a + } + err := analyzer.Flags.Set("alias", fmt.Sprintf("%s:%s", a.Pkg, a.Alias)) if err != nil { lintCtx.Log.Errorf("failed to parse configuration: %v", err) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go index 044c96f3b1..a6dbfe178f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacebloat.go @@ -11,9 +11,9 @@ import ( func NewInterfaceBloat(settings *config.InterfaceBloatSettings) *goanalysis.Linter { a := analyzer.New() - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil { - cfg = map[string]map[string]interface{}{ + cfg = map[string]map[string]any{ a.Name: { analyzer.InterfaceMaxMethodsFlag: settings.Max, }, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go index 59125c5c7b..71bdfddbe8 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go @@ -22,7 +22,7 @@ func NewInterfacer() *goanalysis.Linter { Name: interfacerName, Doc: goanalysis.TheOnlyanalyzerDoc, Requires: []*analysis.Analyzer{buildssa.Analyzer}, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runInterfacer(pass) if err != nil { return nil, err diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go index f2d4aec92f..34dc09d268 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/ireturn.go @@ -13,9 +13,9 @@ import ( func NewIreturn(settings *config.IreturnSettings) *goanalysis.Linter { a := analyzer.NewAnalyzer() - cfg := map[string]map[string]interface{}{} + cfg := map[string]map[string]any{} if settings != nil { - cfg[a.Name] = map[string]interface{}{ + cfg[a.Name] = map[string]any{ "allow": strings.Join(settings.Allow, ","), "reject": strings.Join(settings.Reject, ","), } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go index 551ff98a2c..9ed3201209 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go @@ -19,6 +19,8 @@ import ( const lllName = "lll" +const goCommentDirectivePrefix = "//go:" + //nolint:dupl func NewLLL(settings *config.LllSettings) *goanalysis.Linter { var mu sync.Mutex @@ -27,7 +29,7 @@ func NewLLL(settings *config.LllSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: lllName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runLll(pass, settings) if err != nil { return nil, err @@ -94,6 +96,10 @@ func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]r line := scanner.Text() line = strings.ReplaceAll(line, "\t", tabSpaces) + if strings.HasPrefix(line, goCommentDirectivePrefix) { + continue + } + if strings.HasPrefix(line, "import") { multiImportEnabled = strings.HasSuffix(line, "(") continue diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go index 183006b05c..55509d970c 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/maintidx.go @@ -11,12 +11,12 @@ import ( func NewMaintIdx(cfg *config.MaintIdxSettings) *goanalysis.Linter { analyzer := maintidx.Analyzer - cfgMap := map[string]map[string]interface{}{ + cfgMap := map[string]map[string]any{ analyzer.Name: {"under": 20}, } if cfg != nil { - cfgMap[analyzer.Name] = map[string]interface{}{ + cfgMap[analyzer.Name] = map[string]any{ "under": cfg.Under, } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go index 5d55f01e77..a9828629a2 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go @@ -1,10 +1,10 @@ package golinters import ( + "fmt" "sync" "github.com/ashanbrown/makezero/makezero" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" @@ -23,7 +23,7 @@ func NewMakezero(settings *config.MakezeroSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: makezeroName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runMakeZero(pass, settings) if err != nil { return nil, err @@ -59,7 +59,7 @@ func runMakeZero(pass *analysis.Pass, settings *config.MakezeroSettings) ([]goan for _, file := range pass.Files { hints, err := zero.Run(pass.Fset, pass.TypesInfo, file) if err != nil { - return nil, errors.Wrapf(err, "makezero linter failed on file %q", file.Name.String()) + return nil, fmt.Errorf("makezero linter failed on file %q: %w", file.Name.String(), err) } for _, hint := range hints { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go index 9c3ca8b5fe..0455be76aa 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go @@ -23,7 +23,7 @@ func NewMaligned(settings *config.MalignedSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: malignedName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runMaligned(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror.go new file mode 100644 index 0000000000..4adc001a19 --- /dev/null +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/mirror.go @@ -0,0 +1,70 @@ +package golinters + +import ( + "sync" + + "github.com/butuzov/mirror" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewMirror() *goanalysis.Linter { + var ( + mu sync.Mutex + issues []goanalysis.Issue + ) + + a := mirror.NewAnalyzer() + a.Run = func(pass *analysis.Pass) (any, error) { + // mirror only lints test files if the `--with-tests` flag is passed, + // so we pass the `with-tests` flag as true to the analyzer before running it. + // This can be turned off by using the regular golangci-lint flags such as `--tests` or `--skip-files` + // or can be disabled per linter via exclude rules. + // (see https://github.com/golangci/golangci-lint/issues/2527#issuecomment-1023707262) + violations := mirror.Run(pass, true) + + if len(violations) == 0 { + return nil, nil + } + + for index := range violations { + i := violations[index].Issue(pass.Fset) + + issue := result.Issue{ + FromLinter: a.Name, + Text: i.Message, + Pos: i.Start, + } + + if len(i.InlineFix) > 0 { + issue.Replacement = &result.Replacement{ + Inline: &result.InlineFix{ + StartCol: i.Start.Column - 1, + Length: len(i.Original), + NewString: i.InlineFix, + }, + } + } + + mu.Lock() + issues = append(issues, goanalysis.NewIssue(&issue, pass)) + mu.Unlock() + } + + return nil, nil + } + + analyzer := goanalysis.NewLinter( + a.Name, + a.Doc, + []*analysis.Analyzer{a}, + nil, + ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return issues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) + + return analyzer +} diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go index b5cc5c8a89..ce2b79a7c1 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go @@ -35,7 +35,7 @@ func NewMisspell(settings *config.MisspellSettings) *goanalysis.Linter { ).WithContextSetter(func(lintCtx *linter.Context) { replacer, ruleErr := createMisspellReplacer(settings) - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { if ruleErr != nil { return nil, ruleErr } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go index 75500b5aba..d9ea7efc7e 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/musttag.go @@ -1,7 +1,7 @@ package golinters import ( - "github.com/junk1tm/musttag" + "go.tmz.dev/musttag" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go index dc2de0345a..d276ac6a9a 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go @@ -1,134 +1,27 @@ package golinters import ( - "fmt" - "go/ast" - "go/token" - "sync" - + "github.com/alexkohler/nakedret/v2" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" - "github.com/golangci/golangci-lint/pkg/lint/linter" - "github.com/golangci/golangci-lint/pkg/result" ) const nakedretName = "nakedret" -//nolint:dupl func NewNakedret(settings *config.NakedretSettings) *goanalysis.Linter { - var mu sync.Mutex - var resIssues []goanalysis.Issue - - analyzer := &analysis.Analyzer{ - Name: nakedretName, - Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { - issues := runNakedRet(pass, settings) - - if len(issues) == 0 { - return nil, nil - } - - mu.Lock() - resIssues = append(resIssues, issues...) - mu.Unlock() - - return nil, nil - }, + var maxLines int + if settings != nil { + maxLines = settings.MaxFuncLines } + analyzer := nakedret.NakedReturnAnalyzer(uint(maxLines)) + return goanalysis.NewLinter( nakedretName, "Finds naked returns in functions greater than a specified function length", []*analysis.Analyzer{analyzer}, nil, - ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { - return resIssues - }).WithLoadMode(goanalysis.LoadModeSyntax) -} - -func runNakedRet(pass *analysis.Pass, settings *config.NakedretSettings) []goanalysis.Issue { - var issues []goanalysis.Issue - - for _, file := range pass.Files { - v := nakedretVisitor{ - maxLength: settings.MaxFuncLines, - f: pass.Fset, - } - - ast.Walk(&v, file) - - for i := range v.issues { - issues = append(issues, goanalysis.NewIssue(&v.issues[i], pass)) - } - } - - return issues -} - -type nakedretVisitor struct { - maxLength int - f *token.FileSet - issues []result.Issue -} - -func (v *nakedretVisitor) processFuncDecl(funcDecl *ast.FuncDecl) { - file := v.f.File(funcDecl.Pos()) - functionLineLength := file.Position(funcDecl.End()).Line - file.Position(funcDecl.Pos()).Line - - // Scan the body for usage of the named returns - for _, stmt := range funcDecl.Body.List { - s, ok := stmt.(*ast.ReturnStmt) - if !ok { - continue - } - - if len(s.Results) != 0 { - continue - } - - file := v.f.File(s.Pos()) - if file == nil || functionLineLength <= v.maxLength { - continue - } - if funcDecl.Name == nil { - continue - } - - v.issues = append(v.issues, result.Issue{ - FromLinter: nakedretName, - Text: fmt.Sprintf("naked return in func `%s` with %d lines of code", - funcDecl.Name.Name, functionLineLength), - Pos: v.f.Position(s.Pos()), - }) - } -} - -func (v *nakedretVisitor) Visit(node ast.Node) ast.Visitor { - funcDecl, ok := node.(*ast.FuncDecl) - if !ok { - return v - } - - var namedReturns []*ast.Ident - - // We've found a function - if funcDecl.Type != nil && funcDecl.Type.Results != nil { - for _, field := range funcDecl.Type.Results.List { - for _, ident := range field.Names { - if ident != nil { - namedReturns = append(namedReturns, ident) - } - } - } - } - - if len(namedReturns) == 0 || funcDecl.Body == nil { - return v - } - - v.processFuncDecl(funcDecl) - return v + ).WithLoadMode(goanalysis.LoadModeSyntax) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go index 78a516f9d1..12ad69eceb 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go @@ -23,7 +23,7 @@ func NewNestif(settings *config.NestifSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: goanalysis.TheOnlyAnalyzerName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runNestIf(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go index 739b4d4fcb..804557b76d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil.go @@ -13,9 +13,9 @@ import ( func NewNilNil(cfg *config.NilNilSettings) *goanalysis.Linter { a := analyzer.New() - cfgMap := make(map[string]map[string]interface{}) + cfgMap := make(map[string]map[string]any) if cfg != nil && len(cfg.CheckedTypes) != 0 { - cfgMap[a.Name] = map[string]interface{}{ + cfgMap[a.Name] = map[string]any{ "checked-types": strings.Join(cfg.CheckedTypes, ","), } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go index fb4919f8a9..a359548f42 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go @@ -11,9 +11,9 @@ import ( func NewNLReturn(settings *config.NlreturnSettings) *goanalysis.Linter { a := nlreturn.NewAnalyzer() - cfg := map[string]map[string]interface{}{} + cfg := map[string]map[string]any{} if settings != nil { - cfg[a.Name] = map[string]interface{}{ + cfg[a.Name] = map[string]any{ "block-size": settings.BlockSize, } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go index a809f44995..00ef1f833f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go @@ -24,7 +24,7 @@ func NewNoLintLint(settings *config.NoLintLintSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: NoLintLintName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runNoLintLint(pass, settings) if err != nil { return nil, err diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go index 3dff2f7590..7856f6d613 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go @@ -11,9 +11,9 @@ import ( func NewNoNamedReturns(settings *config.NoNamedReturnsSettings) *goanalysis.Linter { a := analyzer.Analyzer - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil { - cfg = map[string]map[string]interface{}{ + cfg = map[string]map[string]any{ a.Name: { analyzer.FlagReportErrorInDefer: settings.ReportErrorInDefer, }, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go index 55af7350a0..92201e4e2f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go @@ -11,9 +11,9 @@ import ( func NewParallelTest(settings *config.ParallelTestSettings) *goanalysis.Linter { a := paralleltest.Analyzer - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil { - cfg = map[string]map[string]interface{}{ + cfg = map[string]map[string]any{ a.Name: { "i": settings.IgnoreMissing, }, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go index 75a9b4ec2f..f48d57562e 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go @@ -23,7 +23,7 @@ func NewPreAlloc(settings *config.PreallocSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: preallocName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runPreAlloc(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go index caccd48239..d3c25e274b 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go @@ -11,9 +11,9 @@ import ( func NewPredeclared(settings *config.PredeclaredSettings) *goanalysis.Linter { a := predeclared.Analyzer - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil { - cfg = map[string]map[string]interface{}{ + cfg = map[string]map[string]any{ a.Name: { predeclared.IgnoreFlag: settings.Ignore, predeclared.QualifiedFlag: settings.Qualified, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go index f77847a49a..381c57489d 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go @@ -30,7 +30,7 @@ func NewPromlinter(settings *config.PromlinterSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: promlinterName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runPromLinter(pass, promSettings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go index bc1b93a54a..a6dd670530 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/reassign.go @@ -14,9 +14,9 @@ import ( func NewReassign(settings *config.ReassignSettings) *goanalysis.Linter { a := reassign.NewAnalyzer() - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil && len(settings.Patterns) > 0 { - cfg = map[string]map[string]interface{}{ + cfg = map[string]map[string]any{ a.Name: { reassign.FlagPattern: fmt.Sprintf("^(%s)$", strings.Join(settings.Patterns, "|")), }, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go index faa9e0243e..b57566e7af 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go @@ -13,7 +13,6 @@ import ( reviveConfig "github.com/mgechev/revive/config" "github.com/mgechev/revive/lint" "github.com/mgechev/revive/rule" - "github.com/pkg/errors" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" @@ -52,7 +51,7 @@ func NewRevive(settings *config.ReviveSettings) *goanalysis.Linter { []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { issues, err := runRevive(lintCtx, pass, settings) if err != nil { return nil, err @@ -171,13 +170,13 @@ func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) { err := toml.NewEncoder(buf).Encode(rawRoot) if err != nil { - return nil, errors.Wrap(err, "failed to encode configuration") + return nil, fmt.Errorf("failed to encode configuration: %w", err) } conf = &lint.Config{} _, err = toml.NewDecoder(buf).Decode(conf) if err != nil { - return nil, errors.Wrap(err, "failed to decode configuration") + return nil, fmt.Errorf("failed to decode configuration: %w", err) } } @@ -188,8 +187,8 @@ func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) { return conf, nil } -func createConfigMap(cfg *config.ReviveSettings) map[string]interface{} { - rawRoot := map[string]interface{}{ +func createConfigMap(cfg *config.ReviveSettings) map[string]any { + rawRoot := map[string]any{ "ignoreGeneratedHeader": cfg.IgnoreGeneratedHeader, "confidence": cfg.Confidence, "severity": cfg.Severity, @@ -198,9 +197,9 @@ func createConfigMap(cfg *config.ReviveSettings) map[string]interface{} { "enableAllRules": cfg.EnableAllRules, } - rawDirectives := map[string]map[string]interface{}{} + rawDirectives := map[string]map[string]any{} for _, directive := range cfg.Directives { - rawDirectives[directive.Name] = map[string]interface{}{ + rawDirectives[directive.Name] = map[string]any{ "severity": directive.Severity, } } @@ -209,9 +208,9 @@ func createConfigMap(cfg *config.ReviveSettings) map[string]interface{} { rawRoot["directive"] = rawDirectives } - rawRules := map[string]map[string]interface{}{} + rawRules := map[string]map[string]any{} for _, s := range cfg.Rules { - rawRules[s.Name] = map[string]interface{}{ + rawRules[s.Name] = map[string]any{ "severity": s.Severity, "arguments": safeTomlSlice(s.Arguments), "disabled": s.Disabled, @@ -225,19 +224,19 @@ func createConfigMap(cfg *config.ReviveSettings) map[string]interface{} { return rawRoot } -func safeTomlSlice(r []interface{}) []interface{} { +func safeTomlSlice(r []any) []any { if len(r) == 0 { return nil } - if _, ok := r[0].(map[interface{}]interface{}); !ok { + if _, ok := r[0].(map[any]any); !ok { return r } - var typed []interface{} + var typed []any for _, elt := range r { - item := map[string]interface{}{} - for k, v := range elt.(map[interface{}]interface{}) { + item := map[string]any{} + for k, v := range elt.(map[any]any) { item[k.(string)] = v } @@ -248,7 +247,7 @@ func safeTomlSlice(r []interface{}) []interface{} { } // This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L15 +// Extracted from https://github.com/mgechev/revive/blob/v1.3.0/config/config.go#L15 var defaultRules = []lint.Rule{ &rule.VarDeclarationsRule{}, &rule.PackageCommentsRule{}, @@ -268,21 +267,23 @@ var defaultRules = []lint.Rule{ &rule.TimeNamingRule{}, &rule.ContextKeysType{}, &rule.ContextAsArgumentRule{}, + &rule.IfReturnRule{}, + &rule.EmptyBlockRule{}, + &rule.SuperfluousElseRule{}, + &rule.UnusedParamRule{}, + &rule.UnreachableCodeRule{}, + &rule.RedefinesBuiltinIDRule{}, } var allRules = append([]lint.Rule{ &rule.ArgumentsLimitRule{}, &rule.CyclomaticRule{}, &rule.FileHeaderRule{}, - &rule.EmptyBlockRule{}, - &rule.SuperfluousElseRule{}, &rule.ConfusingNamingRule{}, &rule.GetReturnRule{}, &rule.ModifiesParamRule{}, &rule.ConfusingResultsRule{}, &rule.DeepExitRule{}, - &rule.UnusedParamRule{}, - &rule.UnreachableCodeRule{}, &rule.AddConstantRule{}, &rule.FlagParamRule{}, &rule.UnnecessaryStmtRule{}, @@ -290,7 +291,6 @@ var allRules = append([]lint.Rule{ &rule.ModifiesValRecRule{}, &rule.ConstantLogicalExprRule{}, &rule.BoolLiteralRule{}, - &rule.RedefinesBuiltinIDRule{}, &rule.ImportsBlacklistRule{}, &rule.FunctionResultsLimitRule{}, &rule.MaxPublicStructsRule{}, @@ -316,12 +316,13 @@ var allRules = append([]lint.Rule{ &rule.UnexportedNamingRule{}, &rule.FunctionLength{}, &rule.NestedStructs{}, - &rule.IfReturnRule{}, &rule.UselessBreak{}, &rule.TimeEqualRule{}, &rule.BannedCharsRule{}, &rule.OptimizeOperandsOrderRule{}, + &rule.UseAnyRule{}, &rule.DataRaceRule{}, + &rule.CommentSpacingsRule{}, }, defaultRules...) const defaultConfidence = 0.8 diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go index 7054ef33a5..e6ef15ede0 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go @@ -23,7 +23,7 @@ func NewScopelint() *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: scopelintName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runScopeLint(pass) if len(issues) == 0 { @@ -176,12 +176,12 @@ func (f *Node) Visit(node ast.Node) ast.Visitor { // and must end with a format string and any arguments. // //nolint:interfacer -func (f *Node) errorf(n ast.Node, format string, args ...interface{}) { +func (f *Node) errorf(n ast.Node, format string, args ...any) { pos := f.fset.Position(n.Pos()) f.errorAtf(pos, format, args...) } -func (f *Node) errorAtf(pos token.Position, format string, args ...interface{}) { +func (f *Node) errorAtf(pos token.Position, format string, args ...any) { *f.issues = append(*f.issues, result.Issue{ Pos: pos, Text: fmt.Sprintf(format, args...), diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go index fe49b1be20..f3df0c2f35 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go @@ -23,7 +23,7 @@ func NewStructcheck(settings *config.StructCheckSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: structcheckName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runStructCheck(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go index 899f6ff582..2e1e21c5bd 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go @@ -15,7 +15,7 @@ func NewStylecheck(settings *config.StaticCheckSettings) *goanalysis.Linter { // `scconfig.Analyzer` is a singleton, then it's not possible to have more than one instance for all staticcheck "sub-linters". // When we will merge the 4 "sub-linters", the problem will disappear: https://github.com/golangci/golangci-lint/issues/357 // Currently only stylecheck analyzer has a configuration in staticcheck. - scconfig.Analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + scconfig.Analyzer.Run = func(pass *analysis.Pass) (any, error) { return cfg, nil } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign.go new file mode 100644 index 0000000000..07b7564649 --- /dev/null +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagalign.go @@ -0,0 +1,70 @@ +package golinters + +import ( + "sync" + + "github.com/4meepo/tagalign" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewTagAlign(settings *config.TagAlignSettings) *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + options := []tagalign.Option{tagalign.WithMode(tagalign.GolangciLintMode)} + + if settings != nil { + options = append(options, tagalign.WithAlign(settings.Align)) + + if settings.Sort || len(settings.Order) > 0 { + options = append(options, tagalign.WithSort(settings.Order...)) + } + } + + analyzer := tagalign.NewAnalyzer(options...) + analyzer.Run = func(pass *analysis.Pass) (any, error) { + taIssues := tagalign.Run(pass, options...) + + issues := make([]goanalysis.Issue, len(taIssues)) + for i, issue := range taIssues { + report := &result.Issue{ + FromLinter: analyzer.Name, + Pos: issue.Pos, + Text: issue.Message, + Replacement: &result.Replacement{ + Inline: &result.InlineFix{ + StartCol: issue.InlineFix.StartCol, + Length: issue.InlineFix.Length, + NewString: issue.InlineFix.NewString, + }, + }, + } + + issues[i] = goanalysis.NewIssue(report, pass) + } + + if len(issues) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, issues...) + mu.Unlock() + + return nil, nil + } + + return goanalysis.NewLinter( + analyzer.Name, + analyzer.Doc, + []*analysis.Analyzer{analyzer}, + nil, + ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go index 174b0dd615..6c6bd3186f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv.go @@ -11,9 +11,9 @@ import ( func NewTenv(settings *config.TenvSettings) *goanalysis.Linter { a := tenv.Analyzer - var cfg map[string]map[string]interface{} + var cfg map[string]map[string]any if settings != nil { - cfg = map[string]map[string]interface{}{ + cfg = map[string]map[string]any{ a.Name: { tenv.A: settings.All, }, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go index 2cc627595f..db1ead9661 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go @@ -13,9 +13,9 @@ import ( func NewTestpackage(cfg *config.TestpackageSettings) *goanalysis.Linter { var a = testpackage.NewAnalyzer() - var settings map[string]map[string]interface{} + var settings map[string]map[string]any if cfg != nil { - settings = map[string]map[string]interface{}{ + settings = map[string]map[string]any{ a.Name: { testpackage.SkipRegexpFlagName: cfg.SkipRegexp, testpackage.AllowPackagesFlagName: strings.Join(cfg.AllowPackages, ","), diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go index 41edbe7616..84a8e9e8be 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go @@ -47,7 +47,7 @@ func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter { args = append(args, k) } - cfgMap := map[string]map[string]interface{}{ + cfgMap := map[string]map[string]any{ a.Name: { "checks": strings.Join(args, ","), }, @@ -55,7 +55,7 @@ func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter { return goanalysis.NewLinter( "thelper", - "thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers", + "thelper detects Go test helpers without t.Helper() call and checks the consistency of test helpers", []*analysis.Analyzer{a}, cfgMap, ).WithLoadMode(goanalysis.LoadModeTypesInfo) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go index def9f15657..aad858dfd6 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go @@ -21,7 +21,7 @@ func NewUnconvert() *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: unconvertName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runUnconvert(pass) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go index 7accf29566..4078d94988 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go @@ -24,7 +24,7 @@ func NewUnparam(settings *config.UnparamSettings) *goanalysis.Linter { Name: unparamName, Doc: goanalysis.TheOnlyanalyzerDoc, Requires: []*analysis.Analyzer{buildssa.Analyzer}, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runUnparam(pass, settings) if err != nil { return nil, err diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go index d464690549..aa9374d343 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go @@ -27,7 +27,7 @@ func NewUnused(settings *config.StaticCheckSettings) *goanalysis.Linter { Name: unusedName, Doc: unused.Analyzer.Analyzer.Doc, Requires: unused.Analyzer.Analyzer.Requires, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues, err := runUnused(pass) if err != nil { return nil, err diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go index 0ea4b563b0..663a841ac7 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/usestdlibvars.go @@ -11,9 +11,9 @@ import ( func NewUseStdlibVars(cfg *config.UseStdlibVarsSettings) *goanalysis.Linter { a := analyzer.New() - cfgMap := make(map[string]map[string]interface{}) + cfgMap := make(map[string]map[string]any) if cfg != nil { - cfgMap[a.Name] = map[string]interface{}{ + cfgMap[a.Name] = map[string]any{ analyzer.ConstantKindFlag: cfg.ConstantKind, analyzer.CryptoHashFlag: cfg.CryptoHash, analyzer.HTTPMethodFlag: cfg.HTTPMethod, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go index c2c5b7aa9c..495c5b59fb 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go @@ -31,7 +31,7 @@ func NewVarcheck(settings *config.VarCheckSettings) *goanalysis.Linter { []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { issues := runVarCheck(pass, settings) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go index d86c04b20a..688dfa8046 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go @@ -13,10 +13,10 @@ import ( func NewVarnamelen(settings *config.VarnamelenSettings) *goanalysis.Linter { analyzer := varnamelen.NewAnalyzer() - cfg := map[string]map[string]interface{}{} + cfg := map[string]map[string]any{} if settings != nil { - vnlCfg := map[string]interface{}{ + vnlCfg := map[string]any{ "checkReceiver": strconv.FormatBool(settings.CheckReceiver), "checkReturn": strconv.FormatBool(settings.CheckReturn), "checkTypeParam": strconv.FormatBool(settings.CheckTypeParam), diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go index 1b32a7ad63..e5941fa5dc 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go @@ -1,10 +1,10 @@ package golinters import ( + "fmt" "go/token" "sync" - "github.com/pkg/errors" "github.com/ultraware/whitespace" "golang.org/x/tools/go/analysis" @@ -41,7 +41,7 @@ func NewWhitespace(settings *config.WhitespaceSettings) *goanalysis.Linter { []*analysis.Analyzer{analyzer}, nil, ).WithContextSetter(func(lintCtx *linter.Context) { - analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + analyzer.Run = func(pass *analysis.Pass) (any, error) { issues, err := runWhitespace(lintCtx, pass, wsSettings) if err != nil { return nil, err @@ -87,7 +87,7 @@ func runWhitespace(lintCtx *linter.Context, pass *analysis.Pass, wsSettings whit bracketLine, err := lintCtx.LineCache.GetLine(issue.Pos.Filename, issue.Pos.Line) if err != nil { - return nil, errors.Wrapf(err, "failed to get line %s:%d", issue.Pos.Filename, issue.Pos.Line) + return nil, fmt.Errorf("failed to get line %s:%d: %w", issue.Pos.Filename, issue.Pos.Line, err) } switch i.Type { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go index 4bc7024470..05697a629e 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go @@ -40,7 +40,7 @@ func NewWSL(settings *config.WSLSettings) *goanalysis.Linter { analyzer := &analysis.Analyzer{ Name: goanalysis.TheOnlyAnalyzerName, Doc: goanalysis.TheOnlyanalyzerDoc, - Run: func(pass *analysis.Pass) (interface{}, error) { + Run: func(pass *analysis.Pass) (any, error) { issues := runWSL(pass, &conf) if len(issues) == 0 { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint.go new file mode 100644 index 0000000000..a37bca12e2 --- /dev/null +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/golinters/zerologlint.go @@ -0,0 +1,17 @@ +package golinters + +import ( + "github.com/ykadowak/zerologlint" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewZerologLint() *goanalysis.Linter { + return goanalysis.NewLinter( + "zerologlint", + "Detects the wrong usage of `zerolog` that a user forgets to dispatch with `Send` or `Msg`.", + []*analysis.Analyzer{zerologlint.Analyzer}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go index 4873f3f96c..93922f85a7 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go @@ -3,13 +3,12 @@ package goutil import ( "context" "encoding/json" + "fmt" "os" "os/exec" "strings" "time" - "github.com/pkg/errors" - "github.com/golangci/golangci-lint/pkg/logutils" ) @@ -40,11 +39,11 @@ func (e *Env) Discover(ctx context.Context) error { args = append(args, string(EnvGoCache), string(EnvGoRoot)) out, err := exec.CommandContext(ctx, "go", args...).Output() if err != nil { - return errors.Wrap(err, "failed to run 'go env'") + return fmt.Errorf("failed to run 'go env': %w", err) } if err = json.Unmarshal(out, &e.vars); err != nil { - return errors.Wrapf(err, "failed to parse 'go %s' json", strings.Join(args, " ")) + return fmt.Errorf("failed to parse 'go %s' json: %w", strings.Join(args, " "), err) } e.debugf("Read go env for %s: %#v", time.Since(startedAt), e.vars) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go index 167ac46259..5891ec2770 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go @@ -50,6 +50,11 @@ type Config struct { Deprecation *Deprecation } +func (lc *Config) WithEnabledByDefault() *Config { + lc.EnabledByDefault = true + return lc +} + func (lc *Config) ConsiderSlow() *Config { lc.IsSlow = true return lc @@ -127,7 +132,7 @@ func (lc *Config) WithNoopFallback(cfg *config.Config) *Config { lc.Linter = &Noop{ name: lc.Linter.Name(), desc: lc.Linter.Desc(), - run: func(pass *analysis.Pass) (interface{}, error) { + run: func(pass *analysis.Pass) (any, error) { return nil, nil }, } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go index 7d3b2260a5..a65d6b9278 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go @@ -17,7 +17,7 @@ type Linter interface { type Noop struct { name string desc string - run func(pass *analysis.Pass) (interface{}, error) + run func(pass *analysis.Pass) (any, error) } func (n Noop) Run(_ context.Context, lintCtx *Context) ([]result.Issue, error) { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go index 6f406f7d26..5a04fe193b 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go @@ -24,6 +24,7 @@ type Manager struct { func NewManager(cfg *config.Config, log logutils.Log) *Manager { m := &Manager{cfg: cfg, log: log} + nameToLCs := make(map[string][]*linter.Config) for _, lc := range m.GetAllSupportedLinterConfigs() { for _, name := range lc.AllNames() { @@ -32,6 +33,7 @@ func NewManager(cfg *config.Config, log logutils.Log) *Manager { } m.nameToLCs = nameToLCs + return m } @@ -87,17 +89,6 @@ func (m Manager) GetLinterConfigs(name string) []*linter.Config { return m.nameToLCs[name] } -func enableLinterConfigs(lcs []*linter.Config, isEnabled func(lc *linter.Config) bool) []*linter.Config { - var ret []*linter.Config - for _, lc := range lcs { - lc := lc - lc.EnabledByDefault = isEnabled(lc) - ret = append(ret, lc) - } - - return ret -} - //nolint:funlen func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { var ( @@ -135,6 +126,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { gomodguardCfg *config.GoModGuardSettings gosecCfg *config.GoSecSettings gosimpleCfg *config.StaticCheckSettings + gosmopolitanCfg *config.GosmopolitanSettings govetCfg *config.GovetSettings grouperCfg *config.GrouperSettings ifshortCfg *config.IfshortSettings @@ -164,6 +156,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { staticcheckCfg *config.StaticCheckSettings structcheckCfg *config.StructCheckSettings stylecheckCfg *config.StaticCheckSettings + tagalignCfg *config.TagAlignSettings tagliatelleCfg *config.TagliatelleSettings tenvCfg *config.TenvSettings testpackageCfg *config.TestpackageSettings @@ -213,6 +206,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { gomodguardCfg = &m.cfg.LintersSettings.Gomodguard gosecCfg = &m.cfg.LintersSettings.Gosec gosimpleCfg = &m.cfg.LintersSettings.Gosimple + gosmopolitanCfg = &m.cfg.LintersSettings.Gosmopolitan govetCfg = &m.cfg.LintersSettings.Govet grouperCfg = &m.cfg.LintersSettings.Grouper ifshortCfg = &m.cfg.LintersSettings.Ifshort @@ -242,12 +236,14 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { staticcheckCfg = &m.cfg.LintersSettings.Staticcheck structcheckCfg = &m.cfg.LintersSettings.Structcheck stylecheckCfg = &m.cfg.LintersSettings.Stylecheck + tagalignCfg = &m.cfg.LintersSettings.TagAlign tagliatelleCfg = &m.cfg.LintersSettings.Tagliatelle tenvCfg = &m.cfg.LintersSettings.Tenv testpackageCfg = &m.cfg.LintersSettings.Testpackage thelperCfg = &m.cfg.LintersSettings.Thelper unparamCfg = &m.cfg.LintersSettings.Unparam - unusedCfg = &m.cfg.LintersSettings.Unused + unusedCfg = new(config.StaticCheckSettings) + usestdlibvars = &m.cfg.LintersSettings.UseStdlibVars varcheckCfg = &m.cfg.LintersSettings.Varcheck varnamelenCfg = &m.cfg.LintersSettings.Varnamelen whitespaceCfg = &m.cfg.LintersSettings.Whitespace @@ -258,6 +254,10 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { govetCfg.Go = m.cfg.Run.Go } + if gocriticCfg != nil { + gocriticCfg.Go = m.cfg.Run.Go + } + if gofumptCfg != nil && gofumptCfg.LangVersion == "" { gofumptCfg.LangVersion = m.cfg.Run.Go } @@ -280,7 +280,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { // The linters are sorted in the alphabetical order (case-insensitive). // When a new linter is added the version in `WithSince(...)` must be the next minor version of golangci-lint. - lcs := []*linter.Config{ + return []*linter.Config{ linter.NewConfig(golinters.NewAsasalint(asasalintCfg)). WithSince("1.47.0"). WithPresets(linter.PresetBugs). @@ -305,6 +305,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { linter.NewConfig(golinters.NewContainedCtx()). WithSince("1.44.0"). + WithLoadForGoAnalysis(). WithPresets(linter.PresetStyle). WithURL("https://github.com/sivchari/containedctx"), @@ -360,6 +361,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithURL("https://github.com/charithe/durationcheck"), linter.NewConfig(golinters.NewErrcheck(errcheckCfg)). + WithEnabledByDefault(). WithSince("v1.0.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetError). @@ -417,6 +419,11 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { linter.NewConfig(golinters.NewForbidigo(forbidigoCfg)). WithSince("v1.34.0"). WithPresets(linter.PresetStyle). + // Strictly speaking, + // the additional information is only needed when forbidigoCfg.AnalyzeTypes is chosen by the user. + // But we don't know that here in all cases (sometimes config is not loaded), + // so we have to assume that it is needed to be on the safe side. + WithLoadForGoAnalysis(). WithURL("https://github.com/ashanbrown/forbidigo"), linter.NewConfig(golinters.NewForceTypeAssert()). @@ -551,13 +558,21 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithAlternativeNames("gas"), linter.NewConfig(golinters.NewGosimple(gosimpleCfg)). + WithEnabledByDefault(). WithSince("v1.20.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetStyle). WithAlternativeNames(megacheckName). WithURL("https://github.com/dominikh/go-tools/tree/master/simple"), + linter.NewConfig(golinters.NewGosmopolitan(gosmopolitanCfg)). + WithSince("v1.53.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/xen0n/gosmopolitan"), + linter.NewConfig(golinters.NewGovet(govetCfg)). + WithEnabledByDefault(). WithSince("v1.0.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetMetaLinter). @@ -582,6 +597,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithURL("https://github.com/julz/importas"), linter.NewConfig(golinters.NewIneffassign()). + WithEnabledByDefault(). WithSince("v1.0.0"). WithPresets(linter.PresetUnused). WithURL("https://github.com/gordonklaus/ineffassign"), @@ -633,6 +649,12 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithURL("https://github.com/mdempsky/maligned"). Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", "govet 'fieldalignment'"), + linter.NewConfig(golinters.NewMirror()). + WithSince("v1.53.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/butuzov/mirror"), + linter.NewConfig(golinters.NewMisspell(misspellCfg)). WithSince("v1.8.0"). WithPresets(linter.PresetStyle, linter.PresetComment). @@ -643,7 +665,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSince("v1.51.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetStyle, linter.PresetBugs). - WithURL("https://github.com/junk1tm/musttag"), + WithURL("https://github.com/tmzane/musttag"), linter.NewConfig(golinters.NewNakedret(nakedretCfg)). WithSince("v1.19.0"). @@ -732,8 +754,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSince("v1.23.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetSQL). - WithURL("https://github.com/jingyugao/rowserrcheck"). - WithNoopFallback(m.cfg), + WithURL("https://github.com/jingyugao/rowserrcheck"), linter.NewConfig(golinters.NewScopelint()). WithSince("v1.12.0"). @@ -748,6 +769,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithURL("https://github.com/ryanrolds/sqlclosecheck"), linter.NewConfig(golinters.NewStaticcheck(staticcheckCfg)). + WithEnabledByDefault(). WithSince("v1.0.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetMetaLinter). @@ -759,8 +781,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithLoadForGoAnalysis(). WithPresets(linter.PresetUnused). WithURL("https://github.com/opennota/check"). - Deprecated("The owner seems to have abandoned the linter.", "v1.49.0", "unused"). - WithNoopFallback(m.cfg), + Deprecated("The owner seems to have abandoned the linter.", "v1.49.0", "unused"), linter.NewConfig(golinters.NewStylecheck(stylecheckCfg)). WithSince("v1.20.0"). @@ -768,6 +789,12 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithPresets(linter.PresetStyle). WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"), + linter.NewConfig(golinters.NewTagAlign(tagalignCfg)). + WithSince("v1.53.0"). + WithPresets(linter.PresetStyle, linter.PresetFormatting). + WithAutoFix(). + WithURL("https://github.com/4meepo/tagalign"), + linter.NewConfig(golinters.NewTagliatelle(tagliatelleCfg)). WithSince("v1.40.0"). WithPresets(linter.PresetStyle). @@ -802,6 +829,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithURL("https://github.com/moricho/tparallel"), linter.NewConfig(golinters.NewTypecheck()). + WithEnabledByDefault(). WithSince("v1.3.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs). @@ -820,6 +848,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithURL("https://github.com/mvdan/unparam"), linter.NewConfig(golinters.NewUnused(unusedCfg)). + WithEnabledByDefault(). WithSince("v1.20.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetUnused). @@ -850,8 +879,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSince("v1.38.0"). WithPresets(linter.PresetStyle). WithLoadForGoAnalysis(). - WithURL("https://github.com/sanposhiho/wastedassign"). - WithNoopFallback(m.cfg), + WithURL("https://github.com/sanposhiho/wastedassign"), linter.NewConfig(golinters.NewWhitespace(whitespaceCfg)). WithSince("v1.19.0"). @@ -875,20 +903,13 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSince("v1.26.0"). WithPresets(linter.PresetStyle). WithURL("https://github.com/golangci/golangci-lint/blob/master/pkg/golinters/nolintlint/README.md"), - } - enabledByDefault := map[string]bool{ - golinters.NewGovet(nil).Name(): true, - golinters.NewErrcheck(errcheckCfg).Name(): true, - golinters.NewStaticcheck(staticcheckCfg).Name(): true, - golinters.NewUnused(unusedCfg).Name(): true, - golinters.NewGosimple(gosimpleCfg).Name(): true, - golinters.NewIneffassign().Name(): true, - golinters.NewTypecheck().Name(): true, + linter.NewConfig(golinters.NewZerologLint()). + WithSince("v1.53.0"). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/ykadowak/zerologlint"), } - return enableLinterConfigs(lcs, func(lc *linter.Config) bool { - return enabledByDefault[lc.Name()] - }) } func (m Manager) GetAllEnabledByDefaultLinters() []*linter.Config { @@ -943,10 +964,12 @@ func (m Manager) loadCustomLinterConfig(name string, settings config.CustomLinte settings.Description, analyzer.GetAnalyzers(), nil).WithLoadMode(goanalysis.LoadModeTypesInfo) - linterConfig := linter.NewConfig(customLinter) - linterConfig.EnabledByDefault = true - linterConfig.IsSlow = false - linterConfig.WithURL(settings.OriginalURL) + + linterConfig := linter.NewConfig(customLinter). + WithEnabledByDefault(). + WithLoadForGoAnalysis(). + WithURL(settings.OriginalURL) + return linterConfig, nil } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go index c4e1e17659..babad5ba60 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go @@ -11,7 +11,6 @@ import ( "strings" "time" - "github.com/pkg/errors" "golang.org/x/tools/go/packages" "github.com/golangci/golangci-lint/internal/pkgcache" @@ -172,11 +171,11 @@ func (cl *ContextLoader) parseLoadedPackagesErrors(pkgs []*packages.Package) err errs = append(errs, err) if strings.Contains(err.Msg, "no Go files") { - return errors.Wrapf(exitcodes.ErrNoGoFiles, "package %s", pkg.PkgPath) + return fmt.Errorf("package %s: %w", pkg.PkgPath, exitcodes.ErrNoGoFiles) } if strings.Contains(err.Msg, "cannot find package") { // when analyzing not existing directory - return errors.Wrap(exitcodes.ErrFailure, err.Msg) + return fmt.Errorf("%v: %w", err.Msg, exitcodes.ErrFailure) } } @@ -195,7 +194,7 @@ func (cl *ContextLoader) loadPackages(ctx context.Context, loadMode packages.Loa buildFlags, err := cl.makeBuildFlags() if err != nil { - return nil, errors.Wrap(err, "failed to make build flags for go list") + return nil, fmt.Errorf("failed to make build flags for go list: %w", err) } conf := &packages.Config{ @@ -211,14 +210,14 @@ func (cl *ContextLoader) loadPackages(ctx context.Context, loadMode packages.Loa cl.debugf("Built loader args are %s", args) pkgs, err := packages.Load(conf, args...) if err != nil { - return nil, errors.Wrap(err, "failed to load with go/packages") + return nil, fmt.Errorf("failed to load with go/packages: %w", err) } // Currently, go/packages doesn't guarantee that error will be returned // if context was canceled. See // https://github.com/golang/tools/commit/c5cec6710e927457c3c29d6c156415e8539a5111#r39261855 if ctx.Err() != nil { - return nil, errors.Wrap(ctx.Err(), "timed out to load packages") + return nil, fmt.Errorf("timed out to load packages: %w", ctx.Err()) } if loadMode&packages.NeedSyntax == 0 { @@ -299,7 +298,7 @@ func (cl *ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*l loadMode := cl.findLoadMode(linters) pkgs, err := cl.loadPackages(ctx, loadMode) if err != nil { - return nil, errors.Wrap(err, "failed to load packages") + return nil, fmt.Errorf("failed to load packages: %w", err) } deduplicatedPkgs := cl.filterDuplicatePackages(pkgs) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go index f285b731b8..d270892d53 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go @@ -7,7 +7,6 @@ import ( "strings" "github.com/hashicorp/go-multierror" - "github.com/pkg/errors" gopackages "golang.org/x/tools/go/packages" "github.com/golangci/golangci-lint/internal/errorutil" @@ -28,9 +27,16 @@ type Runner struct { Log logutils.Log } -func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, es *lintersdb.EnabledSet, - lineCache *fsutils.LineCache, dbManager *lintersdb.Manager, pkgs []*gopackages.Package) (*Runner, error) { - skipFilesProcessor, err := processors.NewSkipFiles(cfg.Run.SkipFiles) +func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, + es *lintersdb.EnabledSet, + lineCache *fsutils.LineCache, fileCache *fsutils.FileCache, + dbManager *lintersdb.Manager, pkgs []*gopackages.Package) (*Runner, error) { + // Beware that some processors need to add the path prefix when working with paths + // because they get invoked before the path prefixer (exclude and severity rules) + // or process other paths (skip files). + files := fsutils.NewFiles(lineCache, cfg.Output.PathPrefix) + + skipFilesProcessor, err := processors.NewSkipFiles(cfg.Run.SkipFiles, cfg.Output.PathPrefix) if err != nil { return nil, err } @@ -39,14 +45,14 @@ func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, es *lint if cfg.Run.UseDefaultSkipDirs { skipDirs = append(skipDirs, packages.StdExcludeDirRegexps...) } - skipDirsProcessor, err := processors.NewSkipDirs(skipDirs, log.Child(logutils.DebugKeySkipDirs), cfg.Run.Args) + skipDirsProcessor, err := processors.NewSkipDirs(skipDirs, log.Child(logutils.DebugKeySkipDirs), cfg.Run.Args, cfg.Output.PathPrefix) if err != nil { return nil, err } enabledLinters, err := es.GetEnabledLintersMap() if err != nil { - return nil, errors.Wrap(err, "failed to get enabled linters") + return nil, fmt.Errorf("failed to get enabled linters: %w", err) } // print deprecated messages @@ -83,7 +89,7 @@ func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, es *lint processors.NewIdentifierMarker(), getExcludeProcessor(&cfg.Issues), - getExcludeRulesProcessor(&cfg.Issues, log, lineCache), + getExcludeRulesProcessor(&cfg.Issues, log, files), processors.NewNolint(log.Child(logutils.DebugKeyNolint), dbManager, enabledLinters), processors.NewUniqByLine(cfg), @@ -93,7 +99,12 @@ func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, es *lint processors.NewMaxFromLinter(cfg.Issues.MaxIssuesPerLinter, log.Child(logutils.DebugKeyMaxFromLinter), cfg), processors.NewSourceCode(lineCache, log.Child(logutils.DebugKeySourceCode)), processors.NewPathShortener(), - getSeverityRulesProcessor(&cfg.Severity, log, lineCache), + getSeverityRulesProcessor(&cfg.Severity, log, files), + + // The fixer still needs to see paths for the issues that are relative to the current directory. + processors.NewFixer(cfg, log, fileCache), + + // Now we can modify the issues for output. processors.NewPathPrefixer(cfg.Output.PathPrefix), processors.NewSortResults(cfg), }, @@ -260,15 +271,16 @@ func getExcludeProcessor(cfg *config.Issues) processors.Processor { return excludeProcessor } -func getExcludeRulesProcessor(cfg *config.Issues, log logutils.Log, lineCache *fsutils.LineCache) processors.Processor { +func getExcludeRulesProcessor(cfg *config.Issues, log logutils.Log, files *fsutils.Files) processors.Processor { var excludeRules []processors.ExcludeRule for _, r := range cfg.ExcludeRules { excludeRules = append(excludeRules, processors.ExcludeRule{ BaseRule: processors.BaseRule{ - Text: r.Text, - Source: r.Source, - Path: r.Path, - Linters: r.Linters, + Text: r.Text, + Source: r.Source, + Path: r.Path, + PathExcept: r.PathExcept, + Linters: r.Linters, }, }) } @@ -288,13 +300,13 @@ func getExcludeRulesProcessor(cfg *config.Issues, log logutils.Log, lineCache *f if cfg.ExcludeCaseSensitive { excludeRulesProcessor = processors.NewExcludeRulesCaseSensitive( excludeRules, - lineCache, + files, log.Child(logutils.DebugKeyExcludeRules), ) } else { excludeRulesProcessor = processors.NewExcludeRules( excludeRules, - lineCache, + files, log.Child(logutils.DebugKeyExcludeRules), ) } @@ -302,16 +314,17 @@ func getExcludeRulesProcessor(cfg *config.Issues, log logutils.Log, lineCache *f return excludeRulesProcessor } -func getSeverityRulesProcessor(cfg *config.Severity, log logutils.Log, lineCache *fsutils.LineCache) processors.Processor { +func getSeverityRulesProcessor(cfg *config.Severity, log logutils.Log, files *fsutils.Files) processors.Processor { var severityRules []processors.SeverityRule for _, r := range cfg.Rules { severityRules = append(severityRules, processors.SeverityRule{ Severity: r.Severity, BaseRule: processors.BaseRule{ - Text: r.Text, - Source: r.Source, - Path: r.Path, - Linters: r.Linters, + Text: r.Text, + Source: r.Source, + Path: r.Path, + PathExcept: r.PathExcept, + Linters: r.Linters, }, }) } @@ -321,14 +334,14 @@ func getSeverityRulesProcessor(cfg *config.Severity, log logutils.Log, lineCache severityRulesProcessor = processors.NewSeverityRulesCaseSensitive( cfg.Default, severityRules, - lineCache, + files, log.Child(logutils.DebugKeySeverityRules), ) } else { severityRulesProcessor = processors.NewSeverityRules( cfg.Default, severityRules, - lineCache, + files, log.Child(logutils.DebugKeySeverityRules), ) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go index 2c9609d894..16067e490e 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go @@ -1,11 +1,11 @@ package logutils type Log interface { - Fatalf(format string, args ...interface{}) - Panicf(format string, args ...interface{}) - Errorf(format string, args ...interface{}) - Warnf(format string, args ...interface{}) - Infof(format string, args ...interface{}) + Fatalf(format string, args ...any) + Panicf(format string, args ...any) + Errorf(format string, args ...any) + Warnf(format string, args ...any) + Infof(format string, args ...any) Child(name string) Log SetLevel(level LogLevel) diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go index 62c521eac3..80c9fed7a9 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go @@ -22,6 +22,7 @@ const ( DebugKeyExcludeRules = "exclude_rules" DebugKeyExec = "exec" DebugKeyFilenameUnadjuster = "filename_unadjuster" + DebugKeyForbidigo = "forbidigo" DebugKeyGoEnv = "goenv" DebugKeyLinter = "linter" DebugKeyLintersContext = "linters_context" @@ -77,9 +78,9 @@ func getEnabledDebugs() map[string]bool { var enabledDebugs = getEnabledDebugs() -type DebugFunc func(format string, args ...interface{}) +type DebugFunc func(format string, args ...any) -func nopDebugf(format string, args ...interface{}) {} +func nopDebugf(_ string, _ ...any) {} func Debug(tag string) DebugFunc { if !enabledDebugs[tag] { @@ -89,7 +90,7 @@ func Debug(tag string) DebugFunc { logger := NewStderrLog(tag) logger.SetLevel(LogLevelDebug) - return func(format string, args ...interface{}) { + return func(format string, args ...any) { logger.Debugf(format, args...) } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go index e897ce1ede..efda8cc20f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go @@ -12,28 +12,28 @@ func NewMockLog() *MockLog { return &MockLog{} } -func (m *MockLog) Fatalf(format string, args ...interface{}) { - mArgs := []interface{}{format} +func (m *MockLog) Fatalf(format string, args ...any) { + mArgs := []any{format} m.Called(append(mArgs, args...)...) } -func (m *MockLog) Panicf(format string, args ...interface{}) { - mArgs := []interface{}{format} +func (m *MockLog) Panicf(format string, args ...any) { + mArgs := []any{format} m.Called(append(mArgs, args...)...) } -func (m *MockLog) Errorf(format string, args ...interface{}) { - mArgs := []interface{}{format} +func (m *MockLog) Errorf(format string, args ...any) { + mArgs := []any{format} m.Called(append(mArgs, args...)...) } -func (m *MockLog) Warnf(format string, args ...interface{}) { - mArgs := []interface{}{format} +func (m *MockLog) Warnf(format string, args ...any) { + mArgs := []any{format} m.Called(append(mArgs, args...)...) } -func (m *MockLog) Infof(format string, args ...interface{}) { - mArgs := []interface{}{format} +func (m *MockLog) Infof(format string, args ...any) { + mArgs := []any{format} m.Called(append(mArgs, args...)...) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go index a68215e705..367c94f385 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go @@ -67,17 +67,17 @@ func (sl StderrLog) prefix() string { return prefix } -func (sl StderrLog) Fatalf(format string, args ...interface{}) { +func (sl StderrLog) Fatalf(format string, args ...any) { sl.logger.Errorf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) os.Exit(exitcodes.Failure) } -func (sl StderrLog) Panicf(format string, args ...interface{}) { +func (sl StderrLog) Panicf(format string, args ...any) { v := fmt.Sprintf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) panic(v) } -func (sl StderrLog) Errorf(format string, args ...interface{}) { +func (sl StderrLog) Errorf(format string, args ...any) { if sl.level > LogLevelError { return } @@ -88,7 +88,7 @@ func (sl StderrLog) Errorf(format string, args ...interface{}) { // called on hidden errors, see log levels comments. } -func (sl StderrLog) Warnf(format string, args ...interface{}) { +func (sl StderrLog) Warnf(format string, args ...any) { if sl.level > LogLevelWarn { return } @@ -96,7 +96,7 @@ func (sl StderrLog) Warnf(format string, args ...interface{}) { sl.logger.Warnf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) } -func (sl StderrLog) Infof(format string, args ...interface{}) { +func (sl StderrLog) Infof(format string, args ...any) { if sl.level > LogLevelInfo { return } @@ -104,7 +104,7 @@ func (sl StderrLog) Infof(format string, args ...interface{}) { sl.logger.Infof("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) } -func (sl StderrLog) Debugf(format string, args ...interface{}) { +func (sl StderrLog) Debugf(format string, args ...any) { if sl.level > LogLevelDebug { return } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go index 72fb8601ab..4898367129 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go @@ -1,12 +1,11 @@ package packages import ( + "errors" "fmt" "go/token" "strconv" "strings" - - "github.com/pkg/errors" ) func ParseErrorPosition(pos string) (*token.Position, error) { @@ -26,7 +25,7 @@ func ParseErrorPosition(pos string) (*token.Position, error) { if len(parts) == 3 { // no column column, err = strconv.Atoi(parts[2]) if err != nil { - return nil, errors.Wrapf(err, "failed to parse column from %q", parts[2]) + return nil, fmt.Errorf("failed to parse column from %q: %w", parts[2], err) } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go index 307a8e7a0a..3762ca0569 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go @@ -1,7 +1,6 @@ package printers import ( - "context" "encoding/xml" "fmt" "io" @@ -41,7 +40,7 @@ func NewCheckstyle(w io.Writer) *Checkstyle { return &Checkstyle{w: w} } -func (p Checkstyle) Print(ctx context.Context, issues []result.Issue) error { +func (p Checkstyle) Print(issues []result.Issue) error { out := checkstyleOutput{ Version: "5.0", } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go index 8a90f145dd..50d6dcff3b 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go @@ -1,9 +1,7 @@ package printers import ( - "context" "encoding/json" - "fmt" "io" "github.com/golangci/golangci-lint/pkg/result" @@ -35,7 +33,7 @@ func NewCodeClimate(w io.Writer) *CodeClimate { return &CodeClimate{w: w} } -func (p CodeClimate) Print(ctx context.Context, issues []result.Issue) error { +func (p CodeClimate) Print(issues []result.Issue) error { codeClimateIssues := make([]CodeClimateIssue, 0, len(issues)) for i := range issues { issue := &issues[i] @@ -53,12 +51,7 @@ func (p CodeClimate) Print(ctx context.Context, issues []result.Issue) error { codeClimateIssues = append(codeClimateIssues, codeClimateIssue) } - outputJSON, err := json.Marshal(codeClimateIssues) - if err != nil { - return err - } - - _, err = fmt.Fprint(p.w, string(outputJSON)) + err := json.NewEncoder(p.w).Encode(codeClimateIssues) if err != nil { return err } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go index 6a4d05d46f..7f148097ab 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go @@ -1,7 +1,6 @@ package printers import ( - "context" "fmt" "io" @@ -36,7 +35,7 @@ func formatIssueAsGithub(issue *result.Issue) string { return ret } -func (p *github) Print(_ context.Context, issues []result.Issue) error { +func (p *github) Print(issues []result.Issue) error { for ind := range issues { _, err := fmt.Fprintln(p.w, formatIssueAsGithub(&issues[ind])) if err != nil { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go index 3d82d7d8bd..7dd1e5c623 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go @@ -1,7 +1,6 @@ package printers import ( - "context" "fmt" "html/template" "io" @@ -131,7 +130,7 @@ func NewHTML(w io.Writer) *HTML { return &HTML{w: w} } -func (p HTML) Print(_ context.Context, issues []result.Issue) error { +func (p HTML) Print(issues []result.Issue) error { var htmlIssues []htmlIssue for i := range issues { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go index cfef51f587..4bae526b87 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go @@ -1,7 +1,6 @@ package printers import ( - "context" "encoding/json" "io" @@ -26,7 +25,7 @@ type JSONResult struct { Report *report.Data } -func (p JSON) Print(ctx context.Context, issues []result.Issue) error { +func (p JSON) Print(issues []result.Issue) error { res := JSONResult{ Issues: issues, Report: p.rd, diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go index 0424f78b48..86a3811e47 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go @@ -1,7 +1,6 @@ package printers import ( - "context" "encoding/xml" "fmt" "io" @@ -45,7 +44,7 @@ func NewJunitXML(w io.Writer) *JunitXML { return &JunitXML{w: w} } -func (p JunitXML) Print(ctx context.Context, issues []result.Issue) error { +func (p JunitXML) Print(issues []result.Issue) error { suites := make(map[string]testSuiteXML) // use a map to group by file for ind := range issues { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go index bfafb88e2a..ce3116fa4e 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go @@ -1,11 +1,9 @@ package printers import ( - "context" - "github.com/golangci/golangci-lint/pkg/result" ) type Printer interface { - Print(ctx context.Context, issues []result.Issue) error + Print(issues []result.Issue) error } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go index ffef491085..8ede897402 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go @@ -1,7 +1,6 @@ package printers import ( - "context" "fmt" "io" "text/tabwriter" @@ -14,24 +13,32 @@ import ( type Tab struct { printLinterName bool - log logutils.Log - w io.Writer + useColors bool + + log logutils.Log + w io.Writer } -func NewTab(printLinterName bool, log logutils.Log, w io.Writer) *Tab { +func NewTab(printLinterName, useColors bool, log logutils.Log, w io.Writer) *Tab { return &Tab{ printLinterName: printLinterName, + useColors: useColors, log: log, w: w, } } -func (p *Tab) SprintfColored(ca color.Attribute, format string, args ...interface{}) string { +func (p *Tab) SprintfColored(ca color.Attribute, format string, args ...any) string { c := color.New(ca) + + if !p.useColors { + c.DisableColor() + } + return c.Sprintf(format, args...) } -func (p *Tab) Print(ctx context.Context, issues []result.Issue) error { +func (p *Tab) Print(issues []result.Issue) error { w := tabwriter.NewWriter(p.w, 0, 0, 2, ' ', 0) for i := range issues { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go new file mode 100644 index 0000000000..d3693e9971 --- /dev/null +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/teamcity.go @@ -0,0 +1,122 @@ +package printers + +import ( + "fmt" + "io" + "strings" + "unicode/utf8" + + "github.com/golangci/golangci-lint/pkg/result" +) + +// Field limits. +const ( + smallLimit = 255 + largeLimit = 4000 +) + +// TeamCity printer for TeamCity format. +type TeamCity struct { + w io.Writer + escaper *strings.Replacer +} + +// NewTeamCity output format outputs issues according to TeamCity service message format. +func NewTeamCity(w io.Writer) *TeamCity { + return &TeamCity{ + w: w, + // https://www.jetbrains.com/help/teamcity/service-messages.html#Escaped+Values + escaper: strings.NewReplacer( + "'", "|'", + "\n", "|n", + "\r", "|r", + "|", "||", + "[", "|[", + "]", "|]", + ), + } +} + +func (p *TeamCity) Print(issues []result.Issue) error { + uniqLinters := map[string]struct{}{} + + for i := range issues { + issue := issues[i] + + _, ok := uniqLinters[issue.FromLinter] + if !ok { + inspectionType := InspectionType{ + id: issue.FromLinter, + name: issue.FromLinter, + description: issue.FromLinter, + category: "Golangci-lint reports", + } + + _, err := inspectionType.Print(p.w, p.escaper) + if err != nil { + return err + } + + uniqLinters[issue.FromLinter] = struct{}{} + } + + instance := InspectionInstance{ + typeID: issue.FromLinter, + message: issue.Text, + file: issue.FilePath(), + line: issue.Line(), + severity: issue.Severity, + } + + _, err := instance.Print(p.w, p.escaper) + if err != nil { + return err + } + } + + return nil +} + +// InspectionType is the unique description of the conducted inspection. Each specific warning or +// an error in code (inspection instance) has an inspection type. +// https://www.jetbrains.com/help/teamcity/service-messages.html#Inspection+Type +type InspectionType struct { + id string // (mandatory) limited by 255 characters. + name string // (mandatory) limited by 255 characters. + description string // (mandatory) limited by 255 characters. + category string // (mandatory) limited by 4000 characters. +} + +func (i InspectionType) Print(w io.Writer, escaper *strings.Replacer) (int, error) { + return fmt.Fprintf(w, "##teamcity[InspectionType id='%s' name='%s' description='%s' category='%s']\n", + limit(i.id, smallLimit), limit(i.name, smallLimit), limit(escaper.Replace(i.description), largeLimit), limit(i.category, smallLimit)) +} + +// InspectionInstance reports a specific defect, warning, error message. +// Includes location, description, and various optional and custom attributes. +// https://www.jetbrains.com/help/teamcity/service-messages.html#Inspection+Instance +type InspectionInstance struct { + typeID string // (mandatory) limited by 255 characters. + message string // (optional) limited by 4000 characters. + file string // (mandatory) file path limited by 4000 characters. + line int // (optional) line of the file. + severity string // (optional) any linter severity. +} + +func (i InspectionInstance) Print(w io.Writer, replacer *strings.Replacer) (int, error) { + return fmt.Fprintf(w, "##teamcity[inspection typeId='%s' message='%s' file='%s' line='%d' SEVERITY='%s']\n", + limit(i.typeID, smallLimit), + limit(replacer.Replace(i.message), largeLimit), + limit(i.file, largeLimit), + i.line, strings.ToUpper(i.severity)) +} + +func limit(s string, max int) string { + var size, count int + for i := 0; i < max && count < len(s); i++ { + _, size = utf8.DecodeRuneInString(s[count:]) + count += size + } + + return s[:count] +} diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go index d59391b29d..6e29c4b50f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go @@ -1,7 +1,6 @@ package printers import ( - "context" "fmt" "io" "strings" @@ -14,8 +13,8 @@ import ( type Text struct { printIssuedLine bool - useColors bool printLinterName bool + useColors bool log logutils.Log w io.Writer @@ -24,23 +23,24 @@ type Text struct { func NewText(printIssuedLine, useColors, printLinterName bool, log logutils.Log, w io.Writer) *Text { return &Text{ printIssuedLine: printIssuedLine, - useColors: useColors, printLinterName: printLinterName, + useColors: useColors, log: log, w: w, } } -func (p *Text) SprintfColored(ca color.Attribute, format string, args ...interface{}) string { +func (p *Text) SprintfColored(ca color.Attribute, format string, args ...any) string { + c := color.New(ca) + if !p.useColors { - return fmt.Sprintf(format, args...) + c.DisableColor() } - c := color.New(ca) return c.Sprintf(format, args...) } -func (p *Text) Print(ctx context.Context, issues []result.Issue) error { +func (p *Text) Print(issues []result.Issue) error { for i := range issues { p.printIssue(&issues[i]) @@ -73,7 +73,7 @@ func (p *Text) printSourceCode(i *result.Issue) { } } -func (p Text) printUnderLinePointer(i *result.Issue) { +func (p *Text) printUnderLinePointer(i *result.Issue) { // if column == 0 it means column is unknown (e.g. for gosec) if len(i.SourceLines) != 1 || i.Pos.Column == 0 { return diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/report/log.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/report/log.go index 45ab6cae85..61665f28b7 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/report/log.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/report/log.go @@ -20,20 +20,20 @@ func NewLogWrapper(log logutils.Log, reportData *Data) *LogWrapper { } } -func (lw LogWrapper) Fatalf(format string, args ...interface{}) { +func (lw LogWrapper) Fatalf(format string, args ...any) { lw.origLog.Fatalf(format, args...) } -func (lw LogWrapper) Panicf(format string, args ...interface{}) { +func (lw LogWrapper) Panicf(format string, args ...any) { lw.origLog.Panicf(format, args...) } -func (lw LogWrapper) Errorf(format string, args ...interface{}) { +func (lw LogWrapper) Errorf(format string, args ...any) { lw.origLog.Errorf(format, args...) lw.rd.Error = fmt.Sprintf(format, args...) } -func (lw LogWrapper) Warnf(format string, args ...interface{}) { +func (lw LogWrapper) Warnf(format string, args ...any) { lw.origLog.Warnf(format, args...) w := Warning{ Tag: strings.Join(lw.tags, "/"), @@ -43,7 +43,7 @@ func (lw LogWrapper) Warnf(format string, args ...interface{}) { lw.rd.Warnings = append(lw.rd.Warnings, w) } -func (lw LogWrapper) Infof(format string, args ...interface{}) { +func (lw LogWrapper) Infof(format string, args ...any) { lw.origLog.Infof(format, args...) } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go index 5e41fd6a94..c7675fce8f 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go @@ -1,13 +1,13 @@ package processors import ( + "errors" + "fmt" "go/parser" "go/token" "path/filepath" "strings" - "github.com/pkg/errors" - "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -107,7 +107,7 @@ func (p *AutogeneratedExclude) getOrCreateFileSummary(i *result.Issue) (*ageFile doc, err := getDoc(i.FilePath()) if err != nil { - return nil, errors.Wrapf(err, "failed to get doc of file %s", i.FilePath()) + return nil, fmt.Errorf("failed to get doc of file %s: %w", i.FilePath(), err) } fs.isGenerated = isGeneratedFileByComment(doc) @@ -119,7 +119,7 @@ func getDoc(filePath string) (string, error) { fset := token.NewFileSet() syntax, err := parser.ParseFile(fset, filePath, nil, parser.PackageClauseOnly|parser.ParseComments) if err != nil { - return "", errors.Wrap(err, "failed to parse file") + return "", fmt.Errorf("failed to parse file: %w", err) } var docLines []string diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go index 6958b9f2f3..b5e138806b 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go @@ -9,31 +9,36 @@ import ( ) type BaseRule struct { - Text string - Source string - Path string - Linters []string + Text string + Source string + Path string + PathExcept string + Linters []string } type baseRule struct { - text *regexp.Regexp - source *regexp.Regexp - path *regexp.Regexp - linters []string + text *regexp.Regexp + source *regexp.Regexp + path *regexp.Regexp + pathExcept *regexp.Regexp + linters []string } func (r *baseRule) isEmpty() bool { - return r.text == nil && r.source == nil && r.path == nil && len(r.linters) == 0 + return r.text == nil && r.source == nil && r.path == nil && r.pathExcept == nil && len(r.linters) == 0 } -func (r *baseRule) match(issue *result.Issue, lineCache *fsutils.LineCache, log logutils.Log) bool { +func (r *baseRule) match(issue *result.Issue, files *fsutils.Files, log logutils.Log) bool { if r.isEmpty() { return false } if r.text != nil && !r.text.MatchString(issue.Text) { return false } - if r.path != nil && !r.path.MatchString(issue.FilePath()) { + if r.path != nil && !r.path.MatchString(files.WithPathPrefix(issue.FilePath())) { + return false + } + if r.pathExcept != nil && r.pathExcept.MatchString(issue.FilePath()) { return false } if len(r.linters) != 0 && !r.matchLinter(issue) { @@ -41,7 +46,7 @@ func (r *baseRule) match(issue *result.Issue, lineCache *fsutils.LineCache, log } // the most heavyweight checking last - if r.source != nil && !r.matchSource(issue, lineCache, log) { + if r.source != nil && !r.matchSource(issue, files.LineCache, log) { return false } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go index c8793871ac..8e77237518 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go @@ -1,11 +1,10 @@ package processors import ( + "fmt" "path/filepath" "strings" - "github.com/pkg/errors" - "github.com/golangci/golangci-lint/pkg/goutil" "github.com/golangci/golangci-lint/pkg/result" ) @@ -37,7 +36,7 @@ func (p Cgo) Process(issues []result.Issue) ([]result.Issue, error) { if !filepath.IsAbs(i.FilePath()) { absPath, err := filepath.Abs(i.FilePath()) if err != nil { - return false, errors.Wrapf(err, "failed to build abs path for %q", i.FilePath()) + return false, fmt.Errorf("failed to build abs path for %q: %w", i.FilePath(), err) } issueFilePath = absPath } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go index 62533b8115..2f7e30b430 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go @@ -17,15 +17,15 @@ type ExcludeRule struct { } type ExcludeRules struct { - rules []excludeRule - lineCache *fsutils.LineCache - log logutils.Log + rules []excludeRule + files *fsutils.Files + log logutils.Log } -func NewExcludeRules(rules []ExcludeRule, lineCache *fsutils.LineCache, log logutils.Log) *ExcludeRules { +func NewExcludeRules(rules []ExcludeRule, files *fsutils.Files, log logutils.Log) *ExcludeRules { r := &ExcludeRules{ - lineCache: lineCache, - log: log, + files: files, + log: log, } r.rules = createRules(rules, "(?i)") @@ -47,6 +47,10 @@ func createRules(rules []ExcludeRule, prefix string) []excludeRule { path := fsutils.NormalizePathInRegex(rule.Path) parsedRule.path = regexp.MustCompile(path) } + if rule.PathExcept != "" { + pathExcept := fsutils.NormalizePathInRegex(rule.PathExcept) + parsedRule.pathExcept = regexp.MustCompile(pathExcept) + } parsedRules = append(parsedRules, parsedRule) } return parsedRules @@ -59,7 +63,7 @@ func (p ExcludeRules) Process(issues []result.Issue) ([]result.Issue, error) { return filterIssues(issues, func(i *result.Issue) bool { for _, rule := range p.rules { rule := rule - if rule.match(i, p.lineCache, p.log) { + if rule.match(i, p.files, p.log) { return false } } @@ -76,10 +80,10 @@ type ExcludeRulesCaseSensitive struct { *ExcludeRules } -func NewExcludeRulesCaseSensitive(rules []ExcludeRule, lineCache *fsutils.LineCache, log logutils.Log) *ExcludeRulesCaseSensitive { +func NewExcludeRulesCaseSensitive(rules []ExcludeRule, files *fsutils.Files, log logutils.Log) *ExcludeRulesCaseSensitive { r := &ExcludeRules{ - lineCache: lineCache, - log: log, + files: files, + log: log, } r.rules = createRules(rules, "") diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go index d125e15793..a79a846288 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go @@ -8,8 +8,6 @@ import ( "sort" "strings" - "github.com/pkg/errors" - "github.com/golangci/golangci-lint/internal/robustio" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/fsutils" @@ -18,6 +16,8 @@ import ( "github.com/golangci/golangci-lint/pkg/timeutils" ) +var _ Processor = Fixer{} + type Fixer struct { cfg *config.Config log logutils.Log @@ -38,9 +38,9 @@ func (f Fixer) printStat() { f.sw.PrintStages() } -func (f Fixer) Process(issues []result.Issue) []result.Issue { +func (f Fixer) Process(issues []result.Issue) ([]result.Issue, error) { if !f.cfg.Issues.NeedFix { - return issues + return issues, nil } outIssues := make([]result.Issue, 0, len(issues)) @@ -69,22 +69,28 @@ func (f Fixer) Process(issues []result.Issue) []result.Issue { } f.printStat() - return outIssues + return outIssues, nil } +func (f Fixer) Name() string { + return "fixer" +} + +func (f Fixer) Finish() {} + func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error { // TODO: don't read the whole file into memory: read line by line; // can't just use bufio.scanner: it has a line length limit origFileData, err := f.fileCache.GetFileBytes(filePath) if err != nil { - return errors.Wrapf(err, "failed to get file bytes for %s", filePath) + return fmt.Errorf("failed to get file bytes for %s: %w", filePath, err) } origFileLines := bytes.Split(origFileData, []byte("\n")) tmpFileName := filepath.Join(filepath.Dir(filePath), fmt.Sprintf(".%s.golangci_fix", filepath.Base(filePath))) tmpOutFile, err := os.Create(tmpFileName) if err != nil { - return errors.Wrapf(err, "failed to make file %s", tmpFileName) + return fmt.Errorf("failed to make file %s: %w", tmpFileName, err) } // merge multiple issues per line into one issue @@ -112,7 +118,7 @@ func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error { tmpOutFile.Close() if err = robustio.Rename(tmpOutFile.Name(), filePath); err != nil { _ = robustio.RemoveAll(tmpOutFile.Name()) - return errors.Wrapf(err, "failed to rename %s -> %s", tmpOutFile.Name(), filePath) + return fmt.Errorf("failed to rename %s -> %s: %w", tmpOutFile.Name(), filePath, err) } return nil @@ -241,7 +247,7 @@ func (f Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmp outLine += "\n" } if _, err := tmpOutFile.WriteString(outLine); err != nil { - return errors.Wrap(err, "failed to write output line") + return fmt.Errorf("failed to write output line: %w", err) } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go index 8bc3d847d6..4691be38a4 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/issues.go @@ -1,7 +1,7 @@ package processors import ( - "github.com/pkg/errors" + "fmt" "github.com/golangci/golangci-lint/pkg/result" ) @@ -22,7 +22,7 @@ func filterIssuesErr(issues []result.Issue, filter func(i *result.Issue) (bool, for i := range issues { ok, err := filter(&issues[i]) if err != nil { - return nil, errors.Wrapf(err, "can't filter issue %#v", issues[i]) + return nil, fmt.Errorf("can't filter issue %#v: %w", issues[i], err) } if ok { diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go index 04ed831266..f6b885011b 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go @@ -1,8 +1,7 @@ package processors import ( - "path/filepath" - + "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/result" ) @@ -27,7 +26,7 @@ func (*PathPrefixer) Name() string { func (p *PathPrefixer) Process(issues []result.Issue) ([]result.Issue, error) { if p.prefix != "" { for i := range issues { - issues[i].Pos.Filename = filepath.Join(p.prefix, issues[i].Pos.Filename) + issues[i].Pos.Filename = fsutils.WithPathPrefix(p.prefix, issues[i].Pos.Filename) } } return issues, nil diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go index 85c1866a21..0a4a643b71 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go @@ -21,13 +21,13 @@ type SeverityRule struct { type SeverityRules struct { defaultSeverity string rules []severityRule - lineCache *fsutils.LineCache + files *fsutils.Files log logutils.Log } -func NewSeverityRules(defaultSeverity string, rules []SeverityRule, lineCache *fsutils.LineCache, log logutils.Log) *SeverityRules { +func NewSeverityRules(defaultSeverity string, rules []SeverityRule, files *fsutils.Files, log logutils.Log) *SeverityRules { r := &SeverityRules{ - lineCache: lineCache, + files: files, log: log, defaultSeverity: defaultSeverity, } @@ -52,6 +52,10 @@ func createSeverityRules(rules []SeverityRule, prefix string) []severityRule { path := fsutils.NormalizePathInRegex(rule.Path) parsedRule.path = regexp.MustCompile(path) } + if rule.PathExcept != "" { + pathExcept := fsutils.NormalizePathInRegex(rule.PathExcept) + parsedRule.pathExcept = regexp.MustCompile(pathExcept) + } parsedRules = append(parsedRules, parsedRule) } return parsedRules @@ -70,7 +74,7 @@ func (p SeverityRules) Process(issues []result.Issue) ([]result.Issue, error) { ruleSeverity = rule.severity } - if rule.match(i, p.lineCache, p.log) { + if rule.match(i, p.files, p.log) { i.Severity = ruleSeverity return i } @@ -90,9 +94,9 @@ type SeverityRulesCaseSensitive struct { } func NewSeverityRulesCaseSensitive(defaultSeverity string, rules []SeverityRule, - lineCache *fsutils.LineCache, log logutils.Log) *SeverityRulesCaseSensitive { + files *fsutils.Files, log logutils.Log) *SeverityRulesCaseSensitive { r := &SeverityRules{ - lineCache: lineCache, + files: files, log: log, defaultSeverity: defaultSeverity, } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go index 11ab99104f..e71495fd0b 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go @@ -1,12 +1,11 @@ package processors import ( + "fmt" "path/filepath" "regexp" "strings" - "github.com/pkg/errors" - "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/result" @@ -23,19 +22,20 @@ type SkipDirs struct { skippedDirs map[string]*skipStat absArgsDirs []string skippedDirsCache map[string]bool + pathPrefix string } var _ Processor = (*SkipDirs)(nil) const goFileSuffix = ".go" -func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string) (*SkipDirs, error) { +func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string, pathPrefix string) (*SkipDirs, error) { var patternsRe []*regexp.Regexp for _, p := range patterns { p = fsutils.NormalizePathInRegex(p) patternRe, err := regexp.Compile(p) if err != nil { - return nil, errors.Wrapf(err, "can't compile regexp %q", p) + return nil, fmt.Errorf("can't compile regexp %q: %w", p, err) } patternsRe = append(patternsRe, patternRe) } @@ -52,7 +52,7 @@ func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string) (*SkipDi absArg, err := filepath.Abs(arg) if err != nil { - return nil, errors.Wrapf(err, "failed to abs-ify arg %q", arg) + return nil, fmt.Errorf("failed to abs-ify arg %q: %w", arg, err) } absArgsDirs = append(absArgsDirs, absArg) } @@ -63,6 +63,7 @@ func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string) (*SkipDi skippedDirs: map[string]*skipStat{}, absArgsDirs: absArgsDirs, skippedDirsCache: map[string]bool{}, + pathPrefix: pathPrefix, }, nil } @@ -121,8 +122,9 @@ func (p *SkipDirs) shouldPassIssueDirs(issueRelDir, issueAbsDir string) bool { // The alternative solution is to find relative to args path, but it has // disadvantages (https://github.com/golangci/golangci-lint/pull/313). + path := fsutils.WithPathPrefix(p.pathPrefix, issueRelDir) for _, pattern := range p.patterns { - if pattern.MatchString(issueRelDir) { + if pattern.MatchString(path) { ps := pattern.String() if p.skippedDirs[issueRelDir] == nil { p.skippedDirs[issueRelDir] = &skipStat{ diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go index b7b86bed06..9579bee844 100644 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go +++ b/tools/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go @@ -9,12 +9,13 @@ import ( ) type SkipFiles struct { - patterns []*regexp.Regexp + patterns []*regexp.Regexp + pathPrefix string } var _ Processor = (*SkipFiles)(nil) -func NewSkipFiles(patterns []string) (*SkipFiles, error) { +func NewSkipFiles(patterns []string, pathPrefix string) (*SkipFiles, error) { var patternsRe []*regexp.Regexp for _, p := range patterns { p = fsutils.NormalizePathInRegex(p) @@ -26,7 +27,8 @@ func NewSkipFiles(patterns []string) (*SkipFiles, error) { } return &SkipFiles{ - patterns: patternsRe, + patterns: patternsRe, + pathPrefix: pathPrefix, }, nil } @@ -40,8 +42,9 @@ func (p SkipFiles) Process(issues []result.Issue) ([]result.Issue, error) { } return filterIssues(issues, func(i *result.Issue) bool { - for _, p := range p.patterns { - if p.MatchString(i.FilePath()) { + path := fsutils.WithPathPrefix(p.pathPrefix, i.FilePath()) + for _, pattern := range p.patterns { + if pattern.MatchString(path) { return false } } diff --git a/tools/vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go b/tools/vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go deleted file mode 100644 index cb89e34e0c..0000000000 --- a/tools/vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go +++ /dev/null @@ -1,17 +0,0 @@ -package sliceutil - -// IndexOf get the index of the given value in the given string slice, -// or -1 if not found. -func IndexOf(slice []string, value string) int { - for i, v := range slice { - if v == value { - return i - } - } - return -1 -} - -// Contains check if a string slice contains a value. -func Contains(slice []string, value string) bool { - return IndexOf(slice, value) != -1 -} diff --git a/tools/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go b/tools/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go index 3d7b18d77b..f9dece8f2b 100644 --- a/tools/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go +++ b/tools/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go @@ -183,8 +183,11 @@ func (bld *builder) Visit(n ast.Node) ast.Visitor { brek.setDestination(bld.newBlock(exits...)) bld.breaks.pop() case *ast.DeferStmt: + bld.walk(n.Call.Fun) + for _, a := range n.Call.Args { + bld.walk(a) + } bld.defers[len(bld.defers)-1] = true - return bld case *ast.LabeledStmt: bld.gotos.get(n.Label).setDestination(bld.newBlock(bld.block)) bld.labelStmt = n diff --git a/tools/vendor/github.com/inconshreveable/mousetrap/trap_others.go b/tools/vendor/github.com/inconshreveable/mousetrap/trap_others.go index 9d2d8a4bab..06a91f0868 100644 --- a/tools/vendor/github.com/inconshreveable/mousetrap/trap_others.go +++ b/tools/vendor/github.com/inconshreveable/mousetrap/trap_others.go @@ -1,3 +1,4 @@ +//go:build !windows // +build !windows package mousetrap diff --git a/tools/vendor/github.com/inconshreveable/mousetrap/trap_windows.go b/tools/vendor/github.com/inconshreveable/mousetrap/trap_windows.go index 336142a5e3..0c56880216 100644 --- a/tools/vendor/github.com/inconshreveable/mousetrap/trap_windows.go +++ b/tools/vendor/github.com/inconshreveable/mousetrap/trap_windows.go @@ -1,81 +1,32 @@ -// +build windows -// +build !go1.4 - package mousetrap import ( - "fmt" - "os" "syscall" "unsafe" ) -const ( - // defined by the Win32 API - th32cs_snapprocess uintptr = 0x2 -) - -var ( - kernel = syscall.MustLoadDLL("kernel32.dll") - CreateToolhelp32Snapshot = kernel.MustFindProc("CreateToolhelp32Snapshot") - Process32First = kernel.MustFindProc("Process32FirstW") - Process32Next = kernel.MustFindProc("Process32NextW") -) - -// ProcessEntry32 structure defined by the Win32 API -type processEntry32 struct { - dwSize uint32 - cntUsage uint32 - th32ProcessID uint32 - th32DefaultHeapID int - th32ModuleID uint32 - cntThreads uint32 - th32ParentProcessID uint32 - pcPriClassBase int32 - dwFlags uint32 - szExeFile [syscall.MAX_PATH]uint16 -} - -func getProcessEntry(pid int) (pe *processEntry32, err error) { - snapshot, _, e1 := CreateToolhelp32Snapshot.Call(th32cs_snapprocess, uintptr(0)) - if snapshot == uintptr(syscall.InvalidHandle) { - err = fmt.Errorf("CreateToolhelp32Snapshot: %v", e1) - return +func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) { + snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0) + if err != nil { + return nil, err } - defer syscall.CloseHandle(syscall.Handle(snapshot)) - - var processEntry processEntry32 - processEntry.dwSize = uint32(unsafe.Sizeof(processEntry)) - ok, _, e1 := Process32First.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) - if ok == 0 { - err = fmt.Errorf("Process32First: %v", e1) - return + defer syscall.CloseHandle(snapshot) + var procEntry syscall.ProcessEntry32 + procEntry.Size = uint32(unsafe.Sizeof(procEntry)) + if err = syscall.Process32First(snapshot, &procEntry); err != nil { + return nil, err } - for { - if processEntry.th32ProcessID == uint32(pid) { - pe = &processEntry - return + if procEntry.ProcessID == uint32(pid) { + return &procEntry, nil } - - ok, _, e1 = Process32Next.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) - if ok == 0 { - err = fmt.Errorf("Process32Next: %v", e1) - return + err = syscall.Process32Next(snapshot, &procEntry) + if err != nil { + return nil, err } } } -func getppid() (pid int, err error) { - pe, err := getProcessEntry(os.Getpid()) - if err != nil { - return - } - - pid = int(pe.th32ParentProcessID) - return -} - // StartedByExplorer returns true if the program was invoked by the user double-clicking // on the executable from explorer.exe // @@ -83,16 +34,9 @@ func getppid() (pid int, err error) { // It does not guarantee that the program was run from a terminal. It only can tell you // whether it was launched from explorer.exe func StartedByExplorer() bool { - ppid, err := getppid() + pe, err := getProcessEntry(syscall.Getppid()) if err != nil { return false } - - pe, err := getProcessEntry(ppid) - if err != nil { - return false - } - - name := syscall.UTF16ToString(pe.szExeFile[:]) - return name == "explorer.exe" + return "explorer.exe" == syscall.UTF16ToString(pe.ExeFile[:]) } diff --git a/tools/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go b/tools/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go deleted file mode 100644 index 9a28e57c3c..0000000000 --- a/tools/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go +++ /dev/null @@ -1,46 +0,0 @@ -// +build windows -// +build go1.4 - -package mousetrap - -import ( - "os" - "syscall" - "unsafe" -) - -func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) { - snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0) - if err != nil { - return nil, err - } - defer syscall.CloseHandle(snapshot) - var procEntry syscall.ProcessEntry32 - procEntry.Size = uint32(unsafe.Sizeof(procEntry)) - if err = syscall.Process32First(snapshot, &procEntry); err != nil { - return nil, err - } - for { - if procEntry.ProcessID == uint32(pid) { - return &procEntry, nil - } - err = syscall.Process32Next(snapshot, &procEntry) - if err != nil { - return nil, err - } - } -} - -// StartedByExplorer returns true if the program was invoked by the user double-clicking -// on the executable from explorer.exe -// -// It is conservative and returns false if any of the internal calls fail. -// It does not guarantee that the program was run from a terminal. It only can tell you -// whether it was launched from explorer.exe -func StartedByExplorer() bool { - pe, err := getProcessEntry(os.Getppid()) - if err != nil { - return false - } - return "explorer.exe" == syscall.UTF16ToString(pe.ExeFile[:]) -} diff --git a/tools/vendor/github.com/junk1tm/musttag/.goreleaser.yml b/tools/vendor/github.com/junk1tm/musttag/.goreleaser.yml deleted file mode 100644 index 6f85d818fb..0000000000 --- a/tools/vendor/github.com/junk1tm/musttag/.goreleaser.yml +++ /dev/null @@ -1,30 +0,0 @@ -builds: - - main: ./cmd/musttag - env: - - CGO_ENABLED=0 - flags: - - -trimpath - ldflags: - - -s -w -X main.version={{.Version}} - targets: - - darwin_amd64 - - darwin_arm64 - - linux_amd64 - - windows_amd64 - -archives: - - replacements: - darwin: macOS - format_overrides: - - goos: windows - format: zip - -brews: - - tap: - owner: junk1tm - name: homebrew-tap - branch: main - token: "{{ .Env.HOMEBREW_TAP_GITHUB_TOKEN }}" - homepage: "https://github.com/junk1tm/musttag" - description: "A Go linter that enforces field tags in (un)marshaled structs" - license: "MIT" diff --git a/tools/vendor/github.com/junk1tm/musttag/README.md b/tools/vendor/github.com/junk1tm/musttag/README.md deleted file mode 100644 index c04eae7c35..0000000000 --- a/tools/vendor/github.com/junk1tm/musttag/README.md +++ /dev/null @@ -1,93 +0,0 @@ -# musttag - -[![ci](https://github.com/junk1tm/musttag/actions/workflows/go.yml/badge.svg)](https://github.com/junk1tm/musttag/actions/workflows/go.yml) -[![docs](https://pkg.go.dev/badge/github.com/junk1tm/musttag.svg)](https://pkg.go.dev/github.com/junk1tm/musttag) -[![report](https://goreportcard.com/badge/github.com/junk1tm/musttag)](https://goreportcard.com/report/github.com/junk1tm/musttag) -[![codecov](https://codecov.io/gh/junk1tm/musttag/branch/main/graph/badge.svg)](https://codecov.io/gh/junk1tm/musttag) - -A Go linter that enforces field tags in (un)marshaled structs - -## 📌 About - -`musttag` checks that exported fields of a struct passed to a `Marshal`-like function are annotated with the relevant tag: - -```go -// BAD: -var user struct { - Name string -} -data, err := json.Marshal(user) - -// GOOD: -var user struct { - Name string `json:"name"` -} -data, err := json.Marshal(user) -``` - -The rational from [Uber Style Guide][1]: - -> The serialized form of the structure is a contract between different systems. -> Changes to the structure of the serialized form, including field names, break this contract. -> Specifying field names inside tags makes the contract explicit, -> and it guards against accidentally breaking the contract by refactoring or renaming fields. - -## 🚀 Features - -`musttag` supports these packages out of the box: - -* `encoding/json` -* `encoding/xml` -* `gopkg.in/yaml.v3` -* `github.com/BurntSushi/toml` -* `github.com/mitchellh/mapstructure` -* ...and any [custom one](#custom-packages) - -## 📦 Install - -### Go - -```shell -go install github.com/junk1tm/musttag/cmd/musttag@latest -``` - -### Brew - -```shell -brew install junk1tm/tap/musttag -``` - -### Manual - -Download a prebuilt binary from the [Releases][2] page. - -## 📋 Usage - -As a standalone binary: - -```shell -musttag ./... -``` - -Via `go vet`: - -```shell -go vet -vettool=$(which musttag) ./... -``` - -### Custom packages - -The `-fn=name:tag:argpos` flag can be used to report functions from custom packages, where - -* `name` is the full name of the function, including the package -* `tag` is the struct tag whose presence should be ensured -* `argpos` is the position of the argument to check - -For example, to support the `sqlx.Get` function: - -```shell -musttag -fn="github.com/jmoiron/sqlx.Get:db:1" ./... -``` - -[1]: https://github.com/uber-go/guide/blob/master/style.md#use-field-tags-in-marshaled-structs -[2]: https://github.com/junk1tm/musttag/releases diff --git a/tools/vendor/github.com/junk1tm/musttag/musttag.go b/tools/vendor/github.com/junk1tm/musttag/musttag.go deleted file mode 100644 index 434402263e..0000000000 --- a/tools/vendor/github.com/junk1tm/musttag/musttag.go +++ /dev/null @@ -1,254 +0,0 @@ -// Package musttag implements the musttag analyzer. -package musttag - -import ( - "flag" - "go/ast" - "go/token" - "go/types" - "reflect" - "strconv" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/types/typeutil" -) - -// Func describes a function call to look for, e.g. json.Marshal. -type Func struct { - Name string // Name is the full name of the function, including the package. - Tag string // Tag is the struct tag whose presence should be ensured. - ArgPos int // ArgPos is the position of the argument to check. -} - -// builtin is a set of functions supported out of the box. -var builtin = []Func{ - {Name: "encoding/json.Marshal", Tag: "json", ArgPos: 0}, - {Name: "encoding/json.MarshalIndent", Tag: "json", ArgPos: 0}, - {Name: "encoding/json.Unmarshal", Tag: "json", ArgPos: 1}, - {Name: "(*encoding/json.Encoder).Encode", Tag: "json", ArgPos: 0}, - {Name: "(*encoding/json.Decoder).Decode", Tag: "json", ArgPos: 0}, - - {Name: "encoding/xml.Marshal", Tag: "xml", ArgPos: 0}, - {Name: "encoding/xml.MarshalIndent", Tag: "xml", ArgPos: 0}, - {Name: "encoding/xml.Unmarshal", Tag: "xml", ArgPos: 1}, - {Name: "(*encoding/xml.Encoder).Encode", Tag: "xml", ArgPos: 0}, - {Name: "(*encoding/xml.Decoder).Decode", Tag: "xml", ArgPos: 0}, - {Name: "(*encoding/xml.Encoder).EncodeElement", Tag: "xml", ArgPos: 0}, - {Name: "(*encoding/xml.Decoder).DecodeElement", Tag: "xml", ArgPos: 0}, - - {Name: "gopkg.in/yaml.v3.Marshal", Tag: "yaml", ArgPos: 0}, - {Name: "gopkg.in/yaml.v3.Unmarshal", Tag: "yaml", ArgPos: 1}, - {Name: "(*gopkg.in/yaml.v3.Encoder).Encode", Tag: "yaml", ArgPos: 0}, - {Name: "(*gopkg.in/yaml.v3.Decoder).Decode", Tag: "yaml", ArgPos: 0}, - - {Name: "github.com/BurntSushi/toml.Unmarshal", Tag: "toml", ArgPos: 1}, - {Name: "github.com/BurntSushi/toml.Decode", Tag: "toml", ArgPos: 1}, - {Name: "github.com/BurntSushi/toml.DecodeFS", Tag: "toml", ArgPos: 2}, - {Name: "github.com/BurntSushi/toml.DecodeFile", Tag: "toml", ArgPos: 1}, - {Name: "(*github.com/BurntSushi/toml.Encoder).Encode", Tag: "toml", ArgPos: 0}, - {Name: "(*github.com/BurntSushi/toml.Decoder).Decode", Tag: "toml", ArgPos: 0}, - - {Name: "github.com/mitchellh/mapstructure.Decode", Tag: "mapstructure", ArgPos: 1}, - {Name: "github.com/mitchellh/mapstructure.DecodeMetadata", Tag: "mapstructure", ArgPos: 1}, - {Name: "github.com/mitchellh/mapstructure.WeakDecode", Tag: "mapstructure", ArgPos: 1}, - {Name: "github.com/mitchellh/mapstructure.WeakDecodeMetadata", Tag: "mapstructure", ArgPos: 1}, -} - -// flags creates a flag set for the analyzer. -// The funcs slice will be filled with custom functions passed via CLI flags. -func flags(funcs *[]Func) flag.FlagSet { - fs := flag.NewFlagSet("musttag", flag.ContinueOnError) - fs.Func("fn", "report custom function (name:tag:argpos)", func(s string) error { - parts := strings.Split(s, ":") - if len(parts) != 3 || parts[0] == "" || parts[1] == "" { - return strconv.ErrSyntax - } - pos, err := strconv.Atoi(parts[2]) - if err != nil { - return err - } - *funcs = append(*funcs, Func{ - Name: parts[0], - Tag: parts[1], - ArgPos: pos, - }) - return nil - }) - return *fs -} - -// New creates a new musttag analyzer. -// To report a custom function provide its description via Func, -// it will be added to the builtin ones. -func New(funcs ...Func) *analysis.Analyzer { - var flagFuncs []Func - return &analysis.Analyzer{ - Name: "musttag", - Doc: "enforce field tags in (un)marshaled structs", - Flags: flags(&flagFuncs), - Requires: []*analysis.Analyzer{inspect.Analyzer}, - Run: func(pass *analysis.Pass) (any, error) { - l := len(builtin) + len(funcs) + len(flagFuncs) - m := make(map[string]Func, l) - toMap := func(slice []Func) { - for _, fn := range slice { - m[fn.Name] = fn - } - } - toMap(builtin) - toMap(funcs) - toMap(flagFuncs) - return run(pass, m) - }, - } -} - -// for tests only. -var ( - // should the same struct be reported only once for the same tag? - reportOnce = true - - // reportf is a wrapper for pass.Reportf (as a variable, so it could be mocked in tests). - reportf = func(pass *analysis.Pass, pos token.Pos, fn Func) { - // TODO(junk1tm): print the name of the struct type as well? - pass.Reportf(pos, "exported fields should be annotated with the %q tag", fn.Tag) - } -) - -// run starts the analysis. -func run(pass *analysis.Pass, funcs map[string]Func) (any, error) { - type report struct { - pos token.Pos // the position for report. - tag string // the missing struct tag. - } - - // store previous reports to prevent reporting - // the same struct more than once (if reportOnce is true). - reports := make(map[report]struct{}) - - walk := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - filter := []ast.Node{(*ast.CallExpr)(nil)} - - walk.Preorder(filter, func(n ast.Node) { - call, ok := n.(*ast.CallExpr) - if !ok { - return // not a function call. - } - - callee := typeutil.StaticCallee(pass.TypesInfo, call) - if callee == nil { - return // not a static call. - } - - fn, ok := funcs[callee.FullName()] - if !ok { - return // the function is not supported. - } - - if len(call.Args) <= fn.ArgPos { - return // TODO(junk1tm): return a proper error. - } - - arg := call.Args[fn.ArgPos] - if unary, ok := arg.(*ast.UnaryExpr); ok { - arg = unary.X // e.g. json.Marshal(&foo) - } - - initialPos := token.NoPos - switch arg := arg.(type) { - case *ast.Ident: // e.g. json.Marshal(foo) - if arg.Obj == nil { - return // e.g. json.Marshal(nil) - } - initialPos = arg.Obj.Pos() - case *ast.CompositeLit: // e.g. json.Marshal(struct{}{}) - initialPos = arg.Pos() - } - - t := pass.TypesInfo.TypeOf(arg) - s, ok := parseStruct(t, initialPos) - if !ok { - return // not a struct argument. - } - - reportPos, ok := checkStruct(s, fn.Tag, make(map[string]struct{})) - if ok { - return // nothing to report. - } - - r := report{reportPos, fn.Tag} - if _, ok := reports[r]; ok && reportOnce { - return // already reported. - } - - reportf(pass, reportPos, fn) - reports[r] = struct{}{} - }) - - return nil, nil -} - -// structInfo expands types.Struct with its position in the source code. -// If the struct is anonymous, Pos points to the corresponding identifier. -type structInfo struct { - *types.Struct - Pos token.Pos -} - -// parseStruct parses the given types.Type, returning the underlying struct type. -// If it's a named type, the result will contain the position of its declaration, -// or the given token.Pos otherwise. -func parseStruct(t types.Type, pos token.Pos) (*structInfo, bool) { - for { - // unwrap pointers (if any) first. - ptr, ok := t.(*types.Pointer) - if !ok { - break - } - t = ptr.Elem() - } - - switch t := t.(type) { - case *types.Named: // a struct of the named type. - if s, ok := t.Underlying().(*types.Struct); ok { - return &structInfo{Struct: s, Pos: t.Obj().Pos()}, true - } - case *types.Struct: // an anonymous struct. - return &structInfo{Struct: t, Pos: pos}, true - } - - return nil, false -} - -// checkStruct recursively checks the given struct and returns the position for report, -// in case one of its fields is missing the tag. -func checkStruct(s *structInfo, tag string, visited map[string]struct{}) (token.Pos, bool) { - visited[s.String()] = struct{}{} - for i := 0; i < s.NumFields(); i++ { - if !s.Field(i).Exported() { - continue - } - - st := reflect.StructTag(s.Tag(i)) - if _, ok := st.Lookup(tag); !ok && !s.Field(i).Embedded() { - return s.Pos, false - } - - t := s.Field(i).Type() - nested, ok := parseStruct(t, s.Pos) // TODO(junk1tm): or s.Field(i).Pos()? - if !ok { - continue - } - if _, ok := visited[nested.String()]; ok { - continue - } - if pos, ok := checkStruct(nested, tag, visited); !ok { - return pos, false - } - } - - return token.NoPos, true -} diff --git a/tools/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go b/tools/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go index c7da52a200..9c2fbb9862 100644 --- a/tools/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go +++ b/tools/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go @@ -249,7 +249,9 @@ func isTestFunction(funcDecl *ast.FuncDecl) (bool, string) { if selectExpr, ok := starExp.X.(*ast.SelectorExpr); ok { if selectExpr.Sel.Name == testMethodStruct { if s, ok := selectExpr.X.(*ast.Ident); ok { - return s.Name == testMethodPackageType, param.Names[0].Name + if len(param.Names) > 0 { + return s.Name == testMethodPackageType, param.Names[0].Name + } } } } diff --git a/tools/vendor/github.com/ldez/tagliatelle/readme.md b/tools/vendor/github.com/ldez/tagliatelle/readme.md index 7bd728dd75..55a544db81 100644 --- a/tools/vendor/github.com/ldez/tagliatelle/readme.md +++ b/tools/vendor/github.com/ldez/tagliatelle/readme.md @@ -11,6 +11,7 @@ Supported string casing: - `pascal` - `kebab` - `snake` +- `upperSnake` - `goCamel` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). - `goPascal` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). - `goKebab` Respects [Go's common initialisms](https://github.com/golang/lint/blob/83fdc39ff7b56453e3793356bcff3070b9b96445/lint.go#L770-L809) (e.g. HttpResponse -> HTTPResponse). @@ -45,18 +46,18 @@ Supported string casing: | NameJSON | NameJson | NameJSON | | UneTête | UneTête | UneTête | -| Source | Snake Case | Go Snake Case | -|----------------|------------------|------------------| -| GooID | goo_id | goo_ID | -| HTTPStatusCode | http_status_code | HTTP_status_code | -| FooBAR | foo_bar | foo_bar | -| URL | url | URL | -| ID | id | ID | -| hostIP | host_ip | host_IP | -| JSON | json | JSON | -| JSONName | json_name | JSON_name | -| NameJSON | name_json | name_JSON | -| UneTête | une_tête | une_tête | +| Source | Snake Case | Upper Snake Case | Go Snake Case | +|----------------|------------------|------------------|------------------| +| GooID | goo_id | GOO_ID | goo_ID | +| HTTPStatusCode | http_status_code | HTTP_STATUS_CODE | HTTP_status_code | +| FooBAR | foo_bar | FOO_BAR | foo_bar | +| URL | url | URL | URL | +| ID | id | ID | ID | +| hostIP | host_ip | HOST_IP | host_IP | +| JSON | json | JSON | JSON | +| JSONName | json_name | JSON_NAME | JSON_name | +| NameJSON | name_json | NAME_JSON | name_JSON | +| UneTête | une_tête | UNE_TÊTE | une_tête | | Source | Kebab Case | Go KebabCase | |----------------|------------------|------------------| @@ -120,7 +121,7 @@ linters-settings: use-field-name: true rules: # Any struct tag type can be used. - # Support string case: `camel`, `pascal`, `kebab`, `snake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower` + # Support string case: `camel`, `pascal`, `kebab`, `snake`, `upperSnake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower`, `header`. json: camel yaml: camel xml: camel @@ -148,6 +149,7 @@ Here are the default rules for the well known and used tags, when using tagliate - `bson`: `camel` - `avro`: `snake` - `header`: `header` +- `envconfig`: `upperSnake` ### Custom Rules diff --git a/tools/vendor/github.com/ldez/tagliatelle/tagliatelle.go b/tools/vendor/github.com/ldez/tagliatelle/tagliatelle.go index c465376371..22c5feb3d8 100644 --- a/tools/vendor/github.com/ldez/tagliatelle/tagliatelle.go +++ b/tools/vendor/github.com/ldez/tagliatelle/tagliatelle.go @@ -204,6 +204,8 @@ func getConverter(c string) (func(s string) string, error) { return toHeader, nil case "upper": return strings.ToUpper, nil + case "upperSnake": + return strcase.ToSNAKE, nil case "lower": return strings.ToLower, nil default: diff --git a/tools/vendor/github.com/matoous/godox/godox.go b/tools/vendor/github.com/matoous/godox/godox.go index 6d7104b09d..3903525c80 100644 --- a/tools/vendor/github.com/matoous/godox/godox.go +++ b/tools/vendor/github.com/matoous/godox/godox.go @@ -8,31 +8,23 @@ import ( "go/token" "path/filepath" "strings" + "unicode" + "unicode/utf8" ) -var ( - defaultKeywords = []string{"TODO", "BUG", "FIXME"} -) +var defaultKeywords = []string{"TODO", "BUG", "FIXME"} -// Message contains a message and position +// Message contains a message and position. type Message struct { Pos token.Position Message string } -func getMessages(c *ast.Comment, fset *token.FileSet, keywords []string) []Message { - commentText := c.Text - switch commentText[1] { - case '/': - commentText = commentText[2:] - if len(commentText) > 0 && commentText[0] == ' ' { - commentText = commentText[1:] - } - case '*': - commentText = commentText[2 : len(commentText)-2] - } +func getMessages(comment *ast.Comment, fset *token.FileSet, keywords []string) []Message { + commentText := extractComment(comment.Text) b := bufio.NewReader(bytes.NewBufferString(commentText)) + var comments []Message for lineNum := 0; ; lineNum++ { @@ -40,45 +32,88 @@ func getMessages(c *ast.Comment, fset *token.FileSet, keywords []string) []Messa if err != nil { break } + + const minimumSize = 4 + sComment := bytes.TrimSpace(line) - if len(sComment) < 4 { + if len(sComment) < minimumSize { continue } + for _, kw := range keywords { - if bytes.EqualFold([]byte(kw), sComment[0:len(kw)]) { - pos := fset.Position(c.Pos()) - // trim the comment - if len(sComment) > 40 { - sComment = []byte(fmt.Sprintf("%.40s...", sComment)) - } - comments = append(comments, Message{ - Pos: pos, - Message: fmt.Sprintf( - "%s:%d: Line contains %s: \"%s\"", - filepath.Join(pos.Filename), - pos.Line+lineNum, - strings.Join(keywords, "/"), - sComment, - ), - }) - break + if lkw := len(kw); !(bytes.EqualFold([]byte(kw), sComment[0:lkw]) && + !hasAlphanumRuneAdjacent(sComment[lkw:])) { + continue + } + + pos := fset.Position(comment.Pos()) + // trim the comment + const commentLimit = 40 + if len(sComment) > commentLimit { + sComment = []byte(fmt.Sprintf("%.40s...", sComment)) } + + comments = append(comments, Message{ + Pos: pos, + Message: fmt.Sprintf( + "%s:%d: Line contains %s: %q", + filepath.Clean(pos.Filename), + pos.Line+lineNum, + strings.Join(keywords, "/"), + sComment, + ), + }) + + break } } + return comments } +func extractComment(commentText string) string { + switch commentText[1] { + case '/': + commentText = commentText[2:] + if len(commentText) > 0 && commentText[0] == ' ' { + commentText = commentText[1:] + } + case '*': + commentText = commentText[2 : len(commentText)-2] + } + + return commentText +} + +func hasAlphanumRuneAdjacent(rest []byte) bool { + if len(rest) == 0 { + return false + } + + switch rest[0] { // most common cases + case ':', ' ', '(': + return false + } + + r, _ := utf8.DecodeRune(rest) + + return unicode.IsLetter(r) || unicode.IsNumber(r) || unicode.IsDigit(r) +} + // Run runs the godox linter on given file. // Godox searches for comments starting with given keywords and reports them. func Run(file *ast.File, fset *token.FileSet, keywords ...string) []Message { if len(keywords) == 0 { keywords = defaultKeywords } + var messages []Message + for _, c := range file.Comments { for _, ci := range c.List { messages = append(messages, getMessages(ci, fset, keywords)...) } } + return messages } diff --git a/tools/vendor/github.com/mgechev/revive/config/config.go b/tools/vendor/github.com/mgechev/revive/config/config.go index d6b4f4100d..04cd214042 100644 --- a/tools/vendor/github.com/mgechev/revive/config/config.go +++ b/tools/vendor/github.com/mgechev/revive/config/config.go @@ -3,7 +3,7 @@ package config import ( "errors" "fmt" - "io/ioutil" + "os" "github.com/mgechev/revive/formatter" @@ -31,21 +31,23 @@ var defaultRules = []lint.Rule{ &rule.TimeNamingRule{}, &rule.ContextKeysType{}, &rule.ContextAsArgumentRule{}, + &rule.IfReturnRule{}, + &rule.EmptyBlockRule{}, + &rule.SuperfluousElseRule{}, + &rule.UnusedParamRule{}, + &rule.UnreachableCodeRule{}, + &rule.RedefinesBuiltinIDRule{}, } var allRules = append([]lint.Rule{ &rule.ArgumentsLimitRule{}, &rule.CyclomaticRule{}, &rule.FileHeaderRule{}, - &rule.EmptyBlockRule{}, - &rule.SuperfluousElseRule{}, &rule.ConfusingNamingRule{}, &rule.GetReturnRule{}, &rule.ModifiesParamRule{}, &rule.ConfusingResultsRule{}, &rule.DeepExitRule{}, - &rule.UnusedParamRule{}, - &rule.UnreachableCodeRule{}, &rule.AddConstantRule{}, &rule.FlagParamRule{}, &rule.UnnecessaryStmtRule{}, @@ -53,7 +55,6 @@ var allRules = append([]lint.Rule{ &rule.ModifiesValRecRule{}, &rule.ConstantLogicalExprRule{}, &rule.BoolLiteralRule{}, - &rule.RedefinesBuiltinIDRule{}, &rule.ImportsBlacklistRule{}, &rule.FunctionResultsLimitRule{}, &rule.MaxPublicStructsRule{}, @@ -79,7 +80,6 @@ var allRules = append([]lint.Rule{ &rule.UnexportedNamingRule{}, &rule.FunctionLength{}, &rule.NestedStructs{}, - &rule.IfReturnRule{}, &rule.UselessBreak{}, &rule.TimeEqualRule{}, &rule.BannedCharsRule{}, @@ -140,7 +140,7 @@ func GetLintingRules(config *lint.Config, extraRules []lint.Rule) ([]lint.Rule, } func parseConfig(path string, config *lint.Config) error { - file, err := ioutil.ReadFile(path) + file, err := os.ReadFile(path) if err != nil { return errors.New("cannot read the config file") } diff --git a/tools/vendor/github.com/mgechev/revive/rule/comment-spacings.go b/tools/vendor/github.com/mgechev/revive/rule/comment-spacings.go index abe2ad76d9..0d75c55f30 100644 --- a/tools/vendor/github.com/mgechev/revive/rule/comment-spacings.go +++ b/tools/vendor/github.com/mgechev/revive/rule/comment-spacings.go @@ -23,6 +23,7 @@ func (r *CommentSpacingsRule) configure(arguments lint.Arguments) { r.allowList = []string{ "//go:", "//revive:", + "//nolint:", } for _, arg := range arguments { @@ -47,7 +48,13 @@ func (r *CommentSpacingsRule) Apply(file *lint.File, args lint.Arguments) []lint continue // nothing to do } - isOK := commentLine[2] == ' ' + isMultiLineComment := commentLine[1] == '*' + isOK := commentLine[2] == '\n' + if isMultiLineComment && isOK { + continue + } + + isOK = (commentLine[2] == ' ') || (commentLine[2] == '\t') if isOK { continue } diff --git a/tools/vendor/github.com/mgechev/revive/rule/empty-block.go b/tools/vendor/github.com/mgechev/revive/rule/empty-block.go index 8a4a0fef19..25a052a0ef 100644 --- a/tools/vendor/github.com/mgechev/revive/rule/empty-block.go +++ b/tools/vendor/github.com/mgechev/revive/rule/empty-block.go @@ -40,6 +40,16 @@ func (w lintEmptyBlock) Visit(node ast.Node) ast.Visitor { case *ast.FuncLit: w.ignore[n.Body] = true return w + case *ast.SelectStmt: + w.ignore[n.Body] = true + return w + case *ast.ForStmt: + if len(n.Body.List) == 0 && n.Init == nil && n.Post == nil && n.Cond != nil { + if _, isCall := n.Cond.(*ast.CallExpr); isCall { + w.ignore[n.Body] = true + return w + } + } case *ast.RangeStmt: if len(n.Body.List) == 0 { w.onFailure(lint.Failure{ diff --git a/tools/vendor/github.com/mgechev/revive/rule/nested-structs.go b/tools/vendor/github.com/mgechev/revive/rule/nested-structs.go index fd1226991f..147bd482b1 100644 --- a/tools/vendor/github.com/mgechev/revive/rule/nested-structs.go +++ b/tools/vendor/github.com/mgechev/revive/rule/nested-structs.go @@ -14,7 +14,6 @@ func (*NestedStructs) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { var failures []lint.Failure walker := &lintNestedStructs{ - fileAST: file.AST, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, @@ -31,47 +30,46 @@ func (*NestedStructs) Name() string { } type lintNestedStructs struct { - fileAST *ast.File onFailure func(lint.Failure) } func (l *lintNestedStructs) Visit(n ast.Node) ast.Visitor { - switch v := n.(type) { - case *ast.TypeSpec: - _, isInterface := v.Type.(*ast.InterfaceType) - if isInterface { - return nil // do not analyze interface declarations - } - case *ast.FuncDecl: - if v.Body != nil { - ast.Walk(l, v.Body) - } - return nil - case *ast.Field: - _, isChannelField := v.Type.(*ast.ChanType) - if isChannelField { - return nil - } + if v, ok := n.(*ast.StructType); ok { + ls := &lintStruct{l.onFailure} + ast.Walk(ls, v.Fields) + } - filter := func(n ast.Node) bool { - switch n.(type) { - case *ast.StructType: - return true - default: - return false - } - } - structs := pick(v, filter, nil) - for _, s := range structs { - l.onFailure(lint.Failure{ - Failure: "no nested structs are allowed", - Category: "style", - Node: s, - Confidence: 1, - }) + return l +} + +type lintStruct struct { + onFailure func(lint.Failure) +} + +func (l *lintStruct) Visit(n ast.Node) ast.Visitor { + switch s := n.(type) { + case *ast.StructType: + l.fail(s) + return nil + case *ast.ArrayType: + if _, ok := s.Elt.(*ast.StructType); ok { + l.fail(s) } - return nil // no need to visit (again) the field + return nil + case *ast.ChanType: + return nil + case *ast.MapType: + return nil + default: + return l } +} - return l +func (l *lintStruct) fail(n ast.Node) { + l.onFailure(lint.Failure{ + Failure: "no nested structs are allowed", + Category: "style", + Node: n, + Confidence: 1, + }) } diff --git a/tools/vendor/github.com/mgechev/revive/rule/package-comments.go b/tools/vendor/github.com/mgechev/revive/rule/package-comments.go index 33963ab976..02f246be08 100644 --- a/tools/vendor/github.com/mgechev/revive/rule/package-comments.go +++ b/tools/vendor/github.com/mgechev/revive/rule/package-comments.go @@ -58,12 +58,14 @@ func (l *lintPackageComments) checkPackageComment() []lint.Failure { var packageFile *ast.File // which name is $package.go var firstFile *ast.File var firstFileName string + var fileSource string for name, file := range l.file.Pkg.Files() { if file.AST.Doc != nil { return nil } if name == "doc.go" { docFile = file.AST + fileSource = "doc.go" } if name == file.AST.Name.String()+".go" { packageFile = file.AST @@ -76,14 +78,21 @@ func (l *lintPackageComments) checkPackageComment() []lint.Failure { // prefer warning on doc.go, $package.go over first file if docFile == nil { docFile = packageFile + fileSource = l.fileAst.Name.String() + ".go" } if docFile == nil { docFile = firstFile + fileSource = firstFileName } + if docFile != nil { + pkgFile := l.file.Pkg.Files()[fileSource] return []lint.Failure{{ - Category: "comments", - Node: docFile, + Category: "comments", + Position: lint.FailurePosition{ + Start: pkgFile.ToPosition(docFile.Pos()), + End: pkgFile.ToPosition(docFile.Name.End()), + }, Confidence: 1, Failure: "should have a package comment", }} diff --git a/tools/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go b/tools/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go index 23dd85a7ac..b3ff084563 100644 --- a/tools/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go +++ b/tools/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go @@ -89,6 +89,9 @@ func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor { case *ast.GenDecl: switch n.Tok { case token.TYPE: + if len(n.Specs) < 1 { + return nil + } typeSpec, ok := n.Specs[0].(*ast.TypeSpec) if !ok { return nil diff --git a/tools/vendor/github.com/mgechev/revive/rule/struct-tag.go b/tools/vendor/github.com/mgechev/revive/rule/struct-tag.go index 3accf58fb6..d1c8056aa0 100644 --- a/tools/vendor/github.com/mgechev/revive/rule/struct-tag.go +++ b/tools/vendor/github.com/mgechev/revive/rule/struct-tag.go @@ -5,23 +5,55 @@ import ( "go/ast" "strconv" "strings" + "sync" "github.com/fatih/structtag" "github.com/mgechev/revive/lint" ) // StructTagRule lints struct tags. -type StructTagRule struct{} +type StructTagRule struct { + userDefined map[string][]string // map: key -> []option + sync.Mutex +} + +func (r *StructTagRule) configure(arguments lint.Arguments) { + r.Lock() + defer r.Unlock() + if r.userDefined == nil && len(arguments) > 0 { + checkNumberOfArguments(1, arguments, r.Name()) + r.userDefined = make(map[string][]string, len(arguments)) + for _, arg := range arguments { + item, ok := arg.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string, got %v (of type %T)", r.Name(), arg, arg)) + } + parts := strings.Split(item, ",") + if len(parts) < 2 { + panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string of the form key[,option]+, got %s", r.Name(), item)) + } + key := strings.TrimSpace(parts[0]) + for i := 1; i < len(parts); i++ { + option := strings.TrimSpace(parts[i]) + r.userDefined[key] = append(r.userDefined[key], option) + } + } + } +} // Apply applies the rule to given file. -func (*StructTagRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { - var failures []lint.Failure +func (r *StructTagRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { + r.configure(args) + var failures []lint.Failure onFailure := func(failure lint.Failure) { failures = append(failures, failure) } - w := lintStructTagRule{onFailure: onFailure} + w := lintStructTagRule{ + onFailure: onFailure, + userDefined: r.userDefined, + } ast.Walk(w, file.AST) @@ -35,8 +67,9 @@ func (*StructTagRule) Name() string { type lintStructTagRule struct { onFailure func(lint.Failure) - usedTagNbr map[int]bool // list of used tag numbers - usedTagName map[string]bool // list of used tag keys + userDefined map[string][]string // map: key -> []option + usedTagNbr map[int]bool // list of used tag numbers + usedTagName map[string]bool // list of used tag keys } func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor { @@ -57,17 +90,26 @@ func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor { return w } +const keyASN1 = "asn1" +const keyBSON = "bson" +const keyDefault = "default" +const keyJSON = "json" +const keyProtobuf = "protobuf" +const keyRequired = "required" +const keyXML = "xml" +const keyYAML = "yaml" + func (w lintStructTagRule) checkTagNameIfNeed(tag *structtag.Tag) (string, bool) { isUnnamedTag := tag.Name == "" || tag.Name == "-" if isUnnamedTag { return "", true } - needsToCheckTagName := tag.Key == "bson" || - tag.Key == "json" || - tag.Key == "xml" || - tag.Key == "yaml" || - tag.Key == "protobuf" + needsToCheckTagName := tag.Key == keyBSON || + tag.Key == keyJSON || + tag.Key == keyXML || + tag.Key == keyYAML || + tag.Key == keyProtobuf if !needsToCheckTagName { return "", true @@ -92,10 +134,10 @@ func (w lintStructTagRule) checkTagNameIfNeed(tag *structtag.Tag) (string, bool) func (lintStructTagRule) getTagName(tag *structtag.Tag) string { switch tag.Key { - case "protobuf": + case keyProtobuf: for _, option := range tag.Options { if strings.HasPrefix(option, "name=") { - return strings.TrimLeft(option, "name=") + return strings.TrimPrefix(option, "name=") } } return "" //protobuf tag lacks 'name' option @@ -123,40 +165,40 @@ func (w lintStructTagRule) checkTaggedField(f *ast.Field) { } switch key := tag.Key; key { - case "asn1": + case keyASN1: msg, ok := w.checkASN1Tag(f.Type, tag) if !ok { w.addFailure(f.Tag, msg) } - case "bson": + case keyBSON: msg, ok := w.checkBSONTag(tag.Options) if !ok { w.addFailure(f.Tag, msg) } - case "default": + case keyDefault: if !w.typeValueMatch(f.Type, tag.Name) { w.addFailure(f.Tag, "field's type and default value's type mismatch") } - case "json": + case keyJSON: msg, ok := w.checkJSONTag(tag.Name, tag.Options) if !ok { w.addFailure(f.Tag, msg) } - case "protobuf": + case keyProtobuf: msg, ok := w.checkProtobufTag(tag) if !ok { w.addFailure(f.Tag, msg) } - case "required": + case keyRequired: if tag.Name != "true" && tag.Name != "false" { w.addFailure(f.Tag, "required should be 'true' or 'false'") } - case "xml": + case keyXML: msg, ok := w.checkXMLTag(tag.Options) if !ok { w.addFailure(f.Tag, msg) } - case "yaml": + case keyYAML: msg, ok := w.checkYAMLTag(tag.Options) if !ok { w.addFailure(f.Tag, msg) @@ -201,6 +243,10 @@ func (w lintStructTagRule) checkASN1Tag(t ast.Expr, tag *structtag.Tag) (string, continue } + if w.isUserDefined(keyASN1, opt) { + continue + } + return fmt.Sprintf("unknown option '%s' in ASN1 tag", opt), false } } @@ -208,11 +254,14 @@ func (w lintStructTagRule) checkASN1Tag(t ast.Expr, tag *structtag.Tag) (string, return "", true } -func (lintStructTagRule) checkBSONTag(options []string) (string, bool) { +func (w lintStructTagRule) checkBSONTag(options []string) (string, bool) { for _, opt := range options { switch opt { case "inline", "minsize", "omitempty": default: + if w.isUserDefined(keyBSON, opt) { + continue + } return fmt.Sprintf("unknown option '%s' in BSON tag", opt), false } } @@ -220,7 +269,7 @@ func (lintStructTagRule) checkBSONTag(options []string) (string, bool) { return "", true } -func (lintStructTagRule) checkJSONTag(name string, options []string) (string, bool) { +func (w lintStructTagRule) checkJSONTag(name string, options []string) (string, bool) { for _, opt := range options { switch opt { case "omitempty", "string": @@ -230,6 +279,9 @@ func (lintStructTagRule) checkJSONTag(name string, options []string) (string, bo return "option can not be empty in JSON tag", false } default: + if w.isUserDefined(keyJSON, opt) { + continue + } return fmt.Sprintf("unknown option '%s' in JSON tag", opt), false } } @@ -237,11 +289,14 @@ func (lintStructTagRule) checkJSONTag(name string, options []string) (string, bo return "", true } -func (lintStructTagRule) checkXMLTag(options []string) (string, bool) { +func (w lintStructTagRule) checkXMLTag(options []string) (string, bool) { for _, opt := range options { switch opt { case "any", "attr", "cdata", "chardata", "comment", "innerxml", "omitempty", "typeattr": default: + if w.isUserDefined(keyXML, opt) { + continue + } return fmt.Sprintf("unknown option '%s' in XML tag", opt), false } } @@ -249,11 +304,14 @@ func (lintStructTagRule) checkXMLTag(options []string) (string, bool) { return "", true } -func (lintStructTagRule) checkYAMLTag(options []string) (string, bool) { +func (w lintStructTagRule) checkYAMLTag(options []string) (string, bool) { for _, opt := range options { switch opt { case "flow", "inline", "omitempty": default: + if w.isUserDefined(keyYAML, opt) { + continue + } return fmt.Sprintf("unknown option '%s' in YAML tag", opt), false } } @@ -330,6 +388,9 @@ func (w lintStructTagRule) checkProtobufTag(tag *structtag.Tag) (string, bool) { case "name", "json": // do nothing default: + if w.isUserDefined(keyProtobuf, k) { + continue + } return fmt.Sprintf("unknown option '%s' in protobuf tag", k), false } } @@ -344,3 +405,17 @@ func (w lintStructTagRule) addFailure(n ast.Node, msg string) { Confidence: 1, }) } + +func (w lintStructTagRule) isUserDefined(key, opt string) bool { + if w.userDefined == nil { + return false + } + + options := w.userDefined[key] + for _, o := range options { + if opt == o { + return true + } + } + return false +} diff --git a/tools/vendor/github.com/mgechev/revive/rule/var-naming.go b/tools/vendor/github.com/mgechev/revive/rule/var-naming.go index 3c0c19cdf3..fa4a188642 100644 --- a/tools/vendor/github.com/mgechev/revive/rule/var-naming.go +++ b/tools/vendor/github.com/mgechev/revive/rule/var-naming.go @@ -4,12 +4,15 @@ import ( "fmt" "go/ast" "go/token" + "regexp" "strings" "sync" "github.com/mgechev/revive/lint" ) +var anyCapsRE = regexp.MustCompile(`[A-Z]`) + // VarNamingRule lints given else constructs. type VarNamingRule struct { configured bool @@ -60,6 +63,14 @@ func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint. Category: "naming", }) } + if anyCapsRE.MatchString(walker.fileAst.Name.Name) { + walker.onFailure(lint.Failure{ + Failure: fmt.Sprintf("don't use MixedCaps in package name; %s should be %s", walker.fileAst.Name.Name, strings.ToLower(walker.fileAst.Name.Name)), + Confidence: 1, + Node: walker.fileAst.Name, + Category: "naming", + }) + } ast.Walk(&walker, fileAst) diff --git a/tools/vendor/github.com/moricho/tparallel/.goreleaser.yml b/tools/vendor/github.com/moricho/tparallel/.goreleaser.yaml similarity index 54% rename from tools/vendor/github.com/moricho/tparallel/.goreleaser.yml rename to tools/vendor/github.com/moricho/tparallel/.goreleaser.yaml index e9f6d727e7..4a04fe25b5 100644 --- a/tools/vendor/github.com/moricho/tparallel/.goreleaser.yml +++ b/tools/vendor/github.com/moricho/tparallel/.goreleaser.yaml @@ -1,6 +1,4 @@ project_name: tparallel -env: - - GO111MODULE=on before: hooks: - go mod tidy @@ -13,17 +11,33 @@ builds: - -X main.Revision={{.ShortCommit}} env: - CGO_ENABLED=0 + goos: + - linux + - windows + - darwin + archives: - - name_template: '{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' - replacements: - darwin: darwin - linux: linux - windows: windows - 386: i386 - amd64: x86_64 + - format: tar.gz + name_template: >- + {{ .ProjectName }}_ + {{- title .Os }}_ + {{- if eq .Arch "amd64" }}x86_64 + {{- else if eq .Arch "386" }}i386 + {{- else }}{{ .Arch }}{{ end }} + {{- if .Arm }}v{{ .Arm }}{{ end }} format_overrides: - - goos: windows - format: zip + - goos: windows + format: zip +checksum: + name_template: 'checksums.txt' +snapshot: + name_template: "{{ incpatch .Version }}-next" +changelog: + sort: asc + filters: + exclude: + - '^docs:' + - '^test:' release: prerelease: auto brews: diff --git a/tools/vendor/github.com/moricho/tparallel/README.md b/tools/vendor/github.com/moricho/tparallel/README.md index cd358d1554..65ed46c422 100644 --- a/tools/vendor/github.com/moricho/tparallel/README.md +++ b/tools/vendor/github.com/moricho/tparallel/README.md @@ -1,37 +1,49 @@ # tparallel + [![tparallel](https://github.com/moricho/tparallel/workflows/tparallel/badge.svg?branch=master)](https://github.com/moricho/tparallel/actions) [![Go Report Card](https://goreportcard.com/badge/github.com/moricho/tparallel)](https://goreportcard.com/report/github.com/moricho/tparallel) [![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) `tparallel` finds inappropriate usage of `t.Parallel()` method in your Go test codes. -It detects the following: +It detects the following: + - `t.Parallel()` is called in either a top-level test function or a sub-test function only - Although `t.Parallel()` is called in the sub-test function, it is post-processed by `defer` instead of `t.Cleanup()` - -This tool was inspired by this blog: [Go言語でのテストの並列化 〜t.Parallel()メソッドを理解する〜](https://engineering.mercari.com/blog/entry/how_to_use_t_parallel/) + +This tool was inspired by this blog: [Go 言語でのテストの並列化 〜t.Parallel()メソッドを理解する〜](https://engineering.mercari.com/blog/entry/how_to_use_t_parallel/) ## Installation ### From GitHub Releases + Please see [GitHub Releases](https://github.com/moricho/tparallel/releases). Available binaries are: + - macOS - Linux - Windows ### macOS -``` sh + +```sh $ brew tap moricho/tparallel $ brew install tparallel ``` ### go get + ```sh $ go get -u github.com/moricho/tparallel/cmd/tparallel ``` ## Usage +### golangci-lint + +[golangci-lint](https://github.com/golangci/golangci-lint) now supports `tparallel`, so you can enable this linter and use in it. + +### shell + ```sh $ go vet -vettool=`which tparallel` ``` diff --git a/tools/vendor/github.com/nishanths/exhaustive/README.md b/tools/vendor/github.com/nishanths/exhaustive/README.md index 0f16d81edb..dbb41ab9d0 100644 --- a/tools/vendor/github.com/nishanths/exhaustive/README.md +++ b/tools/vendor/github.com/nishanths/exhaustive/README.md @@ -1,18 +1,17 @@ -# exhaustive [![Godoc][godoc-svg]][godoc] +# exhaustive -Package exhaustive defines an analyzer that checks exhaustiveness of switch -statements of enum-like constants in Go source code. +[![Godoc][godoc-svg]][godoc] -For supported flags, the definition of enum, and the definition of -exhaustiveness used by this package, see [pkg.go.dev][godoc-doc]. For a -changelog, see [CHANGELOG][changelog] in the GitHub wiki. +`exhaustive` checks exhaustiveness of enum switch statements in Go source code. -The analyzer can be configured to additionally check exhaustiveness of map -literals whose key type is enum-like. +For the definition of enum and the definition of exhaustiveness used by this +program, see [godoc][godoc-doc]. For the changelog, see [CHANGELOG][changelog] +in the GitHub wiki. The program can be configured to additionally check +exhaustiveness of keys in map literals whose key type is an enum. ## Usage -Command line program: +Command: ``` go install github.com/nishanths/exhaustive/cmd/exhaustive@latest @@ -20,21 +19,26 @@ go install github.com/nishanths/exhaustive/cmd/exhaustive@latest exhaustive [flags] [packages] ``` +For available flags, refer to the [Flags][godoc-flags] section in godoc or run +`exhaustive -h`. + Package: ``` go get github.com/nishanths/exhaustive + +import "github.com/nishanths/exhaustive" ``` -The `exhaustive.Analyzer` variable follows the guidelines of the +The `exhaustive.Analyzer` variable follows guidelines in the [`golang.org/x/tools/go/analysis`][xanalysis] package. This should make it -possible to integrate `exhaustive` in your own analysis driver program. +possible to integrate `exhaustive` with your own analysis driver program. ## Example Given an enum: -```go +``` package token // import "example.org/token" type Token int @@ -48,49 +52,49 @@ const ( ) ``` -And code that switches on the enum: +and code that switches on the enum: -```go -package calc // import "example.org/calc" +``` +package calc import "example.org/token" -func f(t token.Token) { +func x(t token.Token) { switch t { case token.Add: case token.Subtract: - case token.Multiply: + case token.Remainder: default: } } - -var m = map[token.Token]string{ - token.Add: "add", - token.Subtract: "subtract", - token.Multiply: "multiply", -} ``` -Running `exhaustive` with default options will report: +running `exhaustive` with default flags will produce: ``` -% exhaustive example.org/calc -calc.go:6:2: missing cases in switch of type token.Token: token.Quotient, token.Remainder +calc.go:6:2: missing cases in switch of type token.Token: token.Multiply, token.Quotient ``` -Specify the flag `-check=switch,map` to additionally check exhaustiveness of -map literal keys: +Specify flag `-check=switch,map` to additionally check exhaustiveness of keys +in map literals. For example: + +``` +var m = map[token.Token]rune{ + token.Add: '+', + token.Subtract: '-', + token.Multiply: '*', + token.Quotient: '/', +} +``` ``` -% exhaustive -check=switch,map example.org/calc -calc.go:6:2: missing cases in switch of type token.Token: token.Quotient, token.Remainder -calc.go:14:9: missing keys in map of key type token.Token: token.Quotient, token.Remainder +calc.go:14:9: missing keys in map of key type token.Token: token.Remainder ``` ## Contributing -Issues and changes are welcome. Please discuss substantial changes -in an issue first. +Issues and changes are welcome. Please discuss substantial changes in an issue +first. [godoc]: https://pkg.go.dev/github.com/nishanths/exhaustive [godoc-svg]: https://pkg.go.dev/badge/github.com/nishanths/exhaustive.svg @@ -98,4 +102,3 @@ in an issue first. [godoc-flags]: https://pkg.go.dev/github.com/nishanths/exhaustive#hdr-Flags [xanalysis]: https://pkg.go.dev/golang.org/x/tools/go/analysis [changelog]: https://github.com/nishanths/exhaustive/wiki/CHANGELOG -[issue-typeparam]: https://github.com/nishanths/exhaustive/issues/31 diff --git a/tools/vendor/github.com/nishanths/exhaustive/comment.go b/tools/vendor/github.com/nishanths/exhaustive/comment.go index 69b6e5430f..cc84beaf7b 100644 --- a/tools/vendor/github.com/nishanths/exhaustive/comment.go +++ b/tools/vendor/github.com/nishanths/exhaustive/comment.go @@ -44,7 +44,7 @@ const ( enforceComment = "//exhaustive:enforce" ) -func hasComment(comments []*ast.CommentGroup, comment string) bool { +func hasCommentPrefix(comments []*ast.CommentGroup, comment string) bool { for _, c := range comments { for _, cc := range c.List { if strings.HasPrefix(cc.Text, comment) { diff --git a/tools/vendor/github.com/nishanths/exhaustive/common.go b/tools/vendor/github.com/nishanths/exhaustive/common.go index 20fcf04015..f22b0e1479 100644 --- a/tools/vendor/github.com/nishanths/exhaustive/common.go +++ b/tools/vendor/github.com/nishanths/exhaustive/common.go @@ -9,9 +9,127 @@ import ( "sort" "strings" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/ast/astutil" ) +// enumTypeAndMembers combines an enumType and its members set. +type enumTypeAndMembers struct { + typ enumType + members enumMembers +} + +func fromNamed(pass *analysis.Pass, t *types.Named, typeparam bool) (result []enumTypeAndMembers, ok bool) { + if tpkg := t.Obj().Pkg(); tpkg == nil { + // go/types documentation says: nil for labels and + // objects in the Universe scope. This happens for the built-in + // error type for example. + return nil, false // not a valid enum type, so ok == false + } + + et := enumType{t.Obj()} + if em, ok := importFact(pass, et); ok { + return []enumTypeAndMembers{{et, em}}, true + } + + if typeparam { + // is it a named interface? + if intf, ok := t.Underlying().(*types.Interface); ok { + return fromInterface(pass, intf, typeparam) + } + } + + return nil, false // not a valid enum type, so ok == false +} + +func fromInterface(pass *analysis.Pass, intf *types.Interface, typeparam bool) (result []enumTypeAndMembers, ok bool) { + allOk := true + for i := 0; i < intf.NumEmbeddeds(); i++ { + r, ok := fromType(pass, intf.EmbeddedType(i), typeparam) + result = append(result, r...) + allOk = allOk && ok + } + return result, allOk +} + +func fromUnion(pass *analysis.Pass, union *types.Union, typeparam bool) (result []enumTypeAndMembers, ok bool) { + allOk := true + // gather from each term in the union. + for i := 0; i < union.Len(); i++ { + r, ok := fromType(pass, union.Term(i).Type(), typeparam) + result = append(result, r...) + allOk = allOk && ok + } + return result, allOk +} + +func fromTypeParam(pass *analysis.Pass, tp *types.TypeParam, typeparam bool) (result []enumTypeAndMembers, ok bool) { + // Does not appear to be explicitly documented, but based on Go language + // spec (see section Type constraints) and Go standard library source code, + // we can expect constraints to have underlying type *types.Interface + // Regardless it will be handled in fromType. + return fromType(pass, tp.Constraint().Underlying(), typeparam) +} + +func fromType(pass *analysis.Pass, t types.Type, typeparam bool) (result []enumTypeAndMembers, ok bool) { + switch t := t.(type) { + case *types.Named: + return fromNamed(pass, t, typeparam) + + case *types.Union: + return fromUnion(pass, t, typeparam) + + case *types.TypeParam: + return fromTypeParam(pass, t, typeparam) + + case *types.Interface: + if !typeparam { + return nil, true + } + // anonymous interface. + // e.g. func foo[T interface { M } | interface { N }](v T) {} + return fromInterface(pass, t, typeparam) + + default: + // ignore these. + return nil, true + } +} + +func composingEnumTypes(pass *analysis.Pass, t types.Type) (result []enumTypeAndMembers, ok bool) { + _, typeparam := t.(*types.TypeParam) + result, ok = fromType(pass, t, typeparam) + + if typeparam { + var kind types.BasicKind + var kindSet bool + + // sameBasicKind reports whether each type t that the function is called + // with has the same underlying basic kind. + sameBasicKind := func(t types.Type) (ok bool) { + basic, ok := t.Underlying().(*types.Basic) + if !ok { + return false + } + if kindSet && kind != basic.Kind() { + return false + } + kind = basic.Kind() + kindSet = true + return true + } + + for _, rr := range result { + if !sameBasicKind(rr.typ.TypeName.Type()) { + ok = false + break + } + } + } + + return result, ok +} + func denotesPackage(ident *ast.Ident, info *types.Info) bool { obj := info.ObjectOf(ident) if obj == nil { @@ -37,19 +155,18 @@ func exprConstVal(e ast.Expr, info *types.Info) (constantValue, bool) { // There are two scenarios. // See related test cases in typealias/quux/quux.go. // - // Scenario 1 + // # Scenario 1 // // Tag package and constant package are the same. This is // simple; we just use fs.ModeDir's value. - // // Example: // - // var mode fs.FileMode - // switch mode { - // case fs.ModeDir: - // } + // var mode fs.FileMode + // switch mode { + // case fs.ModeDir: + // } // - // Scenario 2 + // # Scenario 2 // // Tag package and constant package are different. In this // scenario, too, we accept the case clause expr constant value, @@ -58,19 +175,19 @@ func exprConstVal(e ast.Expr, info *types.Info) (constantValue, bool) { // // Example: // - // var mode fs.FileMode - // switch mode { - // case os.ModeDir: - // } + // var mode fs.FileMode + // switch mode { + // case os.ModeDir: + // } // // Or equivalently: // - // // The type of mode is effectively fs.FileMode, - // // due to type alias. - // var mode os.FileMode - // switch mode { - // case os.ModeDir: - // } + // // The type of mode is effectively fs.FileMode, + // // due to type alias. + // var mode os.FileMode + // switch mode { + // case os.ModeDir: + // } return determineConstVal(ident, info), true } @@ -136,12 +253,6 @@ type member struct { val constantValue } -// typeAndMembers combines an enumType and its members set. -type typeAndMembers struct { - et enumType - em enumMembers -} - type checklist struct { info map[enumType]enumMembers checkl map[member]struct{} @@ -227,7 +338,7 @@ func (c *checklist) remaining() map[member]struct{} { // different enum types. type group []member -func groupMissing(missing map[member]struct{}, types []enumType) []group { +func groupify(items map[member]struct{}, types []enumType) []group { // indices maps each element in the input slice to its index. indices := func(vs []enumType) map[enumType]int { ret := make(map[enumType]int, len(vs)) @@ -249,17 +360,17 @@ func groupMissing(missing map[member]struct{}, types []enumType) []group { } // byConstVal groups member names by constant value. - byConstVal := func(members map[member]struct{}) map[constantValue][]member { + byConstVal := func(items map[member]struct{}) map[constantValue][]member { ret := make(map[constantValue][]member) - for m := range members { + for m := range items { ret[m.val] = append(ret[m.val], m) } return ret } var groups []group - for _, members := range byConstVal(missing) { - groups = append(groups, group(members)) + for _, ms := range byConstVal(items) { + groups = append(groups, group(ms)) } // sort members within each group in AST order. @@ -310,17 +421,15 @@ func diagnosticGroups(gs []group) string { return strings.Join(out, ", ") } -func toEnumTypes(es []typeAndMembers) []enumType { +func toEnumTypes(es []enumTypeAndMembers) []enumType { out := make([]enumType, len(es)) for i := range es { - out[i] = es[i].et + out[i] = es[i].typ } return out } func dedupEnumTypes(types []enumType) []enumType { - // TODO(nishanths) this function is a candidate to use generics. - m := make(map[enumType]struct{}) var ret []enumType for _, t := range types { @@ -334,35 +443,32 @@ func dedupEnumTypes(types []enumType) []enumType { return ret } -// TODO(nishanths) If dropping pre-go1.18 support, the following -// types and functions are candidates to use generics. - type boolCache struct { - m map[*ast.File]bool - value func(*ast.File) bool + m map[*ast.File]bool + compute func(*ast.File) bool } -func (c boolCache) get(file *ast.File) bool { - if c.m == nil { - c.m = make(map[*ast.File]bool) - } +func (c *boolCache) get(file *ast.File) bool { if _, ok := c.m[file]; !ok { - c.m[file] = c.value(file) + if c.m == nil { + c.m = make(map[*ast.File]bool) + } + c.m[file] = c.compute(file) } return c.m[file] } type commentCache struct { - m map[*ast.File]ast.CommentMap - value func(*token.FileSet, *ast.File) ast.CommentMap + m map[*ast.File]ast.CommentMap + compute func(*token.FileSet, *ast.File) ast.CommentMap } -func (c commentCache) get(fset *token.FileSet, file *ast.File) ast.CommentMap { - if c.m == nil { - c.m = make(map[*ast.File]ast.CommentMap) - } +func (c *commentCache) get(fset *token.FileSet, file *ast.File) ast.CommentMap { if _, ok := c.m[file]; !ok { - c.m[file] = c.value(fset, file) + if c.m == nil { + c.m = make(map[*ast.File]ast.CommentMap) + } + c.m[file] = c.compute(fset, file) } return c.m[file] } diff --git a/tools/vendor/github.com/nishanths/exhaustive/common_go118.go b/tools/vendor/github.com/nishanths/exhaustive/common_go118.go deleted file mode 100644 index aebdd8064e..0000000000 --- a/tools/vendor/github.com/nishanths/exhaustive/common_go118.go +++ /dev/null @@ -1,122 +0,0 @@ -//go:build go1.18 -// +build go1.18 - -package exhaustive - -import ( - "go/types" - - "golang.org/x/tools/go/analysis" -) - -func fromNamed(pass *analysis.Pass, t *types.Named, typeparam bool) (result []typeAndMembers, ok bool) { - if tpkg := t.Obj().Pkg(); tpkg == nil { - // go/types documentation says: nil for labels and - // objects in the Universe scope. This happens for the built-in - // error type for example. - return nil, false // not a valid enum type, so ok == false - } - - et := enumType{t.Obj()} - if em, ok := importFact(pass, et); ok { - return []typeAndMembers{{et, em}}, true - } - - if typeparam { - if intf, ok := t.Underlying().(*types.Interface); ok { - return fromInterface(pass, intf, typeparam) - } - } - - return nil, false // not a valid enum type, so ok == false -} - -func fromInterface(pass *analysis.Pass, intf *types.Interface, typeparam bool) (result []typeAndMembers, all bool) { - all = true - - for i := 0; i < intf.NumEmbeddeds(); i++ { - embed := intf.EmbeddedType(i) - - switch embed.(type) { - case *types.Union: - u := embed.(*types.Union) - // gather from each term in the union. - for i := 0; i < u.Len(); i++ { - r, a := fromType(pass, u.Term(i).Type(), typeparam) - result = append(result, r...) - all = all && a - } - - case *types.Named: - r, a := fromNamed(pass, embed.(*types.Named), typeparam) - result = append(result, r...) - all = all && a - - default: - // don't care about these. - // e.g. basic type - } - } - - return -} - -func fromType(pass *analysis.Pass, t types.Type, typeparam bool) (result []typeAndMembers, ok bool) { - switch t := t.(type) { - case *types.Named: - return fromNamed(pass, t, typeparam) - - case *types.TypeParam: - // does not appear to be explicitly documented, but based on - // spec (see section Type constraints) and source code, we can - // expect constraints to have underlying type *types.Interface. - intf := t.Constraint().Underlying().(*types.Interface) - return fromInterface(pass, intf, typeparam) - - case *types.Interface: - // anonymous interface. - // e.g. func foo[T interface { M } | interface { N }](v T) {} - if !typeparam { - return nil, true - } - return fromInterface(pass, t, typeparam) - - default: - // ignore these. - return nil, true - } -} - -func composingEnumTypes(pass *analysis.Pass, t types.Type) (result []typeAndMembers, ok bool) { - _, typeparam := t.(*types.TypeParam) - result, ok = fromType(pass, t, typeparam) - - if typeparam { - var kind types.BasicKind - var kindSet bool - - // sameKind reports whether each type t that the function is called - // with has the same underlying basic kind. - sameBasicKind := func(t types.Type) (ok bool) { - basic, ok := t.Underlying().(*types.Basic) - if !ok { - return false - } - if kindSet && kind != basic.Kind() { - return false - } - kind = basic.Kind() - kindSet = true - return true - } - - for _, rr := range result { - if !sameBasicKind(rr.et.TypeName.Type()) { - ok = false - break - } - } - } - - return result, ok -} diff --git a/tools/vendor/github.com/nishanths/exhaustive/common_pre_go118.go b/tools/vendor/github.com/nishanths/exhaustive/common_pre_go118.go deleted file mode 100644 index f916c17fd4..0000000000 --- a/tools/vendor/github.com/nishanths/exhaustive/common_pre_go118.go +++ /dev/null @@ -1,37 +0,0 @@ -//go:build !go1.18 -// +build !go1.18 - -package exhaustive - -import ( - "go/types" - - "golang.org/x/tools/go/analysis" -) - -func fromNamed(pass *analysis.Pass, t *types.Named) (result typeAndMembers, ok bool) { - if tpkg := t.Obj().Pkg(); tpkg == nil { - return typeAndMembers{}, false - } - - et := enumType{t.Obj()} - em, ok := importFact(pass, et) - if !ok { - return typeAndMembers{}, false - } - - return typeAndMembers{et, em}, true -} - -func composingEnumTypes(pass *analysis.Pass, t types.Type) (result []typeAndMembers, ok bool) { - switch t := t.(type) { - case *types.Named: - e, ok := fromNamed(pass, t) - if !ok { - return nil, false - } - return []typeAndMembers{e}, true - default: - return nil, false - } -} diff --git a/tools/vendor/github.com/nishanths/exhaustive/doc.go b/tools/vendor/github.com/nishanths/exhaustive/doc.go new file mode 100644 index 0000000000..8435e5d245 --- /dev/null +++ b/tools/vendor/github.com/nishanths/exhaustive/doc.go @@ -0,0 +1,215 @@ +/* +Package exhaustive defines an analyzer that checks exhaustiveness of switch +statements of enum-like constants in Go source code. The analyzer can +optionally also check exhaustiveness of keys in map literals whose key type +is enum-like. + +# Definition of enum + +The Go [language spec] does not have an explicit definition for enums. For +the purpose of this analyzer, and by convention, an enum type is any named +type that: + + - has underlying type float, string, or integer (includes byte and rune); + and + - has at least one constant of its type defined in the same [block]. + +In the example below, Biome is an enum type. The three constants are its +enum members. + + package eco + + type Biome int + + const ( + Tundra Biome = 1 + Savanna Biome = 2 + Desert Biome = 3 + ) + +Enum member constants for an enum type must be declared in the same block as +the type. The constant values may be specified using iota, literal values, or +any valid means for declaring a Go constant. It is allowed for multiple enum +member constants for an enum type to have the same constant value. + +# Definition of exhaustiveness + +A switch statement that switches on a value of an enum type is exhaustive if +all enum members are listed in the switch statement's cases. If multiple enum +members have the same constant value, it is sufficient for any one of these +same-valued members to be listed. + +For an enum type defined in the same package as the switch statement, both +exported and unexported enum members must be listed to satisfy exhaustiveness. +For an enum type defined in an external package, it is sufficient that only +exported enum members are listed. Only constant identifiers (e.g. Tundra, +eco.Desert) listed in a switch statement's case clause can contribute towards +satisfying exhaustiveness; other expressions, such as literal values and +function calls, listed in case clauses do not contribute towards satisfying +exhaustiveness. + +By default, the existence of a default case in a switch statement does not +unconditionally make a switch statement exhaustive. Use the +-default-signifies-exhaustive flag to adjust this behavior. + +For a map literal whose key type is an enum type, a similar definition of +exhaustiveness applies. The map literal is considered exhaustive if all enum +members are be listed in its keys. Empty map literals are never checked for +exhaustiveness. + +# Type parameters + +A switch statement that switches on a value whose type is a type parameter is +checked for exhaustiveness if and only if each type element in the type +constraint is an enum type and the type elements share the same underlying +[BasicKind]. + +For example, the switch statement below will be checked because each type +element (i.e. M and N) in the type constraint is an enum type and the type +elements share the same underlying BasicKind, namely int8. To satisfy +exhaustiveness, the enum members collectively belonging to the enum types M +and N (i.e. A, B, and C) must be listed in the switch statement's cases. + + func bar[T M | I](v T) { + switch v { + case T(A): + case T(B): + case T(C): + } + } + + type I interface{ N } + + type M int8 + const A M = 1 + + type N int8 + const B N = 2 + const C N = 3 + +# Type aliases + +The analyzer handles type aliases as shown in the example below. newpkg.M is +an enum type. oldpkg.M is an alias for newpkg.M. Note that oldpkg.M isn't +itself an enum type; oldpkg.M is simply an alias for the actual enum type +newpkg.M. + + package oldpkg + type M = newpkg.M + const ( + A = newpkg.A + B = newpkg.B + ) + + package newpkg + type M int + const ( + A M = 1 + B M = 2 + ) + +A switch statement that switches either on a value of type newpkg.M or of type +oldpkg.M (which, being an alias, is just an alternative spelling for newpkg.M) +is exhaustive if all of newpkg.M's enum members are listed in the switch +statement's cases. The following switch statement is exhaustive. + + func f(v newpkg.M) { + switch v { + case newpkg.A: // or equivalently oldpkg.A + case newpkg.B: // or equivalently oldpkg.B + } + } + +The analyzer guarantees that introducing a type alias (such as type M = +newpkg.M) will not result in new diagnostics if the set of enum member +constant values of the RHS type is a subset of the set of enum member constant +values of the LHS type. + +# Flags + +Summary: + + flag type default value + ---- ---- ------------- + -check comma-separated strings switch + -explicit-exhaustive-switch bool false + -explicit-exhaustive-map bool false + -check-generated bool false + -default-signifies-exhaustive bool false + -ignore-enum-members regexp pattern (none) + -ignore-enum-types regexp pattern (none) + -package-scope-only bool false + +Descriptions: + + -check + Comma-separated list of program elements to check for + exhaustiveness. Supported program element values are + "switch" and "map". The default value is "switch", which + means that only switch statements are checked. + + -explicit-exhaustive-switch + Check a switch statement only if it is associated with a + "//exhaustive:enforce" comment. By default the analyzer + checks every switch statement that isn't associated with a + "//exhaustive:ignore" comment. + + -explicit-exhaustive-map + Similar to -explicit-exhaustive-switch but for map literals. + + -check-generated + Check generated files. For the definition of a generated + file, see https://golang.org/s/generatedcode. + + -default-signifies-exhaustive + Consider a switch statement to be exhaustive + unconditionally if it has a default case. (In other words, + all enum members do not have to be listed in its cases if a + default case is present.) Setting this flag usually is + counter to the purpose of exhaustiveness checks, so it is + not recommended to set this flag. + + -ignore-enum-members + Constants that match the specified regular expression (in + package regexp syntax) are not considered enum members and + hence do not have to be listed to satisfy exhaustiveness. + The specified regular expression is matched against the + constant name inclusive of import path. For example, if the + import path for the constant is "example.org/eco" and the + constant name is "Tundra", then the specified regular + expression is matched against the string + "example.org/eco.Tundra". + + -ignore-enum-types + Similar to -ignore-enum-members but for types. + + -package-scope-only + Only discover enums declared in file-level blocks. By + default, the analyzer discovers enums defined in all + blocks. + +# Skip analysis + +To skip analysis of a switch statement or a map literal, associate it with a +comment that begins with "//exhaustive:ignore". For example: + + //exhaustive:ignore ... an optional explanation goes here ... + switch v { + case A: + case B: + } + +To ignore specific constants in exhaustiveness checks, specify the +-ignore-enum-members flag: + + exhaustive -ignore-enum-members '^example\.org/eco\.Tundra$' + +To ignore specific types, specify the -ignore-enum-types flag: + + exhaustive -ignore-enum-types '^time\.Duration$|^example\.org/measure\.Unit$' + +[language spec]: https://golang.org/ref/spec +[block]: https://golang.org/ref/spec#Blocks +[BasicKind]: https://pkg.go.dev/go/types#BasicKind +*/ +package exhaustive diff --git a/tools/vendor/github.com/nishanths/exhaustive/enum.go b/tools/vendor/github.com/nishanths/exhaustive/enum.go index fa46eb9213..cabf1d880d 100644 --- a/tools/vendor/github.com/nishanths/exhaustive/enum.go +++ b/tools/vendor/github.com/nishanths/exhaustive/enum.go @@ -21,11 +21,11 @@ func (et enumType) String() string { return et.TypeName.String() } // func (et enumType) scope() *types.Scope { return et.TypeName.Parent() } // scope that the type is declared in func (et enumType) factObject() types.Object { return et.TypeName } // types.Object for fact export -// enumMembers is set of enum members for a single enum type. +// enumMembers is the set of enum members for a single enum type. // The zero value is ready to use. type enumMembers struct { Names []string // enum member names - NameToPos map[string]token.Pos // member name -> AST position + NameToPos map[string]token.Pos // enum member name -> AST position NameToValue map[string]constantValue // enum member name -> constant value ValueToNames map[constantValue][]string // constant value -> enum member names } @@ -47,11 +47,11 @@ func (em *enumMembers) add(name string, val constantValue, pos token.Pos) { em.ValueToNames[val] = append(em.ValueToNames[val], name) } -func (em enumMembers) String() string { +func (em *enumMembers) String() string { return em.factString() } -func (em enumMembers) factString() string { +func (em *enumMembers) factString() string { var buf strings.Builder for j, vv := range em.Names { buf.WriteString(vv) diff --git a/tools/vendor/github.com/nishanths/exhaustive/exhaustive.go b/tools/vendor/github.com/nishanths/exhaustive/exhaustive.go index ddb9ee0af1..d67a60c329 100644 --- a/tools/vendor/github.com/nishanths/exhaustive/exhaustive.go +++ b/tools/vendor/github.com/nishanths/exhaustive/exhaustive.go @@ -1,229 +1,3 @@ -/* -Package exhaustive defines an analyzer that checks exhaustiveness of switch -statements of enum-like constants in Go source code. The analyzer can be -configured to additionally check exhaustiveness of map literals whose key type -is enum-like. - -# Definition of enum - -The Go [language spec] does not provide an explicit definition for enums. For -the purpose of this analyzer, and by convention, an enum type is any named -type that has: - - - underlying type float, string, or integer (includes byte and - rune, which are aliases for uint8 and int32, respectively); and - - at least one constant of the type defined in the same scope. - -In the example below, Biome is an enum type. The three constants are its -enum members. - - package eco - - type Biome int - - const ( - Tundra Biome = 1 - Savanna Biome = 2 - Desert Biome = 3 - ) - -Enum member constants for a particular enum type do not necessarily all -have to be declared in the same const block. The constant values may be -specified using iota, using literal values, or using any valid means for -declaring a Go constant. It is allowed for multiple enum member -constants for a particular enum type to have the same constant value. - -# Definition of exhaustiveness - -A switch statement that switches on a value of an enum type is exhaustive if -all enum members, by constant value, are listed in the switch -statement's cases. If multiple members have the same constant value, it is -sufficient for any one of these same-valued members to be listed. - -For an enum type defined in the same package as the switch statement, both -exported and unexported enum members must be listed to satisfy exhaustiveness. -For an enum type defined in an external package, it is sufficient that only -exported enum members are listed. In a switch statement's cases, only -identifiers (e.g. Tundra) and qualified identifiers (e.g. somepkg.Grassland) -that name constants may contribute towards satisfying exhaustiveness; other -expressions such as literal values and function calls will not. - -By default, the existence of a default case in a switch statement does not -unconditionally make a switch statement exhaustive. Use the --default-signifies-exhaustive flag to adjust this behavior. - -A similar definition of exhaustiveness applies to a map literal whose key type -is an enum type. For the map literal to be considered exhaustive, all enum -members, by constant value, must be listed as keys. Empty map literals are not -checked. For the analyzer to check map literals, the -check flag must include -the value "map". - -# Type parameters - -A switch statement that switches on a value whose type is a type parameter is -checked for exhaustiveness if each type element in the type constraint is an -enum type and shares the same underlying basic type kind. - -In the following example, the switch statement on the value of type parameter -T will be checked, because each type element of T—namely M, N, and O—is an -enum type and shares the same underlying basic type kind (i.e. int8). To -satisfy exhaustiveness, all enum members, by constant value, for each of the -enum types M, N, and O—namely A, B, C, and D—must be listed in the switch -statement's cases. - - func bar[T M | I](v T) { - switch v { - case T(A): - case T(B): - case T(C): - case T(D): - } - } - - type I interface{ N | J } - type J interface{ O } - - type M int8 - const A M = 1 - - type N int8 - const B N = 2 - const C N = 3 - - type O int8 - const D O = 4 - -# Type aliases - -The analyzer handles type aliases as shown in the example below. Here T2 is a -enum type. T1 is an alias for T2. Note that T1 itself isn't considered an enum -type; T1 is only an alias for an enum type. - - package pkg - type T1 = newpkg.T2 - const ( - A = newpkg.A - B = newpkg.B - ) - - package newpkg - type T2 int - const ( - A T2 = 1 - B T2 = 2 - ) - -A switch statement that switches on a value of type T1 (which, in reality, is -just an alternate spelling for type T2) is exhaustive if all of T2's enum -members, by constant value, are listed in the switch statement's cases. -(Recall that only constants declared in the same scope as type T2's scope can -be T2's enum members.) - -The following switch statements are exhaustive. - - // Note: the type of v is effectively newpkg.T2, due to type aliasing. - func f(v pkg.T1) { - switch v { - case newpkg.A: - case newpkg.B: - } - } - - func g(v pkg.T1) { - switch v { - case pkg.A: - case pkg.B: - } - } - -The analyzer guarantees that introducing a type alias (such as type T1 = -newpkg.T2) will not result in new diagnostics from the analyzer, as long as -the set of enum member constant values of the alias RHS type is a subset of -the set of enum member constant values of the LHS type. - -# Flags - -Summary: - - flag type default value - ---- ---- ------------- - -check comma-separated string switch - -explicit-exhaustive-switch bool false - -explicit-exhaustive-map bool false - -check-generated bool false - -default-signifies-exhaustive bool false - -ignore-enum-members regexp pattern (none) - -ignore-enum-types regexp pattern (none) - -package-scope-only bool false - -Flag descriptions: - - - The -check flag specifies a comma-separated list of program elements - that should be checked for exhaustiveness; supported program elements - are "switch" and "map". The default flag value is "switch", which means - that only switch statements are checked. Specify the flag value - "switch,map" to check both switch statements and map literals. - - - If -explicit-exhaustive-switch is enabled, the analyzer checks a switch - statement only if it is associated with a comment beginning with - "//exhaustive:enforce". Otherwise, the analyzer checks every enum switch - statement not associated with a comment beginning with - "//exhaustive:ignore". - - - The -explicit-exhaustive-map flag is the map literal counterpart for the - -explicit-exhaustive-switch flag. - - - If -check-generated is enabled, switch statements and map literals in - generated Go source files are checked. By default, the analyzer does not - check generated files. Refer to https://golang.org/s/generatedcode for - the definition of generated files. - - - If -default-signifies-exhaustive is enabled, the presence of a default - case in a switch statement unconditionally satisfies exhaustiveness (all - enum members do not have to be listed). Enabling this flag usually tends - to counter the purpose of exhaustiveness checking, so it is not - recommended that you enable this flag. - - - The -ignore-enum-members flag specifies a regular expression in Go - package regexp syntax. Constants matching the regular expression do not - have to be listed in switch statement cases or map literals in order to - satisfy exhaustiveness. The specified regular expression is matched - against the constant name inclusive of the enum package import path. For - example, if the package import path of the constant is "example.org/eco" - and the constant name is "Tundra", the specified regular expression will - be matched against the string "example.org/eco.Tundra". - - - The -ignore-enum-types flag is similar to the -ignore-enum-members flag, - except that it applies to types. - - - If -package-scope-only is enabled, the analyzer only finds enums defined - in package scope but not in inner scopes such as functions; consequently - only switch statements and map literals that use such enums are checked - for exhaustiveness. By default, the analyzer finds enums defined in all - scopes, including in inner scopes such as functions. - -# Skip analysis - -To skip analysis of a switch statement or a map literal, associate it with a -comment that begins with "//exhaustive:ignore". For example: - - //exhaustive:ignore - switch v { - case A: - case B: - } - -To ignore specific constants in exhaustiveness checks, use the --ignore-enum-members flag: - - exhaustive -ignore-enum-members '^example\.org/eco\.Tundra$' - -To ignore specific types, use the -ignore-enum-types flag: - - exhaustive -ignore-enum-types '^time\.Duration$|^example\.org/measure\.Unit$' - -[language spec]: https://golang.org/ref/spec -*/ package exhaustive import ( @@ -236,21 +10,25 @@ import ( ) func init() { - Analyzer.Flags.Var(&fCheck, CheckFlag, "comma-separated list of program `elements` that should be checked for exhaustiveness; supported elements are: switch, map") + registerFlags() +} + +func registerFlags() { + Analyzer.Flags.Var(&fCheck, CheckFlag, "comma-separated list of program `elements` to check for exhaustiveness; supported element values: switch, map") Analyzer.Flags.BoolVar(&fExplicitExhaustiveSwitch, ExplicitExhaustiveSwitchFlag, false, `check switch statement only if associated with "//exhaustive:enforce" comment`) Analyzer.Flags.BoolVar(&fExplicitExhaustiveMap, ExplicitExhaustiveMapFlag, false, `check map literal only if associated with "//exhaustive:enforce" comment`) Analyzer.Flags.BoolVar(&fCheckGenerated, CheckGeneratedFlag, false, "check generated files") - Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "presence of default case in switch statement unconditionally satisfies exhaustiveness") - Analyzer.Flags.Var(&fIgnoreEnumMembers, IgnoreEnumMembersFlag, "constants matching `regexp` are ignored for exhaustiveness checks") - Analyzer.Flags.Var(&fIgnoreEnumTypes, IgnoreEnumTypesFlag, "types matching `regexp` are ignored for exhaustiveness checks") - Analyzer.Flags.BoolVar(&fPackageScopeOnly, PackageScopeOnlyFlag, false, "find enums only in package scopes, not inner scopes") + Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "switch statement is unconditionally exhaustive if it has a default case") + Analyzer.Flags.Var(&fIgnoreEnumMembers, IgnoreEnumMembersFlag, "ignore constants matching `regexp`") + Analyzer.Flags.Var(&fIgnoreEnumTypes, IgnoreEnumTypesFlag, "ignore types matching `regexp`") + Analyzer.Flags.BoolVar(&fPackageScopeOnly, PackageScopeOnlyFlag, false, "only discover enums declared in file-level blocks") var unused string Analyzer.Flags.StringVar(&unused, IgnorePatternFlag, "", "no effect (deprecated); use -"+IgnoreEnumMembersFlag) Analyzer.Flags.StringVar(&unused, CheckingStrategyFlag, "", "no effect (deprecated)") } -// Flag names used by the analyzer. They are exported for use by analyzer +// Flag names used by the analyzer. These are exported for use by analyzer // driver programs. const ( CheckFlag = "check" @@ -262,33 +40,11 @@ const ( IgnoreEnumTypesFlag = "ignore-enum-types" PackageScopeOnlyFlag = "package-scope-only" + // Deprecated flag names. IgnorePatternFlag = "ignore-pattern" // Deprecated: use IgnoreEnumMembersFlag. - CheckingStrategyFlag = "checking-strategy" // Deprecated. -) - -// checkElement is a program element supported by the -check flag. -type checkElement string - -const ( - elementSwitch checkElement = "switch" - elementMap checkElement = "map" + CheckingStrategyFlag = "checking-strategy" // Deprecated: no longer applicable. ) -func validCheckElement(s string) error { - switch checkElement(s) { - case elementSwitch: - return nil - case elementMap: - return nil - default: - return fmt.Errorf("invalid program element %q", s) - } -} - -var defaultCheckElements = []string{ - string(elementSwitch), -} - // Flag values. var ( fCheck = stringsFlag{elements: defaultCheckElements, filter: validCheckElement} @@ -301,7 +57,7 @@ var ( fPackageScopeOnly bool ) -// resetFlags resets the flag variables to their default values. +// resetFlags resets the flag variables to default values. // Useful in tests. func resetFlags() { fCheck = stringsFlag{elements: defaultCheckElements, filter: validCheckElement} @@ -314,6 +70,29 @@ func resetFlags() { fPackageScopeOnly = false } +// checkElement is a program element supported by the -check flag. +type checkElement string + +const ( + elementSwitch checkElement = "switch" + elementMap checkElement = "map" +) + +func validCheckElement(s string) error { + switch checkElement(s) { + case elementSwitch: + return nil + case elementMap: + return nil + default: + return fmt.Errorf("invalid program element %q", s) + } +} + +var defaultCheckElements = []string{ + string(elementSwitch), +} + var Analyzer = &analysis.Analyzer{ Name: "exhaustive", Doc: "check exhaustiveness of enum switch statements", @@ -325,32 +104,12 @@ var Analyzer = &analysis.Analyzer{ func run(pass *analysis.Pass) (interface{}, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - for typ, members := range findEnums( - fPackageScopeOnly, - pass.Pkg, - inspect, - pass.TypesInfo, - ) { + for typ, members := range findEnums(fPackageScopeOnly, pass.Pkg, inspect, pass.TypesInfo) { exportFact(pass, typ, members) } - generated := boolCache{value: isGeneratedFile} - comments := commentCache{value: fileCommentMap} - swConf := switchConfig{ - explicit: fExplicitExhaustiveSwitch, - defaultSignifiesExhaustive: fDefaultSignifiesExhaustive, - checkGenerated: fCheckGenerated, - ignoreConstant: fIgnoreEnumMembers.re, - ignoreType: fIgnoreEnumTypes.re, - } - mapConf := mapConfig{ - explicit: fExplicitExhaustiveMap, - checkGenerated: fCheckGenerated, - ignoreConstant: fIgnoreEnumMembers.re, - ignoreType: fIgnoreEnumTypes.re, - } - swChecker := switchChecker(pass, swConf, generated, comments) - mapChecker := mapChecker(pass, mapConf, generated, comments) + generated := boolCache{compute: isGeneratedFile} + comments := commentCache{compute: fileCommentMap} // NOTE: should not share the same inspect.WithStack call for different // program elements: the visitor function for a program element may @@ -359,12 +118,30 @@ func run(pass *analysis.Pass) (interface{}, error) { for _, e := range fCheck.elements { switch checkElement(e) { case elementSwitch: - inspect.WithStack([]ast.Node{&ast.SwitchStmt{}}, toVisitor(swChecker)) + conf := switchConfig{ + explicit: fExplicitExhaustiveSwitch, + defaultSignifiesExhaustive: fDefaultSignifiesExhaustive, + checkGenerated: fCheckGenerated, + ignoreConstant: fIgnoreEnumMembers.re, + ignoreType: fIgnoreEnumTypes.re, + } + checker := switchChecker(pass, conf, generated, comments) + inspect.WithStack([]ast.Node{&ast.SwitchStmt{}}, toVisitor(checker)) + case elementMap: - inspect.WithStack([]ast.Node{&ast.CompositeLit{}}, toVisitor(mapChecker)) + conf := mapConfig{ + explicit: fExplicitExhaustiveMap, + checkGenerated: fCheckGenerated, + ignoreConstant: fIgnoreEnumMembers.re, + ignoreType: fIgnoreEnumTypes.re, + } + checker := mapChecker(pass, conf, generated, comments) + inspect.WithStack([]ast.Node{&ast.CompositeLit{}}, toVisitor(checker)) + default: panic(fmt.Sprintf("unknown checkElement %v", e)) } } + return nil, nil } diff --git a/tools/vendor/github.com/nishanths/exhaustive/map.go b/tools/vendor/github.com/nishanths/exhaustive/map.go index 0692333841..23b4bef1ce 100644 --- a/tools/vendor/github.com/nishanths/exhaustive/map.go +++ b/tools/vendor/github.com/nishanths/exhaustive/map.go @@ -77,13 +77,13 @@ func mapChecker(pass *analysis.Pass, cfg mapConfig, generated boolCache, comment } } - if !cfg.explicit && hasComment(relatedComments, ignoreComment) { + if !cfg.explicit && hasCommentPrefix(relatedComments, ignoreComment) { // Skip checking of this map literal due to ignore // comment. Still return true because there may be nested // map literals that are not to be ignored. return true, resultIgnoreComment } - if cfg.explicit && !hasComment(relatedComments, enforceComment) { + if cfg.explicit && !hasCommentPrefix(relatedComments, enforceComment) { return true, resultNoEnforceComment } @@ -97,7 +97,7 @@ func mapChecker(pass *analysis.Pass, cfg mapConfig, generated boolCache, comment checkl.ignoreType(cfg.ignoreType) for _, e := range es { - checkl.add(e.et, e.em, pass.Pkg == e.et.Pkg()) + checkl.add(e.typ, e.members, pass.Pkg == e.typ.Pkg()) } analyzeMapLiteral(lit, pass.TypesInfo, checkl.found) @@ -128,7 +128,7 @@ func makeMapDiagnostic(lit *ast.CompositeLit, enumTypes []enumType, missing map[ Message: fmt.Sprintf( "missing keys in map of key type %s: %s", diagnosticEnumTypes(enumTypes), - diagnosticGroups(groupMissing(missing, enumTypes)), + diagnosticGroups(groupify(missing, enumTypes)), ), } } diff --git a/tools/vendor/github.com/nishanths/exhaustive/switch.go b/tools/vendor/github.com/nishanths/exhaustive/switch.go index 2e99d28307..000ef9886e 100644 --- a/tools/vendor/github.com/nishanths/exhaustive/switch.go +++ b/tools/vendor/github.com/nishanths/exhaustive/switch.go @@ -80,13 +80,13 @@ func switchChecker(pass *analysis.Pass, cfg switchConfig, generated boolCache, c sw := n.(*ast.SwitchStmt) switchComments := comments.get(pass.Fset, file)[sw] - if !cfg.explicit && hasComment(switchComments, ignoreComment) { + if !cfg.explicit && hasCommentPrefix(switchComments, ignoreComment) { // Skip checking of this switch statement due to ignore // comment. Still return true because there may be nested // switch statements that are not to be ignored. return true, resultIgnoreComment } - if cfg.explicit && !hasComment(switchComments, enforceComment) { + if cfg.explicit && !hasCommentPrefix(switchComments, enforceComment) { // Skip checking of this switch statement due to missing // enforce comment. return true, resultNoEnforceComment @@ -111,7 +111,7 @@ func switchChecker(pass *analysis.Pass, cfg switchConfig, generated boolCache, c checkl.ignoreType(cfg.ignoreType) for _, e := range es { - checkl.add(e.et, e.em, pass.Pkg == e.et.Pkg()) + checkl.add(e.typ, e.members, pass.Pkg == e.typ.Pkg()) } def := analyzeSwitchClauses(sw, pass.TypesInfo, checkl.found) @@ -163,7 +163,7 @@ func makeSwitchDiagnostic(sw *ast.SwitchStmt, enumTypes []enumType, missing map[ Message: fmt.Sprintf( "missing cases in switch of type %s: %s", diagnosticEnumTypes(enumTypes), - diagnosticGroups(groupMissing(missing, enumTypes)), + diagnosticGroups(groupify(missing, enumTypes)), ), } } diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/Makefile b/tools/vendor/github.com/nunnatsa/ginkgolinter/Makefile index f370798396..e8efae583e 100644 --- a/tools/vendor/github.com/nunnatsa/ginkgolinter/Makefile +++ b/tools/vendor/github.com/nunnatsa/ginkgolinter/Makefile @@ -1,14 +1,24 @@ +VERSION ?= "unknown" +VERSION_FLAG := -X github.com/nunnatsa/ginkgolinter/version.version=$(VERSION) +COMMIT_HASH := $(shell git rev-parse HEAD) +HASH_FLAG := -X github.com/nunnatsa/ginkgolinter/version.gitHash=$(COMMIT_HASH) + +BUILD_ARGS := -ldflags "$(VERSION_FLAG) $(HASH_FLAG)" + build: - go build -o ginkgolinter ./cmd/ginkgolinter + go build $(BUILD_ARGS) -o ginkgolinter ./cmd/ginkgolinter build-for-windows: - GOOS=windows GOARCH=amd64 go build -o bin/ginkgolinter-amd64.exe ./cmd/ginkgolinter + GOOS=windows GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64.exe ./cmd/ginkgolinter build-for-mac: - GOOS=darwin GOARCH=amd64 go build -o bin/ginkgolinter-amd64-darwin ./cmd/ginkgolinter + GOOS=darwin GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64-darwin ./cmd/ginkgolinter build-for-linux: - GOOS=linux GOARCH=amd64 go build -o bin/ginkgolinter-amd64-linux ./cmd/ginkgolinter - GOOS=linux GOARCH=386 go build -o bin/ginkgolinter-386-linux ./cmd/ginkgolinter + GOOS=linux GOARCH=amd64 go build $(BUILD_ARGS) -o bin/ginkgolinter-amd64-linux ./cmd/ginkgolinter + GOOS=linux GOARCH=386 go build $(BUILD_ARGS) -o bin/ginkgolinter-386-linux ./cmd/ginkgolinter build-all: build build-for-linux build-for-mac build-for-windows + +test: build + ./tests/e2e.sh diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/README.md b/tools/vendor/github.com/nunnatsa/ginkgolinter/README.md index fa31117af8..e0a4b0739f 100644 --- a/tools/vendor/github.com/nunnatsa/ginkgolinter/README.md +++ b/tools/vendor/github.com/nunnatsa/ginkgolinter/README.md @@ -1,5 +1,10 @@ [![Go Report Card](https://goreportcard.com/badge/github.com/nunnatsa/ginkgolinter)](https://goreportcard.com/report/github.com/nunnatsa/ginkgolinter) [![Coverage Status](https://coveralls.io/repos/github/nunnatsa/ginkgolinter/badge.svg?branch=main)](https://coveralls.io/github/nunnatsa/ginkgolinter?branch=main) +![Build Status](https://github.com/nunnatsa/ginkgolinter/workflows/CI/badge.svg) +[![License](https://img.shields.io/github/license/nunnatsa/ginkgolinter)](/LICENSE) +[![Release](https://img.shields.io/github/release/nunnatsa/ginkgolinter.svg)](https://github.com/nunnatsa/ginkgolinter/releases/latest) +[![GitHub Releases Stats of ginkgolinter](https://img.shields.io/github/downloads/nunnatsa/ginkgolinter/total.svg?logo=github)](https://somsubhra.github.io/github-release-stats/?username=nunnatsa&repository=ginkgolinter) + # ginkgo-linter [ginkgo](https://onsi.github.io/ginkgo/) is a popular testing framework and [gomega](https://onsi.github.io/gomega/) is its assertion package. @@ -18,8 +23,27 @@ Then add the new executable to your PATH. ```shell ginkgolinter [-fix] ./... ``` + Use the `-fix` flag to apply the fix suggestions to the source code. +### Use ginkgolinter with golangci-lint +The ginkgolinter is now part of the popular [golangci-lint](https://golangci-lint.run/), starting from version `v0.51.1`. + +It is not enabled by default, though. There are two ways to run ginkgolinter with golangci-lint: + +* From command line: + ```shell + golangci-lint run -E ginkgolinter ./... + ``` +* From configuration: + + Add ginkgolinter to the enabled linters list in .golangci.reference.yml file in your project. For more details, see + the [golangci-lint documentation](https://golangci-lint.run/usage/configuration/); e.g. + ```yaml + linters: + enable: + - ginkgolinter + ``` ## Linter Checks The linter checks the gomega assertions in golang test code. Gomega may be used together with ginkgo tests, For example: ```go @@ -78,6 +102,8 @@ The output of the linter,when finding issues, looks like this: ./testdata/src/a/a.go:18:5: ginkgo-linter: wrong length assertion; consider using `Expect("").Should(BeEmpty())` instead ./testdata/src/a/a.go:22:5: ginkgo-linter: wrong length assertion; consider using `Expect("").Should(BeEmpty())` instead ``` +#### use the `HaveLen(0)` matcher. +The linter will also warn about the `HaveLen(0)` matcher, and will suggest to replace it with `BeEmpty()` ### Wrong `nil` Assertion The linter finds assertion of the comparison to nil, with all kind of matchers, instead of using the existing `BeNil()` matcher; We want to assert the item, rather than a comparison result. @@ -113,7 +139,7 @@ Expect(x).To(Equal(false)) // should be: Expect(x).To(BeFalse()) ``` It also supports the embedded `Not()` matcher; e.g. -`Ω(x).Should(Not(Equal(True)))` => `Ω(x).ShouldNot(BeBeTrue())` +`Ω(x).Should(Not(Equal(True)))` => `Ω(x).ShouldNot(BeTrue())` ### Wrong Error Assertion The linter finds assertion of errors compared with nil, or to be equal nil, or to be nil. The linter suggests to use `Succeed` for functions or `HaveOccurred` for error values.. @@ -133,11 +159,133 @@ It also supports the embedded `Not()` matcher; e.g. `Ω(err == nil).Should(Not(BeTrue()))` => `Ω(x).Should(HaveOccurred())` +### Wrong Comparison Assertion +The linter finds assertion of boolean comparisons, which are already supported by existing gomega matchers. + +The linter assumes that when compared something to literals or constants, these values should be used for the assertion, +and it will do its best to suggest the right assertion expression accordingly. + +There are several wrong patterns: +```go +var x = 10 +var s = "abcd" + +... + +Expect(x == 10).Should(BeTrue()) // should be Expect(x).Should(Equal(10)) +Expect(10 == x).Should(BeTrue()) // should be Expect(x).Should(Equal(10)) +Expect(x != 5).Should(Equal(true)) // should be Expect(x).ShouldNot(Equal(5)) +Expect(x != 0).Should(Equal(true)) // should be Expect(x).ShouldNot(BeZero()) + +Expect(s != "abcd").Should(BeFalse()) // should be Expect(s).Should(Equal("abcd")) +Expect("abcd" != s).Should(BeFalse()) // should be Expect(s).Should(Equal("abcd")) +``` +Or non-equal comparisons: +```go +Expect(x > 10).To(BeTrue()) // ==> Expect(x).To(BeNumerically(">", 10)) +Expect(x >= 15).To(BeTrue()) // ==> Expect(x).To(BeNumerically(">=", 15)) +Expect(3 > y).To(BeTrue()) // ==> Expect(y).To(BeNumerically("<", 3)) +// and so on ... +``` + +This check included a limited support in constant values. For example: +```go +const c1 = 5 + +... + +Expect(x1 == c1).Should(BeTrue()) // ==> Expect(x1).Should(Equal(c1)) +Expect(c1 == x1).Should(BeTrue()) // ==> Expect(x1).Should(Equal(c1)) +``` + +### Using a function call in async assertion +This rule finds an actual bug in tests, where asserting a function call in an async function; e.g. `Eventually`. For +example: +```go +func slowInt(int val) int { + time.Sleep(time.Second) + return val +} + +... + +It("should test that slowInt returns 42, eventually", func() { + Eventually(slowInt(42)).WithPolling(time.Millisecond * 100).WithTimeout(time.Second * 2).Equal(42) +}) +``` +The problem with the above code is that it **should** poll - call the function - until it returns 42, but what actually +happens is that first the function is called, and we pass `42` to `Eventually` - not the function. This is not what we +tried to do here. + +The linter will suggest replacing this code by: +```go +It("should test that slowInt returns 42, eventually", func() { + Eventually(slowInt).WithArguments(42).WithPolling(time.Millisecond * 100).WithTimeout(time.Second * 2).Equal(42) +}) +``` + +The linter suggested replacing the function call by the function name. + +If function arguments are used, the linter will add the `WithArguments()` method to pass them. + +Please notice that `WithArguments()` is only supported from gomenga v1.22.0. + +When using an older version of gomega, change the code manually. For example: + +```go +It("should test that slowInt returns 42, eventually", func() { + Eventually(func() int { + slowint(42) + }).WithPolling(time.Millisecond * 100).WithTimeout(time.Second * 2).Equal(42) +}) +``` + +### Comparing a pointer with a value +The linter warns when comparing a pointer with a value. +These comparisons are always wrong and will always fail. + +In case of a positive assertion (`To()` or `Should()`), the test will just fail. + +But the main concern is for false positive tests, when using a negative assertion (`NotTo()`, `ToNot()`, `ShouldNot()`, +`Should(Not())` etc.); e.g. +```go +num := 5 +... +pNum := &num +... +Expect(pNum).ShouldNot(Equal(6)) +``` +This assertion will pass, but for the wrong reasons: pNum is not equal 6, not because num == 5, but because pNum is +a pointer, while `6` is an `int`. + +In the case above, the linter will suggest `Expect(pNum).ShouldNot(HaveValue(Equal(6)))` + +This is also right for additional matchers: `BeTrue()` and `BeFalse()`, `BeIdenticalTo()`, `BeEquivalentTo()` +and `BeNumerically`. + +### Missing Assertion Method +The linter warns when calling an "actual" method (e.g. `Expect()`, `Eventually()` etc.), without an assertion method (e.g +`Should()`, `NotTo()` etc.) + +For example: +```go +// no assertion for the result +Eventually(doSomething).WithTimeout(time.Seconds * 5).WithPolling(time.Milliseconds * 100) +``` + +The linter will not suggest a fix for this warning. + +This rule cannot be suppressed. + ## Suppress the linter ### Suppress warning from command line * Use the `--suppress-len-assertion=true` flag to suppress the wrong length assertion warning * Use the `--suppress-nil-assertion=true` flag to suppress the wrong nil assertion warning * Use the `--suppress-err-assertion=true` flag to suppress the wrong error assertion warning +* Use the `--suppress-compare-assertion=true` flag to suppress the wrong comparison assertion warning +* Use the `--suppress-async-assertion=true` flag to suppress the function call in async assertion warning +* Use the `--allow-havelen-0=true` flag to avoid warnings about `HaveLen(0)`; Note: this parameter is only supported from + command line, and not from a comment. ### Suppress warning from the code To suppress the wrong length assertion warning, add a comment with (only) @@ -152,6 +300,14 @@ To suppress the wrong error assertion warning, add a comment with (only) `ginkgo-linter:ignore-err-assert-warning`. +To suppress the wrong comparison assertion warning, add a comment with (only) + +`ginkgo-linter:ignore-compare-assert-warning`. + +To suppress the wrong async assertion warning, add a comment with (only) + +`ginkgo-linter:ignore-async-assert-warning`. + There are two options to use these comments: 1. If the comment is at the top of the file, supress the warning for the whole file; e.g.: ```go diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go b/tools/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go index fc089be106..2ae6b77511 100644 --- a/tools/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go +++ b/tools/vendor/github.com/nunnatsa/ginkgolinter/ginkgo_linter.go @@ -4,7 +4,9 @@ import ( "bytes" "flag" "fmt" + "github.com/nunnatsa/ginkgolinter/version" "go/ast" + "go/constant" "go/printer" "go/token" gotypes "go/types" @@ -19,105 +21,136 @@ import ( // The ginkgolinter enforces standards of using ginkgo and gomega. // -// The current checks are: -// * enforce right length assertion - warn for assertion of len(something): -// -// This check finds the following patterns and suggests an alternative -// * Expect(len(something)).To(Equal(number)) ===> Expect(x).To(HaveLen(number)) -// * ExpectWithOffset(1, len(something)).ShouldNot(Equal(0)) ===> ExpectWithOffset(1, something).ShouldNot(BeEmpty()) -// * Ω(len(something)).NotTo(BeZero()) ===> Ω(something).NotTo(BeEmpty()) -// * Expect(len(something)).To(BeNumerically(">", 0)) ===> Expect(something).ToNot(BeEmpty()) -// * Expect(len(something)).To(BeNumerically(">=", 1)) ===> Expect(something).ToNot(BeEmpty()) -// * Expect(len(something)).To(BeNumerically("==", number)) ===> Expect(something).To(HaveLen(number)) -// -// * enforce right nil assertion - warn for assertion of x == nil: -// This check finds the following patterns and suggests an alternative -// * Expect(x == nil).Should(Equal(true)) ===> Expect(x).Should(BeNil()) -// * Expect(nil == x).Should(BeTrue()) ===> Expect(x).Should(BeNil()) -// * Expect(x != nil).Should(Equal(false)) ===> Expect(x).Should(BeNil()) -// * Expect(nil == x).Should(BeFalse()) ===> Expect(x).Should(BeNil()) -// * Expect(x).Should(Equal(nil) // ===> Expect(x).Should(BeNil()) +// For more details, look at the README.md file const ( - linterName = "ginkgo-linter" - wrongLengthWarningTemplate = linterName + ": wrong length assertion; consider using `%s` instead" - wrongNilWarningTemplate = linterName + ": wrong nil assertion; consider using `%s` instead" - wrongBoolWarningTemplate = linterName + ": wrong boolean assertion; consider using `%s` instead" - wrongErrWarningTemplate = linterName + ": wrong error assertion; consider using `%s` instead" - beEmpty = "BeEmpty" - beNil = "BeNil" - beTrue = "BeTrue" - beFalse = "BeFalse" - equal = "Equal" - not = "Not" - haveLen = "HaveLen" - succeed = "Succeed" - haveOccurred = "HaveOccurred" - expect = "Expect" - omega = "Ω" - expectWithOffset = "ExpectWithOffset" + linterName = "ginkgo-linter" + wrongLengthWarningTemplate = linterName + ": wrong length assertion; consider using `%s` instead" + wrongNilWarningTemplate = linterName + ": wrong nil assertion; consider using `%s` instead" + wrongBoolWarningTemplate = linterName + ": wrong boolean assertion; consider using `%s` instead" + wrongErrWarningTemplate = linterName + ": wrong error assertion; consider using `%s` instead" + wrongCompareWarningTemplate = linterName + ": wrong comparison assertion; consider using `%s` instead" + doubleNegativeWarningTemplate = linterName + ": avoid double negative assertion; consider using `%s` instead" + valueInEventually = linterName + ": use a function call in %s. This actually checks nothing, because %s receives the function returned value, instead of function itself, and this value is never changed" + comparePointerToValue = linterName + ": comparing a pointer to a value will always fail. consider using `%s` instead" + missingAssertionMessage = linterName + `: %q: missing assertion method. Expected "Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"` + missingAsyncAssertionMessage = linterName + `: %q: missing assertion method. Expected "Should()" or "ShouldNot()"` +) +const ( // gomega matchers + beEmpty = "BeEmpty" + beEquivalentTo = "BeEquivalentTo" + beFalse = "BeFalse" + beIdenticalTo = "BeIdenticalTo" + beNil = "BeNil" + beNumerically = "BeNumerically" + beTrue = "BeTrue" + beZero = "BeZero" + equal = "Equal" + haveLen = "HaveLen" + haveOccurred = "HaveOccurred" + haveValue = "HaveValue" + not = "Not" + omega = "Ω" + succeed = "Succeed" +) + +const ( // gomega actuals + expect = "Expect" + expectWithOffset = "ExpectWithOffset" + eventually = "Eventually" + eventuallyWithOffset = "EventuallyWithOffset" + consistently = "Consistently" + consistentlyWithOffset = "ConsistentlyWithOffset" ) // Analyzer is the interface to go_vet var Analyzer = NewAnalyzer() type ginkgoLinter struct { - suppress *types.Suppress + config *types.Config } // NewAnalyzer returns an Analyzer - the package interface with nogo func NewAnalyzer() *analysis.Analyzer { linter := ginkgoLinter{ - suppress: &types.Suppress{ - Len: false, - Nil: false, - Err: false, + config: &types.Config{ + SuppressLen: false, + SuppressNil: false, + SuppressErr: false, + SuppressCompare: false, + AllowHaveLen0: false, }, } a := &analysis.Analyzer{ Name: "ginkgolinter", - Doc: `enforces standards of using ginkgo and gomega + Doc: fmt.Sprintf(doc, version.Version()), + Run: linter.run, + } + + a.Flags.Init("ginkgolinter", flag.ExitOnError) + a.Flags.Var(&linter.config.SuppressLen, "suppress-len-assertion", "Suppress warning for wrong length assertions") + a.Flags.Var(&linter.config.SuppressNil, "suppress-nil-assertion", "Suppress warning for wrong nil assertions") + a.Flags.Var(&linter.config.SuppressErr, "suppress-err-assertion", "Suppress warning for wrong error assertions") + a.Flags.Var(&linter.config.SuppressCompare, "suppress-compare-assertion", "Suppress warning for wrong comparison assertions") + a.Flags.Var(&linter.config.SuppressAsync, "suppress-async-assertion", "Suppress warning for function call in async assertion, like Eventually") + a.Flags.Var(&linter.config.AllowHaveLen0, "allow-havelen-0", "Do not warn for HaveLen(0); default = false") + + return a +} + +const doc = `enforces standards of using ginkgo and gomega + +or + ginkgolinter version + +version: %s + currently, the linter searches for following: -* wrong length assertions. We want to assert the item rather than its length. +* trigger a warning when using Eventually or Constantly with a function call. This is in order to prevent the case when + using a function call instead of a function. Function call returns a value only once, and so the original value + is tested again and again and is never changed. [Bug] + +* trigger a warning when comparing a pointer to a value. [Bug] + +* trigger a warning for missing assertion method: [Bug] + Eventually(checkSomething) + +* wrong length assertions. We want to assert the item rather than its length. [Style] For example: Expect(len(x)).Should(Equal(1)) This should be replaced with: Expect(x)).Should(HavelLen(1)) -* wrong nil assertions. We want to assert the item rather than a comparison result. +* wrong nil assertions. We want to assert the item rather than a comparison result. [Style] For example: Expect(x == nil).Should(BeTrue()) This should be replaced with: Expect(x).Should(BeNil()) - `, - Run: linter.run, - RunDespiteErrors: true, - } - a.Flags.Init("ginkgolinter", flag.ExitOnError) - a.Flags.Var(&linter.suppress.Len, "suppress-len-assertion", "Suppress warning for wrong length assertions") - a.Flags.Var(&linter.suppress.Nil, "suppress-nil-assertion", "Suppress warning for wrong nil assertions") - a.Flags.Var(&linter.suppress.Err, "suppress-err-assertion", "Suppress warning for wrong error assertions") +* wrong error assertions. For example: [Style] + Expect(err == nil).Should(BeTrue()) +This should be replaced with: + Expect(err).ShouldNot(HaveOccurred()) - return a -} +* wrong boolean comparison, for example: [Style] + Expect(x == 8).Should(BeTrue()) +This should be replaced with: + Expect(x).Should(BeEqual(8)) + +* replaces Equal(true/false) with BeTrue()/BeFalse() [Style] + +* replaces HaveLen(0) with BeEmpty() [Style] +` // main assertion function func (l *ginkgoLinter) run(pass *analysis.Pass) (interface{}, error) { - if l.suppress.AllTrue() { - return nil, nil - } - for _, file := range pass.Files { - fileSuppress := l.suppress.Clone() + fileConfig := l.config.Clone() cm := ast.NewCommentMap(pass.Fset, file, file.Comments) - fileSuppress.UpdateFromFile(cm) - if fileSuppress.AllTrue() { - continue - } + fileConfig.UpdateFromFile(cm) handler := gomegahandler.GetGomegaHandler(file) if handler == nil { // no gomega import => no use in gomega in this file; nothing to do here @@ -125,16 +158,15 @@ func (l *ginkgoLinter) run(pass *analysis.Pass) (interface{}, error) { } ast.Inspect(file, func(n ast.Node) bool { - stmt, ok := n.(*ast.ExprStmt) if !ok { return true } - exprSuppress := fileSuppress.Clone() + config := fileConfig.Clone() if comments, ok := cm[stmt]; ok { - exprSuppress.UpdateFromComment(comments) + config.UpdateFromComment(comments) } // search for function calls @@ -144,49 +176,325 @@ func (l *ginkgoLinter) run(pass *analysis.Pass) (interface{}, error) { } assertionFunc, ok := assertionExp.Fun.(*ast.SelectorExpr) - if !ok || !isAssertionFunc(assertionFunc.Sel.Name) { + if !ok { + checkNoAssertion(pass, assertionExp, handler) return true } - actualArg := getActualArg(assertionFunc, handler) - if actualArg == nil { + if !isAssertionFunc(assertionFunc.Sel.Name) { + checkNoAssertion(pass, assertionExp, handler) return true } - return checkExpression(pass, exprSuppress, actualArg, assertionExp, handler) + actualExpr := handler.GetActualExpr(assertionFunc) + if actualExpr == nil { + return true + } + return checkExpression(pass, config, assertionExp, actualExpr, handler) }) } return nil, nil } -func checkExpression(pass *analysis.Pass, exprSuppress types.Suppress, actualArg ast.Expr, assertionExp *ast.CallExpr, handler gomegahandler.Handler) bool { - assertionExp = astcopy.CallExpr(assertionExp) - oldExpr := goFmt(pass.Fset, assertionExp) - if !bool(exprSuppress.Len) && isActualIsLenFunc(actualArg) { +func checkExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler) bool { + expr := astcopy.CallExpr(assertionExp) + oldExpr := goFmt(pass.Fset, expr) + + if checkAsyncAssertion(pass, config, expr, actualExpr, handler, oldExpr) { + return true + } + + actualArg := getActualArg(actualExpr, handler) + if actualArg == nil { + return true + } + + if !bool(config.SuppressLen) && isActualIsLenFunc(actualArg) { + return checkLengthMatcher(expr, pass, handler, oldExpr) + } else if nilable, compOp := getNilableFromComparison(actualArg); nilable != nil { + if isExprError(pass, nilable) { + if config.SuppressErr { + return true + } + } else if config.SuppressNil { + return true + } + + return checkNilMatcher(expr, pass, nilable, handler, compOp == token.NEQ, oldExpr) + + } else if first, second, op, ok := isComparison(pass, actualArg); ok { + matcher, shouldContinue := startCheckComparison(expr, handler) + if !shouldContinue { + return false + } + if !bool(config.SuppressLen) && isActualIsLenFunc(first) { + if handleLenComparison(pass, expr, matcher, first, second, op, handler, oldExpr) { + return false + } + } + return bool(config.SuppressCompare) || checkComparison(expr, pass, matcher, handler, first, second, op, oldExpr) + + } else if isExprError(pass, actualArg) { + return bool(config.SuppressErr) || checkNilError(pass, expr, handler, actualArg, oldExpr) - return checkLengthMatcher(assertionExp, pass, handler, oldExpr) + } else if checkPointerComparison(pass, config, assertionExp, expr, actualArg, handler, oldExpr) { + return false } else { - if nilable, compOp := getNilableFromComparison(actualArg); nilable != nil { - if isExprError(pass, nilable) { - if exprSuppress.Err { + return handleAssertionOnly(pass, config, expr, handler, actualArg, oldExpr, true) + } +} + +// be careful - never change origExp!!! only modify its clone, expr!!! +func checkPointerComparison(pass *analysis.Pass, config types.Config, origExp *ast.CallExpr, expr *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, oldExpr string) bool { + if !isPointer(pass, actualArg) { + return false + } + matcher, ok := origExp.Args[0].(*ast.CallExpr) + if !ok { + return false + } + + matcherFuncName, ok := handler.GetActualFuncName(matcher) + if !ok { + return false + } + + // not using recurse here, since we need the original expression, in order to get the TypeInfo, while we should not + // modify it. + for matcherFuncName == not { + reverseAssertionFuncLogic(expr) + expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0] + matcher, ok = matcher.Args[0].(*ast.CallExpr) + if !ok { + return false + } + + matcherFuncName, ok = handler.GetActualFuncName(matcher) + if !ok { + return false + } + } + + switch matcherFuncName { + case equal, beIdenticalTo, beEquivalentTo: + arg := matcher.Args[0] + if isPointer(pass, arg) { + return false + } + if isNil(arg) { + return false + } + if isInterface(pass, arg) { + return false + } + case beFalse, beTrue, beNumerically: + default: + return false + } + + handleAssertionOnly(pass, config, expr, handler, actualArg, oldExpr, false) + + args := []ast.Expr{astcopy.CallExpr(expr.Args[0].(*ast.CallExpr))} + handler.ReplaceFunction(expr.Args[0].(*ast.CallExpr), ast.NewIdent(haveValue)) + expr.Args[0].(*ast.CallExpr).Args = args + report(pass, expr, comparePointerToValue, oldExpr) + + return true + +} + +// check async assertion does not assert function call. This is a real bug in the test. In this case, the assertion is +// done on the returned value, instead of polling the result of a function, for instance. +func checkAsyncAssertion(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, oldExpr string) bool { + funcName, ok := handler.GetActualFuncName(actualExpr) + if !ok { + return false + } + + var funcIndex int + switch funcName { + case eventually, consistently: + funcIndex = 0 + case eventuallyWithOffset, consistentlyWithOffset: + funcIndex = 1 + default: + return false + } + + if !config.SuppressAsync && len(actualExpr.Args) > funcIndex { + t := pass.TypesInfo.TypeOf(actualExpr.Args[funcIndex]) + + // skip context variable, if used as first argument + if "context.Context" == t.String() { + funcIndex++ + } + + if len(actualExpr.Args) > funcIndex { + if fun, funcCall := actualExpr.Args[funcIndex].(*ast.CallExpr); funcCall { + t = pass.TypesInfo.TypeOf(fun) + switch t.(type) { + // allow functions that return function or channel. + case *gotypes.Signature, *gotypes.Chan, *gotypes.Pointer: + default: + actualExpr = handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) + + if len(fun.Args) > 0 { + origArgs := actualExpr.Args + origFunc := actualExpr.Fun + actualExpr.Args = fun.Args + + origArgs[funcIndex] = fun.Fun + call := &ast.SelectorExpr{ + Sel: ast.NewIdent("WithArguments"), + X: &ast.CallExpr{ + Fun: origFunc, + Args: origArgs, + }, + } + + actualExpr.Fun = call + actualExpr.Args = fun.Args + } else { + actualExpr.Args[funcIndex] = fun.Fun + } + + handleAssertionOnly(pass, config, expr, handler, actualExpr, oldExpr, false) + report(pass, expr, fmt.Sprintf(valueInEventually, funcName, funcName)+"; consider using `%s` instead", oldExpr) return true } - } else if exprSuppress.Nil { - return true } + } + } + + handleAssertionOnly(pass, config, expr, handler, actualExpr, oldExpr, true) + return true +} + +func startCheckComparison(exp *ast.CallExpr, handler gomegahandler.Handler) (*ast.CallExpr, bool) { + matcher, ok := exp.Args[0].(*ast.CallExpr) + if !ok { + return nil, false + } + + matcherFuncName, ok := handler.GetActualFuncName(matcher) + if !ok { + return nil, false + } + + switch matcherFuncName { + case beTrue: + case beFalse: + reverseAssertionFuncLogic(exp) + case equal: + boolean, found := matcher.Args[0].(*ast.Ident) + if !found { + return nil, false + } + + if boolean.Name == "false" { + reverseAssertionFuncLogic(exp) + } else if boolean.Name != "true" { + return nil, false + } - return checkNilMatcher(assertionExp, pass, nilable, handler, compOp == token.NEQ, oldExpr) + case not: + reverseAssertionFuncLogic(exp) + exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] + return startCheckComparison(exp, handler) - } else if isExprError(pass, actualArg) { - return bool(exprSuppress.Err) || checkNilError(pass, assertionExp, handler, actualArg, oldExpr) + default: + return nil, false + } + + return matcher, true +} +func checkComparison(exp *ast.CallExpr, pass *analysis.Pass, matcher *ast.CallExpr, handler gomegahandler.Handler, first ast.Expr, second ast.Expr, op token.Token, oldExp string) bool { + fun, ok := exp.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + + call := handler.GetActualExpr(fun) + if call == nil { + return true + } + + switch op { + case token.EQL: + handleEqualComparison(pass, matcher, first, second, handler) + + case token.NEQ: + reverseAssertionFuncLogic(exp) + handleEqualComparison(pass, matcher, first, second, handler) + case token.GTR, token.GEQ, token.LSS, token.LEQ: + if !isNumeric(pass, first) { + return true + } + handler.ReplaceFunction(matcher, ast.NewIdent(beNumerically)) + matcher.Args = []ast.Expr{ + &ast.BasicLit{Kind: token.STRING, Value: fmt.Sprintf(`"%s"`, op.String())}, + second, + } + default: + return true + } + + call.Args = []ast.Expr{first} + report(pass, exp, wrongCompareWarningTemplate, oldExp) + return false +} + +func handleEqualComparison(pass *analysis.Pass, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, handler gomegahandler.Handler) { + if isZero(pass, second) { + handler.ReplaceFunction(matcher, ast.NewIdent(beZero)) + matcher.Args = nil + } else { + t := pass.TypesInfo.TypeOf(first) + if gotypes.IsInterface(t) { + handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo)) + } else if _, ok := t.(*gotypes.Pointer); ok { + handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo)) } else { - return simplifyEqual(pass, exprSuppress, assertionExp, handler, actualArg, oldExpr) + handler.ReplaceFunction(matcher, ast.NewIdent(equal)) } + + matcher.Args = []ast.Expr{second} } } +func handleLenComparison(pass *analysis.Pass, exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, oldExpr string) bool { + switch op { + case token.EQL: + case token.NEQ: + reverseAssertionFuncLogic(exp) + default: + return false + } + + var eql *ast.Ident + if isZero(pass, second) { + eql = ast.NewIdent(beEmpty) + } else { + eql = ast.NewIdent(haveLen) + matcher.Args = []ast.Expr{second} + } + + handler.ReplaceFunction(matcher, eql) + firstLen, ok := first.(*ast.CallExpr) // assuming it's len() + if !ok { + return false // should never happen + } + + val := firstLen.Args[0] + fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr)) + fun.Args = []ast.Expr{val} + + report(pass, exp, wrongLengthWarningTemplate, oldExpr) + return true +} + // Check if the "actual" argument is a call to the golang built-in len() function func isActualIsLenFunc(actualArg ast.Expr) bool { lenArgExp, ok := actualArg.(*ast.CallExpr) @@ -215,11 +523,11 @@ func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegaha handleEqualMatcher(matcher, pass, exp, handler, oldExp) return false - case "BeZero": + case beZero: handleBeZero(pass, exp, handler, oldExp) return false - case "BeNumerically": + case beNumerically: return handleBeNumerically(matcher, pass, exp, handler, oldExp) case not: @@ -317,13 +625,19 @@ func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gome return false } -// handle Equal(nil), Equal(true) and Equal(false) -func simplifyEqual(pass *analysis.Pass, exprSuppress types.Suppress, assertionExp *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, oldExpr string) bool { - if len(assertionExp.Args) == 0 { +// handleAssertionOnly checks use-cases when the actual value is valid, but only the assertion should be fixed +// it handles: +// +// Equal(nil) => BeNil() +// Equal(true) => BeTrue() +// Equal(false) => BeFalse() +// HaveLen(0) => BeEmpty() +func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, oldExpr string, shouldReport bool) bool { + if len(expr.Args) == 0 { return true } - equalFuncExpr, ok := assertionExp.Args[0].(*ast.CallExpr) + equalFuncExpr, ok := expr.Args[0].(*ast.CallExpr) if !ok { return true } @@ -339,16 +653,16 @@ func simplifyEqual(pass *analysis.Pass, exprSuppress types.Suppress, assertionEx return true } - token, ok := equalFuncExpr.Args[0].(*ast.Ident) + tkn, ok := equalFuncExpr.Args[0].(*ast.Ident) if !ok { return true } var replacement string var template string - switch token.Name { + switch tkn.Name { case "nil": - if exprSuppress.Nil { + if config.SuppressNil { return true } replacement = beNil @@ -357,7 +671,12 @@ func simplifyEqual(pass *analysis.Pass, exprSuppress types.Suppress, assertionEx replacement = beTrue template = wrongBoolWarningTemplate case "false": - replacement = beFalse + if isNegativeAssertion(expr) { + reverseAssertionFuncLogic(expr) + replacement = beTrue + } else { + replacement = beFalse + } template = wrongBoolWarningTemplate default: return true @@ -366,27 +685,76 @@ func simplifyEqual(pass *analysis.Pass, exprSuppress types.Suppress, assertionEx handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(replacement)) equalFuncExpr.Args = nil - report(pass, assertionExp, template, oldExpr) + if shouldReport { + report(pass, expr, template, oldExpr) + } + + return false + case beFalse: + if isNegativeAssertion(expr) { + reverseAssertionFuncLogic(expr) + handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beTrue)) + if shouldReport { + report(pass, expr, doubleNegativeWarningTemplate, oldExpr) + } + } return false + case haveLen: + if config.AllowHaveLen0 { + return true + } + + if len(equalFuncExpr.Args) > 0 { + if isZero(pass, equalFuncExpr.Args[0]) { + handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beEmpty)) + equalFuncExpr.Args = nil + if shouldReport { + report(pass, expr, wrongLengthWarningTemplate, oldExpr) + } + return false + } + } + + return true + case not: - reverseAssertionFuncLogic(assertionExp) - assertionExp.Args[0] = assertionExp.Args[0].(*ast.CallExpr).Args[0] - return simplifyEqual(pass, exprSuppress, assertionExp, handler, actualArg, oldExpr) + reverseAssertionFuncLogic(expr) + expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0] + return handleAssertionOnly(pass, config, expr, handler, actualArg, oldExpr, shouldReport) default: return true } } -// checks that the function is an assertion's actual function and return the "actual" parameter. If the function -// is not assertion's actual function, return nil. -func getActualArg(assertionFunc *ast.SelectorExpr, handler gomegahandler.Handler) ast.Expr { - actualExpr, ok := assertionFunc.X.(*ast.CallExpr) - if !ok { - return nil +func isZero(pass *analysis.Pass, arg ast.Expr) bool { + if val, ok := arg.(*ast.BasicLit); ok && val.Kind == token.INT && val.Value == "0" { + return true + } + info, ok := pass.TypesInfo.Types[arg] + if ok { + if t, ok := info.Type.(*gotypes.Basic); ok && t.Kind() == gotypes.Int && info.Value != nil { + if i, ok := constant.Int64Val(info.Value); ok && i == 0 { + return true + } + } + } else if val, ok := arg.(*ast.Ident); ok && val.Obj != nil && val.Obj.Kind == ast.Con { + if spec, ok := val.Obj.Decl.(*ast.ValueSpec); ok { + if len(spec.Values) == 1 { + if value, ok := spec.Values[0].(*ast.BasicLit); ok && value.Kind == token.INT && value.Value == "0" { + return true + } + } + } } + return false +} + +// getActualArg checks that the function is an assertion's actual function and return the "actual" parameter. If the +// function is not assertion's actual function, return nil. +func getActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) ast.Expr { funcName, ok := handler.GetActualFuncName(actualExpr) if !ok { return nil @@ -462,13 +830,13 @@ func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.Ca reportLengthAssertion(pass, exp, handler, oldExp) return false } else if op == `"=="` { - chooseNumericMatcher(exp, handler, valExp) + chooseNumericMatcher(pass, exp, handler, valExp) reportLengthAssertion(pass, exp, handler, oldExp) return false } else if op == `"!="` { reverseAssertionFuncLogic(exp) - chooseNumericMatcher(exp, handler, valExp) + chooseNumericMatcher(pass, exp, handler, valExp) reportLengthAssertion(pass, exp, handler, oldExp) return false @@ -477,9 +845,9 @@ func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.Ca return true } -func chooseNumericMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, valExp *ast.BasicLit) { +func chooseNumericMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, valExp ast.Expr) { caller := exp.Args[0].(*ast.CallExpr) - if valExp.Value == "0" { + if isZero(pass, valExp) { handler.ReplaceFunction(caller, ast.NewIdent(beEmpty)) exp.Args[0].(*ast.CallExpr).Args = nil } else { @@ -493,10 +861,15 @@ func reverseAssertionFuncLogic(exp *ast.CallExpr) { assertionFunc.Name = reverseassertion.ChangeAssertionLogic(assertionFunc.Name) } +func isNegativeAssertion(exp *ast.CallExpr) bool { + assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel + return reverseassertion.IsNegativeLogic(assertionFunc.Name) +} + func handleEqualMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, oldExp string) { equalTo, ok := matcher.Args[0].(*ast.BasicLit) if ok { - chooseNumericMatcher(exp, handler, equalTo) + chooseNumericMatcher(pass, exp, handler, equalTo) } else { handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveLen)) exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]} @@ -552,6 +925,7 @@ func handleNilComparisonErr(pass *analysis.Pass, exp *ast.CallExpr, nilable ast. return newFuncName, isItError } + func isAssertionFunc(name string) bool { switch name { case "To", "ToNot", "NotTo", "Should", "ShouldNot": @@ -561,13 +935,15 @@ func isAssertionFunc(name string) bool { } func reportLengthAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler, oldExpr string) { - replaceLenActualArg(expr.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr), handler) + actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) + replaceLenActualArg(actualExpr, handler) report(pass, expr, wrongLengthWarningTemplate, oldExpr) } func reportNilAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, oldExpr string, isItError bool) { - changed := replaceNilActualArg(expr.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr), handler, nilable) + actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) + changed := replaceNilActualArg(actualExpr, handler, nilable) if !changed { return } @@ -603,6 +979,17 @@ func report(pass *analysis.Pass, expr *ast.CallExpr, messageTemplate, oldExpr st }) } +func reportNoFix(pass *analysis.Pass, pos token.Pos, message string, args ...any) { + if len(args) > 0 { + message = fmt.Sprintf(message, args...) + } + + pass.Report(analysis.Diagnostic{ + Pos: pos, + Message: message, + }) +} + func getNilableFromComparison(actualArg ast.Expr) (ast.Expr, token.Token) { bin, ok := actualArg.(*ast.BinaryExpr) if !ok { @@ -625,6 +1012,44 @@ func isNil(expr ast.Expr) bool { return ok && nilObject.Name == "nil" && nilObject.Obj == nil } +func isComparison(pass *analysis.Pass, actualArg ast.Expr) (ast.Expr, ast.Expr, token.Token, bool) { + bin, ok := actualArg.(*ast.BinaryExpr) + if !ok { + return nil, nil, token.ILLEGAL, false + } + + first, second, op := bin.X, bin.Y, bin.Op + replace := false + switch realFirst := first.(type) { + case *ast.Ident: // check if const + info, ok := pass.TypesInfo.Types[realFirst] + if ok { + if _, ok := info.Type.(*gotypes.Basic); ok && info.Value != nil { + replace = true + } + } + + case *ast.BasicLit: + replace = true + } + + if replace { + first, second = second, first + } + + switch op { + case token.EQL: + case token.NEQ: + case token.GTR, token.GEQ, token.LSS, token.LEQ: + if replace { + op = reverseassertion.ChangeCompareOperator(op) + } + default: + return nil, nil, token.ILLEGAL, false + } + return first, second, op, true +} + func goFmt(fset *token.FileSet, x ast.Expr) string { var b bytes.Buffer _ = printer.Fprint(&b, fset, x) @@ -660,3 +1085,48 @@ func isExprError(pass *analysis.Pass, expr ast.Expr) bool { } return false } + +func isPointer(pass *analysis.Pass, expr ast.Expr) bool { + t := pass.TypesInfo.TypeOf(expr) + _, ok := t.(*gotypes.Pointer) + return ok +} + +func isInterface(pass *analysis.Pass, expr ast.Expr) bool { + t := pass.TypesInfo.TypeOf(expr) + _, ok := t.(*gotypes.Named) + return ok +} + +func isNumeric(pass *analysis.Pass, node ast.Expr) bool { + t := pass.TypesInfo.TypeOf(node) + + switch t.String() { + case "int", "uint", "int8", "uint8", "int16", "uint16", "int32", "uint32", "int64", "uint64", "float32", "float64": + return true + } + return false +} + +func checkNoAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler) { + funcName, ok := handler.GetActualFuncName(expr) + if ok { + if isActualFunc(funcName) { + reportNoFix(pass, expr.Pos(), missingAssertionMessage, funcName) + } else if isActualAsyncFunc(funcName) { + reportNoFix(pass, expr.Pos(), missingAsyncAssertionMessage, funcName) + } + } +} + +func isActualFunc(name string) bool { + return name == expect || name == expectWithOffset +} + +func isActualAsyncFunc(name string) bool { + switch name { + case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset: + return true + } + return false +} diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go b/tools/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go index d57ba29136..1e8765148c 100644 --- a/tools/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go +++ b/tools/vendor/github.com/nunnatsa/ginkgolinter/gomegahandler/handler.go @@ -16,6 +16,8 @@ type Handler interface { getDefFuncName(expr *ast.CallExpr) string getFieldType(field *ast.Field) string + + GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr } // GetGomegaHandler returns a gomegar handler according to the way gomega was imported in the specific file @@ -50,7 +52,13 @@ func (h dotHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { case *ast.SelectorExpr: if isGomegaVar(actualFunc.X, h) { return actualFunc.Sel.Name, true + } else { + if x, ok := actualFunc.X.(*ast.CallExpr); ok { + return h.GetActualFuncName(x) + } } + case *ast.CallExpr: + return h.GetActualFuncName(actualFunc) } return "", false } @@ -93,18 +101,21 @@ func (g nameHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { return "", false } - x, ok := selector.X.(*ast.Ident) - if !ok { - return "", false - } - - if x.Name != string(g) { - if !isGomegaVar(x, g) { - return "", false + switch x := selector.X.(type) { + case *ast.Ident: + if x.Name != string(g) { + if !isGomegaVar(x, g) { + return "", false + } } + + return selector.Sel.Name, true + + case *ast.CallExpr: + return g.GetActualFuncName(x) } - return selector.Sel.Name, true + return "", false } // ReplaceFunction replaces the function with another one, for fix suggestions @@ -165,3 +176,56 @@ func isGomegaVar(x ast.Expr, handler Handler) bool { } return false } + +func (h dotHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr { + actualExpr, ok := assertionFunc.X.(*ast.CallExpr) + if !ok { + return nil + } + + switch fun := actualExpr.Fun.(type) { + case *ast.Ident: + return actualExpr + case *ast.SelectorExpr: + if isHelperMethods(fun.Sel.Name) { + return h.GetActualExpr(fun) + } + if isGomegaVar(fun.X, h) { + return actualExpr + } + } + return nil +} + +func (g nameHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr { + actualExpr, ok := assertionFunc.X.(*ast.CallExpr) + if !ok { + return nil + } + + switch fun := actualExpr.Fun.(type) { + case *ast.Ident: + return actualExpr + case *ast.SelectorExpr: + if x, ok := fun.X.(*ast.Ident); ok && x.Name == string(g) { + return actualExpr + } + if isHelperMethods(fun.Sel.Name) { + return g.GetActualExpr(fun) + } + + if isGomegaVar(fun.X, g) { + return actualExpr + } + } + return nil +} + +func isHelperMethods(funcName string) bool { + switch funcName { + case "WithOffset", "WithTimeout", "WithPolling", "Within", "ProbeEvery", "WithContext", "WithArguments", "MustPassRepeatedly": + return true + } + + return false +} diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go b/tools/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go index 42c914e503..1dbd898106 100644 --- a/tools/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go +++ b/tools/vendor/github.com/nunnatsa/ginkgolinter/reverseassertion/reverse_assertion.go @@ -1,5 +1,7 @@ package reverseassertion +import "go/token" + var reverseLogicAssertions = map[string]string{ "To": "ToNot", "ToNot": "To", @@ -15,3 +17,26 @@ func ChangeAssertionLogic(funcName string) string { } return funcName } + +func IsNegativeLogic(funcName string) bool { + switch funcName { + case "ToNot", "NotTo", "ShouldNot": + return true + } + return false +} + +var reverseCompareOperators = map[token.Token]token.Token{ + token.LSS: token.GTR, + token.GTR: token.LSS, + token.LEQ: token.GEQ, + token.GEQ: token.LEQ, +} + +// ChangeCompareOperator return the reversed comparison operator +func ChangeCompareOperator(op token.Token) token.Token { + if revOp, ok := reverseCompareOperators[op]; ok { + return revOp + } + return op +} diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/types/config.go b/tools/vendor/github.com/nunnatsa/ginkgolinter/types/config.go new file mode 100644 index 0000000000..43145d8471 --- /dev/null +++ b/tools/vendor/github.com/nunnatsa/ginkgolinter/types/config.go @@ -0,0 +1,77 @@ +package types + +import ( + "strings" + + "go/ast" +) + +const ( + suppressPrefix = "ginkgo-linter:" + suppressLengthAssertionWarning = suppressPrefix + "ignore-len-assert-warning" + suppressNilAssertionWarning = suppressPrefix + "ignore-nil-assert-warning" + suppressErrAssertionWarning = suppressPrefix + "ignore-err-assert-warning" + suppressCompareAssertionWarning = suppressPrefix + "ignore-compare-assert-warning" + suppressAsyncAsertWarning = suppressPrefix + "ignore-async-assert-warning" +) + +type Config struct { + SuppressLen Boolean + SuppressNil Boolean + SuppressErr Boolean + SuppressCompare Boolean + SuppressAsync Boolean + AllowHaveLen0 Boolean +} + +func (s *Config) AllTrue() bool { + return bool(s.SuppressLen && s.SuppressNil && s.SuppressErr && s.SuppressCompare && s.SuppressAsync) +} + +func (s *Config) Clone() Config { + return Config{ + SuppressLen: s.SuppressLen, + SuppressNil: s.SuppressNil, + SuppressErr: s.SuppressErr, + SuppressCompare: s.SuppressCompare, + SuppressAsync: s.SuppressAsync, + AllowHaveLen0: s.AllowHaveLen0, + } +} + +func (s *Config) UpdateFromComment(commentGroup []*ast.CommentGroup) { + for _, cmntList := range commentGroup { + if s.AllTrue() { + break + } + + for _, cmnt := range cmntList.List { + commentLines := strings.Split(cmnt.Text, "\n") + for _, comment := range commentLines { + comment = strings.TrimPrefix(comment, "//") + comment = strings.TrimPrefix(comment, "/*") + comment = strings.TrimSuffix(comment, "*/") + comment = strings.TrimSpace(comment) + + s.SuppressLen = s.SuppressLen || (comment == suppressLengthAssertionWarning) + s.SuppressNil = s.SuppressNil || (comment == suppressNilAssertionWarning) + s.SuppressErr = s.SuppressErr || (comment == suppressErrAssertionWarning) + s.SuppressCompare = s.SuppressCompare || (comment == suppressCompareAssertionWarning) + s.SuppressAsync = s.SuppressAsync || (comment == suppressAsyncAsertWarning) + } + } + } +} + +func (s *Config) UpdateFromFile(cm ast.CommentMap) { + + for key, commentGroup := range cm { + if s.AllTrue() { + break + } + + if _, ok := key.(*ast.GenDecl); ok { + s.UpdateFromComment(commentGroup) + } + } +} diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/types/suppress.go b/tools/vendor/github.com/nunnatsa/ginkgolinter/types/suppress.go deleted file mode 100644 index a703eb2d38..0000000000 --- a/tools/vendor/github.com/nunnatsa/ginkgolinter/types/suppress.go +++ /dev/null @@ -1,67 +0,0 @@ -package types - -import ( - "strings" - - "go/ast" -) - -const ( - suppressPrefix = "ginkgo-linter:" - suppressLengthAssertionWarning = suppressPrefix + "ignore-len-assert-warning" - suppressNilAssertionWarning = suppressPrefix + "ignore-nil-assert-warning" - suppressErrAssertionWarning = suppressPrefix + "ignore-err-assert-warning" -) - -type Suppress struct { - Len Boolean - Nil Boolean - Err Boolean -} - -func (s Suppress) AllTrue() bool { - return bool(s.Len && s.Nil && s.Err) -} - -func (s Suppress) Clone() Suppress { - return Suppress{ - Len: s.Len, - Nil: s.Nil, - Err: s.Err, - } -} - -func (s *Suppress) UpdateFromComment(commentGroup []*ast.CommentGroup) { - for _, cmntList := range commentGroup { - if s.AllTrue() { - break - } - - for _, cmnt := range cmntList.List { - commentLines := strings.Split(cmnt.Text, "\n") - for _, comment := range commentLines { - comment = strings.TrimPrefix(comment, "//") - comment = strings.TrimPrefix(comment, "/*") - comment = strings.TrimSuffix(comment, "*/") - comment = strings.TrimSpace(comment) - - s.Len = s.Len || (comment == suppressLengthAssertionWarning) - s.Nil = s.Nil || (comment == suppressNilAssertionWarning) - s.Err = s.Err || (comment == suppressErrAssertionWarning) - } - } - } -} - -func (s *Suppress) UpdateFromFile(cm ast.CommentMap) { - - for key, commentGroup := range cm { - if s.AllTrue() { - break - } - - if _, ok := key.(*ast.GenDecl); ok { - s.UpdateFromComment(commentGroup) - } - } -} diff --git a/tools/vendor/github.com/nunnatsa/ginkgolinter/version/version.go b/tools/vendor/github.com/nunnatsa/ginkgolinter/version/version.go new file mode 100644 index 0000000000..7bf181a8e8 --- /dev/null +++ b/tools/vendor/github.com/nunnatsa/ginkgolinter/version/version.go @@ -0,0 +1,14 @@ +package version + +var ( + version = "unknown" + gitHash = "unknown" +) + +func Version() string { + return version +} + +func GitHash() string { + return gitHash +} diff --git a/tools/vendor/github.com/pkg/errors/.gitignore b/tools/vendor/github.com/pkg/errors/.gitignore deleted file mode 100644 index daf913b1b3..0000000000 --- a/tools/vendor/github.com/pkg/errors/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof diff --git a/tools/vendor/github.com/pkg/errors/.travis.yml b/tools/vendor/github.com/pkg/errors/.travis.yml deleted file mode 100644 index 9159de03e0..0000000000 --- a/tools/vendor/github.com/pkg/errors/.travis.yml +++ /dev/null @@ -1,10 +0,0 @@ -language: go -go_import_path: github.com/pkg/errors -go: - - 1.11.x - - 1.12.x - - 1.13.x - - tip - -script: - - make check diff --git a/tools/vendor/github.com/pkg/errors/LICENSE b/tools/vendor/github.com/pkg/errors/LICENSE deleted file mode 100644 index 835ba3e755..0000000000 --- a/tools/vendor/github.com/pkg/errors/LICENSE +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) 2015, Dave Cheney -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/tools/vendor/github.com/pkg/errors/Makefile b/tools/vendor/github.com/pkg/errors/Makefile deleted file mode 100644 index ce9d7cded6..0000000000 --- a/tools/vendor/github.com/pkg/errors/Makefile +++ /dev/null @@ -1,44 +0,0 @@ -PKGS := github.com/pkg/errors -SRCDIRS := $(shell go list -f '{{.Dir}}' $(PKGS)) -GO := go - -check: test vet gofmt misspell unconvert staticcheck ineffassign unparam - -test: - $(GO) test $(PKGS) - -vet: | test - $(GO) vet $(PKGS) - -staticcheck: - $(GO) get honnef.co/go/tools/cmd/staticcheck - staticcheck -checks all $(PKGS) - -misspell: - $(GO) get github.com/client9/misspell/cmd/misspell - misspell \ - -locale GB \ - -error \ - *.md *.go - -unconvert: - $(GO) get github.com/mdempsky/unconvert - unconvert -v $(PKGS) - -ineffassign: - $(GO) get github.com/gordonklaus/ineffassign - find $(SRCDIRS) -name '*.go' | xargs ineffassign - -pedantic: check errcheck - -unparam: - $(GO) get mvdan.cc/unparam - unparam ./... - -errcheck: - $(GO) get github.com/kisielk/errcheck - errcheck $(PKGS) - -gofmt: - @echo Checking code is gofmted - @test -z "$(shell gofmt -s -l -d -e $(SRCDIRS) | tee /dev/stderr)" diff --git a/tools/vendor/github.com/pkg/errors/README.md b/tools/vendor/github.com/pkg/errors/README.md deleted file mode 100644 index 54dfdcb12e..0000000000 --- a/tools/vendor/github.com/pkg/errors/README.md +++ /dev/null @@ -1,59 +0,0 @@ -# errors [![Travis-CI](https://travis-ci.org/pkg/errors.svg)](https://travis-ci.org/pkg/errors) [![AppVeyor](https://ci.appveyor.com/api/projects/status/b98mptawhudj53ep/branch/master?svg=true)](https://ci.appveyor.com/project/davecheney/errors/branch/master) [![GoDoc](https://godoc.org/github.com/pkg/errors?status.svg)](http://godoc.org/github.com/pkg/errors) [![Report card](https://goreportcard.com/badge/github.com/pkg/errors)](https://goreportcard.com/report/github.com/pkg/errors) [![Sourcegraph](https://sourcegraph.com/github.com/pkg/errors/-/badge.svg)](https://sourcegraph.com/github.com/pkg/errors?badge) - -Package errors provides simple error handling primitives. - -`go get github.com/pkg/errors` - -The traditional error handling idiom in Go is roughly akin to -```go -if err != nil { - return err -} -``` -which applied recursively up the call stack results in error reports without context or debugging information. The errors package allows programmers to add context to the failure path in their code in a way that does not destroy the original value of the error. - -## Adding context to an error - -The errors.Wrap function returns a new error that adds context to the original error. For example -```go -_, err := ioutil.ReadAll(r) -if err != nil { - return errors.Wrap(err, "read failed") -} -``` -## Retrieving the cause of an error - -Using `errors.Wrap` constructs a stack of errors, adding context to the preceding error. Depending on the nature of the error it may be necessary to reverse the operation of errors.Wrap to retrieve the original error for inspection. Any error value which implements this interface can be inspected by `errors.Cause`. -```go -type causer interface { - Cause() error -} -``` -`errors.Cause` will recursively retrieve the topmost error which does not implement `causer`, which is assumed to be the original cause. For example: -```go -switch err := errors.Cause(err).(type) { -case *MyError: - // handle specifically -default: - // unknown error -} -``` - -[Read the package documentation for more information](https://godoc.org/github.com/pkg/errors). - -## Roadmap - -With the upcoming [Go2 error proposals](https://go.googlesource.com/proposal/+/master/design/go2draft.md) this package is moving into maintenance mode. The roadmap for a 1.0 release is as follows: - -- 0.9. Remove pre Go 1.9 and Go 1.10 support, address outstanding pull requests (if possible) -- 1.0. Final release. - -## Contributing - -Because of the Go2 errors changes, this package is not accepting proposals for new functionality. With that said, we welcome pull requests, bug fixes and issue reports. - -Before sending a PR, please discuss your change by raising an issue. - -## License - -BSD-2-Clause diff --git a/tools/vendor/github.com/pkg/errors/appveyor.yml b/tools/vendor/github.com/pkg/errors/appveyor.yml deleted file mode 100644 index a932eade02..0000000000 --- a/tools/vendor/github.com/pkg/errors/appveyor.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: build-{build}.{branch} - -clone_folder: C:\gopath\src\github.com\pkg\errors -shallow_clone: true # for startup speed - -environment: - GOPATH: C:\gopath - -platform: - - x64 - -# http://www.appveyor.com/docs/installed-software -install: - # some helpful output for debugging builds - - go version - - go env - # pre-installed MinGW at C:\MinGW is 32bit only - # but MSYS2 at C:\msys64 has mingw64 - - set PATH=C:\msys64\mingw64\bin;%PATH% - - gcc --version - - g++ --version - -build_script: - - go install -v ./... - -test_script: - - set PATH=C:\gopath\bin;%PATH% - - go test -v ./... - -#artifacts: -# - path: '%GOPATH%\bin\*.exe' -deploy: off diff --git a/tools/vendor/github.com/pkg/errors/errors.go b/tools/vendor/github.com/pkg/errors/errors.go deleted file mode 100644 index 161aea2582..0000000000 --- a/tools/vendor/github.com/pkg/errors/errors.go +++ /dev/null @@ -1,288 +0,0 @@ -// Package errors provides simple error handling primitives. -// -// The traditional error handling idiom in Go is roughly akin to -// -// if err != nil { -// return err -// } -// -// which when applied recursively up the call stack results in error reports -// without context or debugging information. The errors package allows -// programmers to add context to the failure path in their code in a way -// that does not destroy the original value of the error. -// -// Adding context to an error -// -// The errors.Wrap function returns a new error that adds context to the -// original error by recording a stack trace at the point Wrap is called, -// together with the supplied message. For example -// -// _, err := ioutil.ReadAll(r) -// if err != nil { -// return errors.Wrap(err, "read failed") -// } -// -// If additional control is required, the errors.WithStack and -// errors.WithMessage functions destructure errors.Wrap into its component -// operations: annotating an error with a stack trace and with a message, -// respectively. -// -// Retrieving the cause of an error -// -// Using errors.Wrap constructs a stack of errors, adding context to the -// preceding error. Depending on the nature of the error it may be necessary -// to reverse the operation of errors.Wrap to retrieve the original error -// for inspection. Any error value which implements this interface -// -// type causer interface { -// Cause() error -// } -// -// can be inspected by errors.Cause. errors.Cause will recursively retrieve -// the topmost error that does not implement causer, which is assumed to be -// the original cause. For example: -// -// switch err := errors.Cause(err).(type) { -// case *MyError: -// // handle specifically -// default: -// // unknown error -// } -// -// Although the causer interface is not exported by this package, it is -// considered a part of its stable public interface. -// -// Formatted printing of errors -// -// All error values returned from this package implement fmt.Formatter and can -// be formatted by the fmt package. The following verbs are supported: -// -// %s print the error. If the error has a Cause it will be -// printed recursively. -// %v see %s -// %+v extended format. Each Frame of the error's StackTrace will -// be printed in detail. -// -// Retrieving the stack trace of an error or wrapper -// -// New, Errorf, Wrap, and Wrapf record a stack trace at the point they are -// invoked. This information can be retrieved with the following interface: -// -// type stackTracer interface { -// StackTrace() errors.StackTrace -// } -// -// The returned errors.StackTrace type is defined as -// -// type StackTrace []Frame -// -// The Frame type represents a call site in the stack trace. Frame supports -// the fmt.Formatter interface that can be used for printing information about -// the stack trace of this error. For example: -// -// if err, ok := err.(stackTracer); ok { -// for _, f := range err.StackTrace() { -// fmt.Printf("%+s:%d\n", f, f) -// } -// } -// -// Although the stackTracer interface is not exported by this package, it is -// considered a part of its stable public interface. -// -// See the documentation for Frame.Format for more details. -package errors - -import ( - "fmt" - "io" -) - -// New returns an error with the supplied message. -// New also records the stack trace at the point it was called. -func New(message string) error { - return &fundamental{ - msg: message, - stack: callers(), - } -} - -// Errorf formats according to a format specifier and returns the string -// as a value that satisfies error. -// Errorf also records the stack trace at the point it was called. -func Errorf(format string, args ...interface{}) error { - return &fundamental{ - msg: fmt.Sprintf(format, args...), - stack: callers(), - } -} - -// fundamental is an error that has a message and a stack, but no caller. -type fundamental struct { - msg string - *stack -} - -func (f *fundamental) Error() string { return f.msg } - -func (f *fundamental) Format(s fmt.State, verb rune) { - switch verb { - case 'v': - if s.Flag('+') { - io.WriteString(s, f.msg) - f.stack.Format(s, verb) - return - } - fallthrough - case 's': - io.WriteString(s, f.msg) - case 'q': - fmt.Fprintf(s, "%q", f.msg) - } -} - -// WithStack annotates err with a stack trace at the point WithStack was called. -// If err is nil, WithStack returns nil. -func WithStack(err error) error { - if err == nil { - return nil - } - return &withStack{ - err, - callers(), - } -} - -type withStack struct { - error - *stack -} - -func (w *withStack) Cause() error { return w.error } - -// Unwrap provides compatibility for Go 1.13 error chains. -func (w *withStack) Unwrap() error { return w.error } - -func (w *withStack) Format(s fmt.State, verb rune) { - switch verb { - case 'v': - if s.Flag('+') { - fmt.Fprintf(s, "%+v", w.Cause()) - w.stack.Format(s, verb) - return - } - fallthrough - case 's': - io.WriteString(s, w.Error()) - case 'q': - fmt.Fprintf(s, "%q", w.Error()) - } -} - -// Wrap returns an error annotating err with a stack trace -// at the point Wrap is called, and the supplied message. -// If err is nil, Wrap returns nil. -func Wrap(err error, message string) error { - if err == nil { - return nil - } - err = &withMessage{ - cause: err, - msg: message, - } - return &withStack{ - err, - callers(), - } -} - -// Wrapf returns an error annotating err with a stack trace -// at the point Wrapf is called, and the format specifier. -// If err is nil, Wrapf returns nil. -func Wrapf(err error, format string, args ...interface{}) error { - if err == nil { - return nil - } - err = &withMessage{ - cause: err, - msg: fmt.Sprintf(format, args...), - } - return &withStack{ - err, - callers(), - } -} - -// WithMessage annotates err with a new message. -// If err is nil, WithMessage returns nil. -func WithMessage(err error, message string) error { - if err == nil { - return nil - } - return &withMessage{ - cause: err, - msg: message, - } -} - -// WithMessagef annotates err with the format specifier. -// If err is nil, WithMessagef returns nil. -func WithMessagef(err error, format string, args ...interface{}) error { - if err == nil { - return nil - } - return &withMessage{ - cause: err, - msg: fmt.Sprintf(format, args...), - } -} - -type withMessage struct { - cause error - msg string -} - -func (w *withMessage) Error() string { return w.msg + ": " + w.cause.Error() } -func (w *withMessage) Cause() error { return w.cause } - -// Unwrap provides compatibility for Go 1.13 error chains. -func (w *withMessage) Unwrap() error { return w.cause } - -func (w *withMessage) Format(s fmt.State, verb rune) { - switch verb { - case 'v': - if s.Flag('+') { - fmt.Fprintf(s, "%+v\n", w.Cause()) - io.WriteString(s, w.msg) - return - } - fallthrough - case 's', 'q': - io.WriteString(s, w.Error()) - } -} - -// Cause returns the underlying cause of the error, if possible. -// An error value has a cause if it implements the following -// interface: -// -// type causer interface { -// Cause() error -// } -// -// If the error does not implement Cause, the original error will -// be returned. If the error is nil, nil will be returned without further -// investigation. -func Cause(err error) error { - type causer interface { - Cause() error - } - - for err != nil { - cause, ok := err.(causer) - if !ok { - break - } - err = cause.Cause() - } - return err -} diff --git a/tools/vendor/github.com/pkg/errors/go113.go b/tools/vendor/github.com/pkg/errors/go113.go deleted file mode 100644 index be0d10d0c7..0000000000 --- a/tools/vendor/github.com/pkg/errors/go113.go +++ /dev/null @@ -1,38 +0,0 @@ -// +build go1.13 - -package errors - -import ( - stderrors "errors" -) - -// Is reports whether any error in err's chain matches target. -// -// The chain consists of err itself followed by the sequence of errors obtained by -// repeatedly calling Unwrap. -// -// An error is considered to match a target if it is equal to that target or if -// it implements a method Is(error) bool such that Is(target) returns true. -func Is(err, target error) bool { return stderrors.Is(err, target) } - -// As finds the first error in err's chain that matches target, and if so, sets -// target to that error value and returns true. -// -// The chain consists of err itself followed by the sequence of errors obtained by -// repeatedly calling Unwrap. -// -// An error matches target if the error's concrete value is assignable to the value -// pointed to by target, or if the error has a method As(interface{}) bool such that -// As(target) returns true. In the latter case, the As method is responsible for -// setting target. -// -// As will panic if target is not a non-nil pointer to either a type that implements -// error, or to any interface type. As returns false if err is nil. -func As(err error, target interface{}) bool { return stderrors.As(err, target) } - -// Unwrap returns the result of calling the Unwrap method on err, if err's -// type contains an Unwrap method returning error. -// Otherwise, Unwrap returns nil. -func Unwrap(err error) error { - return stderrors.Unwrap(err) -} diff --git a/tools/vendor/github.com/pkg/errors/stack.go b/tools/vendor/github.com/pkg/errors/stack.go deleted file mode 100644 index 779a8348fb..0000000000 --- a/tools/vendor/github.com/pkg/errors/stack.go +++ /dev/null @@ -1,177 +0,0 @@ -package errors - -import ( - "fmt" - "io" - "path" - "runtime" - "strconv" - "strings" -) - -// Frame represents a program counter inside a stack frame. -// For historical reasons if Frame is interpreted as a uintptr -// its value represents the program counter + 1. -type Frame uintptr - -// pc returns the program counter for this frame; -// multiple frames may have the same PC value. -func (f Frame) pc() uintptr { return uintptr(f) - 1 } - -// file returns the full path to the file that contains the -// function for this Frame's pc. -func (f Frame) file() string { - fn := runtime.FuncForPC(f.pc()) - if fn == nil { - return "unknown" - } - file, _ := fn.FileLine(f.pc()) - return file -} - -// line returns the line number of source code of the -// function for this Frame's pc. -func (f Frame) line() int { - fn := runtime.FuncForPC(f.pc()) - if fn == nil { - return 0 - } - _, line := fn.FileLine(f.pc()) - return line -} - -// name returns the name of this function, if known. -func (f Frame) name() string { - fn := runtime.FuncForPC(f.pc()) - if fn == nil { - return "unknown" - } - return fn.Name() -} - -// Format formats the frame according to the fmt.Formatter interface. -// -// %s source file -// %d source line -// %n function name -// %v equivalent to %s:%d -// -// Format accepts flags that alter the printing of some verbs, as follows: -// -// %+s function name and path of source file relative to the compile time -// GOPATH separated by \n\t (\n\t) -// %+v equivalent to %+s:%d -func (f Frame) Format(s fmt.State, verb rune) { - switch verb { - case 's': - switch { - case s.Flag('+'): - io.WriteString(s, f.name()) - io.WriteString(s, "\n\t") - io.WriteString(s, f.file()) - default: - io.WriteString(s, path.Base(f.file())) - } - case 'd': - io.WriteString(s, strconv.Itoa(f.line())) - case 'n': - io.WriteString(s, funcname(f.name())) - case 'v': - f.Format(s, 's') - io.WriteString(s, ":") - f.Format(s, 'd') - } -} - -// MarshalText formats a stacktrace Frame as a text string. The output is the -// same as that of fmt.Sprintf("%+v", f), but without newlines or tabs. -func (f Frame) MarshalText() ([]byte, error) { - name := f.name() - if name == "unknown" { - return []byte(name), nil - } - return []byte(fmt.Sprintf("%s %s:%d", name, f.file(), f.line())), nil -} - -// StackTrace is stack of Frames from innermost (newest) to outermost (oldest). -type StackTrace []Frame - -// Format formats the stack of Frames according to the fmt.Formatter interface. -// -// %s lists source files for each Frame in the stack -// %v lists the source file and line number for each Frame in the stack -// -// Format accepts flags that alter the printing of some verbs, as follows: -// -// %+v Prints filename, function, and line number for each Frame in the stack. -func (st StackTrace) Format(s fmt.State, verb rune) { - switch verb { - case 'v': - switch { - case s.Flag('+'): - for _, f := range st { - io.WriteString(s, "\n") - f.Format(s, verb) - } - case s.Flag('#'): - fmt.Fprintf(s, "%#v", []Frame(st)) - default: - st.formatSlice(s, verb) - } - case 's': - st.formatSlice(s, verb) - } -} - -// formatSlice will format this StackTrace into the given buffer as a slice of -// Frame, only valid when called with '%s' or '%v'. -func (st StackTrace) formatSlice(s fmt.State, verb rune) { - io.WriteString(s, "[") - for i, f := range st { - if i > 0 { - io.WriteString(s, " ") - } - f.Format(s, verb) - } - io.WriteString(s, "]") -} - -// stack represents a stack of program counters. -type stack []uintptr - -func (s *stack) Format(st fmt.State, verb rune) { - switch verb { - case 'v': - switch { - case st.Flag('+'): - for _, pc := range *s { - f := Frame(pc) - fmt.Fprintf(st, "\n%+v", f) - } - } - } -} - -func (s *stack) StackTrace() StackTrace { - f := make([]Frame, len(*s)) - for i := 0; i < len(f); i++ { - f[i] = Frame((*s)[i]) - } - return f -} - -func callers() *stack { - const depth = 32 - var pcs [depth]uintptr - n := runtime.Callers(3, pcs[:]) - var st stack = pcs[0:n] - return &st -} - -// funcname removes the path prefix component of a function's name reported by func.Name(). -func funcname(name string) string { - i := strings.LastIndex(name, "/") - name = name[i+1:] - i = strings.Index(name, ".") - return name[i+1:] -} diff --git a/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go b/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go index c9dcf5e551..366b5c6b0f 100644 --- a/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go +++ b/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go @@ -10,8 +10,8 @@ var allowedErrors = []struct { fun string }{ // pkg/archive/tar - {err: "io.EOF", fun: "(*tar.Reader).Next"}, - {err: "io.EOF", fun: "(*tar.Reader).Read"}, + {err: "io.EOF", fun: "(*archive/tar.Reader).Next"}, + {err: "io.EOF", fun: "(*archive/tar.Reader).Read"}, // pkg/bufio {err: "io.EOF", fun: "(*bufio.Reader).Discard"}, {err: "io.EOF", fun: "(*bufio.Reader).Peek"}, @@ -154,10 +154,10 @@ func assigningCallExprs(info *TypesInfoExt, subject *ast.Ident) []*ast.CallExpr assigningExpr := assignment.Rhs[0] // If the assignment is comprised of multiple expressions, find out - // which LHS expression we should use by finding its index in the LHS. - if len(assignment.Rhs) > 1 { + // which RHS expression we should use by finding its index in the LHS. + if len(assignment.Lhs) == len(assignment.Rhs) { for i, lhs := range assignment.Lhs { - if subject.Name == lhs.(*ast.Ident).Name { + if ident, ok := lhs.(*ast.Ident); ok && subject.Name == ident.Name { assigningExpr = assignment.Rhs[i] break } diff --git a/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go b/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go index ab02136f47..c65c4ee62b 100644 --- a/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go +++ b/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go @@ -19,20 +19,22 @@ func NewAnalyzer() *analysis.Analyzer { } var ( - flagSet flag.FlagSet - checkComparison bool - checkAsserts bool - checkErrorf bool + flagSet flag.FlagSet + checkComparison bool + checkAsserts bool + checkErrorf bool + checkErrorfMulti bool ) func init() { flagSet.BoolVar(&checkComparison, "comparison", true, "Check for plain error comparisons") flagSet.BoolVar(&checkAsserts, "asserts", true, "Check for plain type assertions and type switches") flagSet.BoolVar(&checkErrorf, "errorf", false, "Check whether fmt.Errorf uses the %w verb for formatting errors. See the readme for caveats") + flagSet.BoolVar(&checkErrorfMulti, "errorf-multi", true, "Permit more than 1 %w verb, valid per Go 1.20 (Requires -errorf=true)") } func run(pass *analysis.Pass) (interface{}, error) { - lints := []Lint{} + lints := []analysis.Diagnostic{} extInfo := newTypesInfoExt(pass.TypesInfo) if checkComparison { l := LintErrorComparisons(pass.Fset, extInfo) @@ -43,13 +45,13 @@ func run(pass *analysis.Pass) (interface{}, error) { lints = append(lints, l...) } if checkErrorf { - l := LintFmtErrorfCalls(pass.Fset, *pass.TypesInfo) + l := LintFmtErrorfCalls(pass.Fset, *pass.TypesInfo, checkErrorfMulti) lints = append(lints, l...) } sort.Sort(ByPosition(lints)) for _, l := range lints { - pass.Report(analysis.Diagnostic{Pos: l.Pos, Message: l.Message}) + pass.Report(l) } return nil, nil } diff --git a/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go b/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go index b9ebe6efe1..920dc56e79 100644 --- a/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go +++ b/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go @@ -6,14 +6,11 @@ import ( "go/constant" "go/token" "go/types" -) -type Lint struct { - Message string - Pos token.Pos -} + "golang.org/x/tools/go/analysis" +) -type ByPosition []Lint +type ByPosition []analysis.Diagnostic func (l ByPosition) Len() int { return len(l) } func (l ByPosition) Swap(i, j int) { l[i], l[j] = l[j], l[i] } @@ -22,8 +19,8 @@ func (l ByPosition) Less(i, j int) bool { return l[i].Pos < l[j].Pos } -func LintFmtErrorfCalls(fset *token.FileSet, info types.Info) []Lint { - lints := []Lint{} +func LintFmtErrorfCalls(fset *token.FileSet, info types.Info, multipleWraps bool) []analysis.Diagnostic { + lints := []analysis.Diagnostic{} for expr, t := range info.Types { // Search for error expressions that are the result of fmt.Errorf // invocations. @@ -42,52 +39,87 @@ func LintFmtErrorfCalls(fset *token.FileSet, info types.Info) []Lint { } // For any arguments that are errors, check whether the wrapping verb is used. %w may occur - // for multiple errors in one Errorf invocation. We raise an issue if at least one error - // does not have a corresponding wrapping verb. - var lintArg ast.Expr + // for multiple errors in one Errorf invocation, unless multipleWraps is true. We raise an + // issue if at least one error does not have a corresponding wrapping verb. args := call.Args[1:] - for i := 0; i < len(args) && i < len(formatVerbs); i++ { - if !implementsError(info.Types[args[i]].Type) && !isErrorStringCall(info, args[i]) { - continue - } + if !multipleWraps { + wrapCount := 0 + for i := 0; i < len(args) && i < len(formatVerbs); i++ { + arg := args[i] + if !implementsError(info.Types[arg].Type) { + continue + } + verb := formatVerbs[i] + + if verb.format == "w" { + wrapCount++ + if wrapCount > 1 { + lints = append(lints, analysis.Diagnostic{ + Message: "only one %w verb is permitted per format string", + Pos: arg.Pos(), + }) + break + } + } - if formatVerbs[i] == "w" { - continue + if wrapCount == 0 { + lints = append(lints, analysis.Diagnostic{ + Message: "non-wrapping format verb for fmt.Errorf. Use `%w` to format errors", + Pos: args[i].Pos(), + }) + break + } } - if lintArg == nil { - lintArg = args[i] - break - } - } - if lintArg != nil { - lints = append(lints, Lint{ - Message: "non-wrapping format verb for fmt.Errorf. Use `%w` to format errors", - Pos: lintArg.Pos(), - }) - } - } - return lints -} + } else { + var lint *analysis.Diagnostic + argIndex := 0 + for _, verb := range formatVerbs { + if verb.index != -1 { + argIndex = verb.index + } else { + argIndex++ + } + + if verb.format == "w" { + continue + } + if argIndex-1 >= len(args) { + continue + } + arg := args[argIndex-1] + if !implementsError(info.Types[arg].Type) { + continue + } -// isErrorStringCall tests whether the expression is a string expression that -// is the result of an `(error).Error()` method call. -func isErrorStringCall(info types.Info, expr ast.Expr) bool { - if info.Types[expr].Type.String() == "string" { - if call, ok := expr.(*ast.CallExpr); ok { - if callSel, ok := call.Fun.(*ast.SelectorExpr); ok { - fun := info.Uses[callSel.Sel].(*types.Func) - return fun.Type().String() == "func() string" && fun.Name() == "Error" + strStart := call.Args[0].Pos() + if lint == nil { + lint = &analysis.Diagnostic{ + Message: "non-wrapping format verb for fmt.Errorf. Use `%w` to format errors", + Pos: arg.Pos(), + } + } + lint.SuggestedFixes = append(lint.SuggestedFixes, analysis.SuggestedFix{ + Message: "Use `%w` to format errors", + TextEdits: []analysis.TextEdit{{ + Pos: strStart + token.Pos(verb.formatOffset) + 1, + End: strStart + token.Pos(verb.formatOffset) + 2, + NewText: []byte("w"), + }}, + }) + } + if lint != nil { + lints = append(lints, *lint) } } } - return false + return lints } // printfFormatStringVerbs returns a normalized list of all the verbs that are used per argument to // the printf function. The index of each returned element corresponds to the index of the // respective argument. -func printfFormatStringVerbs(info types.Info, call *ast.CallExpr) ([]string, bool) { +func printfFormatStringVerbs(info types.Info, call *ast.CallExpr) ([]verb, bool) { if len(call.Args) <= 1 { return nil, false } @@ -103,18 +135,8 @@ func printfFormatStringVerbs(info types.Info, call *ast.CallExpr) ([]string, boo if err != nil { return nil, false } - orderedVerbs := verbOrder(verbs, len(call.Args)-1) - - resolvedVerbs := make([]string, len(orderedVerbs)) - for i, vv := range orderedVerbs { - for _, v := range vv { - resolvedVerbs[i] = v.format - if v.format == "w" { - break - } - } - } - return resolvedVerbs, true + + return verbs, true } func isFmtErrorfCallExpr(info types.Info, expr ast.Expr) (*ast.CallExpr, bool) { @@ -136,8 +158,8 @@ func isFmtErrorfCallExpr(info types.Info, expr ast.Expr) (*ast.CallExpr, bool) { return nil, false } -func LintErrorComparisons(fset *token.FileSet, info *TypesInfoExt) []Lint { - lints := []Lint{} +func LintErrorComparisons(fset *token.FileSet, info *TypesInfoExt) []analysis.Diagnostic { + lints := []analysis.Diagnostic{} for expr := range info.Types { // Find == and != operations. @@ -165,7 +187,7 @@ func LintErrorComparisons(fset *token.FileSet, info *TypesInfoExt) []Lint { continue } - lints = append(lints, Lint{ + lints = append(lints, analysis.Diagnostic{ Message: fmt.Sprintf("comparing with %s will fail on wrapped errors. Use errors.Is to check for a specific error", binExpr.Op), Pos: binExpr.Pos(), }) @@ -190,7 +212,7 @@ func LintErrorComparisons(fset *token.FileSet, info *TypesInfoExt) []Lint { } if switchComparesNonNil(switchStmt) { - lints = append(lints, Lint{ + lints = append(lints, analysis.Diagnostic{ Message: "switch on an error will fail on wrapped errors. Use errors.Is to check for specific errors", Pos: switchStmt.Pos(), }) @@ -266,8 +288,8 @@ func switchComparesNonNil(switchStmt *ast.SwitchStmt) bool { return false } -func LintErrorTypeAssertions(fset *token.FileSet, info types.Info) []Lint { - lints := []Lint{} +func LintErrorTypeAssertions(fset *token.FileSet, info types.Info) []analysis.Diagnostic { + lints := []analysis.Diagnostic{} for expr := range info.Types { // Find type assertions. @@ -281,7 +303,7 @@ func LintErrorTypeAssertions(fset *token.FileSet, info types.Info) []Lint { continue } - lints = append(lints, Lint{ + lints = append(lints, analysis.Diagnostic{ Message: "type assertion on error will fail on wrapped errors. Use errors.As to check for specific errors", Pos: typeAssert.Pos(), }) @@ -308,7 +330,7 @@ func LintErrorTypeAssertions(fset *token.FileSet, info types.Info) []Lint { continue } - lints = append(lints, Lint{ + lints = append(lints, analysis.Diagnostic{ Message: "type switch on error will fail on wrapped errors. Use errors.As to check for specific errors", Pos: typeAssert.Pos(), }) diff --git a/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go b/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go index d9a935ff2b..9737525924 100644 --- a/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go +++ b/tools/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go @@ -25,12 +25,14 @@ func verbOrder(verbs []verb, numArgs int) [][]verb { } type verb struct { - format string - index int + format string + formatOffset int + index int } type printfParser struct { str string + at int } func (pp *printfParser) ParseAllVerbs() ([]verb, error) { @@ -80,7 +82,7 @@ func (pp *printfParser) parseVerb() (*verb, error) { format := pp.next() - return &verb{format: string(format), index: index}, nil + return &verb{format: string(format), formatOffset: pp.at - 1, index: index}, nil } func (pp *printfParser) parseIndex() (int, error) { @@ -96,6 +98,7 @@ func (pp *printfParser) parseIndex() (int, error) { return -1, err } pp.str = pp.str[end+1:] + pp.at += end + 1 return index, nil } @@ -114,6 +117,7 @@ func (pp *printfParser) skipToPercent() error { return io.EOF } pp.str = pp.str[i:] + pp.at += i return nil } @@ -130,5 +134,6 @@ func (pp *printfParser) next() rune { } r := rune(pp.str[0]) pp.str = pp.str[1:] + pp.at++ return r } diff --git a/tools/vendor/github.com/quasilyte/regex/syntax/README.md b/tools/vendor/github.com/quasilyte/regex/syntax/README.md index 13064ec39a..b70e25ad96 100644 --- a/tools/vendor/github.com/quasilyte/regex/syntax/README.md +++ b/tools/vendor/github.com/quasilyte/regex/syntax/README.md @@ -4,19 +4,17 @@ Package `syntax` provides regular expressions parser as well as AST definitions. ## Rationale -There are several problems with the stdlib [regexp/syntax](https://golang.org/pkg/regexp/syntax/) package: +The advantages of this package over stdlib [regexp/syntax](https://golang.org/pkg/regexp/syntax/): -1. It does several transformations during the parsing that make it - hard to do any kind of syntax analysis afterward. +1. Does not transformations/optimizations during the parsing. + The produced parse tree is loseless. -2. The AST used there is optimized for the compilation and - execution inside the [regexp](https://golang.org/pkg/regexp) package. - It's somewhat complicated, especially in a way character ranges are encoded. +2. Simpler AST representation. -3. It only supports [re2](https://github.com/google/re2/wiki/Syntax) syntax. - This parser recognizes most PCRE operations. +3. Can parse most PCRE operations in addition to [re2](https://github.com/google/re2/wiki/Syntax) syntax. + It can also handle PHP/Perl style patterns with delimiters. -4. It's easier to extend this package than something from the standard library. +4. This package is easier to extend than something from the standard library. This package does almost no assumptions about how generated AST is going to be used so it preserves as much syntax information as possible. @@ -24,3 +22,8 @@ so it preserves as much syntax information as possible. It's easy to write another intermediate representation on top of it. The main function of this package is to convert a textual regexp pattern into a more structured form that can be processed more easily. + +## Users + +* [go-critic](https://github.com/go-critic/go-critic) - Go static analyzer +* [NoVerify](https://github.com/VKCOM/noverify) - PHP static analyzer diff --git a/tools/vendor/github.com/quasilyte/regex/syntax/ast.go b/tools/vendor/github.com/quasilyte/regex/syntax/ast.go index 44b7b61bb3..4d21a9432b 100644 --- a/tools/vendor/github.com/quasilyte/regex/syntax/ast.go +++ b/tools/vendor/github.com/quasilyte/regex/syntax/ast.go @@ -1,7 +1,6 @@ package syntax import ( - "fmt" "strings" ) @@ -63,85 +62,3 @@ func (e Expr) LastArg() Expr { type Operation byte type Form byte - -func FormatSyntax(re *Regexp) string { - return formatExprSyntax(re, re.Expr) -} - -func formatExprSyntax(re *Regexp, e Expr) string { - switch e.Op { - case OpChar, OpLiteral: - switch e.Value { - case "{": - return "'{'" - case "}": - return "'}'" - default: - return e.Value - } - case OpString, OpEscapeChar, OpEscapeMeta, OpEscapeOctal, OpEscapeUni, OpEscapeHex, OpPosixClass: - return e.Value - case OpRepeat: - return fmt.Sprintf("(repeat %s %s)", formatExprSyntax(re, e.Args[0]), e.Args[1].Value) - case OpCaret: - return "^" - case OpDollar: - return "$" - case OpDot: - return "." - case OpQuote: - return fmt.Sprintf("(q %s)", e.Value) - case OpCharRange: - return fmt.Sprintf("%s-%s", formatExprSyntax(re, e.Args[0]), formatExprSyntax(re, e.Args[1])) - case OpCharClass: - return fmt.Sprintf("[%s]", formatArgsSyntax(re, e.Args)) - case OpNegCharClass: - return fmt.Sprintf("[^%s]", formatArgsSyntax(re, e.Args)) - case OpConcat: - return fmt.Sprintf("{%s}", formatArgsSyntax(re, e.Args)) - case OpAlt: - return fmt.Sprintf("(or %s)", formatArgsSyntax(re, e.Args)) - case OpCapture: - return fmt.Sprintf("(capture %s)", formatExprSyntax(re, e.Args[0])) - case OpNamedCapture: - return fmt.Sprintf("(capture %s %s)", formatExprSyntax(re, e.Args[0]), e.Args[1].Value) - case OpGroup: - return fmt.Sprintf("(group %s)", formatExprSyntax(re, e.Args[0])) - case OpAtomicGroup: - return fmt.Sprintf("(atomic %s)", formatExprSyntax(re, e.Args[0])) - case OpGroupWithFlags: - return fmt.Sprintf("(group %s ?%s)", formatExprSyntax(re, e.Args[0]), e.Args[1].Value) - case OpFlagOnlyGroup: - return fmt.Sprintf("(flags ?%s)", formatExprSyntax(re, e.Args[0])) - case OpPositiveLookahead: - return fmt.Sprintf("(?= %s)", formatExprSyntax(re, e.Args[0])) - case OpNegativeLookahead: - return fmt.Sprintf("(?! %s)", formatExprSyntax(re, e.Args[0])) - case OpPositiveLookbehind: - return fmt.Sprintf("(?<= %s)", formatExprSyntax(re, e.Args[0])) - case OpNegativeLookbehind: - return fmt.Sprintf("(?", e.Op) - } -} - -func formatArgsSyntax(re *Regexp, args []Expr) string { - parts := make([]string, len(args)) - for i, e := range args { - parts[i] = formatExprSyntax(re, e) - } - return strings.Join(parts, " ") -} diff --git a/tools/vendor/github.com/quasilyte/regex/syntax/errors.go b/tools/vendor/github.com/quasilyte/regex/syntax/errors.go index cfafc1d0e8..beefba5f9c 100644 --- a/tools/vendor/github.com/quasilyte/regex/syntax/errors.go +++ b/tools/vendor/github.com/quasilyte/regex/syntax/errors.go @@ -1,9 +1,5 @@ package syntax -import ( - "fmt" -) - type ParseError struct { Pos Position Message string @@ -11,17 +7,21 @@ type ParseError struct { func (e ParseError) Error() string { return e.Message } -func throwfPos(pos Position, format string, args ...interface{}) { - panic(ParseError{ - Pos: pos, - Message: fmt.Sprintf(format, args...), - }) +func throw(pos Position, message string) { + panic(ParseError{Pos: pos, Message: message}) +} + +func throwExpectedFound(pos Position, expected, found string) { + throw(pos, "expected '"+expected+"', found '"+found+"'") +} + +func throwUnexpectedToken(pos Position, token string) { + throw(pos, "unexpected token: "+token) } -func throwErrorf(posBegin, posEnd int, format string, args ...interface{}) { - pos := Position{ - Begin: uint16(posBegin), - End: uint16(posEnd), +func newPos(begin, end int) Position { + return Position{ + Begin: uint16(begin), + End: uint16(end), } - throwfPos(pos, format, args...) } diff --git a/tools/vendor/github.com/quasilyte/regex/syntax/lexer.go b/tools/vendor/github.com/quasilyte/regex/syntax/lexer.go index e92b038c20..aae146c2e6 100644 --- a/tools/vendor/github.com/quasilyte/regex/syntax/lexer.go +++ b/tools/vendor/github.com/quasilyte/regex/syntax/lexer.go @@ -2,7 +2,6 @@ package syntax import ( "strings" - "unicode" "unicode/utf8" ) @@ -111,7 +110,7 @@ func (l *lexer) Peek() token { func (l *lexer) scan() { for l.pos < len(l.input) { ch := l.input[l.pos] - if ch > unicode.MaxASCII { + if ch >= utf8.RuneSelf { _, size := utf8.DecodeRuneInString(l.input[l.pos:]) l.pushTok(tokChar, size) l.maybeInsertConcat() @@ -161,7 +160,7 @@ func (l *lexer) scan() { } else if l.tryScanGroupName(l.pos + 2) { } else if l.tryScanGroupFlags(l.pos + 2) { } else { - throwErrorf(l.pos, l.pos+1, "group token is incomplete") + throw(newPos(l.pos, l.pos+1), "group token is incomplete") } } } else { @@ -190,7 +189,7 @@ func (l *lexer) scanCharClass() { for l.pos < len(l.input) { ch := l.input[l.pos] - if ch > unicode.MaxASCII { + if ch >= utf8.RuneSelf { _, size := utf8.DecodeRuneInString(l.input[l.pos:]) l.pushTok(tokChar, size) continue @@ -224,17 +223,17 @@ func (l *lexer) scanCharClass() { func (l *lexer) scanEscape(insideCharClass bool) { s := l.input if l.pos+1 >= len(s) { - throwErrorf(l.pos, l.pos+1, `unexpected end of pattern: trailing '\'`) + throw(newPos(l.pos, l.pos+1), `unexpected end of pattern: trailing '\'`) } switch { case s[l.pos+1] == 'p' || s[l.pos+1] == 'P': if l.pos+2 >= len(s) { - throwErrorf(l.pos, l.pos+2, "unexpected end of pattern: expected uni-class-short or '{'") + throw(newPos(l.pos, l.pos+2), "unexpected end of pattern: expected uni-class-short or '{'") } if s[l.pos+2] == '{' { j := strings.IndexByte(s[l.pos+2:], '}') if j < 0 { - throwErrorf(l.pos, l.pos+2, "can't find closing '}'") + throw(newPos(l.pos, l.pos+2), "can't find closing '}'") } l.pushTok(tokEscapeUniFull, len(`\p{`)+j) } else { @@ -242,12 +241,12 @@ func (l *lexer) scanEscape(insideCharClass bool) { } case s[l.pos+1] == 'x': if l.pos+2 >= len(s) { - throwErrorf(l.pos, l.pos+2, "unexpected end of pattern: expected hex-digit or '{'") + throw(newPos(l.pos, l.pos+2), "unexpected end of pattern: expected hex-digit or '{'") } if s[l.pos+2] == '{' { j := strings.IndexByte(s[l.pos+2:], '}') if j < 0 { - throwErrorf(l.pos, l.pos+2, "can't find closing '}'") + throw(newPos(l.pos, l.pos+2), "can't find closing '}'") } l.pushTok(tokEscapeHexFull, len(`\x{`)+j) } else { @@ -277,7 +276,7 @@ func (l *lexer) scanEscape(insideCharClass bool) { default: ch := l.byteAt(l.pos + 1) - if ch > unicode.MaxASCII { + if ch >= utf8.RuneSelf { _, size := utf8.DecodeRuneInString(l.input[l.pos+1:]) l.pushTok(tokEscapeChar, len(`\`)+size) return diff --git a/tools/vendor/github.com/quasilyte/regex/syntax/operation.go b/tools/vendor/github.com/quasilyte/regex/syntax/operation.go index 284e5dc5b4..0fc8fc521a 100644 --- a/tools/vendor/github.com/quasilyte/regex/syntax/operation.go +++ b/tools/vendor/github.com/quasilyte/regex/syntax/operation.go @@ -65,46 +65,51 @@ const ( // OpQuote is a \Q...\E enclosed literal. // Examples: `\Q.?\E` `\Q?q[]=1` - // - // Note that closing \E is not mandatory. + // FormQuoteUnclosed: `\Qabc` + // Args[0] - literal value (OpString) OpQuote // OpEscapeChar is a single char escape. // Examples: `\d` `\a` `\n` + // Args[0] - escaped value (OpString) OpEscapeChar // OpEscapeMeta is an escaped meta char. // Examples: `\(` `\[` `\+` + // Args[0] - escaped value (OpString) OpEscapeMeta // OpEscapeOctal is an octal char code escape (up to 3 digits). // Examples: `\123` `\12` + // Args[0] - escaped value (OpString) OpEscapeOctal // OpEscapeHex is a hex char code escape. // Examples: `\x7F` `\xF7` // FormEscapeHexFull examples: `\x{10FFFF}` `\x{F}`. + // Args[0] - escaped value (OpString) OpEscapeHex // OpEscapeUni is a Unicode char class escape. // Examples: `\pS` `\pL` `\PL` // FormEscapeUniFull examples: `\p{Greek}` `\p{Symbol}` `\p{^L}` + // Args[0] - escaped value (OpString) OpEscapeUni // OpCharClass is a char class enclosed in []. // Examples: `[abc]` `[a-z0-9\]]` - // Args - char class elements (can include OpCharRange and OpPosixClass). + // Args - char class elements (can include OpCharRange and OpPosixClass) OpCharClass // OpNegCharClass is a negated char class enclosed in []. // Examples: `[^abc]` `[^a-z0-9\]]` - // Args - char class elements (can include OpCharRange and OpPosixClass). + // Args - char class elements (can include OpCharRange and OpPosixClass) OpNegCharClass // OpCharRange is an inclusive char range inside a char class. // Examples: `0-9` `A-Z` - // Args[0] - range lower bound (OpChar or OpEscape). - // Args[1] - range upper bound (OpChar or OpEscape). + // Args[0] - range lower bound + // Args[1] - range upper bound OpCharRange // OpPosixClass is a named ASCII char set inside a char class. @@ -186,4 +191,5 @@ const ( FormEscapeUniFull FormNamedCaptureAngle FormNamedCaptureQuote + FormQuoteUnclosed ) diff --git a/tools/vendor/github.com/quasilyte/regex/syntax/parser.go b/tools/vendor/github.com/quasilyte/regex/syntax/parser.go index faf0f8b212..f1c154f315 100644 --- a/tools/vendor/github.com/quasilyte/regex/syntax/parser.go +++ b/tools/vendor/github.com/quasilyte/regex/syntax/parser.go @@ -2,7 +2,6 @@ package syntax import ( "errors" - "fmt" "strings" ) @@ -94,13 +93,39 @@ func newParser(opts *ParserOptions) *Parser { } } + p.prefixParselets[tokQ] = func(tok token) *Expr { + litPos := tok.pos + litPos.Begin += uint16(len(`\Q`)) + form := FormQuoteUnclosed + if strings.HasSuffix(p.tokenValue(tok), `\E`) { + litPos.End -= uint16(len(`\E`)) + form = FormDefault + } + lit := p.newExpr(OpString, litPos) + return p.newExprForm(OpQuote, form, tok.pos, lit) + } + p.prefixParselets[tokEscapeHexFull] = func(tok token) *Expr { - return p.newExprForm(OpEscapeHex, FormEscapeHexFull, tok.pos) + litPos := tok.pos + litPos.Begin += uint16(len(`\x{`)) + litPos.End -= uint16(len(`}`)) + lit := p.newExpr(OpString, litPos) + return p.newExprForm(OpEscapeHex, FormEscapeHexFull, tok.pos, lit) } p.prefixParselets[tokEscapeUniFull] = func(tok token) *Expr { - return p.newExprForm(OpEscapeUni, FormEscapeUniFull, tok.pos) + litPos := tok.pos + litPos.Begin += uint16(len(`\p{`)) + litPos.End -= uint16(len(`}`)) + lit := p.newExpr(OpString, litPos) + return p.newExprForm(OpEscapeUni, FormEscapeUniFull, tok.pos, lit) } + p.prefixParselets[tokEscapeHex] = func(tok token) *Expr { return p.parseEscape(OpEscapeHex, `\x`, tok) } + p.prefixParselets[tokEscapeOctal] = func(tok token) *Expr { return p.parseEscape(OpEscapeOctal, `\`, tok) } + p.prefixParselets[tokEscapeChar] = func(tok token) *Expr { return p.parseEscape(OpEscapeChar, `\`, tok) } + p.prefixParselets[tokEscapeMeta] = func(tok token) *Expr { return p.parseEscape(OpEscapeMeta, `\`, tok) } + p.prefixParselets[tokEscapeUni] = func(tok token) *Expr { return p.parseEscape(OpEscapeUni, `\p`, tok) } + p.prefixParselets[tokLparen] = func(tok token) *Expr { return p.parseGroup(OpCapture, tok) } p.prefixParselets[tokLparenAtomic] = func(tok token) *Expr { return p.parseGroup(OpAtomicGroup, tok) } p.prefixParselets[tokLparenPositiveLookahead] = func(tok token) *Expr { return p.parseGroup(OpPositiveLookahead, tok) } @@ -163,6 +188,10 @@ func (p *Parser) setValues(e *Expr) { e.Value = p.exprValue(e) } +func (p *Parser) tokenValue(tok token) string { + return p.out.Pattern[tok.pos.Begin:tok.pos.End] +} + func (p *Parser) exprValue(e *Expr) string { return p.out.Pattern[e.Begin():e.End()] } @@ -239,7 +268,7 @@ func (p *Parser) allocExpr() *Expr { func (p *Parser) expect(kind tokenKind) Position { tok := p.lexer.NextToken() if tok.kind != kind { - throwErrorf(int(tok.pos.Begin), int(tok.pos.End), "expected '%s', found '%s'", kind, tok.kind) + throwExpectedFound(tok.pos, kind.String(), tok.kind.String()) } return tok.pos } @@ -248,7 +277,7 @@ func (p *Parser) parseExpr(precedence int) *Expr { tok := p.lexer.NextToken() prefix := p.prefixParselets[tok.kind] if prefix == nil { - throwfPos(tok.pos, "unexpected token: %v", tok) + throwUnexpectedToken(tok.pos, tok.String()) } left := prefix(tok) @@ -277,7 +306,7 @@ func (p *Parser) parseCharClass(op Operation, tok token) *Expr { break } if next.kind == tokNone { - throwfPos(tok.pos, "unterminated '['") + throw(tok.pos, "unterminated '['") } } @@ -400,6 +429,13 @@ func (p *Parser) parseGroupWithFlags(tok token) *Expr { return result } +func (p *Parser) parseEscape(op Operation, prefix string, tok token) *Expr { + litPos := tok.pos + litPos.Begin += uint16(len(prefix)) + lit := p.newExpr(OpString, litPos) + return p.newExpr(op, tok.pos, lit) +} + func (p *Parser) precedenceOf(tok token) int { switch tok.kind { case tokPipe: @@ -436,36 +472,32 @@ func (p *Parser) newPCRE(source string) (*RegexpPCRE, error) { return nil, errors.New("whitespace is not a valid delimiter") } if isAlphanumeric(delim) { - return nil, fmt.Errorf("'%c' is not a valid delimiter", delim) + return nil, errors.New("'" + string(delim) + "' is not a valid delimiter") } } - j := strings.LastIndexByte(source, endDelim) + const delimLen = 1 + j := strings.LastIndexByte(source[delimLen:], endDelim) if j == -1 { - return nil, fmt.Errorf("can't find '%c' ending delimiter", endDelim) + return nil, errors.New("can't find '" + string(endDelim) + "' ending delimiter") } + j += delimLen pcre := &RegexpPCRE{ - Pattern: source[1:j], + Pattern: source[delimLen:j], Source: source, Delim: [2]byte{delim, endDelim}, - Modifiers: source[j+1:], + Modifiers: source[j+delimLen:], } return pcre, nil } var tok2op = [256]Operation{ - tokDollar: OpDollar, - tokCaret: OpCaret, - tokDot: OpDot, - tokChar: OpChar, - tokMinus: OpChar, - tokEscapeChar: OpEscapeChar, - tokEscapeMeta: OpEscapeMeta, - tokEscapeHex: OpEscapeHex, - tokEscapeOctal: OpEscapeOctal, - tokEscapeUni: OpEscapeUni, - tokPosixClass: OpPosixClass, - tokQ: OpQuote, - tokComment: OpComment, + tokDollar: OpDollar, + tokCaret: OpCaret, + tokDot: OpDot, + tokChar: OpChar, + tokMinus: OpChar, + tokPosixClass: OpPosixClass, + tokComment: OpComment, } diff --git a/tools/vendor/github.com/quasilyte/regex/syntax/utils.go b/tools/vendor/github.com/quasilyte/regex/syntax/utils.go index 934680c8ba..e5b6548254 100644 --- a/tools/vendor/github.com/quasilyte/regex/syntax/utils.go +++ b/tools/vendor/github.com/quasilyte/regex/syntax/utils.go @@ -2,7 +2,7 @@ package syntax func isSpace(ch byte) bool { switch ch { - case '\r', '\n', '\t', '\f', '\v': + case '\r', '\n', '\t', '\f', '\v', ' ': return true default: return false diff --git a/tools/vendor/github.com/securego/gosec/v2/.golangci.yml b/tools/vendor/github.com/securego/gosec/v2/.golangci.yml index 64e4e4515c..b12140a25f 100644 --- a/tools/vendor/github.com/securego/gosec/v2/.golangci.yml +++ b/tools/vendor/github.com/securego/gosec/v2/.golangci.yml @@ -1,33 +1,41 @@ linters: enable: - - asciicheck - - bodyclose - - deadcode - - depguard - - dogsled - - durationcheck - - errcheck - - errorlint - - exportloopref - - gci - - gofmt - - gofumpt - - goimports - - gosec - - gosimple - - govet - - importas - - ineffassign - - megacheck - - misspell - - nakedret - - nolintlint - - revive - - staticcheck - - structcheck - - typecheck - - unconvert - - unparam - - unused - - varcheck - - wastedassign + - asciicheck + - bodyclose + - depguard + - dogsled + - durationcheck + - errcheck + - errorlint + - exportloopref + - gci + - ginkgolinter + - gofmt + - gofumpt + - goimports + - gosec + - gosimple + - govet + - importas + - ineffassign + - megacheck + - misspell + - nakedret + - nolintlint + - revive + - staticcheck + - typecheck + - unconvert + - unparam + - unused + - wastedassign + +linters-settings: + gci: + sections: + - standard + - default + - prefix(github.com/securego) + +run: + timeout: 5m diff --git a/tools/vendor/github.com/securego/gosec/v2/.goreleaser.yml b/tools/vendor/github.com/securego/gosec/v2/.goreleaser.yml index 25a81b5297..e3c903e7a7 100644 --- a/tools/vendor/github.com/securego/gosec/v2/.goreleaser.yml +++ b/tools/vendor/github.com/securego/gosec/v2/.goreleaser.yml @@ -26,6 +26,11 @@ builds: signs: - cmd: cosign stdin: '{{ .Env.COSIGN_PASSWORD}}' - args: ["sign-blob", "--key=/tmp/cosign.key", "--output=${signature}", "${artifact}"] + args: + - "sign-blob" + - "--key=/tmp/cosign.key" + - "--output=${signature}" + - "${artifact}" + - "--yes" artifacts: all diff --git a/tools/vendor/github.com/securego/gosec/v2/Dockerfile b/tools/vendor/github.com/securego/gosec/v2/Dockerfile index b57c981fb3..1bf94da7d7 100644 --- a/tools/vendor/github.com/securego/gosec/v2/Dockerfile +++ b/tools/vendor/github.com/securego/gosec/v2/Dockerfile @@ -1,11 +1,11 @@ ARG GO_VERSION FROM golang:${GO_VERSION}-alpine AS builder -RUN apk add --no-cache ca-certificates make git curl gcc libc-dev -RUN mkdir -p /build +RUN apk add --no-cache ca-certificates make git curl gcc libc-dev \ + && mkdir -p /build WORKDIR /build COPY . /build/ -RUN go mod download -RUN make build-linux +RUN go mod download \ + && make build-linux FROM golang:${GO_VERSION}-alpine RUN apk add --no-cache ca-certificates bash git gcc libc-dev openssh diff --git a/tools/vendor/github.com/securego/gosec/v2/Makefile b/tools/vendor/github.com/securego/gosec/v2/Makefile index 093c8a99c0..09303d11a0 100644 --- a/tools/vendor/github.com/securego/gosec/v2/Makefile +++ b/tools/vendor/github.com/securego/gosec/v2/Makefile @@ -87,4 +87,7 @@ image-push: image docker push $(IMAGE_REPO)/$(BIN):$(GIT_TAG) docker push $(IMAGE_REPO)/$(BIN):latest -.PHONY: test build clean release image image-push +tlsconfig: + go generate ./... + +.PHONY: test build clean release image image-push tlsconfig diff --git a/tools/vendor/github.com/securego/gosec/v2/README.md b/tools/vendor/github.com/securego/gosec/v2/README.md index bf7aa08875..71e032d808 100644 --- a/tools/vendor/github.com/securego/gosec/v2/README.md +++ b/tools/vendor/github.com/securego/gosec/v2/README.md @@ -68,7 +68,7 @@ jobs: GO111MODULE: on steps: - name: Checkout Source - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Run Gosec Security Scanner uses: securego/gosec@master with: @@ -98,7 +98,7 @@ jobs: GO111MODULE: on steps: - name: Checkout Source - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Run Gosec Security Scanner uses: securego/gosec@master with: @@ -157,7 +157,6 @@ directory you can supply `./...` as the input argument. - G304: File path provided as taint input - G305: File traversal when extracting zip/tar archive - G306: Poor file permissions used when writing to a new file -- G307: Deferring a method which returns an error - G401: Detect the usage of DES, RC4, MD5 or SHA1 - G402: Look for bad TLS connection settings - G403: Ensure minimum RSA key length of 2048 bits @@ -172,6 +171,7 @@ directory you can supply `./...` as the input argument. ### Retired rules - G105: Audit the use of math/big.Int.Exp - [CVE is fixed](https://github.com/golang/go/issues/15184) +- G307: Deferring a method which returns an error - causing more inconvenience than fixing a security issue, despite the details from this [blog post](https://www.joeshaw.org/dont-defer-close-on-writable-files/) ### Selecting rules @@ -188,7 +188,7 @@ $ gosec -exclude=G303 ./... ### CWE Mapping -Every issue detected by `gosec` is mapped to a [CWE (Common Weakness Enumeration)](http://cwe.mitre.org/data/index.html) which describes in more generic terms the vulnerability. The exact mapping can be found [here](https://github.com/securego/gosec/blob/master/issue.go#L50). +Every issue detected by `gosec` is mapped to a [CWE (Common Weakness Enumeration)](http://cwe.mitre.org/data/index.html) which describes in more generic terms the vulnerability. The exact mapping can be found [here](https://github.com/securego/gosec/blob/master/issue/issue.go#L50). ### Configuration diff --git a/tools/vendor/github.com/securego/gosec/v2/action.yml b/tools/vendor/github.com/securego/gosec/v2/action.yml index aab6c8039d..0320f0c21a 100644 --- a/tools/vendor/github.com/securego/gosec/v2/action.yml +++ b/tools/vendor/github.com/securego/gosec/v2/action.yml @@ -10,7 +10,7 @@ inputs: runs: using: 'docker' - image: 'docker://securego/gosec' + image: 'docker://securego/gosec:2.15.0' args: - ${{ inputs.args }} diff --git a/tools/vendor/github.com/securego/gosec/v2/analyzer.go b/tools/vendor/github.com/securego/gosec/v2/analyzer.go index 5f778791e3..830d338e4f 100644 --- a/tools/vendor/github.com/securego/gosec/v2/analyzer.go +++ b/tools/vendor/github.com/securego/gosec/v2/analyzer.go @@ -31,6 +31,10 @@ import ( "strings" "sync" + "github.com/securego/gosec/v2/analyzers" + "github.com/securego/gosec/v2/issue" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" "golang.org/x/tools/go/packages" ) @@ -42,7 +46,10 @@ const LoadMode = packages.NeedName | packages.NeedTypes | packages.NeedTypesSizes | packages.NeedTypesInfo | - packages.NeedSyntax + packages.NeedSyntax | + packages.NeedModule | + packages.NeedEmbedFiles | + packages.NeedEmbedPatterns const externalSuppressionJustification = "Globally suppressed." @@ -60,12 +67,24 @@ type Context struct { Pkg *types.Package PkgFiles []*ast.File Root *ast.File - Config Config Imports *ImportTracker - Ignores []map[string][]SuppressionInfo + Config Config + Ignores []map[string][]issue.SuppressionInfo PassedValues map[string]interface{} } +// GetFileAtNodePos returns the file at the node position in the file set available in the context. +func (ctx *Context) GetFileAtNodePos(node ast.Node) *token.File { + return ctx.FileSet.File(node.Pos()) +} + +// NewIssue creates a new issue +func (ctx *Context) NewIssue(node ast.Node, ruleID, desc string, + severity, confidence issue.Score, +) *issue.Issue { + return issue.New(ctx.GetFileAtNodePos(node), node, ruleID, desc, severity, confidence) +} + // Metrics used when reporting information about a scanning run. type Metrics struct { NumFiles int `json:"files"` @@ -82,7 +101,7 @@ type Analyzer struct { context *Context config Config logger *log.Logger - issues []*Issue + issues []*issue.Issue stats *Metrics errors map[string][]Error // keys are file paths; values are the golang errors in those files tests bool @@ -90,13 +109,7 @@ type Analyzer struct { showIgnored bool trackSuppressions bool concurrency int -} - -// SuppressionInfo object is to record the kind and the justification that used -// to suppress violations. -type SuppressionInfo struct { - Kind string `json:"kind"` - Justification string `json:"justification"` + analyzerList []*analysis.Analyzer } // NewAnalyzer builds a new analyzer. @@ -119,13 +132,14 @@ func NewAnalyzer(conf Config, tests bool, excludeGenerated bool, trackSuppressio context: &Context{}, config: conf, logger: logger, - issues: make([]*Issue, 0, 16), + issues: make([]*issue.Issue, 0, 16), stats: &Metrics{}, errors: make(map[string][]Error), tests: tests, concurrency: concurrency, excludeGenerated: excludeGenerated, trackSuppressions: trackSuppressions, + analyzerList: analyzers.BuildDefaultAnalyzers(), } } @@ -216,7 +230,10 @@ func (gosec *Analyzer) Process(buildTags []string, packagePaths ...string) error wg.Wait() // wait for the goroutines to stop return fmt.Errorf("parsing errors in pkg %q: %w", pkg.Name, err) } - gosec.Check(pkg) + gosec.CheckRules(pkg) + if on, err := gosec.config.IsGlobalEnabled(SSA); err == nil && on { + gosec.CheckAnalyzers(pkg) + } } } } @@ -267,8 +284,8 @@ func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages. return pkgs, nil } -// Check runs analysis on the given package -func (gosec *Analyzer) Check(pkg *packages.Package) { +// CheckRules runs analysis on the given package +func (gosec *Analyzer) CheckRules(pkg *packages.Package) { gosec.logger.Println("Checking package:", pkg.Name) for _, file := range pkg.Syntax { fp := pkg.Fset.File(file.Pos()) @@ -303,6 +320,70 @@ func (gosec *Analyzer) Check(pkg *packages.Package) { } } +// CheckAnalyzers runs analyzers on a given package +func (gosec *Analyzer) CheckAnalyzers(pkg *packages.Package) { + ssaPass := &analysis.Pass{ + Analyzer: buildssa.Analyzer, + Fset: pkg.Fset, + Files: pkg.Syntax, + OtherFiles: pkg.OtherFiles, + IgnoredFiles: pkg.IgnoredFiles, + Pkg: pkg.Types, + TypesInfo: pkg.TypesInfo, + TypesSizes: pkg.TypesSizes, + ResultOf: nil, + Report: nil, + ImportObjectFact: nil, + ExportObjectFact: nil, + ImportPackageFact: nil, + ExportPackageFact: nil, + AllObjectFacts: nil, + AllPackageFacts: nil, + } + ssaResult, err := ssaPass.Analyzer.Run(ssaPass) + if err != nil { + gosec.logger.Printf("Error running SSA analyser on package %q: %s", pkg.Name, err) + return + } + resultMap := map[*analysis.Analyzer]interface{}{ + buildssa.Analyzer: &analyzers.SSAAnalyzerResult{ + Config: gosec.Config(), + Logger: gosec.logger, + SSA: ssaResult.(*buildssa.SSA), + }, + } + for _, analyzer := range gosec.analyzerList { + pass := &analysis.Pass{ + Analyzer: analyzer, + Fset: pkg.Fset, + Files: pkg.Syntax, + OtherFiles: pkg.OtherFiles, + IgnoredFiles: pkg.IgnoredFiles, + Pkg: pkg.Types, + TypesInfo: pkg.TypesInfo, + TypesSizes: pkg.TypesSizes, + ResultOf: resultMap, + Report: func(d analysis.Diagnostic) {}, + ImportObjectFact: nil, + ExportObjectFact: nil, + ImportPackageFact: nil, + ExportPackageFact: nil, + AllObjectFacts: nil, + AllPackageFacts: nil, + } + result, err := pass.Analyzer.Run(pass) + if err != nil { + gosec.logger.Printf("Error running analyzer %s: %s\n", analyzer.Name, err) + continue + } + if result != nil { + if aissue, ok := result.(*issue.Issue); ok { + gosec.updateIssues(aissue, false, []issue.SuppressionInfo{}) + } + } + } +} + func isGeneratedFile(file *ast.File) bool { for _, comment := range file.Comments { for _, row := range comment.List { @@ -364,7 +445,7 @@ func (gosec *Analyzer) AppendError(file string, err error) { } // ignore a node (and sub-tree) if it is tagged with a nosec tag comment -func (gosec *Analyzer) ignore(n ast.Node) map[string]SuppressionInfo { +func (gosec *Analyzer) ignore(n ast.Node) map[string]issue.SuppressionInfo { if groups, ok := gosec.context.Comments[n]; ok && !gosec.ignoreNosec { // Checks if an alternative for #nosec is set and, if not, uses the default. @@ -401,13 +482,13 @@ func (gosec *Analyzer) ignore(n ast.Node) map[string]SuppressionInfo { re := regexp.MustCompile(`(G\d{3})`) matches := re.FindAllStringSubmatch(directive, -1) - suppression := SuppressionInfo{ + suppression := issue.SuppressionInfo{ Kind: "inSource", Justification: justification, } // Find the rule IDs to ignore. - ignores := make(map[string]SuppressionInfo) + ignores := make(map[string]issue.SuppressionInfo) for _, v := range matches { ignores[v[1]] = suppression } @@ -426,25 +507,42 @@ func (gosec *Analyzer) ignore(n ast.Node) map[string]SuppressionInfo { // Visit runs the gosec visitor logic over an AST created by parsing go code. // Rule methods added with AddRule will be invoked as necessary. func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor { - // If we've reached the end of this branch, pop off the ignores stack. - if n == nil { - if len(gosec.context.Ignores) > 0 { - gosec.context.Ignores = gosec.context.Ignores[1:] - } + ignores, ok := gosec.updateIgnoredRules(n) + if !ok { return gosec } + + // Using ast.File instead of ast.ImportSpec, so that we can track all imports at once. switch i := n.(type) { case *ast.File: - // Using ast.File instead of ast.ImportSpec, so that we can track - // all imports at once. gosec.context.Imports.TrackFile(i) } + for _, rule := range gosec.ruleset.RegisteredFor(n) { + suppressions, ignored := gosec.updateSuppressions(rule.ID(), ignores) + issue, err := rule.Match(n, gosec.context) + if err != nil { + file, line := GetLocation(n, gosec.context) + file = path.Base(file) + gosec.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line) + } + gosec.updateIssues(issue, ignored, suppressions) + } + return gosec +} + +func (gosec *Analyzer) updateIgnoredRules(n ast.Node) (map[string][]issue.SuppressionInfo, bool) { + if n == nil { + if len(gosec.context.Ignores) > 0 { + gosec.context.Ignores = gosec.context.Ignores[1:] + } + return nil, false + } // Get any new rule exclusions. ignoredRules := gosec.ignore(n) // Now create the union of exclusions. - ignores := map[string][]SuppressionInfo{} + ignores := map[string][]issue.SuppressionInfo{} if len(gosec.context.Ignores) > 0 { for k, v := range gosec.context.Ignores[0] { ignores[k] = v @@ -456,59 +554,57 @@ func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor { } // Push the new set onto the stack. - gosec.context.Ignores = append([]map[string][]SuppressionInfo{ignores}, gosec.context.Ignores...) + gosec.context.Ignores = append([]map[string][]issue.SuppressionInfo{ignores}, gosec.context.Ignores...) - for _, rule := range gosec.ruleset.RegisteredFor(n) { - // Check if all rules are ignored. - generalSuppressions, generalIgnored := ignores[aliasOfAllRules] - // Check if the specific rule is ignored - ruleSuppressions, ruleIgnored := ignores[rule.ID()] - - ignored := generalIgnored || ruleIgnored - suppressions := append(generalSuppressions, ruleSuppressions...) - - // Track external suppressions. - if gosec.ruleset.IsRuleSuppressed(rule.ID()) { - ignored = true - suppressions = append(suppressions, SuppressionInfo{ - Kind: "external", - Justification: externalSuppressionJustification, - }) - } + return ignores, true +} - issue, err := rule.Match(n, gosec.context) - if err != nil { - file, line := GetLocation(n, gosec.context) - file = path.Base(file) - gosec.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line) +func (gosec *Analyzer) updateSuppressions(id string, ignores map[string][]issue.SuppressionInfo) ([]issue.SuppressionInfo, bool) { + // Check if all rules are ignored. + generalSuppressions, generalIgnored := ignores[aliasOfAllRules] + // Check if the specific rule is ignored + ruleSuppressions, ruleIgnored := ignores[id] + + ignored := generalIgnored || ruleIgnored + suppressions := append(generalSuppressions, ruleSuppressions...) + + // Track external suppressions. + if gosec.ruleset.IsRuleSuppressed(id) { + ignored = true + suppressions = append(suppressions, issue.SuppressionInfo{ + Kind: "external", + Justification: externalSuppressionJustification, + }) + } + return suppressions, ignored +} + +func (gosec *Analyzer) updateIssues(issue *issue.Issue, ignored bool, suppressions []issue.SuppressionInfo) { + if issue != nil { + if gosec.showIgnored { + issue.NoSec = ignored } - if issue != nil { - if gosec.showIgnored { - issue.NoSec = ignored - } - if !ignored || !gosec.showIgnored { - gosec.stats.NumFound++ - } - if ignored && gosec.trackSuppressions { - issue.WithSuppressions(suppressions) - gosec.issues = append(gosec.issues, issue) - } else if !ignored || gosec.showIgnored || gosec.ignoreNosec { - gosec.issues = append(gosec.issues, issue) - } + if !ignored || !gosec.showIgnored { + gosec.stats.NumFound++ + } + if ignored && gosec.trackSuppressions { + issue.WithSuppressions(suppressions) + gosec.issues = append(gosec.issues, issue) + } else if !ignored || gosec.showIgnored || gosec.ignoreNosec { + gosec.issues = append(gosec.issues, issue) } } - return gosec } // Report returns the current issues discovered and the metrics about the scan -func (gosec *Analyzer) Report() ([]*Issue, *Metrics, map[string][]Error) { +func (gosec *Analyzer) Report() ([]*issue.Issue, *Metrics, map[string][]Error) { return gosec.issues, gosec.stats, gosec.errors } // Reset clears state such as context, issues and metrics from the configured analyzer func (gosec *Analyzer) Reset() { gosec.context = &Context{} - gosec.issues = make([]*Issue, 0, 16) + gosec.issues = make([]*issue.Issue, 0, 16) gosec.stats = &Metrics{} gosec.ruleset = NewRuleSet() } diff --git a/tools/vendor/github.com/securego/gosec/v2/analyzers/ssrf.go b/tools/vendor/github.com/securego/gosec/v2/analyzers/ssrf.go new file mode 100644 index 0000000000..a9dbd95008 --- /dev/null +++ b/tools/vendor/github.com/securego/gosec/v2/analyzers/ssrf.go @@ -0,0 +1,57 @@ +// (c) Copyright gosec's authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analyzers + +import ( + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" + + "github.com/securego/gosec/v2/issue" +) + +func newSSRFAnalyzer(id string, description string) *analysis.Analyzer { + return &analysis.Analyzer{ + Name: id, + Doc: description, + Run: runSSRF, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + } +} + +func runSSRF(pass *analysis.Pass) (interface{}, error) { + ssaResult, err := getSSAResult(pass) + if err != nil { + return nil, err + } + // TODO: implement the analysis + for _, fn := range ssaResult.SSA.SrcFuncs { + for _, block := range fn.DomPreorder() { + for _, instr := range block.Instrs { + switch instr := instr.(type) { + case *ssa.Call: + callee := instr.Call.StaticCallee() + if callee != nil { + ssaResult.Logger.Printf("callee: %s\n", callee) + return newIssue(pass.Analyzer.Name, + "not implemeted", + pass.Fset, instr.Call.Pos(), issue.Low, issue.High), nil + } + } + } + } + } + return nil, nil +} diff --git a/tools/vendor/github.com/securego/gosec/v2/analyzers/util.go b/tools/vendor/github.com/securego/gosec/v2/analyzers/util.go new file mode 100644 index 0000000000..b090a3e454 --- /dev/null +++ b/tools/vendor/github.com/securego/gosec/v2/analyzers/util.go @@ -0,0 +1,98 @@ +// (c) Copyright gosec's authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package analyzers + +import ( + "fmt" + "go/token" + "log" + "os" + "strconv" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + + "github.com/securego/gosec/v2/issue" +) + +// SSAAnalyzerResult contains various information returned by the +// SSA analysis along with some configuraion +type SSAAnalyzerResult struct { + Config map[string]interface{} + Logger *log.Logger + SSA *buildssa.SSA +} + +// BuildDefaultAnalyzers returns the default list of analyzers +func BuildDefaultAnalyzers() []*analysis.Analyzer { + return []*analysis.Analyzer{ + newSSRFAnalyzer("G107", "URL provided to HTTP request as taint input"), + } +} + +// getSSAResult retrives the SSA result from analysis pass +func getSSAResult(pass *analysis.Pass) (*SSAAnalyzerResult, error) { + result, ok := pass.ResultOf[buildssa.Analyzer] + if !ok { + return nil, fmt.Errorf("no SSA result found in the analysis pass") + } + ssaResult, ok := result.(*SSAAnalyzerResult) + if !ok { + return nil, fmt.Errorf("the analysis pass result is not of type SSA") + } + return ssaResult, nil +} + +// newIssue creates a new gosec issue +func newIssue(analyzerID string, desc string, fileSet *token.FileSet, + pos token.Pos, severity, confidence issue.Score, +) *issue.Issue { + file := fileSet.File(pos) + line := file.Line(pos) + col := file.Position(pos).Column + + return &issue.Issue{ + RuleID: analyzerID, + File: file.Name(), + Line: strconv.Itoa(line), + Col: strconv.Itoa(col), + Severity: severity, + Confidence: confidence, + What: desc, + Cwe: issue.GetCweByRule(analyzerID), + Code: issueCodeSnippet(fileSet, pos), + } +} + +func issueCodeSnippet(fileSet *token.FileSet, pos token.Pos) string { + file := fileSet.File(pos) + + start := (int64)(file.Line(pos)) + if start-issue.SnippetOffset > 0 { + start = start - issue.SnippetOffset + } + end := (int64)(file.Line(pos)) + end = end + issue.SnippetOffset + + var code string + if file, err := os.Open(file.Name()); err == nil { + defer file.Close() // #nosec + code, err = issue.CodeSnippet(file, start, end) + if err != nil { + return err.Error() + } + } + return code +} diff --git a/tools/vendor/github.com/securego/gosec/v2/config.go b/tools/vendor/github.com/securego/gosec/v2/config.go index 443d45f78b..ca4cf21757 100644 --- a/tools/vendor/github.com/securego/gosec/v2/config.go +++ b/tools/vendor/github.com/securego/gosec/v2/config.go @@ -29,6 +29,8 @@ const ( ExcludeRules GlobalOption = "exclude" // IncludeRules global option for should be load IncludeRules GlobalOption = "include" + // SSA global option to enable go analysis framework with SSA support + SSA GlobalOption = "ssa" ) // Config is used to provide configuration and customization to each of the rules. diff --git a/tools/vendor/github.com/securego/gosec/v2/helpers.go b/tools/vendor/github.com/securego/gosec/v2/helpers.go index 62ede05494..08b7893eb2 100644 --- a/tools/vendor/github.com/securego/gosec/v2/helpers.go +++ b/tools/vendor/github.com/securego/gosec/v2/helpers.go @@ -182,7 +182,7 @@ func GetCallInfo(n ast.Node, ctx *Context) (string, string, error) { } // GetCallStringArgsValues returns the values of strings arguments if they can be resolved -func GetCallStringArgsValues(n ast.Node, ctx *Context) []string { +func GetCallStringArgsValues(n ast.Node, _ *Context) []string { values := []string{} switch node := n.(type) { case *ast.CallExpr: diff --git a/tools/vendor/github.com/securego/gosec/v2/import_tracker.go b/tools/vendor/github.com/securego/gosec/v2/import_tracker.go index 30e7c009b8..7984e99f42 100644 --- a/tools/vendor/github.com/securego/gosec/v2/import_tracker.go +++ b/tools/vendor/github.com/securego/gosec/v2/import_tracker.go @@ -51,9 +51,7 @@ func (t *ImportTracker) TrackPackages(pkgs ...*types.Package) { func (t *ImportTracker) TrackImport(imported *ast.ImportSpec) { importPath := strings.Trim(imported.Path.Value, `"`) if imported.Name != nil { - if imported.Name.Name == "_" { - // Initialization only import - } else { + if imported.Name.Name != "_" { // Aliased import t.Imported[importPath] = append(t.Imported[importPath], imported.Name.String()) } diff --git a/tools/vendor/github.com/securego/gosec/v2/issue.go b/tools/vendor/github.com/securego/gosec/v2/issue/issue.go similarity index 87% rename from tools/vendor/github.com/securego/gosec/v2/issue.go rename to tools/vendor/github.com/securego/gosec/v2/issue/issue.go index d8faf4bf98..5bf00dec2d 100644 --- a/tools/vendor/github.com/securego/gosec/v2/issue.go +++ b/tools/vendor/github.com/securego/gosec/v2/issue/issue.go @@ -12,7 +12,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package gosec +package issue import ( "bufio" @@ -77,7 +77,6 @@ var ruleToCWE = map[string]string{ "G304": "22", "G305": "22", "G306": "276", - "G307": "703", "G401": "326", "G402": "295", "G403": "310", @@ -105,8 +104,15 @@ type Issue struct { Suppressions []SuppressionInfo `json:"suppressions"` // Suppression info of the issue } +// SuppressionInfo object is to record the kind and the justification that used +// to suppress violations. +type SuppressionInfo struct { + Kind string `json:"kind"` + Justification string `json:"justification"` +} + // FileLocation point out the file path and line number in file -func (i Issue) FileLocation() string { +func (i *Issue) FileLocation() string { return fmt.Sprintf("%s:%s", i.File, i.Line) } @@ -137,11 +143,8 @@ func (c Score) String() string { return "UNDEFINED" } -// codeSnippet extracts a code snippet based on the ast reference -func codeSnippet(file *os.File, start int64, end int64, n ast.Node) (string, error) { - if n == nil { - return "", fmt.Errorf("invalid AST node provided") - } +// CodeSnippet extracts a code snippet based on the ast reference +func CodeSnippet(file *os.File, start int64, end int64) (string, error) { var pos int64 var buf bytes.Buffer scanner := bufio.NewScanner(file) @@ -171,9 +174,8 @@ func codeSnippetEndLine(node ast.Node, fobj *token.File) int64 { return e + SnippetOffset } -// NewIssue creates a new Issue -func NewIssue(ctx *Context, node ast.Node, ruleID, desc string, severity Score, confidence Score) *Issue { - fobj := ctx.FileSet.File(node.Pos()) +// New creates a new Issue +func New(fobj *token.File, node ast.Node, ruleID, desc string, severity, confidence Score) *Issue { name := fobj.Name() start, end := fobj.Line(node.Pos()), fobj.Line(node.End()) line := strconv.Itoa(start) @@ -183,11 +185,14 @@ func NewIssue(ctx *Context, node ast.Node, ruleID, desc string, severity Score, col := strconv.Itoa(fobj.Position(node.Pos()).Column) var code string - if file, err := os.Open(fobj.Name()); err == nil { + if node == nil { + code = "invalid AST node provided" + } + if file, err := os.Open(fobj.Name()); err == nil && node != nil { defer file.Close() // #nosec s := codeSnippetStartLine(node, fobj) e := codeSnippetEndLine(node, fobj) - code, err = codeSnippet(file, s, e, node) + code, err = CodeSnippet(file, s, e) if err != nil { code = err.Error() } diff --git a/tools/vendor/github.com/securego/gosec/v2/report.go b/tools/vendor/github.com/securego/gosec/v2/report.go index 96b1466d58..4fdeea5206 100644 --- a/tools/vendor/github.com/securego/gosec/v2/report.go +++ b/tools/vendor/github.com/securego/gosec/v2/report.go @@ -1,15 +1,19 @@ package gosec +import ( + "github.com/securego/gosec/v2/issue" +) + // ReportInfo this is report information type ReportInfo struct { Errors map[string][]Error `json:"Golang errors"` - Issues []*Issue + Issues []*issue.Issue Stats *Metrics GosecVersion string } // NewReportInfo instantiate a ReportInfo -func NewReportInfo(issues []*Issue, metrics *Metrics, errors map[string][]Error) *ReportInfo { +func NewReportInfo(issues []*issue.Issue, metrics *Metrics, errors map[string][]Error) *ReportInfo { return &ReportInfo{ Errors: errors, Issues: issues, diff --git a/tools/vendor/github.com/securego/gosec/v2/resolve.go b/tools/vendor/github.com/securego/gosec/v2/resolve.go index cdc287e8e5..a201b8d32b 100644 --- a/tools/vendor/github.com/securego/gosec/v2/resolve.go +++ b/tools/vendor/github.com/securego/gosec/v2/resolve.go @@ -66,7 +66,7 @@ func resolveBinExpr(n *ast.BinaryExpr, c *Context) bool { return (TryResolve(n.X, c) && TryResolve(n.Y, c)) } -func resolveCallExpr(n *ast.CallExpr, c *Context) bool { +func resolveCallExpr(_ *ast.CallExpr, _ *Context) bool { // TODO(tkelsey): next step, full function resolution return false } diff --git a/tools/vendor/github.com/securego/gosec/v2/rule.go b/tools/vendor/github.com/securego/gosec/v2/rule.go index c0429c4c23..5e973b6acf 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rule.go +++ b/tools/vendor/github.com/securego/gosec/v2/rule.go @@ -15,12 +15,14 @@ package gosec import ( "go/ast" "reflect" + + "github.com/securego/gosec/v2/issue" ) // The Rule interface used by all rules supported by gosec. type Rule interface { ID() string - Match(ast.Node, *Context) (*Issue, error) + Match(ast.Node, *Context) (*issue.Issue, error) } // RuleBuilder is used to register a rule definition with the analyzer diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/archive.go b/tools/vendor/github.com/securego/gosec/v2/rules/archive.go index 92c7e4481c..987047435b 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/archive.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/archive.go @@ -5,10 +5,11 @@ import ( "go/types" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type archive struct { - gosec.MetaData + issue.MetaData calls gosec.CallList argTypes []string } @@ -18,7 +19,7 @@ func (a *archive) ID() string { } // Match inspects AST nodes to determine if the filepath.Joins uses any argument derived from type zip.File or tar.Header -func (a *archive) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (a *archive) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { if node := a.calls.ContainsPkgCallExpr(n, c, false); node != nil { for _, arg := range node.Args { var argType types.Type @@ -38,7 +39,7 @@ func (a *archive) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { if argType != nil { for _, t := range a.argTypes { if argType.String() == t { - return gosec.NewIssue(c, n, a.ID(), a.What, a.Severity, a.Confidence), nil + return c.NewIssue(n, a.ID(), a.What, a.Severity, a.Confidence), nil } } } @@ -48,17 +49,17 @@ func (a *archive) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { } // NewArchive creates a new rule which detects the file traversal when extracting zip/tar archives -func NewArchive(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewArchive(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { calls := gosec.NewCallList() calls.Add("path/filepath", "Join") calls.Add("path", "Join") return &archive{ calls: calls, argTypes: []string{"*archive/zip.File", "*archive/tar.Header"}, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: "File traversal when extracting zip/tar archive", }, }, []ast.Node{(*ast.CallExpr)(nil)} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/bad_defer.go b/tools/vendor/github.com/securego/gosec/v2/rules/bad_defer.go deleted file mode 100644 index 141a4a9390..0000000000 --- a/tools/vendor/github.com/securego/gosec/v2/rules/bad_defer.go +++ /dev/null @@ -1,96 +0,0 @@ -package rules - -import ( - "fmt" - "go/ast" - "strings" - - "github.com/securego/gosec/v2" -) - -type deferType struct { - typ string - methods []string -} - -type badDefer struct { - gosec.MetaData - types []deferType -} - -func (r *badDefer) ID() string { - return r.MetaData.ID -} - -func normalize(typ string) string { - return strings.TrimPrefix(typ, "*") -} - -func contains(methods []string, method string) bool { - for _, m := range methods { - if m == method { - return true - } - } - return false -} - -func (r *badDefer) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { - if deferStmt, ok := n.(*ast.DeferStmt); ok { - for _, deferTyp := range r.types { - if typ, method, err := gosec.GetCallInfo(deferStmt.Call, c); err == nil { - if normalize(typ) == deferTyp.typ && contains(deferTyp.methods, method) { - return gosec.NewIssue(c, n, r.ID(), fmt.Sprintf(r.What, method, typ), r.Severity, r.Confidence), nil - } - } - } - } - - return nil, nil -} - -// NewDeferredClosing detects unsafe defer of error returning methods -func NewDeferredClosing(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - return &badDefer{ - types: []deferType{ - { - typ: "os.File", - methods: []string{"Close"}, - }, - { - typ: "io.ReadCloser", - methods: []string{"Close"}, - }, - { - typ: "io.WriteCloser", - methods: []string{"Close"}, - }, - { - typ: "io.ReadWriteCloser", - methods: []string{"Close"}, - }, - { - typ: "io.ReadSeekCloser", - methods: []string{"Close"}, - }, - { - typ: "io.Closer", - methods: []string{"Close"}, - }, - { - typ: "net.Conn", - methods: []string{"Close"}, - }, - { - typ: "net.Listener", - methods: []string{"Close"}, - }, - }, - MetaData: gosec.MetaData{ - ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, - What: "Deferring unsafe method %q on type %q", - }, - }, []ast.Node{(*ast.DeferStmt)(nil)} -} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/bind.go b/tools/vendor/github.com/securego/gosec/v2/rules/bind.go index 8f6af067ad..fef760c808 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/bind.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/bind.go @@ -19,11 +19,12 @@ import ( "regexp" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) // Looks for net.Listen("0.0.0.0") or net.Listen(":8080") type bindsToAllNetworkInterfaces struct { - gosec.MetaData + issue.MetaData calls gosec.CallList pattern *regexp.Regexp } @@ -32,7 +33,7 @@ func (r *bindsToAllNetworkInterfaces) ID() string { return r.MetaData.ID } -func (r *bindsToAllNetworkInterfaces) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (r *bindsToAllNetworkInterfaces) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { callExpr := r.calls.ContainsPkgCallExpr(n, c, false) if callExpr == nil { return nil, nil @@ -42,14 +43,14 @@ func (r *bindsToAllNetworkInterfaces) Match(n ast.Node, c *gosec.Context) (*gose if bl, ok := arg.(*ast.BasicLit); ok { if arg, err := gosec.GetString(bl); err == nil { if r.pattern.MatchString(arg) { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } } else if ident, ok := arg.(*ast.Ident); ok { values := gosec.GetIdentStringValues(ident) for _, value := range values { if r.pattern.MatchString(value) { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -57,7 +58,7 @@ func (r *bindsToAllNetworkInterfaces) Match(n ast.Node, c *gosec.Context) (*gose values := gosec.GetCallStringArgsValues(callExpr.Args[0], c) for _, value := range values { if r.pattern.MatchString(value) { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -66,17 +67,17 @@ func (r *bindsToAllNetworkInterfaces) Match(n ast.Node, c *gosec.Context) (*gose // NewBindsToAllNetworkInterfaces detects socket connections that are setup to // listen on all network interfaces. -func NewBindsToAllNetworkInterfaces(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewBindsToAllNetworkInterfaces(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { calls := gosec.NewCallList() calls.Add("net", "Listen") calls.Add("crypto/tls", "Listen") return &bindsToAllNetworkInterfaces{ calls: calls, pattern: regexp.MustCompile(`^(0.0.0.0|:).*$`), - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: "Binds to all network interfaces", }, }, []ast.Node{(*ast.CallExpr)(nil)} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/blocklist.go b/tools/vendor/github.com/securego/gosec/v2/rules/blocklist.go index afd4ee56b7..5e03cf7a0c 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/blocklist.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/blocklist.go @@ -19,27 +19,28 @@ import ( "strings" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type blocklistedImport struct { - gosec.MetaData + issue.MetaData Blocklisted map[string]string } func unquote(original string) string { - copy := strings.TrimSpace(original) - copy = strings.TrimLeft(copy, `"`) - return strings.TrimRight(copy, `"`) + cleaned := strings.TrimSpace(original) + cleaned = strings.TrimLeft(cleaned, `"`) + return strings.TrimRight(cleaned, `"`) } func (r *blocklistedImport) ID() string { return r.MetaData.ID } -func (r *blocklistedImport) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (r *blocklistedImport) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { if node, ok := n.(*ast.ImportSpec); ok { if description, ok := r.Blocklisted[unquote(node.Path.Value)]; ok { - return gosec.NewIssue(c, node, r.ID(), description, r.Severity, r.Confidence), nil + return c.NewIssue(node, r.ID(), description, r.Severity, r.Confidence), nil } } return nil, nil @@ -47,12 +48,12 @@ func (r *blocklistedImport) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, e // NewBlocklistedImports reports when a blocklisted import is being used. // Typically when a deprecated technology is being used. -func NewBlocklistedImports(id string, conf gosec.Config, blocklist map[string]string) (gosec.Rule, []ast.Node) { +func NewBlocklistedImports(id string, _ gosec.Config, blocklist map[string]string) (gosec.Rule, []ast.Node) { return &blocklistedImport{ - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, }, Blocklisted: blocklist, }, []ast.Node{(*ast.ImportSpec)(nil)} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go b/tools/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go index 02256faa98..7e57f1a5b1 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go @@ -19,10 +19,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type decompressionBombCheck struct { - gosec.MetaData + issue.MetaData readerCalls gosec.CallList copyCalls gosec.CallList } @@ -40,7 +41,7 @@ func containsReaderCall(node ast.Node, ctx *gosec.Context, list gosec.CallList) return list.Contains(s, idt) } -func (d *decompressionBombCheck) Match(node ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { +func (d *decompressionBombCheck) Match(node ast.Node, ctx *gosec.Context) (*issue.Issue, error) { var readerVarObj map[*ast.Object]struct{} // To check multiple lines, ctx.PassedValues is used to store temporary data. @@ -72,7 +73,7 @@ func (d *decompressionBombCheck) Match(node ast.Node, ctx *gosec.Context) (*gose if idt, ok := n.Args[1].(*ast.Ident); ok { if _, ok := readerVarObj[idt.Obj]; ok { // Detect io.Copy(x, r) - return gosec.NewIssue(ctx, n, d.ID(), d.What, d.Severity, d.Confidence), nil + return ctx.NewIssue(n, d.ID(), d.What, d.Severity, d.Confidence), nil } } } @@ -82,7 +83,7 @@ func (d *decompressionBombCheck) Match(node ast.Node, ctx *gosec.Context) (*gose } // NewDecompressionBombCheck detects if there is potential DoS vulnerability via decompression bomb -func NewDecompressionBombCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewDecompressionBombCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { readerCalls := gosec.NewCallList() readerCalls.Add("compress/gzip", "NewReader") readerCalls.AddAll("compress/zlib", "NewReader", "NewReaderDict") @@ -98,10 +99,10 @@ func NewDecompressionBombCheck(id string, conf gosec.Config) (gosec.Rule, []ast. copyCalls.Add("io", "CopyBuffer") return &decompressionBombCheck{ - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.Medium, + Severity: issue.Medium, + Confidence: issue.Medium, What: "Potential DoS vulnerability via decompression bomb", }, readerCalls: readerCalls, diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go b/tools/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go index c373427b8f..47bcb2dc4a 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go @@ -5,18 +5,19 @@ import ( "regexp" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type traversal struct { pattern *regexp.Regexp - gosec.MetaData + issue.MetaData } func (r *traversal) ID() string { return r.MetaData.ID } -func (r *traversal) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { +func (r *traversal) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { switch node := n.(type) { case *ast.CallExpr: return r.matchCallExpr(node, ctx) @@ -24,14 +25,14 @@ func (r *traversal) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) return nil, nil } -func (r *traversal) matchCallExpr(assign *ast.CallExpr, ctx *gosec.Context) (*gosec.Issue, error) { +func (r *traversal) matchCallExpr(assign *ast.CallExpr, ctx *gosec.Context) (*issue.Issue, error) { for _, i := range assign.Args { if basiclit, ok1 := i.(*ast.BasicLit); ok1 { if fun, ok2 := assign.Fun.(*ast.SelectorExpr); ok2 { if x, ok3 := fun.X.(*ast.Ident); ok3 { - string := x.Name + "." + fun.Sel.Name + "(" + basiclit.Value + ")" - if r.pattern.MatchString(string) { - return gosec.NewIssue(ctx, assign, r.ID(), r.What, r.Severity, r.Confidence), nil + str := x.Name + "." + fun.Sel.Name + "(" + basiclit.Value + ")" + if r.pattern.MatchString(str) { + return ctx.NewIssue(assign, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -54,11 +55,11 @@ func NewDirectoryTraversal(id string, conf gosec.Config) (gosec.Rule, []ast.Node return &traversal{ pattern: regexp.MustCompile(pattern), - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, What: "Potential directory traversal", - Confidence: gosec.Medium, - Severity: gosec.Medium, + Confidence: issue.Medium, + Severity: issue.Medium, }, }, []ast.Node{(*ast.CallExpr)(nil)} } diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/errors.go b/tools/vendor/github.com/securego/gosec/v2/rules/errors.go index 0838382b32..d31248ccb4 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/errors.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/errors.go @@ -19,10 +19,11 @@ import ( "go/types" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type noErrorCheck struct { - gosec.MetaData + issue.MetaData whitelist gosec.CallList } @@ -49,7 +50,7 @@ func returnsError(callExpr *ast.CallExpr, ctx *gosec.Context) int { return -1 } -func (r *noErrorCheck) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { +func (r *noErrorCheck) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { switch stmt := n.(type) { case *ast.AssignStmt: cfg := ctx.Config @@ -61,7 +62,7 @@ func (r *noErrorCheck) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, erro return nil, nil } if id, ok := stmt.Lhs[pos].(*ast.Ident); ok && id.Name == "_" { - return gosec.NewIssue(ctx, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return ctx.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -70,7 +71,7 @@ func (r *noErrorCheck) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, erro if callExpr, ok := stmt.X.(*ast.CallExpr); ok && r.whitelist.ContainsCallExpr(stmt.X, ctx) == nil { pos := returnsError(callExpr, ctx) if pos >= 0 { - return gosec.NewIssue(ctx, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return ctx.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -100,10 +101,10 @@ func NewNoErrorCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { } return &noErrorCheck{ - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Low, - Confidence: gosec.High, + Severity: issue.Low, + Confidence: issue.High, What: "Errors unhandled.", }, whitelist: whitelist, diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/fileperms.go b/tools/vendor/github.com/securego/gosec/v2/rules/fileperms.go index e89b56369d..0376b6a03c 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/fileperms.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/fileperms.go @@ -20,10 +20,11 @@ import ( "strconv" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type filePermissions struct { - gosec.MetaData + issue.MetaData mode int64 pkgs []string calls []string @@ -54,12 +55,12 @@ func modeIsSubset(subset int64, superset int64) bool { return (subset | superset) == superset } -func (r *filePermissions) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (r *filePermissions) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { for _, pkg := range r.pkgs { if callexpr, matched := gosec.MatchCallByPackage(n, c, pkg, r.calls...); matched { modeArg := callexpr.Args[len(callexpr.Args)-1] if mode, err := gosec.GetInt(modeArg); err == nil && !modeIsSubset(mode, r.mode) { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -73,10 +74,10 @@ func NewWritePerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { mode: mode, pkgs: []string{"io/ioutil", "os"}, calls: []string{"WriteFile"}, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: fmt.Sprintf("Expect WriteFile permissions to be %#o or less", mode), }, }, []ast.Node{(*ast.CallExpr)(nil)} @@ -90,10 +91,10 @@ func NewFilePerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { mode: mode, pkgs: []string{"os"}, calls: []string{"OpenFile", "Chmod"}, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: fmt.Sprintf("Expect file permissions to be %#o or less", mode), }, }, []ast.Node{(*ast.CallExpr)(nil)} @@ -107,10 +108,10 @@ func NewMkdirPerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { mode: mode, pkgs: []string{"os"}, calls: []string{"Mkdir", "MkdirAll"}, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: fmt.Sprintf("Expect directory permissions to be %#o or less", mode), }, }, []ast.Node{(*ast.CallExpr)(nil)} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go b/tools/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go index b9e5756541..eac50d7c96 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go @@ -21,11 +21,13 @@ import ( "strconv" zxcvbn "github.com/nbutton23/zxcvbn-go" + "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type credentials struct { - gosec.MetaData + issue.MetaData pattern *regexp.Regexp entropyThreshold float64 perCharThreshold float64 @@ -53,7 +55,7 @@ func (r *credentials) isHighEntropyString(str string) bool { entropyPerChar >= r.perCharThreshold)) } -func (r *credentials) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { +func (r *credentials) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { switch node := n.(type) { case *ast.AssignStmt: return r.matchAssign(node, ctx) @@ -65,14 +67,14 @@ func (r *credentials) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error return nil, nil } -func (r *credentials) matchAssign(assign *ast.AssignStmt, ctx *gosec.Context) (*gosec.Issue, error) { +func (r *credentials) matchAssign(assign *ast.AssignStmt, ctx *gosec.Context) (*issue.Issue, error) { for _, i := range assign.Lhs { if ident, ok := i.(*ast.Ident); ok { if r.pattern.MatchString(ident.Name) { for _, e := range assign.Rhs { if val, err := gosec.GetString(e); err == nil { if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { - return gosec.NewIssue(ctx, assign, r.ID(), r.What, r.Severity, r.Confidence), nil + return ctx.NewIssue(assign, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -82,7 +84,7 @@ func (r *credentials) matchAssign(assign *ast.AssignStmt, ctx *gosec.Context) (* return nil, nil } -func (r *credentials) matchValueSpec(valueSpec *ast.ValueSpec, ctx *gosec.Context) (*gosec.Issue, error) { +func (r *credentials) matchValueSpec(valueSpec *ast.ValueSpec, ctx *gosec.Context) (*issue.Issue, error) { for index, ident := range valueSpec.Names { if r.pattern.MatchString(ident.Name) && valueSpec.Values != nil { // const foo, bar = "same value" @@ -91,7 +93,7 @@ func (r *credentials) matchValueSpec(valueSpec *ast.ValueSpec, ctx *gosec.Contex } if val, err := gosec.GetString(valueSpec.Values[index]); err == nil { if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { - return gosec.NewIssue(ctx, valueSpec, r.ID(), r.What, r.Severity, r.Confidence), nil + return ctx.NewIssue(valueSpec, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -99,7 +101,7 @@ func (r *credentials) matchValueSpec(valueSpec *ast.ValueSpec, ctx *gosec.Contex return nil, nil } -func (r *credentials) matchEqualityCheck(binaryExpr *ast.BinaryExpr, ctx *gosec.Context) (*gosec.Issue, error) { +func (r *credentials) matchEqualityCheck(binaryExpr *ast.BinaryExpr, ctx *gosec.Context) (*issue.Issue, error) { if binaryExpr.Op == token.EQL || binaryExpr.Op == token.NEQ { ident, ok := binaryExpr.X.(*ast.Ident) if !ok { @@ -113,7 +115,7 @@ func (r *credentials) matchEqualityCheck(binaryExpr *ast.BinaryExpr, ctx *gosec. } if val, err := gosec.GetString(valueNode); err == nil { if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { - return gosec.NewIssue(ctx, binaryExpr, r.ID(), r.What, r.Severity, r.Confidence), nil + return ctx.NewIssue(binaryExpr, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -170,11 +172,11 @@ func NewHardcodedCredentials(id string, conf gosec.Config) (gosec.Rule, []ast.No perCharThreshold: perCharThreshold, ignoreEntropy: ignoreEntropy, truncate: truncateString, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, What: "Potential hardcoded credentials", - Confidence: gosec.Low, - Severity: gosec.High, + Confidence: issue.Low, + Severity: issue.High, }, }, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ValueSpec)(nil), (*ast.BinaryExpr)(nil)} } diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/http_serve.go b/tools/vendor/github.com/securego/gosec/v2/rules/http_serve.go index e460b3a680..525ed4ebc7 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/http_serve.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/http_serve.go @@ -4,10 +4,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type httpServeWithoutTimeouts struct { - gosec.MetaData + issue.MetaData pkg string calls []string } @@ -16,23 +17,23 @@ func (r *httpServeWithoutTimeouts) ID() string { return r.MetaData.ID } -func (r *httpServeWithoutTimeouts) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err error) { +func (r *httpServeWithoutTimeouts) Match(n ast.Node, c *gosec.Context) (gi *issue.Issue, err error) { if _, matches := gosec.MatchCallByPackage(n, c, r.pkg, r.calls...); matches { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } return nil, nil } // NewHTTPServeWithoutTimeouts detects use of net/http serve functions that have no support for setting timeouts. -func NewHTTPServeWithoutTimeouts(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewHTTPServeWithoutTimeouts(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &httpServeWithoutTimeouts{ pkg: "net/http", calls: []string{"ListenAndServe", "ListenAndServeTLS", "Serve", "ServeTLS"}, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, What: "Use of net/http serve function that has no support for setting timeouts", - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, }, }, []ast.Node{(*ast.CallExpr)(nil)} } diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go b/tools/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go index b2668dec88..70678e29a3 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go @@ -5,10 +5,11 @@ import ( "go/token" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type implicitAliasing struct { - gosec.MetaData + issue.MetaData aliases map[*ast.Object]struct{} rightBrace token.Pos acceptableAlias []*ast.UnaryExpr @@ -27,7 +28,7 @@ func containsUnary(exprs []*ast.UnaryExpr, expr *ast.UnaryExpr) bool { return false } -func (r *implicitAliasing) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (r *implicitAliasing) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { switch node := n.(type) { case *ast.RangeStmt: // When presented with a range statement, get the underlying Object bound to @@ -73,7 +74,7 @@ func (r *implicitAliasing) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, er // If we find a unary op of & (reference) of an object within r.aliases, complain. if ident, ok := node.X.(*ast.Ident); ok && node.Op.String() == "&" { if _, contains := r.aliases[ident.Obj]; contains { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } case *ast.ReturnStmt: @@ -89,15 +90,15 @@ func (r *implicitAliasing) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, er } // NewImplicitAliasing detects implicit memory aliasing of type: for blah := SomeCall() {... SomeOtherCall(&blah) ...} -func NewImplicitAliasing(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewImplicitAliasing(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &implicitAliasing{ aliases: make(map[*ast.Object]struct{}), rightBrace: token.NoPos, acceptableAlias: make([]*ast.UnaryExpr, 0), - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.Medium, + Severity: issue.Medium, + Confidence: issue.Medium, What: "Implicit memory aliasing in for loop.", }, }, []ast.Node{(*ast.RangeStmt)(nil), (*ast.UnaryExpr)(nil), (*ast.ReturnStmt)(nil)} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go b/tools/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go index f55211a923..1d57906642 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go @@ -19,10 +19,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type integerOverflowCheck struct { - gosec.MetaData + issue.MetaData calls gosec.CallList } @@ -30,7 +31,7 @@ func (i *integerOverflowCheck) ID() string { return i.MetaData.ID } -func (i *integerOverflowCheck) Match(node ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { +func (i *integerOverflowCheck) Match(node ast.Node, ctx *gosec.Context) (*issue.Issue, error) { var atoiVarObj map[*ast.Object]ast.Node // To check multiple lines, ctx.PassedValues is used to store temporary data. @@ -63,7 +64,7 @@ func (i *integerOverflowCheck) Match(node ast.Node, ctx *gosec.Context) (*gosec. if idt, ok := n.Args[0].(*ast.Ident); ok { if _, ok := atoiVarObj[idt.Obj]; ok { // Detect int32(v) and int16(v) - return gosec.NewIssue(ctx, n, i.ID(), i.What, i.Severity, i.Confidence), nil + return ctx.NewIssue(n, i.ID(), i.What, i.Severity, i.Confidence), nil } } } @@ -74,14 +75,14 @@ func (i *integerOverflowCheck) Match(node ast.Node, ctx *gosec.Context) (*gosec. } // NewIntegerOverflowCheck detects if there is potential Integer OverFlow -func NewIntegerOverflowCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewIntegerOverflowCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { calls := gosec.NewCallList() calls.Add("strconv", "Atoi") return &integerOverflowCheck{ - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.High, - Confidence: gosec.Medium, + Severity: issue.High, + Confidence: issue.Medium, What: "Potential Integer overflow made by strconv.Atoi result conversion to int16/32", }, calls: calls, diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/math_big_rat.go b/tools/vendor/github.com/securego/gosec/v2/rules/math_big_rat.go index 69037e18f9..1aac1fa201 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/math_big_rat.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/math_big_rat.go @@ -4,10 +4,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type usingOldMathBig struct { - gosec.MetaData + issue.MetaData calls gosec.CallList } @@ -15,18 +16,18 @@ func (r *usingOldMathBig) ID() string { return r.MetaData.ID } -func (r *usingOldMathBig) Match(node ast.Node, ctx *gosec.Context) (gi *gosec.Issue, err error) { +func (r *usingOldMathBig) Match(node ast.Node, ctx *gosec.Context) (gi *issue.Issue, err error) { if callExpr := r.calls.ContainsPkgCallExpr(node, ctx, false); callExpr == nil { return nil, nil } - confidence := gosec.Low + confidence := issue.Low major, minor, build := gosec.GoVersion() if major == 1 && (minor == 16 && build < 14 || minor == 17 && build < 7) { - confidence = gosec.Medium + confidence = issue.Medium } - return gosec.NewIssue(ctx, node, r.ID(), r.What, r.Severity, confidence), nil + return ctx.NewIssue(node, r.ID(), r.What, r.Severity, confidence), nil } // NewUsingOldMathBig rule detects the use of Rat.SetString from math/big. @@ -35,10 +36,10 @@ func NewUsingOldMathBig(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { calls.Add("math/big.Rat", "SetString") return &usingOldMathBig{ calls: calls, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, What: "Potential uncontrolled memory consumption in Rat.SetString (CVE-2022-23772)", - Severity: gosec.High, + Severity: issue.High, }, }, []ast.Node{(*ast.CallExpr)(nil)} } diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/pprof.go b/tools/vendor/github.com/securego/gosec/v2/rules/pprof.go index 4c99af7523..68498dd5e0 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/pprof.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/pprof.go @@ -4,10 +4,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type pprofCheck struct { - gosec.MetaData + issue.MetaData importPath string importName string } @@ -18,22 +19,22 @@ func (p *pprofCheck) ID() string { } // Match checks for pprof imports -func (p *pprofCheck) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (p *pprofCheck) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { if node, ok := n.(*ast.ImportSpec); ok { if p.importPath == unquote(node.Path.Value) && node.Name != nil && p.importName == node.Name.Name { - return gosec.NewIssue(c, node, p.ID(), p.What, p.Severity, p.Confidence), nil + return c.NewIssue(node, p.ID(), p.What, p.Severity, p.Confidence), nil } } return nil, nil } // NewPprofCheck detects when the profiling endpoint is automatically exposed -func NewPprofCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewPprofCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &pprofCheck{ - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.High, - Confidence: gosec.High, + Severity: issue.High, + Confidence: issue.High, What: "Profiling endpoint is automatically exposed on /debug/pprof", }, importPath: "net/http/pprof", diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/rand.go b/tools/vendor/github.com/securego/gosec/v2/rules/rand.go index 055adce4d4..4491fd9284 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/rand.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/rand.go @@ -18,10 +18,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type weakRand struct { - gosec.MetaData + issue.MetaData funcNames []string packagePath string } @@ -30,10 +31,10 @@ func (w *weakRand) ID() string { return w.MetaData.ID } -func (w *weakRand) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (w *weakRand) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { for _, funcName := range w.funcNames { if _, matched := gosec.MatchCallByPackage(n, c, w.packagePath, funcName); matched { - return gosec.NewIssue(c, n, w.ID(), w.What, w.Severity, w.Confidence), nil + return c.NewIssue(n, w.ID(), w.What, w.Severity, w.Confidence), nil } } @@ -41,17 +42,17 @@ func (w *weakRand) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { } // NewWeakRandCheck detects the use of random number generator that isn't cryptographically secure -func NewWeakRandCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewWeakRandCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &weakRand{ funcNames: []string{ "New", "Read", "Float32", "Float64", "Int", "Int31", "Int31n", "Int63", "Int63n", "Intn", "NormalFloat64", "Uint32", "Uint64", }, packagePath: "math/rand", - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.High, - Confidence: gosec.Medium, + Severity: issue.High, + Confidence: issue.Medium, What: "Use of weak random number generator (math/rand instead of crypto/rand)", }, }, []ast.Node{(*ast.CallExpr)(nil)} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/readfile.go b/tools/vendor/github.com/securego/gosec/v2/rules/readfile.go index 8dcf053290..7ef4bbad13 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/readfile.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/readfile.go @@ -19,10 +19,11 @@ import ( "go/types" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type readfile struct { - gosec.MetaData + issue.MetaData gosec.CallList pathJoin gosec.CallList clean gosec.CallList @@ -80,13 +81,17 @@ func (r *readfile) isFilepathClean(n *ast.Ident, c *gosec.Context) bool { func (r *readfile) trackFilepathClean(n ast.Node) { if clean, ok := n.(*ast.CallExpr); ok && len(clean.Args) > 0 { if ident, ok := clean.Args[0].(*ast.Ident); ok { - r.cleanedVar[ident.Obj.Decl] = n + // ident.Obj may be nil if the referenced declaration is in another file. It also may be incorrect. + // if it is nil, do not follow it. + if ident.Obj != nil { + r.cleanedVar[ident.Obj.Decl] = n + } } } } // Match inspects AST nodes to determine if the match the methods `os.Open` or `ioutil.ReadFile` -func (r *readfile) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (r *readfile) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { if node := r.clean.ContainsPkgCallExpr(n, c, false); node != nil { r.trackFilepathClean(n) return nil, nil @@ -96,14 +101,14 @@ func (r *readfile) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { // eg. os.Open(filepath.Join("/tmp/", file)) if callExpr, ok := arg.(*ast.CallExpr); ok { if r.isJoinFunc(callExpr, c) { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } // handles binary string concatenation eg. ioutil.Readfile("/tmp/" + file + "/blob") if binExp, ok := arg.(*ast.BinaryExpr); ok { // resolve all found identities from the BinaryExpr if _, ok := gosec.FindVarIdentities(binExp, c); ok { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } @@ -112,7 +117,7 @@ func (r *readfile) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { if _, ok := obj.(*types.Var); ok && !gosec.TryResolve(ident, c) && !r.isFilepathClean(ident, c) { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -121,16 +126,16 @@ func (r *readfile) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { } // NewReadFile detects cases where we read files -func NewReadFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewReadFile(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { rule := &readfile{ pathJoin: gosec.NewCallList(), clean: gosec.NewCallList(), CallList: gosec.NewCallList(), - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, What: "Potential file inclusion via variable", - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, }, cleanedVar: map[any]ast.Node{}, } diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/rsa.go b/tools/vendor/github.com/securego/gosec/v2/rules/rsa.go index f2ed5db53d..331e7fc80a 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/rsa.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/rsa.go @@ -19,10 +19,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type weakKeyStrength struct { - gosec.MetaData + issue.MetaData calls gosec.CallList bits int } @@ -31,27 +32,27 @@ func (w *weakKeyStrength) ID() string { return w.MetaData.ID } -func (w *weakKeyStrength) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (w *weakKeyStrength) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { if callExpr := w.calls.ContainsPkgCallExpr(n, c, false); callExpr != nil { if bits, err := gosec.GetInt(callExpr.Args[1]); err == nil && bits < (int64)(w.bits) { - return gosec.NewIssue(c, n, w.ID(), w.What, w.Severity, w.Confidence), nil + return c.NewIssue(n, w.ID(), w.What, w.Severity, w.Confidence), nil } } return nil, nil } // NewWeakKeyStrength builds a rule that detects RSA keys < 2048 bits -func NewWeakKeyStrength(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewWeakKeyStrength(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { calls := gosec.NewCallList() calls.Add("crypto/rsa", "GenerateKey") bits := 2048 return &weakKeyStrength{ calls: calls, bits: bits, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: fmt.Sprintf("RSA keys should be at least %d bits", bits), }, }, []ast.Node{(*ast.CallExpr)(nil)} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/rulelist.go b/tools/vendor/github.com/securego/gosec/v2/rules/rulelist.go index b97813ed02..d856eccad2 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/rulelist.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/rulelist.go @@ -91,7 +91,6 @@ func Generate(trackSuppressions bool, filters ...RuleFilter) RuleList { {"G304", "File path provided as taint input", NewReadFile}, {"G305", "File path traversal when extracting zip archive", NewArchive}, {"G306", "Poor file permissions used when writing to a file", NewWritePerms}, - {"G307", "Unsafe defer call of a method returning an error", NewDeferredClosing}, // crypto {"G401", "Detect the usage of DES, RC4, MD5 or SHA1", NewUsesWeakCryptography}, diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/slowloris.go b/tools/vendor/github.com/securego/gosec/v2/rules/slowloris.go index 60b5e95211..70db73f5f3 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/slowloris.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/slowloris.go @@ -18,10 +18,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type slowloris struct { - gosec.MetaData + issue.MetaData } func (r *slowloris) ID() string { @@ -44,13 +45,13 @@ func containsReadHeaderTimeout(node *ast.CompositeLit) bool { return false } -func (r *slowloris) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { +func (r *slowloris) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { switch node := n.(type) { case *ast.CompositeLit: actualType := ctx.Info.TypeOf(node.Type) if actualType != nil && actualType.String() == "net/http.Server" { if !containsReadHeaderTimeout(node) { - return gosec.NewIssue(ctx, node, r.ID(), r.What, r.Severity, r.Confidence), nil + return ctx.NewIssue(node, r.ID(), r.What, r.Severity, r.Confidence), nil } } } @@ -58,13 +59,13 @@ func (r *slowloris) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) } // NewSlowloris attempts to find the http.Server struct and check if the ReadHeaderTimeout is configured. -func NewSlowloris(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewSlowloris(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &slowloris{ - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, What: "Potential Slowloris Attack because ReadHeaderTimeout is not configured in the http.Server", - Confidence: gosec.Low, - Severity: gosec.Medium, + Confidence: issue.Low, + Severity: issue.Medium, }, }, []ast.Node{(*ast.CompositeLit)(nil)} } diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/sql.go b/tools/vendor/github.com/securego/gosec/v2/rules/sql.go index ee99737d64..4085b5d26a 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/sql.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/sql.go @@ -20,10 +20,11 @@ import ( "regexp" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type sqlStatement struct { - gosec.MetaData + issue.MetaData gosec.CallList // Contains a list of patterns which must all match for the rule to match. @@ -113,7 +114,7 @@ func (s *sqlStrConcat) checkObject(n *ast.Ident, c *gosec.Context) bool { } // checkQuery verifies if the query parameters is a string concatenation -func (s *sqlStrConcat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*gosec.Issue, error) { +func (s *sqlStrConcat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*issue.Issue, error) { query, err := findQueryArg(call, ctx) if err != nil { return nil, err @@ -134,7 +135,7 @@ func (s *sqlStrConcat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*gose if op, ok := op.(*ast.Ident); ok && s.checkObject(op, ctx) { continue } - return gosec.NewIssue(ctx, be, s.ID(), s.What, s.Severity, s.Confidence), nil + return ctx.NewIssue(be, s.ID(), s.What, s.Severity, s.Confidence), nil } } } @@ -143,7 +144,7 @@ func (s *sqlStrConcat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*gose } // Checks SQL query concatenation issues such as "SELECT * FROM table WHERE " + " ' OR 1=1" -func (s *sqlStrConcat) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { +func (s *sqlStrConcat) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { switch stmt := n.(type) { case *ast.AssignStmt: for _, expr := range stmt.Rhs { @@ -160,16 +161,16 @@ func (s *sqlStrConcat) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, erro } // NewSQLStrConcat looks for cases where we are building SQL strings via concatenation -func NewSQLStrConcat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewSQLStrConcat(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { rule := &sqlStrConcat{ sqlStatement: sqlStatement{ patterns: []*regexp.Regexp{ regexp.MustCompile(`(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) `), }, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: "SQL string concatenation", }, CallList: gosec.NewCallList(), @@ -212,7 +213,7 @@ func (s *sqlStrFormat) constObject(e ast.Expr, c *gosec.Context) bool { return false } -func (s *sqlStrFormat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*gosec.Issue, error) { +func (s *sqlStrFormat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*issue.Issue, error) { query, err := findQueryArg(call, ctx) if err != nil { return nil, err @@ -233,7 +234,7 @@ func (s *sqlStrFormat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*gose return nil, nil } -func (s *sqlStrFormat) checkFormatting(n ast.Node, ctx *gosec.Context) *gosec.Issue { +func (s *sqlStrFormat) checkFormatting(n ast.Node, ctx *gosec.Context) *issue.Issue { // argIndex changes the function argument which gets matched to the regex argIndex := 0 if node := s.fmtCalls.ContainsPkgCallExpr(n, ctx, false); node != nil { @@ -286,14 +287,14 @@ func (s *sqlStrFormat) checkFormatting(n ast.Node, ctx *gosec.Context) *gosec.Is } } if s.MatchPatterns(formatter) { - return gosec.NewIssue(ctx, n, s.ID(), s.What, s.Severity, s.Confidence) + return ctx.NewIssue(n, s.ID(), s.What, s.Severity, s.Confidence) } } return nil } // Check SQL query formatting issues such as "fmt.Sprintf("SELECT * FROM foo where '%s', userInput)" -func (s *sqlStrFormat) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { +func (s *sqlStrFormat) Match(n ast.Node, ctx *gosec.Context) (*issue.Issue, error) { switch stmt := n.(type) { case *ast.AssignStmt: for _, expr := range stmt.Rhs { @@ -323,7 +324,7 @@ func (s *sqlStrFormat) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, erro } // NewSQLStrFormat looks for cases where we're building SQL query strings using format strings -func NewSQLStrFormat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewSQLStrFormat(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { rule := &sqlStrFormat{ CallList: gosec.NewCallList(), fmtCalls: gosec.NewCallList(), @@ -334,10 +335,10 @@ func NewSQLStrFormat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { regexp.MustCompile("(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE)( |\n|\r|\t)"), regexp.MustCompile("%[^bdoxXfFp]"), }, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: "SQL string formatting", }, }, diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/ssh.go b/tools/vendor/github.com/securego/gosec/v2/rules/ssh.go index 01f37da510..e2ba5a3f4e 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/ssh.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/ssh.go @@ -4,10 +4,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type sshHostKey struct { - gosec.MetaData + issue.MetaData pkg string calls []string } @@ -16,23 +17,23 @@ func (r *sshHostKey) ID() string { return r.MetaData.ID } -func (r *sshHostKey) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err error) { +func (r *sshHostKey) Match(n ast.Node, c *gosec.Context) (gi *issue.Issue, err error) { if _, matches := gosec.MatchCallByPackage(n, c, r.pkg, r.calls...); matches { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } return nil, nil } // NewSSHHostKey rule detects the use of insecure ssh HostKeyCallback. -func NewSSHHostKey(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewSSHHostKey(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &sshHostKey{ pkg: "golang.org/x/crypto/ssh", calls: []string{"InsecureIgnoreHostKey"}, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, What: "Use of ssh InsecureIgnoreHostKey should be audited", - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, }, }, []ast.Node{(*ast.CallExpr)(nil)} } diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/ssrf.go b/tools/vendor/github.com/securego/gosec/v2/rules/ssrf.go index 86bb8278d3..dbf01081b2 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/ssrf.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/ssrf.go @@ -5,10 +5,11 @@ import ( "go/types" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type ssrf struct { - gosec.MetaData + issue.MetaData gosec.CallList } @@ -40,25 +41,25 @@ func (r *ssrf) ResolveVar(n *ast.CallExpr, c *gosec.Context) bool { } // Match inspects AST nodes to determine if certain net/http methods are called with variable input -func (r *ssrf) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (r *ssrf) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { // Call expression is using http package directly if node := r.ContainsPkgCallExpr(n, c, false); node != nil { if r.ResolveVar(node, c) { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } return nil, nil } // NewSSRFCheck detects cases where HTTP requests are sent -func NewSSRFCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewSSRFCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { rule := &ssrf{ CallList: gosec.NewCallList(), - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, What: "Potential HTTP request made with variable url", - Severity: gosec.Medium, - Confidence: gosec.Medium, + Severity: issue.Medium, + Confidence: issue.Medium, }, } rule.AddAll("net/http", "Do", "Get", "Head", "Post", "PostForm", "RoundTrip") diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/subproc.go b/tools/vendor/github.com/securego/gosec/v2/rules/subproc.go index 2b6cb186cd..ea50d692d5 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/subproc.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/subproc.go @@ -19,10 +19,11 @@ import ( "go/types" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type subprocess struct { - gosec.MetaData + issue.MetaData gosec.CallList } @@ -39,7 +40,7 @@ func (r *subprocess) ID() string { // is unsafe. For example: // // syscall.Exec("echo", "foobar" + tainted) -func (r *subprocess) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (r *subprocess) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { if node := r.ContainsPkgCallExpr(n, c, false); node != nil { args := node.Args if r.isContext(n, c) { @@ -64,7 +65,7 @@ func (r *subprocess) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { _, assignment := ident.Obj.Decl.(*ast.AssignStmt) if variable && assignment { if !gosec.TryResolve(ident, c) { - return gosec.NewIssue(c, n, r.ID(), "Subprocess launched with variable", gosec.Medium, gosec.High), nil + return c.NewIssue(n, r.ID(), "Subprocess launched with variable", issue.Medium, issue.High), nil } } case *ast.Field: @@ -74,21 +75,21 @@ func (r *subprocess) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { vv, vvok := obj.(*types.Var) if vvok && vv.Parent().Lookup(ident.Name) == nil { - return gosec.NewIssue(c, n, r.ID(), "Subprocess launched with variable", gosec.Medium, gosec.High), nil + return c.NewIssue(n, r.ID(), "Subprocess launched with variable", issue.Medium, issue.High), nil } } case *ast.ValueSpec: _, valueSpec := ident.Obj.Decl.(*ast.ValueSpec) if variable && valueSpec { if !gosec.TryResolve(ident, c) { - return gosec.NewIssue(c, n, r.ID(), "Subprocess launched with variable", gosec.Medium, gosec.High), nil + return c.NewIssue(n, r.ID(), "Subprocess launched with variable", issue.Medium, issue.High), nil } } } } } else if !gosec.TryResolve(arg, c) { // the arg is not a constant or a variable but instead a function call or os.Args[i] - return gosec.NewIssue(c, n, r.ID(), "Subprocess launched with a potential tainted input or cmd arguments", gosec.Medium, gosec.High), nil + return c.NewIssue(n, r.ID(), "Subprocess launched with a potential tainted input or cmd arguments", issue.Medium, issue.High), nil } } } @@ -109,8 +110,8 @@ func (r *subprocess) isContext(n ast.Node, ctx *gosec.Context) bool { } // NewSubproc detects cases where we are forking out to an external process -func NewSubproc(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { - rule := &subprocess{gosec.MetaData{ID: id}, gosec.NewCallList()} +func NewSubproc(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { + rule := &subprocess{issue.MetaData{ID: id}, gosec.NewCallList()} rule.Add("os/exec", "Command") rule.Add("os/exec", "CommandContext") rule.Add("syscall", "Exec") diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/tempfiles.go b/tools/vendor/github.com/securego/gosec/v2/rules/tempfiles.go index 63822c093c..6fef52a2cb 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/tempfiles.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/tempfiles.go @@ -19,10 +19,11 @@ import ( "regexp" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type badTempFile struct { - gosec.MetaData + issue.MetaData calls gosec.CallList args *regexp.Regexp argCalls gosec.CallList @@ -33,15 +34,15 @@ func (t *badTempFile) ID() string { return t.MetaData.ID } -func (t *badTempFile) findTempDirArgs(n ast.Node, c *gosec.Context, suspect ast.Node) *gosec.Issue { +func (t *badTempFile) findTempDirArgs(n ast.Node, c *gosec.Context, suspect ast.Node) *issue.Issue { if s, e := gosec.GetString(suspect); e == nil { if t.args.MatchString(s) { - return gosec.NewIssue(c, n, t.ID(), t.What, t.Severity, t.Confidence) + return c.NewIssue(n, t.ID(), t.What, t.Severity, t.Confidence) } return nil } if ce := t.argCalls.ContainsPkgCallExpr(suspect, c, false); ce != nil { - return gosec.NewIssue(c, n, t.ID(), t.What, t.Severity, t.Confidence) + return c.NewIssue(n, t.ID(), t.What, t.Severity, t.Confidence) } if be, ok := suspect.(*ast.BinaryExpr); ok { if ops := gosec.GetBinaryExprOperands(be); len(ops) != 0 { @@ -55,7 +56,7 @@ func (t *badTempFile) findTempDirArgs(n ast.Node, c *gosec.Context, suspect ast. return nil } -func (t *badTempFile) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err error) { +func (t *badTempFile) Match(n ast.Node, c *gosec.Context) (gi *issue.Issue, err error) { if node := t.calls.ContainsPkgCallExpr(n, c, false); node != nil { return t.findTempDirArgs(n, c, node.Args[0]), nil } @@ -63,7 +64,7 @@ func (t *badTempFile) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err } // NewBadTempFile detects direct writes to predictable path in temporary directory -func NewBadTempFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewBadTempFile(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { calls := gosec.NewCallList() calls.Add("io/ioutil", "WriteFile") calls.AddAll("os", "Create", "WriteFile") @@ -77,10 +78,10 @@ func NewBadTempFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { args: regexp.MustCompile(`^(/(usr|var))?/tmp(/.*)?$`), argCalls: argCalls, nestedCalls: nestedCalls, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: "File creation in shared tmp directory without using ioutil.Tempfile", }, }, []ast.Node{(*ast.CallExpr)(nil)} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/templates.go b/tools/vendor/github.com/securego/gosec/v2/rules/templates.go index 1eec7fba10..728766f457 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/templates.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/templates.go @@ -18,10 +18,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type templateCheck struct { - gosec.MetaData + issue.MetaData calls gosec.CallList } @@ -29,11 +30,11 @@ func (t *templateCheck) ID() string { return t.MetaData.ID } -func (t *templateCheck) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (t *templateCheck) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { if node := t.calls.ContainsPkgCallExpr(n, c, false); node != nil { for _, arg := range node.Args { if _, ok := arg.(*ast.BasicLit); !ok { // basic lits are safe - return gosec.NewIssue(c, n, t.ID(), t.What, t.Severity, t.Confidence), nil + return c.NewIssue(n, t.ID(), t.What, t.Severity, t.Confidence), nil } } } @@ -42,7 +43,7 @@ func (t *templateCheck) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error // NewTemplateCheck constructs the template check rule. This rule is used to // find use of templates where HTML/JS escaping is not being used -func NewTemplateCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewTemplateCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { calls := gosec.NewCallList() calls.Add("html/template", "HTML") calls.Add("html/template", "HTMLAttr") @@ -50,10 +51,10 @@ func NewTemplateCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { calls.Add("html/template", "URL") return &templateCheck{ calls: calls, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.Low, + Severity: issue.Medium, + Confidence: issue.Low, What: "The used method does not auto-escape HTML. This can potentially lead to 'Cross-site Scripting' vulnerabilities, in case the attacker controls the input.", }, }, []ast.Node{(*ast.CallExpr)(nil)} diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/tls.go b/tools/vendor/github.com/securego/gosec/v2/rules/tls.go index 1cc3a298f6..65a0b5a33a 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/tls.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/tls.go @@ -24,10 +24,11 @@ import ( "strconv" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type insecureConfigTLS struct { - gosec.MetaData + issue.MetaData MinVersion int64 MaxVersion int64 requiredType string @@ -49,13 +50,13 @@ func stringInSlice(a string, list []string) bool { return false } -func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gosec.Context) *gosec.Issue { +func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gosec.Context) *issue.Issue { if ciphers, ok := n.(*ast.CompositeLit); ok { for _, cipher := range ciphers.Elts { if ident, ok := cipher.(*ast.SelectorExpr); ok { if !stringInSlice(ident.Sel.Name, t.goodCiphers) { err := fmt.Sprintf("TLS Bad Cipher Suite: %s", ident.Sel.Name) - return gosec.NewIssue(c, ident, t.ID(), err, gosec.High, gosec.High) + return c.NewIssue(ident, t.ID(), err, issue.High, issue.High) } } } @@ -63,7 +64,7 @@ func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gosec.Context) return nil } -func (t *insecureConfigTLS) processTLSConf(n ast.Node, c *gosec.Context) *gosec.Issue { +func (t *insecureConfigTLS) processTLSConf(n ast.Node, c *gosec.Context) *issue.Issue { if kve, ok := n.(*ast.KeyValueExpr); ok { issue := t.processTLSConfVal(kve.Key, kve.Value, c) if issue != nil { @@ -83,27 +84,27 @@ func (t *insecureConfigTLS) processTLSConf(n ast.Node, c *gosec.Context) *gosec. return nil } -func (t *insecureConfigTLS) processTLSConfVal(key ast.Expr, value ast.Expr, c *gosec.Context) *gosec.Issue { +func (t *insecureConfigTLS) processTLSConfVal(key ast.Expr, value ast.Expr, c *gosec.Context) *issue.Issue { if ident, ok := key.(*ast.Ident); ok { switch ident.Name { case "InsecureSkipVerify": if node, ok := value.(*ast.Ident); ok { if node.Name != "false" { - return gosec.NewIssue(c, value, t.ID(), "TLS InsecureSkipVerify set true.", gosec.High, gosec.High) + return c.NewIssue(value, t.ID(), "TLS InsecureSkipVerify set true.", issue.High, issue.High) } } else { // TODO(tk): symbol tab look up to get the actual value - return gosec.NewIssue(c, value, t.ID(), "TLS InsecureSkipVerify may be true.", gosec.High, gosec.Low) + return c.NewIssue(value, t.ID(), "TLS InsecureSkipVerify may be true.", issue.High, issue.Low) } case "PreferServerCipherSuites": if node, ok := value.(*ast.Ident); ok { if node.Name == "false" { - return gosec.NewIssue(c, value, t.ID(), "TLS PreferServerCipherSuites set false.", gosec.Medium, gosec.High) + return c.NewIssue(value, t.ID(), "TLS PreferServerCipherSuites set false.", issue.Medium, issue.High) } } else { // TODO(tk): symbol tab look up to get the actual value - return gosec.NewIssue(c, value, t.ID(), "TLS PreferServerCipherSuites may be false.", gosec.Medium, gosec.Low) + return c.NewIssue(value, t.ID(), "TLS PreferServerCipherSuites may be false.", issue.Medium, issue.Low) } case "MinVersion": @@ -188,16 +189,16 @@ func (t *insecureConfigTLS) mapVersion(version string) int64 { return v } -func (t *insecureConfigTLS) checkVersion(n ast.Node, c *gosec.Context) *gosec.Issue { +func (t *insecureConfigTLS) checkVersion(n ast.Node, c *gosec.Context) *issue.Issue { if t.actualMaxVersion == 0 && t.actualMinVersion >= t.MinVersion { // no warning is generated since the min version is greater than the secure min version return nil } if t.actualMinVersion < t.MinVersion { - return gosec.NewIssue(c, n, t.ID(), "TLS MinVersion too low.", gosec.High, gosec.High) + return c.NewIssue(n, t.ID(), "TLS MinVersion too low.", issue.High, issue.High) } if t.actualMaxVersion < t.MaxVersion { - return gosec.NewIssue(c, n, t.ID(), "TLS MaxVersion too low.", gosec.High, gosec.High) + return c.NewIssue(n, t.ID(), "TLS MaxVersion too low.", issue.High, issue.High) } return nil } @@ -207,7 +208,7 @@ func (t *insecureConfigTLS) resetVersion() { t.actualMinVersion = 0 } -func (t *insecureConfigTLS) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (t *insecureConfigTLS) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { if complit, ok := n.(*ast.CompositeLit); ok && complit.Type != nil { actualType := c.Info.TypeOf(complit.Type) if actualType != nil && actualType.String() == t.requiredType { diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/tls_config.go b/tools/vendor/github.com/securego/gosec/v2/rules/tls_config.go index 9bb17c2439..cbbdf7983a 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/tls_config.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/tls_config.go @@ -4,13 +4,14 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) // NewModernTLSCheck creates a check for Modern TLS ciphers // DO NOT EDIT - generated by tlsconfig tool -func NewModernTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewModernTLSCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &insecureConfigTLS{ - MetaData: gosec.MetaData{ID: id}, + MetaData: issue.MetaData{ID: id}, requiredType: "crypto/tls.Config", MinVersion: 0x0304, MaxVersion: 0x0304, @@ -24,9 +25,9 @@ func NewModernTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { // NewIntermediateTLSCheck creates a check for Intermediate TLS ciphers // DO NOT EDIT - generated by tlsconfig tool -func NewIntermediateTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewIntermediateTLSCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &insecureConfigTLS{ - MetaData: gosec.MetaData{ID: id}, + MetaData: issue.MetaData{ID: id}, requiredType: "crypto/tls.Config", MinVersion: 0x0303, MaxVersion: 0x0304, @@ -50,9 +51,9 @@ func NewIntermediateTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.No // NewOldTLSCheck creates a check for Old TLS ciphers // DO NOT EDIT - generated by tlsconfig tool -func NewOldTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewOldTLSCheck(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &insecureConfigTLS{ - MetaData: gosec.MetaData{ID: id}, + MetaData: issue.MetaData{ID: id}, requiredType: "crypto/tls.Config", MinVersion: 0x0301, MaxVersion: 0x0304, diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/unsafe.go b/tools/vendor/github.com/securego/gosec/v2/rules/unsafe.go index 88a298fb52..e1e8d02310 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/unsafe.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/unsafe.go @@ -18,10 +18,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type usingUnsafe struct { - gosec.MetaData + issue.MetaData pkg string calls []string } @@ -30,24 +31,24 @@ func (r *usingUnsafe) ID() string { return r.MetaData.ID } -func (r *usingUnsafe) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err error) { +func (r *usingUnsafe) Match(n ast.Node, c *gosec.Context) (gi *issue.Issue, err error) { if _, matches := gosec.MatchCallByPackage(n, c, r.pkg, r.calls...); matches { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } return nil, nil } // NewUsingUnsafe rule detects the use of the unsafe package. This is only // really useful for auditing purposes. -func NewUsingUnsafe(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewUsingUnsafe(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { return &usingUnsafe{ pkg: "unsafe", calls: []string{"Alignof", "Offsetof", "Sizeof", "Pointer"}, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, What: "Use of unsafe calls should be audited", - Severity: gosec.Low, - Confidence: gosec.High, + Severity: issue.Low, + Confidence: issue.High, }, }, []ast.Node{(*ast.CallExpr)(nil)} } diff --git a/tools/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go b/tools/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go index eecb88f046..4f2ab11d15 100644 --- a/tools/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go +++ b/tools/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go @@ -18,10 +18,11 @@ import ( "go/ast" "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/issue" ) type usesWeakCryptography struct { - gosec.MetaData + issue.MetaData blocklist map[string][]string } @@ -29,17 +30,17 @@ func (r *usesWeakCryptography) ID() string { return r.MetaData.ID } -func (r *usesWeakCryptography) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { +func (r *usesWeakCryptography) Match(n ast.Node, c *gosec.Context) (*issue.Issue, error) { for pkg, funcs := range r.blocklist { if _, matched := gosec.MatchCallByPackage(n, c, pkg, funcs...); matched { - return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + return c.NewIssue(n, r.ID(), r.What, r.Severity, r.Confidence), nil } } return nil, nil } // NewUsesWeakCryptography detects uses of des.* md5.* or rc4.* -func NewUsesWeakCryptography(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { +func NewUsesWeakCryptography(id string, _ gosec.Config) (gosec.Rule, []ast.Node) { calls := make(map[string][]string) calls["crypto/des"] = []string{"NewCipher", "NewTripleDESCipher"} calls["crypto/md5"] = []string{"New", "Sum"} @@ -47,10 +48,10 @@ func NewUsesWeakCryptography(id string, conf gosec.Config) (gosec.Rule, []ast.No calls["crypto/rc4"] = []string{"NewCipher"} rule := &usesWeakCryptography{ blocklist: calls, - MetaData: gosec.MetaData{ + MetaData: issue.MetaData{ ID: id, - Severity: gosec.Medium, - Confidence: gosec.High, + Severity: issue.Medium, + Confidence: issue.High, What: "Use of weak cryptographic primitive", }, } diff --git a/tools/vendor/github.com/sirupsen/logrus/README.md b/tools/vendor/github.com/sirupsen/logrus/README.md index b042c896f2..d1d4a85fd7 100644 --- a/tools/vendor/github.com/sirupsen/logrus/README.md +++ b/tools/vendor/github.com/sirupsen/logrus/README.md @@ -9,7 +9,7 @@ the last thing you want from your Logging library (again...). This does not mean Logrus is dead. Logrus will continue to be maintained for security, (backwards compatible) bug fixes, and performance (where we are -limited by the interface). +limited by the interface). I believe Logrus' biggest contribution is to have played a part in today's widespread use of structured logging in Golang. There doesn't seem to be a @@ -43,7 +43,7 @@ plain text): With `log.SetFormatter(&log.JSONFormatter{})`, for easy parsing by logstash or Splunk: -```json +```text {"animal":"walrus","level":"info","msg":"A group of walrus emerges from the ocean","size":10,"time":"2014-03-10 19:57:38.562264131 -0400 EDT"} @@ -99,7 +99,7 @@ time="2015-03-26T01:27:38-04:00" level=fatal method=github.com/sirupsen/arcticcr ``` Note that this does add measurable overhead - the cost will depend on the version of Go, but is between 20 and 40% in recent tests with 1.6 and 1.7. You can validate this in your -environment via benchmarks: +environment via benchmarks: ``` go test -bench=.*CallerTracing ``` @@ -317,6 +317,8 @@ log.SetLevel(log.InfoLevel) It may be useful to set `log.Level = logrus.DebugLevel` in a debug or verbose environment if your application has that. +Note: If you want different log levels for global (`log.SetLevel(...)`) and syslog logging, please check the [syslog hook README](hooks/syslog/README.md#different-log-levels-for-local-and-remote-logging). + #### Entries Besides the fields added with `WithField` or `WithFields` some fields are diff --git a/tools/vendor/github.com/sirupsen/logrus/writer.go b/tools/vendor/github.com/sirupsen/logrus/writer.go index 72e8e3a1b6..074fd4b8bd 100644 --- a/tools/vendor/github.com/sirupsen/logrus/writer.go +++ b/tools/vendor/github.com/sirupsen/logrus/writer.go @@ -4,6 +4,7 @@ import ( "bufio" "io" "runtime" + "strings" ) // Writer at INFO level. See WriterLevel for details. @@ -20,15 +21,18 @@ func (logger *Logger) WriterLevel(level Level) *io.PipeWriter { return NewEntry(logger).WriterLevel(level) } +// Writer returns an io.Writer that writes to the logger at the info log level func (entry *Entry) Writer() *io.PipeWriter { return entry.WriterLevel(InfoLevel) } +// WriterLevel returns an io.Writer that writes to the logger at the given log level func (entry *Entry) WriterLevel(level Level) *io.PipeWriter { reader, writer := io.Pipe() var printFunc func(args ...interface{}) + // Determine which log function to use based on the specified log level switch level { case TraceLevel: printFunc = entry.Trace @@ -48,23 +52,51 @@ func (entry *Entry) WriterLevel(level Level) *io.PipeWriter { printFunc = entry.Print } + // Start a new goroutine to scan the input and write it to the logger using the specified print function. + // It splits the input into chunks of up to 64KB to avoid buffer overflows. go entry.writerScanner(reader, printFunc) + + // Set a finalizer function to close the writer when it is garbage collected runtime.SetFinalizer(writer, writerFinalizer) return writer } +// writerScanner scans the input from the reader and writes it to the logger func (entry *Entry) writerScanner(reader *io.PipeReader, printFunc func(args ...interface{})) { scanner := bufio.NewScanner(reader) + + // Set the buffer size to the maximum token size to avoid buffer overflows + scanner.Buffer(make([]byte, bufio.MaxScanTokenSize), bufio.MaxScanTokenSize) + + // Define a split function to split the input into chunks of up to 64KB + chunkSize := bufio.MaxScanTokenSize // 64KB + splitFunc := func(data []byte, atEOF bool) (int, []byte, error) { + if len(data) >= chunkSize { + return chunkSize, data[:chunkSize], nil + } + + return bufio.ScanLines(data, atEOF) + } + + // Use the custom split function to split the input + scanner.Split(splitFunc) + + // Scan the input and write it to the logger using the specified print function for scanner.Scan() { - printFunc(scanner.Text()) + printFunc(strings.TrimRight(scanner.Text(), "\r\n")) } + + // If there was an error while scanning the input, log an error if err := scanner.Err(); err != nil { entry.Errorf("Error while reading from Writer: %s", err) } + + // Close the reader when we are done reader.Close() } +// WriterFinalizer is a finalizer function that closes then given writer when it is garbage collected func writerFinalizer(writer *io.PipeWriter) { writer.Close() } diff --git a/tools/vendor/github.com/sivchari/containedctx/containedctx.go b/tools/vendor/github.com/sivchari/containedctx/containedctx.go index 5a2c2dafd7..0260d6a6eb 100644 --- a/tools/vendor/github.com/sivchari/containedctx/containedctx.go +++ b/tools/vendor/github.com/sivchari/containedctx/containedctx.go @@ -34,16 +34,7 @@ func run(pass *analysis.Pass) (interface{}, error) { return } for _, field := range structTyp.Fields.List { - selectorExpr, ok := field.Type.(*ast.SelectorExpr) - if !ok { - continue - } - xname, ok := selectorExpr.X.(*ast.Ident) - if !ok { - continue - } - selname := selectorExpr.Sel.Name - if xname.Name+"."+selname == "context.Context" { + if pass.TypesInfo.TypeOf(field.Type).String() == "context.Context" { pass.Reportf(field.Pos(), "found a struct that contains a context.Context field") } } diff --git a/tools/vendor/github.com/sonatard/noctx/.golangci.yml b/tools/vendor/github.com/sonatard/noctx/.golangci.yml index 1580acde27..55ebeebdb0 100644 --- a/tools/vendor/github.com/sonatard/noctx/.golangci.yml +++ b/tools/vendor/github.com/sonatard/noctx/.golangci.yml @@ -1,20 +1,14 @@ run: - -linters-settings: - govet: + linters-settings: + govet: + enable-all: true + linters: enable-all: true - -linters: - enable-all: true - disable: - - gochecknoglobals - - gomnd - - gocognit - - nestif - -issues: - exclude-rules: - - path: reqwithoutctx/ssa.go - text: "Consider preallocating `exts`" - linters: - - prealloc + disable: + - gochecknoglobals + - gomnd + - gocognit + - nestif + - nilnil + - paralleltest + - varnamelen \ No newline at end of file diff --git a/tools/vendor/github.com/sonatard/noctx/README.md b/tools/vendor/github.com/sonatard/noctx/README.md index bfe9782c6d..b3793fc968 100644 --- a/tools/vendor/github.com/sonatard/noctx/README.md +++ b/tools/vendor/github.com/sonatard/noctx/README.md @@ -1,25 +1,60 @@ # noctx -![](https://github.com/sonatard/noctx/workflows/.github/workflows/ci.yml/badge.svg) +![](https://github.com/sonatard/noctx/workflows/CI/badge.svg) `noctx` finds sending http request without context.Context. -You should use `noctx` if sending http request in your library. +You should use `noctx` if sending http request in your library. Passing `context.Context` enables library user to cancel http request, getting trace information and so on. -## Install +## Usage + + +### noctx with go vet + +go vet is a Go standard tool for analyzing source code. +1. Install noctx. ```sh -$ go get -u github.com/sonatard/noctx/cmd/noctx +$ go install github.com/sonatard/noctx/cmd/noctx@latest ``` -## Usage - +2. noctx execute ```sh $ go vet -vettool=`which noctx` main.go ./main.go:6:11: net/http.Get must not be called ``` +### noctx with golangci-lint + +golangci-lint is a fast Go linters runner. + +1. Install golangci-lint. +[golangci-lint - Install](https://golangci-lint.run/usage/install/) + +2. Setup .golangci.yml +```yaml: +# Add noctx to enable linters. +linters: + enable: + - noctx + +# Or enable-all is true. +linters: + enable-all: true + disable: + - xxx # Add unused linter to disable linters. +``` + +3. noctx execute +```sh +# Use .golangci.yml +$ golangci-lint run + +# Only noctx execute +golangci-lint run --disable-all -E noctx +``` + ## Detection rules - Executing following functions - `net/http.Get` @@ -39,7 +74,51 @@ $ go vet -vettool=`which noctx` main.go `(http.Request).WithContext(ctx)` has a disadvantage of performance because it returns a copy of `http.Request`. Use `http.NewRequestWithContext` function if you only support Go1.13 or later. -## Sample Code + +If your library already provides functions that don't accept context, you define a new function that accepts context and make the existing function a wrapper for a new function. + + +```go +// Before fix code +// Sending an HTTP request but not accepting context +func Send(body io.Reader) error { + req,err := http.NewRequest(http.MethodPost, "http://example.com", body) + if err != nil { + return err + } + _, err = http.DefaultClient.Do(req) + if err != nil{ + return err + } + + return nil +} +``` + +```go +// After fix code +func Send(body io.Reader) error { + // Pass context.Background() to SendWithContext + return SendWithContext(context.Background(), body) +} + +// Sending an HTTP request and accepting context +func SendWithContext(ctx context.Context, body io.Reader) error { + // Change NewRequest to NewRequestWithContext and pass context it + req, err := http.NewRequestWithContext(ctx, http.MethodPost, "http://example.com", body) + if err != nil { + return nil + } + _, err = http.DefaultClient.Do(req) + if err != nil { + return err + } + + return nil +} +``` + +## Detection sample ```go package main diff --git a/tools/vendor/github.com/sonatard/noctx/ngfunc/main.go b/tools/vendor/github.com/sonatard/noctx/ngfunc/main.go index cfeb0f0010..46306218d2 100644 --- a/tools/vendor/github.com/sonatard/noctx/ngfunc/main.go +++ b/tools/vendor/github.com/sonatard/noctx/ngfunc/main.go @@ -1,6 +1,7 @@ package ngfunc import ( + "fmt" "go/types" "github.com/gostaticanalysis/analysisutil" @@ -34,8 +35,11 @@ func Run(pass *analysis.Pass) (interface{}, error) { func ngCalledFuncs(pass *analysis.Pass, ngFuncs []*types.Func) []*Report { var reports []*Report - srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs - for _, sf := range srcFuncs { + ssa, ok := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + if !ok { + panic(fmt.Sprintf("%T is not *buildssa.SSA", pass.ResultOf[buildssa.Analyzer])) + } + for _, sf := range ssa.SrcFuncs { for _, b := range sf.Blocks { for _, instr := range b.Instrs { for _, ngFunc := range ngFuncs { diff --git a/tools/vendor/github.com/sonatard/noctx/noctx.go b/tools/vendor/github.com/sonatard/noctx/noctx.go index 478ad8855d..89e0446ecd 100644 --- a/tools/vendor/github.com/sonatard/noctx/noctx.go +++ b/tools/vendor/github.com/sonatard/noctx/noctx.go @@ -1,6 +1,7 @@ package noctx import ( + "fmt" "github.com/sonatard/noctx/ngfunc" "github.com/sonatard/noctx/reqwithoutctx" "golang.org/x/tools/go/analysis" @@ -8,23 +9,26 @@ import ( ) var Analyzer = &analysis.Analyzer{ - Name: "noctx", - Doc: Doc, - Run: run, + Name: "noctx", + Doc: Doc, + Run: run, + RunDespiteErrors: false, Requires: []*analysis.Analyzer{ buildssa.Analyzer, }, + ResultType: nil, + FactTypes: nil, } const Doc = "noctx finds sending http request without context.Context" func run(pass *analysis.Pass) (interface{}, error) { if _, err := ngfunc.Run(pass); err != nil { - return nil, err + return nil, fmt.Errorf("run: %w", err) } if _, err := reqwithoutctx.Run(pass); err != nil { - return nil, err + return nil, fmt.Errorf("run: %w", err) } return nil, nil diff --git a/tools/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go b/tools/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go index 35751269ee..d7e0f5084d 100644 --- a/tools/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go +++ b/tools/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go @@ -1,6 +1,7 @@ package reqwithoutctx import ( + "fmt" "go/types" "github.com/gostaticanalysis/analysisutil" @@ -10,6 +11,7 @@ import ( "golang.org/x/tools/go/ssa" ) +//nolint:govet type Analyzer struct { Funcs []*ssa.Function newRequestType types.Type @@ -20,10 +22,13 @@ func NewAnalyzer(pass *analysis.Pass) *Analyzer { newRequestType := analysisutil.TypeOf(pass, "net/http", "NewRequest") requestType := analysisutil.TypeOf(pass, "net/http", "*Request") - srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs + ssa, ok := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + if !ok { + panic(fmt.Sprintf("%T is not *buildssa.SSA", pass.ResultOf[buildssa.Analyzer])) + } return &Analyzer{ - Funcs: srcFuncs, + Funcs: ssa.SrcFuncs, newRequestType: newRequestType, requestType: requestType, } @@ -88,14 +93,14 @@ func (a *Analyzer) usedReqs() map[string]*ssa.Extract { } func (a *Analyzer) usedReqByCall(call *ssa.Call) []*ssa.Extract { - var exts []*ssa.Extract + args := call.Common().Args + exts := make([]*ssa.Extract, 0, len(args)) // skip net/http.Request method call if call.Common().Signature().Recv() != nil && types.Identical(call.Value().Type(), a.requestType) { return exts } - args := call.Common().Args if len(args) == 0 { return exts } diff --git a/tools/vendor/github.com/spf13/cobra/.golangci.yml b/tools/vendor/github.com/spf13/cobra/.golangci.yml index 439d3e1de4..2578d94b5e 100644 --- a/tools/vendor/github.com/spf13/cobra/.golangci.yml +++ b/tools/vendor/github.com/spf13/cobra/.golangci.yml @@ -1,4 +1,4 @@ -# Copyright 2013-2022 The Cobra Authors +# Copyright 2013-2023 The Cobra Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tools/vendor/github.com/spf13/cobra/Makefile b/tools/vendor/github.com/spf13/cobra/Makefile index c433a01bce..0da8d7aa08 100644 --- a/tools/vendor/github.com/spf13/cobra/Makefile +++ b/tools/vendor/github.com/spf13/cobra/Makefile @@ -5,10 +5,6 @@ ifeq (, $(shell which golangci-lint)) $(warning "could not find golangci-lint in $(PATH), run: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh") endif -ifeq (, $(shell which richgo)) -$(warning "could not find richgo in $(PATH), run: go install github.com/kyoh86/richgo@latest") -endif - .PHONY: fmt lint test install_deps clean default: all @@ -25,6 +21,10 @@ lint: test: install_deps $(info ******************** running tests ********************) + go test -v ./... + +richtest: install_deps + $(info ******************** running tests with kyoh86/richgo ********************) richgo test -v ./... install_deps: diff --git a/tools/vendor/github.com/spf13/cobra/README.md b/tools/vendor/github.com/spf13/cobra/README.md index 7cc726beb4..592c0b8ab0 100644 --- a/tools/vendor/github.com/spf13/cobra/README.md +++ b/tools/vendor/github.com/spf13/cobra/README.md @@ -1,4 +1,4 @@ -![cobra logo](https://cloud.githubusercontent.com/assets/173412/10886352/ad566232-814f-11e5-9cd0-aa101788c117.png) +![cobra logo](assets/CobraMain.png) Cobra is a library for creating powerful modern CLI applications. @@ -6,7 +6,7 @@ Cobra is used in many Go projects such as [Kubernetes](https://kubernetes.io/), [Hugo](https://gohugo.io), and [GitHub CLI](https://github.com/cli/cli) to name a few. [This list](./projects_using_cobra.md) contains a more extensive list of projects using Cobra. -[![](https://img.shields.io/github/workflow/status/spf13/cobra/Test?longCache=tru&label=Test&logo=github%20actions&logoColor=fff)](https://github.com/spf13/cobra/actions?query=workflow%3ATest) +[![](https://img.shields.io/github/actions/workflow/status/spf13/cobra/test.yml?branch=main&longCache=true&label=Test&logo=github%20actions&logoColor=fff)](https://github.com/spf13/cobra/actions?query=workflow%3ATest) [![Go Reference](https://pkg.go.dev/badge/github.com/spf13/cobra.svg)](https://pkg.go.dev/github.com/spf13/cobra) [![Go Report Card](https://goreportcard.com/badge/github.com/spf13/cobra)](https://goreportcard.com/report/github.com/spf13/cobra) [![Slack](https://img.shields.io/badge/Slack-cobra-brightgreen)](https://gophers.slack.com/archives/CD3LP1199) diff --git a/tools/vendor/github.com/spf13/cobra/active_help.go b/tools/vendor/github.com/spf13/cobra/active_help.go index 95e03aecb6..2d0239437a 100644 --- a/tools/vendor/github.com/spf13/cobra/active_help.go +++ b/tools/vendor/github.com/spf13/cobra/active_help.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tools/vendor/github.com/spf13/cobra/args.go b/tools/vendor/github.com/spf13/cobra/args.go index 2c1f99e787..e79ec33a81 100644 --- a/tools/vendor/github.com/spf13/cobra/args.go +++ b/tools/vendor/github.com/spf13/cobra/args.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -21,7 +21,7 @@ import ( type PositionalArgs func(cmd *Command, args []string) error -// Legacy arg validation has the following behaviour: +// legacyArgs validation has the following behaviour: // - root commands with no subcommands can take arbitrary arguments // - root commands with subcommands will do subcommand validity checking // - subcommands will always accept arbitrary arguments diff --git a/tools/vendor/github.com/spf13/cobra/bash_completions.go b/tools/vendor/github.com/spf13/cobra/bash_completions.go index 3acdb27974..10c78847de 100644 --- a/tools/vendor/github.com/spf13/cobra/bash_completions.go +++ b/tools/vendor/github.com/spf13/cobra/bash_completions.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -532,7 +532,7 @@ func writeLocalNonPersistentFlag(buf io.StringWriter, flag *pflag.Flag) { } } -// Setup annotations for go completions for registered flags +// prepareCustomAnnotationsForFlags setup annotations for go completions for registered flags func prepareCustomAnnotationsForFlags(cmd *Command) { flagCompletionMutex.RLock() defer flagCompletionMutex.RUnlock() diff --git a/tools/vendor/github.com/spf13/cobra/bash_completionsV2.go b/tools/vendor/github.com/spf13/cobra/bash_completionsV2.go index bb4b71892c..19b09560c1 100644 --- a/tools/vendor/github.com/spf13/cobra/bash_completionsV2.go +++ b/tools/vendor/github.com/spf13/cobra/bash_completionsV2.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -38,7 +38,7 @@ func genBashComp(buf io.StringWriter, name string, includeDesc bool) { __%[1]s_debug() { - if [[ -n ${BASH_COMP_DEBUG_FILE:-} ]]; then + if [[ -n ${BASH_COMP_DEBUG_FILE-} ]]; then echo "$*" >> "${BASH_COMP_DEBUG_FILE}" fi } @@ -65,7 +65,7 @@ __%[1]s_get_completion_results() { lastChar=${lastParam:$((${#lastParam}-1)):1} __%[1]s_debug "lastParam ${lastParam}, lastChar ${lastChar}" - if [ -z "${cur}" ] && [ "${lastChar}" != "=" ]; then + if [[ -z ${cur} && ${lastChar} != = ]]; then # If the last parameter is complete (there is a space following it) # We add an extra empty parameter so we can indicate this to the go method. __%[1]s_debug "Adding extra empty parameter" @@ -75,7 +75,7 @@ __%[1]s_get_completion_results() { # When completing a flag with an = (e.g., %[1]s -n=) # bash focuses on the part after the =, so we need to remove # the flag part from $cur - if [[ "${cur}" == -*=* ]]; then + if [[ ${cur} == -*=* ]]; then cur="${cur#*=}" fi @@ -87,7 +87,7 @@ __%[1]s_get_completion_results() { directive=${out##*:} # Remove the directive out=${out%%:*} - if [ "${directive}" = "${out}" ]; then + if [[ ${directive} == "${out}" ]]; then # There is not directive specified directive=0 fi @@ -101,22 +101,36 @@ __%[1]s_process_completion_results() { local shellCompDirectiveNoFileComp=%[5]d local shellCompDirectiveFilterFileExt=%[6]d local shellCompDirectiveFilterDirs=%[7]d + local shellCompDirectiveKeepOrder=%[8]d - if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then + if (((directive & shellCompDirectiveError) != 0)); then # Error code. No completion. __%[1]s_debug "Received error from custom completion go code" return else - if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then - if [[ $(type -t compopt) = "builtin" ]]; then + if (((directive & shellCompDirectiveNoSpace) != 0)); then + if [[ $(type -t compopt) == builtin ]]; then __%[1]s_debug "Activating no space" compopt -o nospace else __%[1]s_debug "No space directive not supported in this version of bash" fi fi - if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then - if [[ $(type -t compopt) = "builtin" ]]; then + if (((directive & shellCompDirectiveKeepOrder) != 0)); then + if [[ $(type -t compopt) == builtin ]]; then + # no sort isn't supported for bash less than < 4.4 + if [[ ${BASH_VERSINFO[0]} -lt 4 || ( ${BASH_VERSINFO[0]} -eq 4 && ${BASH_VERSINFO[1]} -lt 4 ) ]]; then + __%[1]s_debug "No sort directive not supported in this version of bash" + else + __%[1]s_debug "Activating keep order" + compopt -o nosort + fi + else + __%[1]s_debug "No sort directive not supported in this version of bash" + fi + fi + if (((directive & shellCompDirectiveNoFileComp) != 0)); then + if [[ $(type -t compopt) == builtin ]]; then __%[1]s_debug "Activating no file completion" compopt +o default else @@ -130,7 +144,7 @@ __%[1]s_process_completion_results() { local activeHelp=() __%[1]s_extract_activeHelp - if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then + if (((directive & shellCompDirectiveFilterFileExt) != 0)); then # File extension filtering local fullFilter filter filteringCmd @@ -143,13 +157,12 @@ __%[1]s_process_completion_results() { filteringCmd="_filedir $fullFilter" __%[1]s_debug "File filtering command: $filteringCmd" $filteringCmd - elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then + elif (((directive & shellCompDirectiveFilterDirs) != 0)); then # File completion for directories only - # Use printf to strip any trailing newline local subdir - subdir=$(printf "%%s" "${completions[0]}") - if [ -n "$subdir" ]; then + subdir=${completions[0]} + if [[ -n $subdir ]]; then __%[1]s_debug "Listing directories in $subdir" pushd "$subdir" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 || return else @@ -164,7 +177,7 @@ __%[1]s_process_completion_results() { __%[1]s_handle_special_char "$cur" = # Print the activeHelp statements before we finish - if [ ${#activeHelp[*]} -ne 0 ]; then + if ((${#activeHelp[*]} != 0)); then printf "\n"; printf "%%s\n" "${activeHelp[@]}" printf "\n" @@ -184,21 +197,21 @@ __%[1]s_process_completion_results() { # Separate activeHelp lines from real completions. # Fills the $activeHelp and $completions arrays. __%[1]s_extract_activeHelp() { - local activeHelpMarker="%[8]s" + local activeHelpMarker="%[9]s" local endIndex=${#activeHelpMarker} while IFS='' read -r comp; do - if [ "${comp:0:endIndex}" = "$activeHelpMarker" ]; then + if [[ ${comp:0:endIndex} == $activeHelpMarker ]]; then comp=${comp:endIndex} __%[1]s_debug "ActiveHelp found: $comp" - if [ -n "$comp" ]; then + if [[ -n $comp ]]; then activeHelp+=("$comp") fi else # Not an activeHelp line but a normal completion completions+=("$comp") fi - done < <(printf "%%s\n" "${out}") + done <<<"${out}" } __%[1]s_handle_completion_types() { @@ -254,7 +267,7 @@ __%[1]s_handle_standard_completion_case() { done < <(printf "%%s\n" "${completions[@]}") # If there is a single completion left, remove the description text - if [ ${#COMPREPLY[*]} -eq 1 ]; then + if ((${#COMPREPLY[*]} == 1)); then __%[1]s_debug "COMPREPLY[0]: ${COMPREPLY[0]}" comp="${COMPREPLY[0]%%%%$tab*}" __%[1]s_debug "Removed description from single completion, which is now: ${comp}" @@ -271,8 +284,8 @@ __%[1]s_handle_special_char() if [[ "$comp" == *${char}* && "$COMP_WORDBREAKS" == *${char}* ]]; then local word=${comp%%"${comp##*${char}}"} local idx=${#COMPREPLY[*]} - while [[ $((--idx)) -ge 0 ]]; do - COMPREPLY[$idx]=${COMPREPLY[$idx]#"$word"} + while ((--idx >= 0)); do + COMPREPLY[idx]=${COMPREPLY[idx]#"$word"} done fi } @@ -298,7 +311,7 @@ __%[1]s_format_comp_descriptions() # Make sure we can fit a description of at least 8 characters # if we are to align the descriptions. - if [[ $maxdesclength -gt 8 ]]; then + if ((maxdesclength > 8)); then # Add the proper number of spaces to align the descriptions for ((i = ${#comp} ; i < longest ; i++)); do comp+=" " @@ -310,8 +323,8 @@ __%[1]s_format_comp_descriptions() # If there is enough space for any description text, # truncate the descriptions that are too long for the shell width - if [ $maxdesclength -gt 0 ]; then - if [ ${#desc} -gt $maxdesclength ]; then + if ((maxdesclength > 0)); then + if ((${#desc} > maxdesclength)); then desc=${desc:0:$(( maxdesclength - 1 ))} desc+="…" fi @@ -332,9 +345,9 @@ __start_%[1]s() # Call _init_completion from the bash-completion package # to prepare the arguments properly if declare -F _init_completion >/dev/null 2>&1; then - _init_completion -n "=:" || return + _init_completion -n =: || return else - __%[1]s_init_completion -n "=:" || return + __%[1]s_init_completion -n =: || return fi __%[1]s_debug @@ -361,7 +374,7 @@ fi # ex: ts=4 sw=4 et filetype=sh `, name, compCmd, ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, - ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, ShellCompDirectiveKeepOrder, activeHelpMarker)) } diff --git a/tools/vendor/github.com/spf13/cobra/cobra.go b/tools/vendor/github.com/spf13/cobra/cobra.go index fe44bc8a07..b07b44a0ce 100644 --- a/tools/vendor/github.com/spf13/cobra/cobra.go +++ b/tools/vendor/github.com/spf13/cobra/cobra.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -167,8 +167,8 @@ func appendIfNotPresent(s, stringToAppend string) string { // rpad adds padding to the right of a string. func rpad(s string, padding int) string { - template := fmt.Sprintf("%%-%ds", padding) - return fmt.Sprintf(template, s) + formattedString := fmt.Sprintf("%%-%ds", padding) + return fmt.Sprintf(formattedString, s) } // tmpl executes the given template text on data, writing the result to w. diff --git a/tools/vendor/github.com/spf13/cobra/command.go b/tools/vendor/github.com/spf13/cobra/command.go index 6ff47dd5c3..01f7c6f1c5 100644 --- a/tools/vendor/github.com/spf13/cobra/command.go +++ b/tools/vendor/github.com/spf13/cobra/command.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -35,7 +35,7 @@ const FlagSetByCobraAnnotation = "cobra_annotation_flag_set_by_cobra" // FParseErrWhitelist configures Flag parse errors to be ignored type FParseErrWhitelist flag.ParseErrorsWhitelist -// Structure to manage groups for commands +// Group Structure to manage groups for commands type Group struct { ID string Title string @@ -47,7 +47,7 @@ type Group struct { // definition to ensure usability. type Command struct { // Use is the one-line usage message. - // Recommended syntax is as follow: + // Recommended syntax is as follows: // [ ] identifies an optional argument. Arguments that are not enclosed in brackets are required. // ... indicates that you can specify multiple values for the previous argument. // | indicates mutually exclusive information. You can use the argument to the left of the separator or the @@ -321,7 +321,7 @@ func (c *Command) SetHelpCommand(cmd *Command) { c.helpCommand = cmd } -// SetHelpCommandGroup sets the group id of the help command. +// SetHelpCommandGroupID sets the group id of the help command. func (c *Command) SetHelpCommandGroupID(groupID string) { if c.helpCommand != nil { c.helpCommand.GroupID = groupID @@ -330,7 +330,7 @@ func (c *Command) SetHelpCommandGroupID(groupID string) { c.helpCommandGroupID = groupID } -// SetCompletionCommandGroup sets the group id of the completion command. +// SetCompletionCommandGroupID sets the group id of the completion command. func (c *Command) SetCompletionCommandGroupID(groupID string) { // completionCommandGroupID is used if no completion command is defined by the user c.Root().completionCommandGroupID = groupID @@ -655,20 +655,44 @@ Loop: // argsMinusFirstX removes only the first x from args. Otherwise, commands that look like // openshift admin policy add-role-to-user admin my-user, lose the admin argument (arg[4]). -func argsMinusFirstX(args []string, x string) []string { - for i, y := range args { - if x == y { - ret := []string{} - ret = append(ret, args[:i]...) - ret = append(ret, args[i+1:]...) - return ret +// Special care needs to be taken not to remove a flag value. +func (c *Command) argsMinusFirstX(args []string, x string) []string { + if len(args) == 0 { + return args + } + c.mergePersistentFlags() + flags := c.Flags() + +Loop: + for pos := 0; pos < len(args); pos++ { + s := args[pos] + switch { + case s == "--": + // -- means we have reached the end of the parseable args. Break out of the loop now. + break Loop + case strings.HasPrefix(s, "--") && !strings.Contains(s, "=") && !hasNoOptDefVal(s[2:], flags): + fallthrough + case strings.HasPrefix(s, "-") && !strings.Contains(s, "=") && len(s) == 2 && !shortHasNoOptDefVal(s[1:], flags): + // This is a flag without a default value, and an equal sign is not used. Increment pos in order to skip + // over the next arg, because that is the value of this flag. + pos++ + continue + case !strings.HasPrefix(s, "-"): + // This is not a flag or a flag value. Check to see if it matches what we're looking for, and if so, + // return the args, excluding the one at this position. + if s == x { + ret := []string{} + ret = append(ret, args[:pos]...) + ret = append(ret, args[pos+1:]...) + return ret + } } } return args } func isFlagArg(arg string) bool { - return ((len(arg) >= 3 && arg[1] == '-') || + return ((len(arg) >= 3 && arg[0:2] == "--") || (len(arg) >= 2 && arg[0] == '-' && arg[1] != '-')) } @@ -686,7 +710,7 @@ func (c *Command) Find(args []string) (*Command, []string, error) { cmd := c.findNext(nextSubCmd) if cmd != nil { - return innerfind(cmd, argsMinusFirstX(innerArgs, nextSubCmd)) + return innerfind(cmd, c.argsMinusFirstX(innerArgs, nextSubCmd)) } return c, innerArgs } @@ -1272,7 +1296,7 @@ func (c *Command) AllChildCommandsHaveGroup() bool { return true } -// ContainGroups return if groupID exists in the list of command groups. +// ContainsGroup return if groupID exists in the list of command groups. func (c *Command) ContainsGroup(groupID string) bool { for _, x := range c.commandgroups { if x.ID == groupID { diff --git a/tools/vendor/github.com/spf13/cobra/command_notwin.go b/tools/vendor/github.com/spf13/cobra/command_notwin.go index 2b77f8f019..307f0c127f 100644 --- a/tools/vendor/github.com/spf13/cobra/command_notwin.go +++ b/tools/vendor/github.com/spf13/cobra/command_notwin.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tools/vendor/github.com/spf13/cobra/command_win.go b/tools/vendor/github.com/spf13/cobra/command_win.go index 520f23abf0..adbef395c2 100644 --- a/tools/vendor/github.com/spf13/cobra/command_win.go +++ b/tools/vendor/github.com/spf13/cobra/command_win.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tools/vendor/github.com/spf13/cobra/completions.go b/tools/vendor/github.com/spf13/cobra/completions.go index e8a0206db1..ee38c4d0b8 100644 --- a/tools/vendor/github.com/spf13/cobra/completions.go +++ b/tools/vendor/github.com/spf13/cobra/completions.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -77,6 +77,10 @@ const ( // obtain the same behavior but only for flags. ShellCompDirectiveFilterDirs + // ShellCompDirectiveKeepOrder indicates that the shell should preserve the order + // in which the completions are provided + ShellCompDirectiveKeepOrder + // =========================================================================== // All directives using iota should be above this one. @@ -159,6 +163,9 @@ func (d ShellCompDirective) string() string { if d&ShellCompDirectiveFilterDirs != 0 { directives = append(directives, "ShellCompDirectiveFilterDirs") } + if d&ShellCompDirectiveKeepOrder != 0 { + directives = append(directives, "ShellCompDirectiveKeepOrder") + } if len(directives) == 0 { directives = append(directives, "ShellCompDirectiveDefault") } @@ -169,7 +176,7 @@ func (d ShellCompDirective) string() string { return strings.Join(directives, ", ") } -// Adds a special hidden command that can be used to request custom completions. +// initCompleteCmd adds a special hidden command that can be used to request custom completions. func (c *Command) initCompleteCmd(args []string) { completeCmd := &Command{ Use: fmt.Sprintf("%s [command-line]", ShellCompRequestCmd), @@ -727,7 +734,7 @@ to enable it. You can execute the following once: To load completions in your current shell session: - source <(%[1]s completion zsh); compdef _%[1]s %[1]s + source <(%[1]s completion zsh) To load completions for every new session, execute once: diff --git a/tools/vendor/github.com/spf13/cobra/fish_completions.go b/tools/vendor/github.com/spf13/cobra/fish_completions.go index 97112a17b2..12ca0d2b11 100644 --- a/tools/vendor/github.com/spf13/cobra/fish_completions.go +++ b/tools/vendor/github.com/spf13/cobra/fish_completions.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -53,7 +53,7 @@ function __%[1]s_perform_completion __%[1]s_debug "last arg: $lastArg" # Disable ActiveHelp which is not supported for fish shell - set -l requestComp "%[9]s=0 $args[1] %[3]s $args[2..-1] $lastArg" + set -l requestComp "%[10]s=0 $args[1] %[3]s $args[2..-1] $lastArg" __%[1]s_debug "Calling $requestComp" set -l results (eval $requestComp 2> /dev/null) @@ -89,6 +89,60 @@ function __%[1]s_perform_completion printf "%%s\n" "$directiveLine" end +# this function limits calls to __%[1]s_perform_completion, by caching the result behind $__%[1]s_perform_completion_once_result +function __%[1]s_perform_completion_once + __%[1]s_debug "Starting __%[1]s_perform_completion_once" + + if test -n "$__%[1]s_perform_completion_once_result" + __%[1]s_debug "Seems like a valid result already exists, skipping __%[1]s_perform_completion" + return 0 + end + + set --global __%[1]s_perform_completion_once_result (__%[1]s_perform_completion) + if test -z "$__%[1]s_perform_completion_once_result" + __%[1]s_debug "No completions, probably due to a failure" + return 1 + end + + __%[1]s_debug "Performed completions and set __%[1]s_perform_completion_once_result" + return 0 +end + +# this function is used to clear the $__%[1]s_perform_completion_once_result variable after completions are run +function __%[1]s_clear_perform_completion_once_result + __%[1]s_debug "" + __%[1]s_debug "========= clearing previously set __%[1]s_perform_completion_once_result variable ==========" + set --erase __%[1]s_perform_completion_once_result + __%[1]s_debug "Succesfully erased the variable __%[1]s_perform_completion_once_result" +end + +function __%[1]s_requires_order_preservation + __%[1]s_debug "" + __%[1]s_debug "========= checking if order preservation is required ==========" + + __%[1]s_perform_completion_once + if test -z "$__%[1]s_perform_completion_once_result" + __%[1]s_debug "Error determining if order preservation is required" + return 1 + end + + set -l directive (string sub --start 2 $__%[1]s_perform_completion_once_result[-1]) + __%[1]s_debug "Directive is: $directive" + + set -l shellCompDirectiveKeepOrder %[9]d + set -l keeporder (math (math --scale 0 $directive / $shellCompDirectiveKeepOrder) %% 2) + __%[1]s_debug "Keeporder is: $keeporder" + + if test $keeporder -ne 0 + __%[1]s_debug "This does require order preservation" + return 0 + end + + __%[1]s_debug "This doesn't require order preservation" + return 1 +end + + # This function does two things: # - Obtain the completions and store them in the global __%[1]s_comp_results # - Return false if file completion should be performed @@ -99,17 +153,17 @@ function __%[1]s_prepare_completions # Start fresh set --erase __%[1]s_comp_results - set -l results (__%[1]s_perform_completion) - __%[1]s_debug "Completion results: $results" + __%[1]s_perform_completion_once + __%[1]s_debug "Completion results: $__%[1]s_perform_completion_once_result" - if test -z "$results" + if test -z "$__%[1]s_perform_completion_once_result" __%[1]s_debug "No completion, probably due to a failure" # Might as well do file completion, in case it helps return 1 end - set -l directive (string sub --start 2 $results[-1]) - set --global __%[1]s_comp_results $results[1..-2] + set -l directive (string sub --start 2 $__%[1]s_perform_completion_once_result[-1]) + set --global __%[1]s_comp_results $__%[1]s_perform_completion_once_result[1..-2] __%[1]s_debug "Completions are: $__%[1]s_comp_results" __%[1]s_debug "Directive is: $directive" @@ -205,13 +259,17 @@ end # Remove any pre-existing completions for the program since we will be handling all of them. complete -c %[2]s -e +# this will get called after the two calls below and clear the $__%[1]s_perform_completion_once_result global +complete -c %[2]s -n '__%[1]s_clear_perform_completion_once_result' # The call to __%[1]s_prepare_completions will setup __%[1]s_comp_results # which provides the program's completion choices. -complete -c %[2]s -n '__%[1]s_prepare_completions' -f -a '$__%[1]s_comp_results' - +# If this doesn't require order preservation, we don't use the -k flag +complete -c %[2]s -n 'not __%[1]s_requires_order_preservation && __%[1]s_prepare_completions' -f -a '$__%[1]s_comp_results' +# otherwise we use the -k flag +complete -k -c %[2]s -n '__%[1]s_requires_order_preservation && __%[1]s_prepare_completions' -f -a '$__%[1]s_comp_results' `, nameForVar, name, compCmd, ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, - ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, activeHelpEnvVar(name))) + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, ShellCompDirectiveKeepOrder, activeHelpEnvVar(name))) } // GenFishCompletion generates fish completion file and writes to the passed writer. diff --git a/tools/vendor/github.com/spf13/cobra/flag_groups.go b/tools/vendor/github.com/spf13/cobra/flag_groups.go index 9c377aaf9c..b35fde1554 100644 --- a/tools/vendor/github.com/spf13/cobra/flag_groups.go +++ b/tools/vendor/github.com/spf13/cobra/flag_groups.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tools/vendor/github.com/spf13/cobra/powershell_completions.go b/tools/vendor/github.com/spf13/cobra/powershell_completions.go index 004de42e41..177d2755f2 100644 --- a/tools/vendor/github.com/spf13/cobra/powershell_completions.go +++ b/tools/vendor/github.com/spf13/cobra/powershell_completions.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -77,6 +77,7 @@ filter __%[1]s_escapeStringWithSpecialChars { $ShellCompDirectiveNoFileComp=%[6]d $ShellCompDirectiveFilterFileExt=%[7]d $ShellCompDirectiveFilterDirs=%[8]d + $ShellCompDirectiveKeepOrder=%[9]d # Prepare the command to request completions for the program. # Split the command at the first space to separate the program and arguments. @@ -106,13 +107,22 @@ filter __%[1]s_escapeStringWithSpecialChars { # If the last parameter is complete (there is a space following it) # We add an extra empty parameter so we can indicate this to the go method. __%[1]s_debug "Adding extra empty parameter" -`+" # We need to use `\"`\" to pass an empty argument a \"\" or '' does not work!!!"+` -`+" $RequestComp=\"$RequestComp\" + ' `\"`\"'"+` + # PowerShell 7.2+ changed the way how the arguments are passed to executables, + # so for pre-7.2 or when Legacy argument passing is enabled we need to use +`+" # `\"`\" to pass an empty argument, a \"\" or '' does not work!!!"+` + if ($PSVersionTable.PsVersion -lt [version]'7.2.0' -or + ($PSVersionTable.PsVersion -lt [version]'7.3.0' -and -not [ExperimentalFeature]::IsEnabled("PSNativeCommandArgumentPassing")) -or + (($PSVersionTable.PsVersion -ge [version]'7.3.0' -or [ExperimentalFeature]::IsEnabled("PSNativeCommandArgumentPassing")) -and + $PSNativeCommandArgumentPassing -eq 'Legacy')) { +`+" $RequestComp=\"$RequestComp\" + ' `\"`\"'"+` + } else { + $RequestComp="$RequestComp" + ' ""' + } } __%[1]s_debug "Calling $RequestComp" # First disable ActiveHelp which is not supported for Powershell - $env:%[9]s=0 + $env:%[10]s=0 #call the command store the output in $out and redirect stderr and stdout to null # $Out is an array contains each line per element @@ -137,7 +147,7 @@ filter __%[1]s_escapeStringWithSpecialChars { } $Longest = 0 - $Values = $Out | ForEach-Object { + [Array]$Values = $Out | ForEach-Object { #Split the output in name and description `+" $Name, $Description = $_.Split(\"`t\",2)"+` __%[1]s_debug "Name: $Name Description: $Description" @@ -182,6 +192,11 @@ filter __%[1]s_escapeStringWithSpecialChars { } } + # we sort the values in ascending order by name if keep order isn't passed + if (($Directive -band $ShellCompDirectiveKeepOrder) -eq 0 ) { + $Values = $Values | Sort-Object -Property Name + } + if (($Directive -band $ShellCompDirectiveNoFileComp) -ne 0 ) { __%[1]s_debug "ShellCompDirectiveNoFileComp is called" @@ -267,7 +282,7 @@ filter __%[1]s_escapeStringWithSpecialChars { Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock $__%[2]sCompleterBlock `, name, nameForVar, compCmd, ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, - ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, activeHelpEnvVar(name))) + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, ShellCompDirectiveKeepOrder, activeHelpEnvVar(name))) } func (c *Command) genPowerShellCompletion(w io.Writer, includeDesc bool) error { diff --git a/tools/vendor/github.com/spf13/cobra/projects_using_cobra.md b/tools/vendor/github.com/spf13/cobra/projects_using_cobra.md index 6865f88e79..8a291eb20e 100644 --- a/tools/vendor/github.com/spf13/cobra/projects_using_cobra.md +++ b/tools/vendor/github.com/spf13/cobra/projects_using_cobra.md @@ -1,11 +1,13 @@ ## Projects using Cobra - [Allero](https://github.com/allero-io/allero) +- [Arewefastyet](https://benchmark.vitess.io) - [Arduino CLI](https://github.com/arduino/arduino-cli) - [Bleve](https://blevesearch.com/) - [Cilium](https://cilium.io/) - [CloudQuery](https://github.com/cloudquery/cloudquery) - [CockroachDB](https://www.cockroachlabs.com/) +- [Constellation](https://github.com/edgelesssys/constellation) - [Cosmos SDK](https://github.com/cosmos/cosmos-sdk) - [Datree](https://github.com/datreeio/datree) - [Delve](https://github.com/derekparker/delve) @@ -25,7 +27,7 @@ - [Istio](https://istio.io) - [Kool](https://github.com/kool-dev/kool) - [Kubernetes](https://kubernetes.io/) -- [Kubescape](https://github.com/armosec/kubescape) +- [Kubescape](https://github.com/kubescape/kubescape) - [KubeVirt](https://github.com/kubevirt/kubevirt) - [Linkerd](https://linkerd.io/) - [Mattermost-server](https://github.com/mattermost/mattermost-server) @@ -51,10 +53,12 @@ - [Random](https://github.com/erdaltsksn/random) - [Rclone](https://rclone.org/) - [Scaleway CLI](https://github.com/scaleway/scaleway-cli) +- [Sia](https://github.com/SiaFoundation/siad) - [Skaffold](https://skaffold.dev/) - [Tendermint](https://github.com/tendermint/tendermint) - [Twitch CLI](https://github.com/twitchdev/twitch-cli) - [UpCloud CLI (`upctl`)](https://github.com/UpCloudLtd/upcloud-cli) +- [Vitess](https://vitess.io) - VMware's [Tanzu Community Edition](https://github.com/vmware-tanzu/community-edition) & [Tanzu Framework](https://github.com/vmware-tanzu/tanzu-framework) - [Werf](https://werf.io/) - [ZITADEL](https://github.com/zitadel/zitadel) diff --git a/tools/vendor/github.com/spf13/cobra/shell_completions.go b/tools/vendor/github.com/spf13/cobra/shell_completions.go index 126e83c307..b035742d39 100644 --- a/tools/vendor/github.com/spf13/cobra/shell_completions.go +++ b/tools/vendor/github.com/spf13/cobra/shell_completions.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/tools/vendor/github.com/spf13/cobra/shell_completions.md b/tools/vendor/github.com/spf13/cobra/shell_completions.md index 553ee5df8a..065c0621d4 100644 --- a/tools/vendor/github.com/spf13/cobra/shell_completions.md +++ b/tools/vendor/github.com/spf13/cobra/shell_completions.md @@ -71,7 +71,7 @@ PowerShell: `,cmd.Root().Name()), DisableFlagsInUseLine: true, ValidArgs: []string{"bash", "zsh", "fish", "powershell"}, - Args: cobra.ExactValidArgs(1), + Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs), Run: func(cmd *cobra.Command, args []string) { switch args[0] { case "bash": @@ -162,16 +162,7 @@ cmd := &cobra.Command{ } ``` -The aliases are not shown to the user on tab completion, but they are accepted as valid nouns by -the completion algorithm if entered manually, e.g. in: - -```bash -$ kubectl get rc [tab][tab] -backend frontend database -``` - -Note that without declaring `rc` as an alias, the completion algorithm would not know to show the list of -replication controllers following `rc`. +The aliases are shown to the user on tab completion only if no completions were found within sub-commands or `ValidArgs`. ### Dynamic completion of nouns @@ -237,6 +228,10 @@ ShellCompDirectiveFilterFileExt // return []string{"themes"}, ShellCompDirectiveFilterDirs // ShellCompDirectiveFilterDirs + +// ShellCompDirectiveKeepOrder indicates that the shell should preserve the order +// in which the completions are provided +ShellCompDirectiveKeepOrder ``` ***Note***: When using the `ValidArgsFunction`, Cobra will call your registered function after having parsed all flags and arguments provided in the command-line. You therefore don't need to do this parsing yourself. For example, when a user calls `helm status --namespace my-rook-ns [tab][tab]`, Cobra will call your registered `ValidArgsFunction` after having parsed the `--namespace` flag, as it would have done when calling the `RunE` function. @@ -385,6 +380,19 @@ or ```go ValidArgs: []string{"bash\tCompletions for bash", "zsh\tCompletions for zsh"} ``` + +If you don't want to show descriptions in the completions, you can add `--no-descriptions` to the default `completion` command to disable them, like: + +```bash +$ source <(helm completion bash) +$ helm completion [tab][tab] +bash (generate autocompletion script for bash) powershell (generate autocompletion script for powershell) +fish (generate autocompletion script for fish) zsh (generate autocompletion script for zsh) + +$ source <(helm completion bash --no-descriptions) +$ helm completion [tab][tab] +bash fish powershell zsh +``` ## Bash completions ### Dependencies diff --git a/tools/vendor/github.com/spf13/cobra/user_guide.md b/tools/vendor/github.com/spf13/cobra/user_guide.md index e55367e853..85201d840c 100644 --- a/tools/vendor/github.com/spf13/cobra/user_guide.md +++ b/tools/vendor/github.com/spf13/cobra/user_guide.md @@ -188,6 +188,37 @@ var versionCmd = &cobra.Command{ } ``` +### Organizing subcommands + +A command may have subcommands which in turn may have other subcommands. This is achieved by using +`AddCommand`. In some cases, especially in larger applications, each subcommand may be defined in +its own go package. + +The suggested approach is for the parent command to use `AddCommand` to add its most immediate +subcommands. For example, consider the following directory structure: + +```text +├── cmd +│   ├── root.go +│   └── sub1 +│   ├── sub1.go +│   └── sub2 +│   ├── leafA.go +│   ├── leafB.go +│   └── sub2.go +└── main.go +``` + +In this case: + +* The `init` function of `root.go` adds the command defined in `sub1.go` to the root command. +* The `init` function of `sub1.go` adds the command defined in `sub2.go` to the sub1 command. +* The `init` function of `sub2.go` adds the commands defined in `leafA.go` and `leafB.go` to the + sub2 command. + +This approach ensures the subcommands are always included at compile time while avoiding cyclic +references. + ### Returning and handling errors If you wish to return an error to the caller of a command, `RunE` can be used. @@ -313,8 +344,8 @@ rootCmd.MarkFlagsRequiredTogether("username", "password") You can also prevent different flags from being provided together if they represent mutually exclusive options such as specifying an output format as either `--json` or `--yaml` but never both: ```go -rootCmd.Flags().BoolVar(&u, "json", false, "Output in JSON") -rootCmd.Flags().BoolVar(&pw, "yaml", false, "Output in YAML") +rootCmd.Flags().BoolVar(&ofJson, "json", false, "Output in JSON") +rootCmd.Flags().BoolVar(&ofYaml, "yaml", false, "Output in YAML") rootCmd.MarkFlagsMutuallyExclusive("json", "yaml") ``` @@ -349,7 +380,7 @@ shown below: ```go var cmd = &cobra.Command{ Short: "hello", - Args: MatchAll(ExactArgs(2), OnlyValidArgs), + Args: cobra.MatchAll(cobra.ExactArgs(2), cobra.OnlyValidArgs), Run: func(cmd *cobra.Command, args []string) { fmt.Println("Hello, World!") }, diff --git a/tools/vendor/github.com/spf13/cobra/zsh_completions.go b/tools/vendor/github.com/spf13/cobra/zsh_completions.go index 84cec76fde..1856e4c7f6 100644 --- a/tools/vendor/github.com/spf13/cobra/zsh_completions.go +++ b/tools/vendor/github.com/spf13/cobra/zsh_completions.go @@ -1,4 +1,4 @@ -// Copyright 2013-2022 The Cobra Authors +// Copyright 2013-2023 The Cobra Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -90,6 +90,7 @@ func genZshComp(buf io.StringWriter, name string, includeDesc bool) { compCmd = ShellCompNoDescRequestCmd } WriteStringAndCheck(buf, fmt.Sprintf(`#compdef %[1]s +compdef _%[1]s %[1]s # zsh completion for %-36[1]s -*- shell-script -*- @@ -108,8 +109,9 @@ _%[1]s() local shellCompDirectiveNoFileComp=%[5]d local shellCompDirectiveFilterFileExt=%[6]d local shellCompDirectiveFilterDirs=%[7]d + local shellCompDirectiveKeepOrder=%[8]d - local lastParam lastChar flagPrefix requestComp out directive comp lastComp noSpace + local lastParam lastChar flagPrefix requestComp out directive comp lastComp noSpace keepOrder local -a completions __%[1]s_debug "\n========= starting completion logic ==========" @@ -177,7 +179,7 @@ _%[1]s() return fi - local activeHelpMarker="%[8]s" + local activeHelpMarker="%[9]s" local endIndex=${#activeHelpMarker} local startIndex=$((${#activeHelpMarker}+1)) local hasActiveHelp=0 @@ -227,6 +229,11 @@ _%[1]s() noSpace="-S ''" fi + if [ $((directive & shellCompDirectiveKeepOrder)) -ne 0 ]; then + __%[1]s_debug "Activating keep order." + keepOrder="-V" + fi + if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then # File extension filtering local filteringCmd @@ -262,7 +269,7 @@ _%[1]s() return $result else __%[1]s_debug "Calling _describe" - if eval _describe "completions" completions $flagPrefix $noSpace; then + if eval _describe $keepOrder "completions" completions $flagPrefix $noSpace; then __%[1]s_debug "_describe found some completions" # Return the success of having called _describe @@ -296,6 +303,6 @@ if [ "$funcstack[1]" = "_%[1]s" ]; then fi `, name, compCmd, ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, - ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, ShellCompDirectiveKeepOrder, activeHelpMarker)) } diff --git a/tools/vendor/github.com/stretchr/testify/assert/assertion_compare.go b/tools/vendor/github.com/stretchr/testify/assert/assertion_compare.go index 95d8e59da6..b774da88d8 100644 --- a/tools/vendor/github.com/stretchr/testify/assert/assertion_compare.go +++ b/tools/vendor/github.com/stretchr/testify/assert/assertion_compare.go @@ -352,9 +352,9 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { // Greater asserts that the first element is greater than the second // -// assert.Greater(t, 2, 1) -// assert.Greater(t, float64(2), float64(1)) -// assert.Greater(t, "b", "a") +// assert.Greater(t, 2, 1) +// assert.Greater(t, float64(2), float64(1)) +// assert.Greater(t, "b", "a") func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -364,10 +364,10 @@ func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface // GreaterOrEqual asserts that the first element is greater than or equal to the second // -// assert.GreaterOrEqual(t, 2, 1) -// assert.GreaterOrEqual(t, 2, 2) -// assert.GreaterOrEqual(t, "b", "a") -// assert.GreaterOrEqual(t, "b", "b") +// assert.GreaterOrEqual(t, 2, 1) +// assert.GreaterOrEqual(t, 2, 2) +// assert.GreaterOrEqual(t, "b", "a") +// assert.GreaterOrEqual(t, "b", "b") func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -377,9 +377,9 @@ func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...in // Less asserts that the first element is less than the second // -// assert.Less(t, 1, 2) -// assert.Less(t, float64(1), float64(2)) -// assert.Less(t, "a", "b") +// assert.Less(t, 1, 2) +// assert.Less(t, float64(1), float64(2)) +// assert.Less(t, "a", "b") func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -389,10 +389,10 @@ func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) // LessOrEqual asserts that the first element is less than or equal to the second // -// assert.LessOrEqual(t, 1, 2) -// assert.LessOrEqual(t, 2, 2) -// assert.LessOrEqual(t, "a", "b") -// assert.LessOrEqual(t, "b", "b") +// assert.LessOrEqual(t, 1, 2) +// assert.LessOrEqual(t, 2, 2) +// assert.LessOrEqual(t, "a", "b") +// assert.LessOrEqual(t, "b", "b") func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -402,8 +402,8 @@ func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...inter // Positive asserts that the specified element is positive // -// assert.Positive(t, 1) -// assert.Positive(t, 1.23) +// assert.Positive(t, 1) +// assert.Positive(t, 1.23) func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -414,8 +414,8 @@ func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { // Negative asserts that the specified element is negative // -// assert.Negative(t, -1) -// assert.Negative(t, -1.23) +// assert.Negative(t, -1) +// assert.Negative(t, -1.23) func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/tools/vendor/github.com/stretchr/testify/assert/assertion_format.go b/tools/vendor/github.com/stretchr/testify/assert/assertion_format.go index 7880b8f943..84dbd6c790 100644 --- a/tools/vendor/github.com/stretchr/testify/assert/assertion_format.go +++ b/tools/vendor/github.com/stretchr/testify/assert/assertion_format.go @@ -22,9 +22,9 @@ func Conditionf(t TestingT, comp Comparison, msg string, args ...interface{}) bo // Containsf asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") -// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") -// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") +// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") +// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") +// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -56,7 +56,7 @@ func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string // Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// assert.Emptyf(t, obj, "error message %s", "formatted") +// assert.Emptyf(t, obj, "error message %s", "formatted") func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -66,7 +66,7 @@ func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) boo // Equalf asserts that two objects are equal. // -// assert.Equalf(t, 123, 123, "error message %s", "formatted") +// assert.Equalf(t, 123, 123, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -81,8 +81,8 @@ func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, ar // EqualErrorf asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -90,10 +90,27 @@ func EqualErrorf(t TestingT, theError error, errString string, msg string, args return EqualError(t, theError, errString, append([]interface{}{msg}, args...)...) } +// EqualExportedValuesf asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// assert.EqualExportedValuesf(t, S{1, 2}, S{1, 3}, "error message %s", "formatted") => true +// assert.EqualExportedValuesf(t, S{1, 2}, S{2, 3}, "error message %s", "formatted") => false +func EqualExportedValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return EqualExportedValues(t, expected, actual, append([]interface{}{msg}, args...)...) +} + // EqualValuesf asserts that two objects are equal or convertable to the same types // and equal. // -// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted") +// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted") func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -103,10 +120,10 @@ func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg stri // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if assert.Errorf(t, err, "error message %s", "formatted") { -// assert.Equal(t, expectedErrorf, err) -// } +// actualObj, err := SomeFunction() +// if assert.Errorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -126,8 +143,8 @@ func ErrorAsf(t TestingT, err error, target interface{}, msg string, args ...int // ErrorContainsf asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// assert.ErrorContainsf(t, err, expectedErrorSubString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// assert.ErrorContainsf(t, err, expectedErrorSubString, "error message %s", "formatted") func ErrorContainsf(t TestingT, theError error, contains string, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -147,7 +164,7 @@ func ErrorIsf(t TestingT, err error, target error, msg string, args ...interface // Eventuallyf asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -155,9 +172,34 @@ func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick return Eventually(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) } +// EventuallyWithTf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// assert.EventuallyWithTf(t, func(c *assert.CollectT, "error message %s", "formatted") { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func EventuallyWithTf(t TestingT, condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return EventuallyWithT(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) +} + // Exactlyf asserts that two objects are equal in value and type. // -// assert.Exactlyf(t, int32(123), int64(123), "error message %s", "formatted") +// assert.Exactlyf(t, int32(123), int64(123), "error message %s", "formatted") func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -183,7 +225,7 @@ func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{} // Falsef asserts that the specified value is false. // -// assert.Falsef(t, myBool, "error message %s", "formatted") +// assert.Falsef(t, myBool, "error message %s", "formatted") func Falsef(t TestingT, value bool, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -202,9 +244,9 @@ func FileExistsf(t TestingT, path string, msg string, args ...interface{}) bool // Greaterf asserts that the first element is greater than the second // -// assert.Greaterf(t, 2, 1, "error message %s", "formatted") -// assert.Greaterf(t, float64(2), float64(1), "error message %s", "formatted") -// assert.Greaterf(t, "b", "a", "error message %s", "formatted") +// assert.Greaterf(t, 2, 1, "error message %s", "formatted") +// assert.Greaterf(t, float64(2), float64(1), "error message %s", "formatted") +// assert.Greaterf(t, "b", "a", "error message %s", "formatted") func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -214,10 +256,10 @@ func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...in // GreaterOrEqualf asserts that the first element is greater than or equal to the second // -// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") -// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") -// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") -// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") +// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -228,7 +270,7 @@ func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, arg // HTTPBodyContainsf asserts that a specified handler returns a // body that contains a string. // -// assert.HTTPBodyContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// assert.HTTPBodyContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { @@ -241,7 +283,7 @@ func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url // HTTPBodyNotContainsf asserts that a specified handler returns a // body that does not contain a string. // -// assert.HTTPBodyNotContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// assert.HTTPBodyNotContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { @@ -253,7 +295,7 @@ func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, u // HTTPErrorf asserts that a specified handler returns an error status code. // -// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -265,7 +307,7 @@ func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, // HTTPRedirectf asserts that a specified handler returns a redirect status code. // -// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -277,7 +319,7 @@ func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url stri // HTTPStatusCodef asserts that a specified handler returns a specified status code. // -// assert.HTTPStatusCodef(t, myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") +// assert.HTTPStatusCodef(t, myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPStatusCodef(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msg string, args ...interface{}) bool { @@ -289,7 +331,7 @@ func HTTPStatusCodef(t TestingT, handler http.HandlerFunc, method string, url st // HTTPSuccessf asserts that a specified handler returns a success status code. // -// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -301,7 +343,7 @@ func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url strin // Implementsf asserts that an object is implemented by the specified interface. // -// assert.Implementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted") +// assert.Implementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted") func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -311,7 +353,7 @@ func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, ms // InDeltaf asserts that the two numerals are within delta of each other. // -// assert.InDeltaf(t, math.Pi, 22/7.0, 0.01, "error message %s", "formatted") +// assert.InDeltaf(t, math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -353,9 +395,9 @@ func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsil // IsDecreasingf asserts that the collection is decreasing // -// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted") -// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted") -// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted") func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -365,9 +407,9 @@ func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface // IsIncreasingf asserts that the collection is increasing // -// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted") -// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted") -// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted") func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -377,9 +419,9 @@ func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface // IsNonDecreasingf asserts that the collection is not decreasing // -// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted") -// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted") -// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted") func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -389,9 +431,9 @@ func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interf // IsNonIncreasingf asserts that the collection is not increasing // -// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted") -// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted") -// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted") func IsNonIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -409,7 +451,7 @@ func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg strin // JSONEqf asserts that two JSON strings are equivalent. // -// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -420,7 +462,7 @@ func JSONEqf(t TestingT, expected string, actual string, msg string, args ...int // Lenf asserts that the specified object has specific length. // Lenf also fails if the object has a type that len() not accept. // -// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") +// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -430,9 +472,9 @@ func Lenf(t TestingT, object interface{}, length int, msg string, args ...interf // Lessf asserts that the first element is less than the second // -// assert.Lessf(t, 1, 2, "error message %s", "formatted") -// assert.Lessf(t, float64(1), float64(2), "error message %s", "formatted") -// assert.Lessf(t, "a", "b", "error message %s", "formatted") +// assert.Lessf(t, 1, 2, "error message %s", "formatted") +// assert.Lessf(t, float64(1), float64(2), "error message %s", "formatted") +// assert.Lessf(t, "a", "b", "error message %s", "formatted") func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -442,10 +484,10 @@ func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...inter // LessOrEqualf asserts that the first element is less than or equal to the second // -// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") -// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") -// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") -// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") +// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") +// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -455,8 +497,8 @@ func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args . // Negativef asserts that the specified element is negative // -// assert.Negativef(t, -1, "error message %s", "formatted") -// assert.Negativef(t, -1.23, "error message %s", "formatted") +// assert.Negativef(t, -1, "error message %s", "formatted") +// assert.Negativef(t, -1.23, "error message %s", "formatted") func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -467,7 +509,7 @@ func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) bool // Neverf asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// assert.Neverf(t, func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// assert.Neverf(t, func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func Neverf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -477,7 +519,7 @@ func Neverf(t TestingT, condition func() bool, waitFor time.Duration, tick time. // Nilf asserts that the specified object is nil. // -// assert.Nilf(t, err, "error message %s", "formatted") +// assert.Nilf(t, err, "error message %s", "formatted") func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -496,10 +538,10 @@ func NoDirExistsf(t TestingT, path string, msg string, args ...interface{}) bool // NoErrorf asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if assert.NoErrorf(t, err, "error message %s", "formatted") { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if assert.NoErrorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } func NoErrorf(t TestingT, err error, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -519,9 +561,9 @@ func NoFileExistsf(t TestingT, path string, msg string, args ...interface{}) boo // NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") -// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") -// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -532,9 +574,9 @@ func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, a // NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { -// assert.Equal(t, "two", obj[1]) -// } +// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -544,7 +586,7 @@ func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) // NotEqualf asserts that the specified values are NOT equal. // -// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") +// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -557,7 +599,7 @@ func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, // NotEqualValuesf asserts that two objects are not equal even when converted to the same type // -// assert.NotEqualValuesf(t, obj1, obj2, "error message %s", "formatted") +// assert.NotEqualValuesf(t, obj1, obj2, "error message %s", "formatted") func NotEqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -576,7 +618,7 @@ func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interf // NotNilf asserts that the specified object is not nil. // -// assert.NotNilf(t, err, "error message %s", "formatted") +// assert.NotNilf(t, err, "error message %s", "formatted") func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -586,7 +628,7 @@ func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) bo // NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. // -// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") +// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") func NotPanicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -596,8 +638,8 @@ func NotPanicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bo // NotRegexpf asserts that a specified regexp does not match a string. // -// assert.NotRegexpf(t, regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") -// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") +// assert.NotRegexpf(t, regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") +// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -607,7 +649,7 @@ func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args .. // NotSamef asserts that two pointers do not reference the same object. // -// assert.NotSamef(t, ptr1, ptr2, "error message %s", "formatted") +// assert.NotSamef(t, ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -621,7 +663,7 @@ func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string, // NotSubsetf asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -639,7 +681,7 @@ func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) bool { // Panicsf asserts that the code inside the specified PanicTestFunc panics. // -// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") +// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") func Panicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -651,7 +693,7 @@ func Panicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// assert.PanicsWithErrorf(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// assert.PanicsWithErrorf(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func PanicsWithErrorf(t TestingT, errString string, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -662,7 +704,7 @@ func PanicsWithErrorf(t TestingT, errString string, f PanicTestFunc, msg string, // PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -672,8 +714,8 @@ func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg str // Positivef asserts that the specified element is positive // -// assert.Positivef(t, 1, "error message %s", "formatted") -// assert.Positivef(t, 1.23, "error message %s", "formatted") +// assert.Positivef(t, 1, "error message %s", "formatted") +// assert.Positivef(t, 1.23, "error message %s", "formatted") func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -683,8 +725,8 @@ func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) bool // Regexpf asserts that a specified regexp matches a string. // -// assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") -// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") +// assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") +// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -694,7 +736,7 @@ func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...in // Samef asserts that two pointers reference the same object. // -// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") +// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -708,7 +750,7 @@ func Samef(t TestingT, expected interface{}, actual interface{}, msg string, arg // Subsetf asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -718,7 +760,7 @@ func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args // Truef asserts that the specified value is true. // -// assert.Truef(t, myBool, "error message %s", "formatted") +// assert.Truef(t, myBool, "error message %s", "formatted") func Truef(t TestingT, value bool, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -728,7 +770,7 @@ func Truef(t TestingT, value bool, msg string, args ...interface{}) bool { // WithinDurationf asserts that the two times are within duration delta of each other. // -// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -738,7 +780,7 @@ func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta tim // WithinRangef asserts that a time is within a time range (inclusive). // -// assert.WithinRangef(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") +// assert.WithinRangef(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") func WithinRangef(t TestingT, actual time.Time, start time.Time, end time.Time, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/tools/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/tools/vendor/github.com/stretchr/testify/assert/assertion_forward.go index 339515b8bf..b1d94aec53 100644 --- a/tools/vendor/github.com/stretchr/testify/assert/assertion_forward.go +++ b/tools/vendor/github.com/stretchr/testify/assert/assertion_forward.go @@ -30,9 +30,9 @@ func (a *Assertions) Conditionf(comp Comparison, msg string, args ...interface{} // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// a.Contains("Hello World", "World") -// a.Contains(["Hello", "World"], "World") -// a.Contains({"Hello": "World"}, "Hello") +// a.Contains("Hello World", "World") +// a.Contains(["Hello", "World"], "World") +// a.Contains({"Hello": "World"}, "Hello") func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -43,9 +43,9 @@ func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs .. // Containsf asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// a.Containsf("Hello World", "World", "error message %s", "formatted") -// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") -// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") +// a.Containsf("Hello World", "World", "error message %s", "formatted") +// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") +// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -98,7 +98,7 @@ func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg st // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// a.Empty(obj) +// a.Empty(obj) func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -109,7 +109,7 @@ func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool { // Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// a.Emptyf(obj, "error message %s", "formatted") +// a.Emptyf(obj, "error message %s", "formatted") func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -119,7 +119,7 @@ func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) // Equal asserts that two objects are equal. // -// a.Equal(123, 123) +// a.Equal(123, 123) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -134,8 +134,8 @@ func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// a.EqualError(err, expectedErrorString) +// actualObj, err := SomeFunction() +// a.EqualError(err, expectedErrorString) func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -146,8 +146,8 @@ func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ... // EqualErrorf asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") func (a *Assertions) EqualErrorf(theError error, errString string, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -155,10 +155,44 @@ func (a *Assertions) EqualErrorf(theError error, errString string, msg string, a return EqualErrorf(a.t, theError, errString, msg, args...) } +// EqualExportedValues asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// a.EqualExportedValues(S{1, 2}, S{1, 3}) => true +// a.EqualExportedValues(S{1, 2}, S{2, 3}) => false +func (a *Assertions) EqualExportedValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return EqualExportedValues(a.t, expected, actual, msgAndArgs...) +} + +// EqualExportedValuesf asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// a.EqualExportedValuesf(S{1, 2}, S{1, 3}, "error message %s", "formatted") => true +// a.EqualExportedValuesf(S{1, 2}, S{2, 3}, "error message %s", "formatted") => false +func (a *Assertions) EqualExportedValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return EqualExportedValuesf(a.t, expected, actual, msg, args...) +} + // EqualValues asserts that two objects are equal or convertable to the same types // and equal. // -// a.EqualValues(uint32(123), int32(123)) +// a.EqualValues(uint32(123), int32(123)) func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -169,7 +203,7 @@ func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAn // EqualValuesf asserts that two objects are equal or convertable to the same types // and equal. // -// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted") +// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted") func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -179,7 +213,7 @@ func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg // Equalf asserts that two objects are equal. // -// a.Equalf(123, 123, "error message %s", "formatted") +// a.Equalf(123, 123, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -193,10 +227,10 @@ func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if a.Error(err) { -// assert.Equal(t, expectedError, err) -// } +// actualObj, err := SomeFunction() +// if a.Error(err) { +// assert.Equal(t, expectedError, err) +// } func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -225,8 +259,8 @@ func (a *Assertions) ErrorAsf(err error, target interface{}, msg string, args .. // ErrorContains asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// a.ErrorContains(err, expectedErrorSubString) +// actualObj, err := SomeFunction() +// a.ErrorContains(err, expectedErrorSubString) func (a *Assertions) ErrorContains(theError error, contains string, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -237,8 +271,8 @@ func (a *Assertions) ErrorContains(theError error, contains string, msgAndArgs . // ErrorContainsf asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// a.ErrorContainsf(err, expectedErrorSubString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// a.ErrorContainsf(err, expectedErrorSubString, "error message %s", "formatted") func (a *Assertions) ErrorContainsf(theError error, contains string, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -266,10 +300,10 @@ func (a *Assertions) ErrorIsf(err error, target error, msg string, args ...inter // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if a.Errorf(err, "error message %s", "formatted") { -// assert.Equal(t, expectedErrorf, err) -// } +// actualObj, err := SomeFunction() +// if a.Errorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -280,7 +314,7 @@ func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool { // Eventually asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) +// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -288,10 +322,60 @@ func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, ti return Eventually(a.t, condition, waitFor, tick, msgAndArgs...) } +// EventuallyWithT asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// a.EventuallyWithT(func(c *assert.CollectT) { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func (a *Assertions) EventuallyWithT(condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return EventuallyWithT(a.t, condition, waitFor, tick, msgAndArgs...) +} + +// EventuallyWithTf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// a.EventuallyWithTf(func(c *assert.CollectT, "error message %s", "formatted") { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func (a *Assertions) EventuallyWithTf(condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return EventuallyWithTf(a.t, condition, waitFor, tick, msg, args...) +} + // Eventuallyf asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -301,7 +385,7 @@ func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, t // Exactly asserts that two objects are equal in value and type. // -// a.Exactly(int32(123), int64(123)) +// a.Exactly(int32(123), int64(123)) func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -311,7 +395,7 @@ func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArg // Exactlyf asserts that two objects are equal in value and type. // -// a.Exactlyf(int32(123), int64(123), "error message %s", "formatted") +// a.Exactlyf(int32(123), int64(123), "error message %s", "formatted") func (a *Assertions) Exactlyf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -353,7 +437,7 @@ func (a *Assertions) Failf(failureMessage string, msg string, args ...interface{ // False asserts that the specified value is false. // -// a.False(myBool) +// a.False(myBool) func (a *Assertions) False(value bool, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -363,7 +447,7 @@ func (a *Assertions) False(value bool, msgAndArgs ...interface{}) bool { // Falsef asserts that the specified value is false. // -// a.Falsef(myBool, "error message %s", "formatted") +// a.Falsef(myBool, "error message %s", "formatted") func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -391,9 +475,9 @@ func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) b // Greater asserts that the first element is greater than the second // -// a.Greater(2, 1) -// a.Greater(float64(2), float64(1)) -// a.Greater("b", "a") +// a.Greater(2, 1) +// a.Greater(float64(2), float64(1)) +// a.Greater("b", "a") func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -403,10 +487,10 @@ func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...inter // GreaterOrEqual asserts that the first element is greater than or equal to the second // -// a.GreaterOrEqual(2, 1) -// a.GreaterOrEqual(2, 2) -// a.GreaterOrEqual("b", "a") -// a.GreaterOrEqual("b", "b") +// a.GreaterOrEqual(2, 1) +// a.GreaterOrEqual(2, 2) +// a.GreaterOrEqual("b", "a") +// a.GreaterOrEqual("b", "b") func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -416,10 +500,10 @@ func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs . // GreaterOrEqualf asserts that the first element is greater than or equal to the second // -// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") -// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") -// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") -// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") +// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") +// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") +// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") +// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -429,9 +513,9 @@ func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, // Greaterf asserts that the first element is greater than the second // -// a.Greaterf(2, 1, "error message %s", "formatted") -// a.Greaterf(float64(2), float64(1), "error message %s", "formatted") -// a.Greaterf("b", "a", "error message %s", "formatted") +// a.Greaterf(2, 1, "error message %s", "formatted") +// a.Greaterf(float64(2), float64(1), "error message %s", "formatted") +// a.Greaterf("b", "a", "error message %s", "formatted") func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -442,7 +526,7 @@ func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args . // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // -// a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { @@ -455,7 +539,7 @@ func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, u // HTTPBodyContainsf asserts that a specified handler returns a // body that contains a string. // -// a.HTTPBodyContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// a.HTTPBodyContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { @@ -468,7 +552,7 @@ func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. // -// a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { @@ -481,7 +565,7 @@ func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string // HTTPBodyNotContainsf asserts that a specified handler returns a // body that does not contain a string. // -// a.HTTPBodyNotContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// a.HTTPBodyNotContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { @@ -493,7 +577,7 @@ func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method strin // HTTPError asserts that a specified handler returns an error status code. // -// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -505,7 +589,7 @@ func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url stri // HTTPErrorf asserts that a specified handler returns an error status code. // -// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -517,7 +601,7 @@ func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url str // HTTPRedirect asserts that a specified handler returns a redirect status code. // -// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -529,7 +613,7 @@ func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url s // HTTPRedirectf asserts that a specified handler returns a redirect status code. // -// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -541,7 +625,7 @@ func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url // HTTPStatusCode asserts that a specified handler returns a specified status code. // -// a.HTTPStatusCode(myHandler, "GET", "/notImplemented", nil, 501) +// a.HTTPStatusCode(myHandler, "GET", "/notImplemented", nil, 501) // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPStatusCode(handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msgAndArgs ...interface{}) bool { @@ -553,7 +637,7 @@ func (a *Assertions) HTTPStatusCode(handler http.HandlerFunc, method string, url // HTTPStatusCodef asserts that a specified handler returns a specified status code. // -// a.HTTPStatusCodef(myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") +// a.HTTPStatusCodef(myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPStatusCodef(handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msg string, args ...interface{}) bool { @@ -565,7 +649,7 @@ func (a *Assertions) HTTPStatusCodef(handler http.HandlerFunc, method string, ur // HTTPSuccess asserts that a specified handler returns a success status code. // -// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) +// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -577,7 +661,7 @@ func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url st // HTTPSuccessf asserts that a specified handler returns a success status code. // -// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -589,7 +673,7 @@ func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url s // Implements asserts that an object is implemented by the specified interface. // -// a.Implements((*MyInterface)(nil), new(MyObject)) +// a.Implements((*MyInterface)(nil), new(MyObject)) func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -599,7 +683,7 @@ func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, // Implementsf asserts that an object is implemented by the specified interface. // -// a.Implementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted") +// a.Implementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted") func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -609,7 +693,7 @@ func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{} // InDelta asserts that the two numerals are within delta of each other. // -// a.InDelta(math.Pi, 22/7.0, 0.01) +// a.InDelta(math.Pi, 22/7.0, 0.01) func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -651,7 +735,7 @@ func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, del // InDeltaf asserts that the two numerals are within delta of each other. // -// a.InDeltaf(math.Pi, 22/7.0, 0.01, "error message %s", "formatted") +// a.InDeltaf(math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -693,9 +777,9 @@ func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilo // IsDecreasing asserts that the collection is decreasing // -// a.IsDecreasing([]int{2, 1, 0}) -// a.IsDecreasing([]float{2, 1}) -// a.IsDecreasing([]string{"b", "a"}) +// a.IsDecreasing([]int{2, 1, 0}) +// a.IsDecreasing([]float{2, 1}) +// a.IsDecreasing([]string{"b", "a"}) func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -705,9 +789,9 @@ func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) // IsDecreasingf asserts that the collection is decreasing // -// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted") -// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted") -// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted") +// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted") +// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted") func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -717,9 +801,9 @@ func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...inter // IsIncreasing asserts that the collection is increasing // -// a.IsIncreasing([]int{1, 2, 3}) -// a.IsIncreasing([]float{1, 2}) -// a.IsIncreasing([]string{"a", "b"}) +// a.IsIncreasing([]int{1, 2, 3}) +// a.IsIncreasing([]float{1, 2}) +// a.IsIncreasing([]string{"a", "b"}) func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -729,9 +813,9 @@ func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) // IsIncreasingf asserts that the collection is increasing // -// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted") -// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted") -// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted") +// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted") +// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted") func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -741,9 +825,9 @@ func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...inter // IsNonDecreasing asserts that the collection is not decreasing // -// a.IsNonDecreasing([]int{1, 1, 2}) -// a.IsNonDecreasing([]float{1, 2}) -// a.IsNonDecreasing([]string{"a", "b"}) +// a.IsNonDecreasing([]int{1, 1, 2}) +// a.IsNonDecreasing([]float{1, 2}) +// a.IsNonDecreasing([]string{"a", "b"}) func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -753,9 +837,9 @@ func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface // IsNonDecreasingf asserts that the collection is not decreasing // -// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted") -// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted") -// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted") +// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted") func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -765,9 +849,9 @@ func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...in // IsNonIncreasing asserts that the collection is not increasing // -// a.IsNonIncreasing([]int{2, 1, 1}) -// a.IsNonIncreasing([]float{2, 1}) -// a.IsNonIncreasing([]string{"b", "a"}) +// a.IsNonIncreasing([]int{2, 1, 1}) +// a.IsNonIncreasing([]float{2, 1}) +// a.IsNonIncreasing([]string{"b", "a"}) func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -777,9 +861,9 @@ func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface // IsNonIncreasingf asserts that the collection is not increasing // -// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted") -// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted") -// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted") +// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted") func (a *Assertions) IsNonIncreasingf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -805,7 +889,7 @@ func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg s // JSONEq asserts that two JSON strings are equivalent. // -// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -815,7 +899,7 @@ func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interf // JSONEqf asserts that two JSON strings are equivalent. // -// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") func (a *Assertions) JSONEqf(expected string, actual string, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -826,7 +910,7 @@ func (a *Assertions) JSONEqf(expected string, actual string, msg string, args .. // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. // -// a.Len(mySlice, 3) +// a.Len(mySlice, 3) func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -837,7 +921,7 @@ func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface // Lenf asserts that the specified object has specific length. // Lenf also fails if the object has a type that len() not accept. // -// a.Lenf(mySlice, 3, "error message %s", "formatted") +// a.Lenf(mySlice, 3, "error message %s", "formatted") func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -847,9 +931,9 @@ func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...in // Less asserts that the first element is less than the second // -// a.Less(1, 2) -// a.Less(float64(1), float64(2)) -// a.Less("a", "b") +// a.Less(1, 2) +// a.Less(float64(1), float64(2)) +// a.Less("a", "b") func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -859,10 +943,10 @@ func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interfac // LessOrEqual asserts that the first element is less than or equal to the second // -// a.LessOrEqual(1, 2) -// a.LessOrEqual(2, 2) -// a.LessOrEqual("a", "b") -// a.LessOrEqual("b", "b") +// a.LessOrEqual(1, 2) +// a.LessOrEqual(2, 2) +// a.LessOrEqual("a", "b") +// a.LessOrEqual("b", "b") func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -872,10 +956,10 @@ func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...i // LessOrEqualf asserts that the first element is less than or equal to the second // -// a.LessOrEqualf(1, 2, "error message %s", "formatted") -// a.LessOrEqualf(2, 2, "error message %s", "formatted") -// a.LessOrEqualf("a", "b", "error message %s", "formatted") -// a.LessOrEqualf("b", "b", "error message %s", "formatted") +// a.LessOrEqualf(1, 2, "error message %s", "formatted") +// a.LessOrEqualf(2, 2, "error message %s", "formatted") +// a.LessOrEqualf("a", "b", "error message %s", "formatted") +// a.LessOrEqualf("b", "b", "error message %s", "formatted") func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -885,9 +969,9 @@ func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, ar // Lessf asserts that the first element is less than the second // -// a.Lessf(1, 2, "error message %s", "formatted") -// a.Lessf(float64(1), float64(2), "error message %s", "formatted") -// a.Lessf("a", "b", "error message %s", "formatted") +// a.Lessf(1, 2, "error message %s", "formatted") +// a.Lessf(float64(1), float64(2), "error message %s", "formatted") +// a.Lessf("a", "b", "error message %s", "formatted") func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -897,8 +981,8 @@ func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...i // Negative asserts that the specified element is negative // -// a.Negative(-1) -// a.Negative(-1.23) +// a.Negative(-1) +// a.Negative(-1.23) func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -908,8 +992,8 @@ func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) bool { // Negativef asserts that the specified element is negative // -// a.Negativef(-1, "error message %s", "formatted") -// a.Negativef(-1.23, "error message %s", "formatted") +// a.Negativef(-1, "error message %s", "formatted") +// a.Negativef(-1.23, "error message %s", "formatted") func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -920,7 +1004,7 @@ func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) b // Never asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) +// a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -931,7 +1015,7 @@ func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick ti // Neverf asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// a.Neverf(func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// a.Neverf(func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func (a *Assertions) Neverf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -941,7 +1025,7 @@ func (a *Assertions) Neverf(condition func() bool, waitFor time.Duration, tick t // Nil asserts that the specified object is nil. // -// a.Nil(err) +// a.Nil(err) func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -951,7 +1035,7 @@ func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) bool { // Nilf asserts that the specified object is nil. // -// a.Nilf(err, "error message %s", "formatted") +// a.Nilf(err, "error message %s", "formatted") func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -979,10 +1063,10 @@ func (a *Assertions) NoDirExistsf(path string, msg string, args ...interface{}) // NoError asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if a.NoError(err) { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if a.NoError(err) { +// assert.Equal(t, expectedObj, actualObj) +// } func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -992,10 +1076,10 @@ func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) bool { // NoErrorf asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if a.NoErrorf(err, "error message %s", "formatted") { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if a.NoErrorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1024,9 +1108,9 @@ func (a *Assertions) NoFileExistsf(path string, msg string, args ...interface{}) // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// a.NotContains("Hello World", "Earth") -// a.NotContains(["Hello", "World"], "Earth") -// a.NotContains({"Hello": "World"}, "Earth") +// a.NotContains("Hello World", "Earth") +// a.NotContains(["Hello", "World"], "Earth") +// a.NotContains({"Hello": "World"}, "Earth") func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1037,9 +1121,9 @@ func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs // NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") -// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") -// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") +// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") +// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") +// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1050,9 +1134,9 @@ func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg strin // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if a.NotEmpty(obj) { -// assert.Equal(t, "two", obj[1]) -// } +// if a.NotEmpty(obj) { +// assert.Equal(t, "two", obj[1]) +// } func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1063,9 +1147,9 @@ func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) boo // NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if a.NotEmptyf(obj, "error message %s", "formatted") { -// assert.Equal(t, "two", obj[1]) -// } +// if a.NotEmptyf(obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1075,7 +1159,7 @@ func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface // NotEqual asserts that the specified values are NOT equal. // -// a.NotEqual(obj1, obj2) +// a.NotEqual(obj1, obj2) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -1088,7 +1172,7 @@ func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndAr // NotEqualValues asserts that two objects are not equal even when converted to the same type // -// a.NotEqualValues(obj1, obj2) +// a.NotEqualValues(obj1, obj2) func (a *Assertions) NotEqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1098,7 +1182,7 @@ func (a *Assertions) NotEqualValues(expected interface{}, actual interface{}, ms // NotEqualValuesf asserts that two objects are not equal even when converted to the same type // -// a.NotEqualValuesf(obj1, obj2, "error message %s", "formatted") +// a.NotEqualValuesf(obj1, obj2, "error message %s", "formatted") func (a *Assertions) NotEqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1108,7 +1192,7 @@ func (a *Assertions) NotEqualValuesf(expected interface{}, actual interface{}, m // NotEqualf asserts that the specified values are NOT equal. // -// a.NotEqualf(obj1, obj2, "error message %s", "formatted") +// a.NotEqualf(obj1, obj2, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -1139,7 +1223,7 @@ func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...in // NotNil asserts that the specified object is not nil. // -// a.NotNil(err) +// a.NotNil(err) func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1149,7 +1233,7 @@ func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) bool // NotNilf asserts that the specified object is not nil. // -// a.NotNilf(err, "error message %s", "formatted") +// a.NotNilf(err, "error message %s", "formatted") func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1159,7 +1243,7 @@ func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{} // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. // -// a.NotPanics(func(){ RemainCalm() }) +// a.NotPanics(func(){ RemainCalm() }) func (a *Assertions) NotPanics(f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1169,7 +1253,7 @@ func (a *Assertions) NotPanics(f PanicTestFunc, msgAndArgs ...interface{}) bool // NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. // -// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") +// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") func (a *Assertions) NotPanicsf(f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1179,8 +1263,8 @@ func (a *Assertions) NotPanicsf(f PanicTestFunc, msg string, args ...interface{} // NotRegexp asserts that a specified regexp does not match a string. // -// a.NotRegexp(regexp.MustCompile("starts"), "it's starting") -// a.NotRegexp("^start", "it's not starting") +// a.NotRegexp(regexp.MustCompile("starts"), "it's starting") +// a.NotRegexp("^start", "it's not starting") func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1190,8 +1274,8 @@ func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...in // NotRegexpf asserts that a specified regexp does not match a string. // -// a.NotRegexpf(regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") -// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") +// a.NotRegexpf(regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") +// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1201,7 +1285,7 @@ func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, arg // NotSame asserts that two pointers do not reference the same object. // -// a.NotSame(ptr1, ptr2) +// a.NotSame(ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1214,7 +1298,7 @@ func (a *Assertions) NotSame(expected interface{}, actual interface{}, msgAndArg // NotSamef asserts that two pointers do not reference the same object. // -// a.NotSamef(ptr1, ptr2, "error message %s", "formatted") +// a.NotSamef(ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1228,7 +1312,7 @@ func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg stri // NotSubset asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1239,7 +1323,7 @@ func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs // NotSubsetf asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1265,7 +1349,7 @@ func (a *Assertions) NotZerof(i interface{}, msg string, args ...interface{}) bo // Panics asserts that the code inside the specified PanicTestFunc panics. // -// a.Panics(func(){ GoCrazy() }) +// a.Panics(func(){ GoCrazy() }) func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1277,7 +1361,7 @@ func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool { // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// a.PanicsWithError("crazy error", func(){ GoCrazy() }) +// a.PanicsWithError("crazy error", func(){ GoCrazy() }) func (a *Assertions) PanicsWithError(errString string, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1289,7 +1373,7 @@ func (a *Assertions) PanicsWithError(errString string, f PanicTestFunc, msgAndAr // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// a.PanicsWithErrorf("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// a.PanicsWithErrorf("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func (a *Assertions) PanicsWithErrorf(errString string, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1300,7 +1384,7 @@ func (a *Assertions) PanicsWithErrorf(errString string, f PanicTestFunc, msg str // PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) +// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) func (a *Assertions) PanicsWithValue(expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1311,7 +1395,7 @@ func (a *Assertions) PanicsWithValue(expected interface{}, f PanicTestFunc, msgA // PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func (a *Assertions) PanicsWithValuef(expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1321,7 +1405,7 @@ func (a *Assertions) PanicsWithValuef(expected interface{}, f PanicTestFunc, msg // Panicsf asserts that the code inside the specified PanicTestFunc panics. // -// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") +// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1331,8 +1415,8 @@ func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) b // Positive asserts that the specified element is positive // -// a.Positive(1) -// a.Positive(1.23) +// a.Positive(1) +// a.Positive(1.23) func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1342,8 +1426,8 @@ func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) bool { // Positivef asserts that the specified element is positive // -// a.Positivef(1, "error message %s", "formatted") -// a.Positivef(1.23, "error message %s", "formatted") +// a.Positivef(1, "error message %s", "formatted") +// a.Positivef(1.23, "error message %s", "formatted") func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1353,8 +1437,8 @@ func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) b // Regexp asserts that a specified regexp matches a string. // -// a.Regexp(regexp.MustCompile("start"), "it's starting") -// a.Regexp("start...$", "it's not starting") +// a.Regexp(regexp.MustCompile("start"), "it's starting") +// a.Regexp("start...$", "it's not starting") func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1364,8 +1448,8 @@ func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...inter // Regexpf asserts that a specified regexp matches a string. // -// a.Regexpf(regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") -// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") +// a.Regexpf(regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") +// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1375,7 +1459,7 @@ func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args . // Same asserts that two pointers reference the same object. // -// a.Same(ptr1, ptr2) +// a.Same(ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1388,7 +1472,7 @@ func (a *Assertions) Same(expected interface{}, actual interface{}, msgAndArgs . // Samef asserts that two pointers reference the same object. // -// a.Samef(ptr1, ptr2, "error message %s", "formatted") +// a.Samef(ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1402,7 +1486,7 @@ func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1413,7 +1497,7 @@ func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ... // Subsetf asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1423,7 +1507,7 @@ func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, a // True asserts that the specified value is true. // -// a.True(myBool) +// a.True(myBool) func (a *Assertions) True(value bool, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1433,7 +1517,7 @@ func (a *Assertions) True(value bool, msgAndArgs ...interface{}) bool { // Truef asserts that the specified value is true. // -// a.Truef(myBool, "error message %s", "formatted") +// a.Truef(myBool, "error message %s", "formatted") func (a *Assertions) Truef(value bool, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1443,7 +1527,7 @@ func (a *Assertions) Truef(value bool, msg string, args ...interface{}) bool { // WithinDuration asserts that the two times are within duration delta of each other. // -// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) +// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1453,7 +1537,7 @@ func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta // WithinDurationf asserts that the two times are within duration delta of each other. // -// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1463,7 +1547,7 @@ func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta // WithinRange asserts that a time is within a time range (inclusive). // -// a.WithinRange(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) +// a.WithinRange(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) func (a *Assertions) WithinRange(actual time.Time, start time.Time, end time.Time, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1473,7 +1557,7 @@ func (a *Assertions) WithinRange(actual time.Time, start time.Time, end time.Tim // WithinRangef asserts that a time is within a time range (inclusive). // -// a.WithinRangef(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") +// a.WithinRangef(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") func (a *Assertions) WithinRangef(actual time.Time, start time.Time, end time.Time, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() diff --git a/tools/vendor/github.com/stretchr/testify/assert/assertion_order.go b/tools/vendor/github.com/stretchr/testify/assert/assertion_order.go index 7594487835..00df62a059 100644 --- a/tools/vendor/github.com/stretchr/testify/assert/assertion_order.go +++ b/tools/vendor/github.com/stretchr/testify/assert/assertion_order.go @@ -46,36 +46,36 @@ func isOrdered(t TestingT, object interface{}, allowedComparesResults []CompareT // IsIncreasing asserts that the collection is increasing // -// assert.IsIncreasing(t, []int{1, 2, 3}) -// assert.IsIncreasing(t, []float{1, 2}) -// assert.IsIncreasing(t, []string{"a", "b"}) +// assert.IsIncreasing(t, []int{1, 2, 3}) +// assert.IsIncreasing(t, []float{1, 2}) +// assert.IsIncreasing(t, []string{"a", "b"}) func IsIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return isOrdered(t, object, []CompareType{compareLess}, "\"%v\" is not less than \"%v\"", msgAndArgs...) } // IsNonIncreasing asserts that the collection is not increasing // -// assert.IsNonIncreasing(t, []int{2, 1, 1}) -// assert.IsNonIncreasing(t, []float{2, 1}) -// assert.IsNonIncreasing(t, []string{"b", "a"}) +// assert.IsNonIncreasing(t, []int{2, 1, 1}) +// assert.IsNonIncreasing(t, []float{2, 1}) +// assert.IsNonIncreasing(t, []string{"b", "a"}) func IsNonIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return isOrdered(t, object, []CompareType{compareEqual, compareGreater}, "\"%v\" is not greater than or equal to \"%v\"", msgAndArgs...) } // IsDecreasing asserts that the collection is decreasing // -// assert.IsDecreasing(t, []int{2, 1, 0}) -// assert.IsDecreasing(t, []float{2, 1}) -// assert.IsDecreasing(t, []string{"b", "a"}) +// assert.IsDecreasing(t, []int{2, 1, 0}) +// assert.IsDecreasing(t, []float{2, 1}) +// assert.IsDecreasing(t, []string{"b", "a"}) func IsDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return isOrdered(t, object, []CompareType{compareGreater}, "\"%v\" is not greater than \"%v\"", msgAndArgs...) } // IsNonDecreasing asserts that the collection is not decreasing // -// assert.IsNonDecreasing(t, []int{1, 1, 2}) -// assert.IsNonDecreasing(t, []float{1, 2}) -// assert.IsNonDecreasing(t, []string{"a", "b"}) +// assert.IsNonDecreasing(t, []int{1, 1, 2}) +// assert.IsNonDecreasing(t, []float{1, 2}) +// assert.IsNonDecreasing(t, []string{"a", "b"}) func IsNonDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return isOrdered(t, object, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs...) } diff --git a/tools/vendor/github.com/stretchr/testify/assert/assertions.go b/tools/vendor/github.com/stretchr/testify/assert/assertions.go index fa1245b189..a55d1bba92 100644 --- a/tools/vendor/github.com/stretchr/testify/assert/assertions.go +++ b/tools/vendor/github.com/stretchr/testify/assert/assertions.go @@ -8,7 +8,6 @@ import ( "fmt" "math" "os" - "path/filepath" "reflect" "regexp" "runtime" @@ -76,6 +75,77 @@ func ObjectsAreEqual(expected, actual interface{}) bool { return bytes.Equal(exp, act) } +// copyExportedFields iterates downward through nested data structures and creates a copy +// that only contains the exported struct fields. +func copyExportedFields(expected interface{}) interface{} { + if isNil(expected) { + return expected + } + + expectedType := reflect.TypeOf(expected) + expectedKind := expectedType.Kind() + expectedValue := reflect.ValueOf(expected) + + switch expectedKind { + case reflect.Struct: + result := reflect.New(expectedType).Elem() + for i := 0; i < expectedType.NumField(); i++ { + field := expectedType.Field(i) + isExported := field.IsExported() + if isExported { + fieldValue := expectedValue.Field(i) + if isNil(fieldValue) || isNil(fieldValue.Interface()) { + continue + } + newValue := copyExportedFields(fieldValue.Interface()) + result.Field(i).Set(reflect.ValueOf(newValue)) + } + } + return result.Interface() + + case reflect.Ptr: + result := reflect.New(expectedType.Elem()) + unexportedRemoved := copyExportedFields(expectedValue.Elem().Interface()) + result.Elem().Set(reflect.ValueOf(unexportedRemoved)) + return result.Interface() + + case reflect.Array, reflect.Slice: + result := reflect.MakeSlice(expectedType, expectedValue.Len(), expectedValue.Len()) + for i := 0; i < expectedValue.Len(); i++ { + index := expectedValue.Index(i) + if isNil(index) { + continue + } + unexportedRemoved := copyExportedFields(index.Interface()) + result.Index(i).Set(reflect.ValueOf(unexportedRemoved)) + } + return result.Interface() + + case reflect.Map: + result := reflect.MakeMap(expectedType) + for _, k := range expectedValue.MapKeys() { + index := expectedValue.MapIndex(k) + unexportedRemoved := copyExportedFields(index.Interface()) + result.SetMapIndex(k, reflect.ValueOf(unexportedRemoved)) + } + return result.Interface() + + default: + return expected + } +} + +// ObjectsExportedFieldsAreEqual determines if the exported (public) fields of two objects are +// considered equal. This comparison of only exported fields is applied recursively to nested data +// structures. +// +// This function does no assertion of any kind. +func ObjectsExportedFieldsAreEqual(expected, actual interface{}) bool { + expectedCleaned := copyExportedFields(expected) + actualCleaned := copyExportedFields(actual) + return ObjectsAreEqualValues(expectedCleaned, actualCleaned) +} + // ObjectsAreEqualValues gets whether two objects are equal, or if their // values are equal. func ObjectsAreEqualValues(expected, actual interface{}) bool { @@ -141,12 +211,11 @@ func CallerInfo() []string { } parts := strings.Split(file, "/") - file = parts[len(parts)-1] if len(parts) > 1 { + filename := parts[len(parts)-1] dir := parts[len(parts)-2] - if (dir != "assert" && dir != "mock" && dir != "require") || file == "mock_test.go" { - path, _ := filepath.Abs(file) - callers = append(callers, fmt.Sprintf("%s:%d", path, line)) + if (dir != "assert" && dir != "mock" && dir != "require") || filename == "mock_test.go" { + callers = append(callers, fmt.Sprintf("%s:%d", file, line)) } } @@ -273,7 +342,7 @@ type labeledContent struct { // labeledOutput returns a string consisting of the provided labeledContent. Each labeled output is appended in the following manner: // -// \t{{label}}:{{align_spaces}}\t{{content}}\n +// \t{{label}}:{{align_spaces}}\t{{content}}\n // // The initial carriage return is required to undo/erase any padding added by testing.T.Errorf. The "\t{{label}}:" is for the label. // If a label is shorter than the longest label provided, padding spaces are added to make all the labels match in length. Once this @@ -296,7 +365,7 @@ func labeledOutput(content ...labeledContent) string { // Implements asserts that an object is implemented by the specified interface. // -// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) +// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -328,7 +397,7 @@ func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs // Equal asserts that two objects are equal. // -// assert.Equal(t, 123, 123) +// assert.Equal(t, 123, 123) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -369,7 +438,7 @@ func validateEqualArgs(expected, actual interface{}) error { // Same asserts that two pointers reference the same object. // -// assert.Same(t, ptr1, ptr2) +// assert.Same(t, ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -389,7 +458,7 @@ func Same(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) b // NotSame asserts that two pointers do not reference the same object. // -// assert.NotSame(t, ptr1, ptr2) +// assert.NotSame(t, ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -457,7 +526,7 @@ func truncatingFormat(data interface{}) string { // EqualValues asserts that two objects are equal or convertable to the same types // and equal. // -// assert.EqualValues(t, uint32(123), int32(123)) +// assert.EqualValues(t, uint32(123), int32(123)) func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -475,9 +544,53 @@ func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interfa } +// EqualExportedValues asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// assert.EqualExportedValues(t, S{1, 2}, S{1, 3}) => true +// assert.EqualExportedValues(t, S{1, 2}, S{2, 3}) => false +func EqualExportedValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + aType := reflect.TypeOf(expected) + bType := reflect.TypeOf(actual) + + if aType != bType { + return Fail(t, fmt.Sprintf("Types expected to match exactly\n\t%v != %v", aType, bType), msgAndArgs...) + } + + if aType.Kind() != reflect.Struct { + return Fail(t, fmt.Sprintf("Types expected to both be struct \n\t%v != %v", aType.Kind(), reflect.Struct), msgAndArgs...) + } + + if bType.Kind() != reflect.Struct { + return Fail(t, fmt.Sprintf("Types expected to both be struct \n\t%v != %v", bType.Kind(), reflect.Struct), msgAndArgs...) + } + + expected = copyExportedFields(expected) + actual = copyExportedFields(actual) + + if !ObjectsAreEqualValues(expected, actual) { + diff := diff(expected, actual) + expected, actual = formatUnequalValues(expected, actual) + return Fail(t, fmt.Sprintf("Not equal (comparing only exported fields): \n"+ + "expected: %s\n"+ + "actual : %s%s", expected, actual, diff), msgAndArgs...) + } + + return true +} + // Exactly asserts that two objects are equal in value and type. // -// assert.Exactly(t, int32(123), int64(123)) +// assert.Exactly(t, int32(123), int64(123)) func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -496,7 +609,7 @@ func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{} // NotNil asserts that the specified object is not nil. // -// assert.NotNil(t, err) +// assert.NotNil(t, err) func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { if !isNil(object) { return true @@ -530,7 +643,7 @@ func isNil(object interface{}) bool { []reflect.Kind{ reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, - reflect.Ptr, reflect.Slice}, + reflect.Ptr, reflect.Slice, reflect.UnsafePointer}, kind) if isNilableKind && value.IsNil() { @@ -542,7 +655,7 @@ func isNil(object interface{}) bool { // Nil asserts that the specified object is nil. // -// assert.Nil(t, err) +// assert.Nil(t, err) func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { if isNil(object) { return true @@ -585,7 +698,7 @@ func isEmpty(object interface{}) bool { // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// assert.Empty(t, obj) +// assert.Empty(t, obj) func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { pass := isEmpty(object) if !pass { @@ -602,9 +715,9 @@ func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if assert.NotEmpty(t, obj) { -// assert.Equal(t, "two", obj[1]) -// } +// if assert.NotEmpty(t, obj) { +// assert.Equal(t, "two", obj[1]) +// } func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { pass := !isEmpty(object) if !pass { @@ -633,7 +746,7 @@ func getLen(x interface{}) (ok bool, length int) { // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. // -// assert.Len(t, mySlice, 3) +// assert.Len(t, mySlice, 3) func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -651,7 +764,7 @@ func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) // True asserts that the specified value is true. // -// assert.True(t, myBool) +// assert.True(t, myBool) func True(t TestingT, value bool, msgAndArgs ...interface{}) bool { if !value { if h, ok := t.(tHelper); ok { @@ -666,7 +779,7 @@ func True(t TestingT, value bool, msgAndArgs ...interface{}) bool { // False asserts that the specified value is false. // -// assert.False(t, myBool) +// assert.False(t, myBool) func False(t TestingT, value bool, msgAndArgs ...interface{}) bool { if value { if h, ok := t.(tHelper); ok { @@ -681,7 +794,7 @@ func False(t TestingT, value bool, msgAndArgs ...interface{}) bool { // NotEqual asserts that the specified values are NOT equal. // -// assert.NotEqual(t, obj1, obj2) +// assert.NotEqual(t, obj1, obj2) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -704,7 +817,7 @@ func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{ // NotEqualValues asserts that two objects are not equal even when converted to the same type // -// assert.NotEqualValues(t, obj1, obj2) +// assert.NotEqualValues(t, obj1, obj2) func NotEqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -763,9 +876,9 @@ func containsElement(list interface{}, element interface{}) (ok, found bool) { // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// assert.Contains(t, "Hello World", "World") -// assert.Contains(t, ["Hello", "World"], "World") -// assert.Contains(t, {"Hello": "World"}, "Hello") +// assert.Contains(t, "Hello World", "World") +// assert.Contains(t, ["Hello", "World"], "World") +// assert.Contains(t, {"Hello": "World"}, "Hello") func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -786,9 +899,9 @@ func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bo // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// assert.NotContains(t, "Hello World", "Earth") -// assert.NotContains(t, ["Hello", "World"], "Earth") -// assert.NotContains(t, {"Hello": "World"}, "Earth") +// assert.NotContains(t, "Hello World", "Earth") +// assert.NotContains(t, ["Hello", "World"], "Earth") +// assert.NotContains(t, {"Hello": "World"}, "Earth") func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -796,10 +909,10 @@ func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) ok, found := containsElement(s, contains) if !ok { - return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", s), msgAndArgs...) + return Fail(t, fmt.Sprintf("%#v could not be applied builtin len()", s), msgAndArgs...) } if found { - return Fail(t, fmt.Sprintf("\"%s\" should not contain \"%s\"", s, contains), msgAndArgs...) + return Fail(t, fmt.Sprintf("%#v should not contain %#v", s, contains), msgAndArgs...) } return true @@ -809,7 +922,7 @@ func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { if h, ok := t.(tHelper); ok { h.Helper() @@ -818,49 +931,44 @@ func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok return true // we consider nil to be equal to the nil set } - defer func() { - if e := recover(); e != nil { - ok = false - } - }() - listKind := reflect.TypeOf(list).Kind() - subsetKind := reflect.TypeOf(subset).Kind() - if listKind != reflect.Array && listKind != reflect.Slice && listKind != reflect.Map { return Fail(t, fmt.Sprintf("%q has an unsupported type %s", list, listKind), msgAndArgs...) } + subsetKind := reflect.TypeOf(subset).Kind() if subsetKind != reflect.Array && subsetKind != reflect.Slice && listKind != reflect.Map { return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...) } - subsetValue := reflect.ValueOf(subset) if subsetKind == reflect.Map && listKind == reflect.Map { - listValue := reflect.ValueOf(list) - subsetKeys := subsetValue.MapKeys() + subsetMap := reflect.ValueOf(subset) + actualMap := reflect.ValueOf(list) - for i := 0; i < len(subsetKeys); i++ { - subsetKey := subsetKeys[i] - subsetElement := subsetValue.MapIndex(subsetKey).Interface() - listElement := listValue.MapIndex(subsetKey).Interface() + for _, k := range subsetMap.MapKeys() { + ev := subsetMap.MapIndex(k) + av := actualMap.MapIndex(k) - if !ObjectsAreEqual(subsetElement, listElement) { - return Fail(t, fmt.Sprintf("\"%s\" does not contain \"%s\"", list, subsetElement), msgAndArgs...) + if !av.IsValid() { + return Fail(t, fmt.Sprintf("%#v does not contain %#v", list, subset), msgAndArgs...) + } + if !ObjectsAreEqual(ev.Interface(), av.Interface()) { + return Fail(t, fmt.Sprintf("%#v does not contain %#v", list, subset), msgAndArgs...) } } return true } - for i := 0; i < subsetValue.Len(); i++ { - element := subsetValue.Index(i).Interface() + subsetList := reflect.ValueOf(subset) + for i := 0; i < subsetList.Len(); i++ { + element := subsetList.Index(i).Interface() ok, found := containsElement(list, element) if !ok { - return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", list), msgAndArgs...) + return Fail(t, fmt.Sprintf("%#v could not be applied builtin len()", list), msgAndArgs...) } if !found { - return Fail(t, fmt.Sprintf("\"%s\" does not contain \"%s\"", list, element), msgAndArgs...) + return Fail(t, fmt.Sprintf("%#v does not contain %#v", list, element), msgAndArgs...) } } @@ -870,7 +978,7 @@ func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok // NotSubset asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { if h, ok := t.(tHelper); ok { h.Helper() @@ -879,34 +987,28 @@ func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) return Fail(t, "nil is the empty set which is a subset of every set", msgAndArgs...) } - defer func() { - if e := recover(); e != nil { - ok = false - } - }() - listKind := reflect.TypeOf(list).Kind() - subsetKind := reflect.TypeOf(subset).Kind() - if listKind != reflect.Array && listKind != reflect.Slice && listKind != reflect.Map { return Fail(t, fmt.Sprintf("%q has an unsupported type %s", list, listKind), msgAndArgs...) } + subsetKind := reflect.TypeOf(subset).Kind() if subsetKind != reflect.Array && subsetKind != reflect.Slice && listKind != reflect.Map { return Fail(t, fmt.Sprintf("%q has an unsupported type %s", subset, subsetKind), msgAndArgs...) } - subsetValue := reflect.ValueOf(subset) if subsetKind == reflect.Map && listKind == reflect.Map { - listValue := reflect.ValueOf(list) - subsetKeys := subsetValue.MapKeys() + subsetMap := reflect.ValueOf(subset) + actualMap := reflect.ValueOf(list) - for i := 0; i < len(subsetKeys); i++ { - subsetKey := subsetKeys[i] - subsetElement := subsetValue.MapIndex(subsetKey).Interface() - listElement := listValue.MapIndex(subsetKey).Interface() + for _, k := range subsetMap.MapKeys() { + ev := subsetMap.MapIndex(k) + av := actualMap.MapIndex(k) - if !ObjectsAreEqual(subsetElement, listElement) { + if !av.IsValid() { + return true + } + if !ObjectsAreEqual(ev.Interface(), av.Interface()) { return true } } @@ -914,8 +1016,9 @@ func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) return Fail(t, fmt.Sprintf("%q is a subset of %q", subset, list), msgAndArgs...) } - for i := 0; i < subsetValue.Len(); i++ { - element := subsetValue.Index(i).Interface() + subsetList := reflect.ValueOf(subset) + for i := 0; i < subsetList.Len(); i++ { + element := subsetList.Index(i).Interface() ok, found := containsElement(list, element) if !ok { return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", list), msgAndArgs...) @@ -1060,7 +1163,7 @@ func didPanic(f PanicTestFunc) (didPanic bool, message interface{}, stack string // Panics asserts that the code inside the specified PanicTestFunc panics. // -// assert.Panics(t, func(){ GoCrazy() }) +// assert.Panics(t, func(){ GoCrazy() }) func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1076,7 +1179,7 @@ func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { // PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) +// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) func PanicsWithValue(t TestingT, expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1097,7 +1200,7 @@ func PanicsWithValue(t TestingT, expected interface{}, f PanicTestFunc, msgAndAr // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// assert.PanicsWithError(t, "crazy error", func(){ GoCrazy() }) +// assert.PanicsWithError(t, "crazy error", func(){ GoCrazy() }) func PanicsWithError(t TestingT, errString string, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1117,7 +1220,7 @@ func PanicsWithError(t TestingT, errString string, f PanicTestFunc, msgAndArgs . // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. // -// assert.NotPanics(t, func(){ RemainCalm() }) +// assert.NotPanics(t, func(){ RemainCalm() }) func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1132,7 +1235,7 @@ func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { // WithinDuration asserts that the two times are within duration delta of each other. // -// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) +// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) func WithinDuration(t TestingT, expected, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1148,7 +1251,7 @@ func WithinDuration(t TestingT, expected, actual time.Time, delta time.Duration, // WithinRange asserts that a time is within a time range (inclusive). // -// assert.WithinRange(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) +// assert.WithinRange(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) func WithinRange(t TestingT, actual, start, end time.Time, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1207,7 +1310,7 @@ func toFloat(x interface{}) (float64, bool) { // InDelta asserts that the two numerals are within delta of each other. // -// assert.InDelta(t, math.Pi, 22/7.0, 0.01) +// assert.InDelta(t, math.Pi, 22/7.0, 0.01) func InDelta(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1380,10 +1483,10 @@ func InEpsilonSlice(t TestingT, expected, actual interface{}, epsilon float64, m // NoError asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if assert.NoError(t, err) { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if assert.NoError(t, err) { +// assert.Equal(t, expectedObj, actualObj) +// } func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool { if err != nil { if h, ok := t.(tHelper); ok { @@ -1397,10 +1500,10 @@ func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool { // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if assert.Error(t, err) { -// assert.Equal(t, expectedError, err) -// } +// actualObj, err := SomeFunction() +// if assert.Error(t, err) { +// assert.Equal(t, expectedError, err) +// } func Error(t TestingT, err error, msgAndArgs ...interface{}) bool { if err == nil { if h, ok := t.(tHelper); ok { @@ -1415,8 +1518,8 @@ func Error(t TestingT, err error, msgAndArgs ...interface{}) bool { // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// assert.EqualError(t, err, expectedErrorString) +// actualObj, err := SomeFunction() +// assert.EqualError(t, err, expectedErrorString) func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1438,8 +1541,8 @@ func EqualError(t TestingT, theError error, errString string, msgAndArgs ...inte // ErrorContains asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// assert.ErrorContains(t, err, expectedErrorSubString) +// actualObj, err := SomeFunction() +// assert.ErrorContains(t, err, expectedErrorSubString) func ErrorContains(t TestingT, theError error, contains string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1472,8 +1575,8 @@ func matchRegexp(rx interface{}, str interface{}) bool { // Regexp asserts that a specified regexp matches a string. // -// assert.Regexp(t, regexp.MustCompile("start"), "it's starting") -// assert.Regexp(t, "start...$", "it's not starting") +// assert.Regexp(t, regexp.MustCompile("start"), "it's starting") +// assert.Regexp(t, "start...$", "it's not starting") func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1490,8 +1593,8 @@ func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface // NotRegexp asserts that a specified regexp does not match a string. // -// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") -// assert.NotRegexp(t, "^start", "it's not starting") +// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") +// assert.NotRegexp(t, "^start", "it's not starting") func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1603,7 +1706,7 @@ func NoDirExists(t TestingT, path string, msgAndArgs ...interface{}) bool { // JSONEq asserts that two JSON strings are equivalent. // -// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1726,7 +1829,7 @@ type tHelper interface { // Eventually asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) +// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1756,10 +1859,93 @@ func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick t } } +// CollectT implements the TestingT interface and collects all errors. +type CollectT struct { + errors []error +} + +// Errorf collects the error. +func (c *CollectT) Errorf(format string, args ...interface{}) { + c.errors = append(c.errors, fmt.Errorf(format, args...)) +} + +// FailNow panics. +func (c *CollectT) FailNow() { + panic("Assertion failed") +} + +// Reset clears the collected errors. +func (c *CollectT) Reset() { + c.errors = nil +} + +// Copy copies the collected errors to the supplied t. +func (c *CollectT) Copy(t TestingT) { + if tt, ok := t.(tHelper); ok { + tt.Helper() + } + for _, err := range c.errors { + t.Errorf("%v", err) + } +} + +// EventuallyWithT asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// assert.EventuallyWithT(t, func(c *assert.CollectT) { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func EventuallyWithT(t TestingT, condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + collect := new(CollectT) + ch := make(chan bool, 1) + + timer := time.NewTimer(waitFor) + defer timer.Stop() + + ticker := time.NewTicker(tick) + defer ticker.Stop() + + for tick := ticker.C; ; { + select { + case <-timer.C: + collect.Copy(t) + return Fail(t, "Condition never satisfied", msgAndArgs...) + case <-tick: + tick = nil + collect.Reset() + go func() { + condition(collect) + ch <- len(collect.errors) == 0 + }() + case v := <-ch: + if v { + return true + } + tick = ticker.C + } + } +} + // Never asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// assert.Never(t, func() bool { return false; }, time.Second, 10*time.Millisecond) +// assert.Never(t, func() bool { return false; }, time.Second, 10*time.Millisecond) func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/tools/vendor/github.com/stretchr/testify/assert/doc.go b/tools/vendor/github.com/stretchr/testify/assert/doc.go index c9dccc4d6c..4953981d38 100644 --- a/tools/vendor/github.com/stretchr/testify/assert/doc.go +++ b/tools/vendor/github.com/stretchr/testify/assert/doc.go @@ -1,39 +1,40 @@ // Package assert provides a set of comprehensive testing tools for use with the normal Go testing system. // -// Example Usage +// # Example Usage // // The following is a complete example using assert in a standard test function: -// import ( -// "testing" -// "github.com/stretchr/testify/assert" -// ) // -// func TestSomething(t *testing.T) { +// import ( +// "testing" +// "github.com/stretchr/testify/assert" +// ) // -// var a string = "Hello" -// var b string = "Hello" +// func TestSomething(t *testing.T) { // -// assert.Equal(t, a, b, "The two words should be the same.") +// var a string = "Hello" +// var b string = "Hello" // -// } +// assert.Equal(t, a, b, "The two words should be the same.") +// +// } // // if you assert many times, use the format below: // -// import ( -// "testing" -// "github.com/stretchr/testify/assert" -// ) +// import ( +// "testing" +// "github.com/stretchr/testify/assert" +// ) // -// func TestSomething(t *testing.T) { -// assert := assert.New(t) +// func TestSomething(t *testing.T) { +// assert := assert.New(t) // -// var a string = "Hello" -// var b string = "Hello" +// var a string = "Hello" +// var b string = "Hello" // -// assert.Equal(a, b, "The two words should be the same.") -// } +// assert.Equal(a, b, "The two words should be the same.") +// } // -// Assertions +// # Assertions // // Assertions allow you to easily write test code, and are global funcs in the `assert` package. // All assertion functions take, as the first argument, the `*testing.T` object provided by the diff --git a/tools/vendor/github.com/stretchr/testify/assert/http_assertions.go b/tools/vendor/github.com/stretchr/testify/assert/http_assertions.go index 4ed341dd28..d8038c28a7 100644 --- a/tools/vendor/github.com/stretchr/testify/assert/http_assertions.go +++ b/tools/vendor/github.com/stretchr/testify/assert/http_assertions.go @@ -23,7 +23,7 @@ func httpCode(handler http.HandlerFunc, method, url string, values url.Values) ( // HTTPSuccess asserts that a specified handler returns a success status code. // -// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) +// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) // // Returns whether the assertion was successful (true) or not (false). func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -45,7 +45,7 @@ func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, value // HTTPRedirect asserts that a specified handler returns a redirect status code. // -// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -67,7 +67,7 @@ func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, valu // HTTPError asserts that a specified handler returns an error status code. // -// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -89,7 +89,7 @@ func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values // HTTPStatusCode asserts that a specified handler returns a specified status code. // -// assert.HTTPStatusCode(t, myHandler, "GET", "/notImplemented", nil, 501) +// assert.HTTPStatusCode(t, myHandler, "GET", "/notImplemented", nil, 501) // // Returns whether the assertion was successful (true) or not (false). func HTTPStatusCode(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, statuscode int, msgAndArgs ...interface{}) bool { @@ -124,7 +124,7 @@ func HTTPBody(handler http.HandlerFunc, method, url string, values url.Values) s // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // -// assert.HTTPBodyContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// assert.HTTPBodyContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { @@ -144,7 +144,7 @@ func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. // -// assert.HTTPBodyNotContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// assert.HTTPBodyNotContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { diff --git a/tools/vendor/github.com/stretchr/testify/mock/doc.go b/tools/vendor/github.com/stretchr/testify/mock/doc.go index 7324128ef1..d6b3c844cc 100644 --- a/tools/vendor/github.com/stretchr/testify/mock/doc.go +++ b/tools/vendor/github.com/stretchr/testify/mock/doc.go @@ -1,17 +1,17 @@ // Package mock provides a system by which it is possible to mock your objects // and verify calls are happening as expected. // -// Example Usage +// # Example Usage // // The mock package provides an object, Mock, that tracks activity on another object. It is usually // embedded into a test object as shown below: // -// type MyTestObject struct { -// // add a Mock object instance -// mock.Mock +// type MyTestObject struct { +// // add a Mock object instance +// mock.Mock // -// // other fields go here as normal -// } +// // other fields go here as normal +// } // // When implementing the methods of an interface, you wire your functions up // to call the Mock.Called(args...) method, and return the appropriate values. @@ -19,25 +19,25 @@ // For example, to mock a method that saves the name and age of a person and returns // the year of their birth or an error, you might write this: // -// func (o *MyTestObject) SavePersonDetails(firstname, lastname string, age int) (int, error) { -// args := o.Called(firstname, lastname, age) -// return args.Int(0), args.Error(1) -// } +// func (o *MyTestObject) SavePersonDetails(firstname, lastname string, age int) (int, error) { +// args := o.Called(firstname, lastname, age) +// return args.Int(0), args.Error(1) +// } // // The Int, Error and Bool methods are examples of strongly typed getters that take the argument // index position. Given this argument list: // -// (12, true, "Something") +// (12, true, "Something") // // You could read them out strongly typed like this: // -// args.Int(0) -// args.Bool(1) -// args.String(2) +// args.Int(0) +// args.Bool(1) +// args.String(2) // // For objects of your own type, use the generic Arguments.Get(index) method and make a type assertion: // -// return args.Get(0).(*MyObject), args.Get(1).(*AnotherObjectOfMine) +// return args.Get(0).(*MyObject), args.Get(1).(*AnotherObjectOfMine) // // This may cause a panic if the object you are getting is nil (the type assertion will fail), in those // cases you should check for nil first. diff --git a/tools/vendor/github.com/stretchr/testify/mock/mock.go b/tools/vendor/github.com/stretchr/testify/mock/mock.go index f0af8246cf..f4b42e44ff 100644 --- a/tools/vendor/github.com/stretchr/testify/mock/mock.go +++ b/tools/vendor/github.com/stretchr/testify/mock/mock.go @@ -3,6 +3,7 @@ package mock import ( "errors" "fmt" + "path" "reflect" "regexp" "runtime" @@ -13,6 +14,7 @@ import ( "github.com/davecgh/go-spew/spew" "github.com/pmezard/go-difflib/difflib" "github.com/stretchr/objx" + "github.com/stretchr/testify/assert" ) @@ -99,7 +101,7 @@ func (c *Call) unlock() { // Return specifies the return arguments for the expectation. // -// Mock.On("DoSomething").Return(errors.New("failed")) +// Mock.On("DoSomething").Return(errors.New("failed")) func (c *Call) Return(returnArguments ...interface{}) *Call { c.lock() defer c.unlock() @@ -111,7 +113,7 @@ func (c *Call) Return(returnArguments ...interface{}) *Call { // Panic specifies if the functon call should fail and the panic message // -// Mock.On("DoSomething").Panic("test panic") +// Mock.On("DoSomething").Panic("test panic") func (c *Call) Panic(msg string) *Call { c.lock() defer c.unlock() @@ -123,14 +125,14 @@ func (c *Call) Panic(msg string) *Call { // Once indicates that that the mock should only return the value once. // -// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Once() +// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Once() func (c *Call) Once() *Call { return c.Times(1) } // Twice indicates that that the mock should only return the value twice. // -// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Twice() +// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Twice() func (c *Call) Twice() *Call { return c.Times(2) } @@ -138,7 +140,7 @@ func (c *Call) Twice() *Call { // Times indicates that that the mock should only return the indicated number // of times. // -// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Times(5) +// Mock.On("MyMethod", arg1, arg2).Return(returnArg1, returnArg2).Times(5) func (c *Call) Times(i int) *Call { c.lock() defer c.unlock() @@ -149,7 +151,7 @@ func (c *Call) Times(i int) *Call { // WaitUntil sets the channel that will block the mock's return until its closed // or a message is received. // -// Mock.On("MyMethod", arg1, arg2).WaitUntil(time.After(time.Second)) +// Mock.On("MyMethod", arg1, arg2).WaitUntil(time.After(time.Second)) func (c *Call) WaitUntil(w <-chan time.Time) *Call { c.lock() defer c.unlock() @@ -159,7 +161,7 @@ func (c *Call) WaitUntil(w <-chan time.Time) *Call { // After sets how long to block until the call returns // -// Mock.On("MyMethod", arg1, arg2).After(time.Second) +// Mock.On("MyMethod", arg1, arg2).After(time.Second) func (c *Call) After(d time.Duration) *Call { c.lock() defer c.unlock() @@ -171,10 +173,10 @@ func (c *Call) After(d time.Duration) *Call { // mocking a method (such as an unmarshaler) that takes a pointer to a struct and // sets properties in such struct // -// Mock.On("Unmarshal", AnythingOfType("*map[string]interface{}")).Return().Run(func(args Arguments) { -// arg := args.Get(0).(*map[string]interface{}) -// arg["foo"] = "bar" -// }) +// Mock.On("Unmarshal", AnythingOfType("*map[string]interface{}")).Return().Run(func(args Arguments) { +// arg := args.Get(0).(*map[string]interface{}) +// arg["foo"] = "bar" +// }) func (c *Call) Run(fn func(args Arguments)) *Call { c.lock() defer c.unlock() @@ -194,16 +196,18 @@ func (c *Call) Maybe() *Call { // On chains a new expectation description onto the mocked interface. This // allows syntax like. // -// Mock. -// On("MyMethod", 1).Return(nil). -// On("MyOtherMethod", 'a', 'b', 'c').Return(errors.New("Some Error")) +// Mock. +// On("MyMethod", 1).Return(nil). +// On("MyOtherMethod", 'a', 'b', 'c').Return(errors.New("Some Error")) +// //go:noinline func (c *Call) On(methodName string, arguments ...interface{}) *Call { return c.Parent.On(methodName, arguments...) } // Unset removes a mock handler from being called. -// test.On("func", mock.Anything).Unset() +// +// test.On("func", mock.Anything).Unset() func (c *Call) Unset() *Call { var unlockOnce sync.Once @@ -218,16 +222,22 @@ func (c *Call) Unset() *Call { foundMatchingCall := false - for i, call := range c.Parent.ExpectedCalls { + // in-place filter slice for calls to be removed - iterate from 0'th to last skipping unnecessary ones + var index int // write index + for _, call := range c.Parent.ExpectedCalls { if call.Method == c.Method { _, diffCount := call.Arguments.Diff(c.Arguments) if diffCount == 0 { foundMatchingCall = true - // Remove from ExpectedCalls - c.Parent.ExpectedCalls = append(c.Parent.ExpectedCalls[:i], c.Parent.ExpectedCalls[i+1:]...) + // Remove from ExpectedCalls - just skip it + continue } } + c.Parent.ExpectedCalls[index] = call + index++ } + // trim slice up to last copied index + c.Parent.ExpectedCalls = c.Parent.ExpectedCalls[:index] if !foundMatchingCall { unlockOnce.Do(c.unlock) @@ -243,9 +253,9 @@ func (c *Call) Unset() *Call { // calls have been called as expected. The referenced calls may be from the // same mock instance and/or other mock instances. // -// Mock.On("Do").Return(nil).Notbefore( -// Mock.On("Init").Return(nil) -// ) +// Mock.On("Do").Return(nil).Notbefore( +// Mock.On("Init").Return(nil) +// ) func (c *Call) NotBefore(calls ...*Call) *Call { c.lock() defer c.unlock() @@ -328,7 +338,7 @@ func (m *Mock) fail(format string, args ...interface{}) { // On starts a description of an expectation of the specified method // being called. // -// Mock.On("MyMethod", arg1, arg2) +// Mock.On("MyMethod", arg1, arg2) func (m *Mock) On(methodName string, arguments ...interface{}) *Call { for _, arg := range arguments { if v := reflect.ValueOf(arg); v.Kind() == reflect.Func { @@ -418,6 +428,10 @@ func callString(method string, arguments Arguments, includeArgumentValues bool) if includeArgumentValues { var argVals []string for argIndex, arg := range arguments { + if _, ok := arg.(*FunctionalOptionsArgument); ok { + argVals = append(argVals, fmt.Sprintf("%d: %s", argIndex, arg)) + continue + } argVals = append(argVals, fmt.Sprintf("%d: %#v", argIndex, arg)) } argValsString = fmt.Sprintf("\n\t\t%s", strings.Join(argVals, "\n\t\t")) @@ -752,6 +766,7 @@ type AnythingOfTypeArgument string // name of the type to check for. Used in Diff and Assert. // // For example: +// // Assert(t, AnythingOfType("string"), AnythingOfType("int")) func AnythingOfType(t string) AnythingOfTypeArgument { return AnythingOfTypeArgument(t) @@ -774,6 +789,34 @@ func IsType(t interface{}) *IsTypeArgument { return &IsTypeArgument{t: t} } +// FunctionalOptionsArgument is a struct that contains the type and value of an functional option argument +// for use when type checking. +type FunctionalOptionsArgument struct { + value interface{} +} + +// String returns the string representation of FunctionalOptionsArgument +func (f *FunctionalOptionsArgument) String() string { + var name string + tValue := reflect.ValueOf(f.value) + if tValue.Len() > 0 { + name = "[]" + reflect.TypeOf(tValue.Index(0).Interface()).String() + } + + return strings.Replace(fmt.Sprintf("%#v", f.value), "[]interface {}", name, 1) +} + +// FunctionalOptions returns an FunctionalOptionsArgument object containing the functional option type +// and the values to check of +// +// For example: +// Assert(t, FunctionalOptions("[]foo.FunctionalOption", foo.Opt1(), foo.Opt2())) +func FunctionalOptions(value ...interface{}) *FunctionalOptionsArgument { + return &FunctionalOptionsArgument{ + value: value, + } +} + // argumentMatcher performs custom argument matching, returning whether or // not the argument is matched by the expectation fixture function. type argumentMatcher struct { @@ -920,6 +963,29 @@ func (args Arguments) Diff(objects []interface{}) (string, int) { differences++ output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, reflect.TypeOf(t).Name(), reflect.TypeOf(actual).Name(), actualFmt) } + } else if reflect.TypeOf(expected) == reflect.TypeOf((*FunctionalOptionsArgument)(nil)) { + t := expected.(*FunctionalOptionsArgument).value + + var name string + tValue := reflect.ValueOf(t) + if tValue.Len() > 0 { + name = "[]" + reflect.TypeOf(tValue.Index(0).Interface()).String() + } + + tName := reflect.TypeOf(t).Name() + if name != reflect.TypeOf(actual).String() && tValue.Len() != 0 { + differences++ + output = fmt.Sprintf("%s\t%d: FAIL: type %s != type %s - %s\n", output, i, tName, reflect.TypeOf(actual).Name(), actualFmt) + } else { + if ef, af := assertOpts(t, actual); ef == "" && af == "" { + // match + output = fmt.Sprintf("%s\t%d: PASS: %s == %s\n", output, i, tName, tName) + } else { + // not match + differences++ + output = fmt.Sprintf("%s\t%d: FAIL: %s != %s\n", output, i, af, ef) + } + } } else { // normal checking @@ -1096,3 +1162,65 @@ var spewConfig = spew.ConfigState{ type tHelper interface { Helper() } + +func assertOpts(expected, actual interface{}) (expectedFmt, actualFmt string) { + expectedOpts := reflect.ValueOf(expected) + actualOpts := reflect.ValueOf(actual) + var expectedNames []string + for i := 0; i < expectedOpts.Len(); i++ { + expectedNames = append(expectedNames, funcName(expectedOpts.Index(i).Interface())) + } + var actualNames []string + for i := 0; i < actualOpts.Len(); i++ { + actualNames = append(actualNames, funcName(actualOpts.Index(i).Interface())) + } + if !assert.ObjectsAreEqual(expectedNames, actualNames) { + expectedFmt = fmt.Sprintf("%v", expectedNames) + actualFmt = fmt.Sprintf("%v", actualNames) + return + } + + for i := 0; i < expectedOpts.Len(); i++ { + expectedOpt := expectedOpts.Index(i).Interface() + actualOpt := actualOpts.Index(i).Interface() + + expectedFunc := expectedNames[i] + actualFunc := actualNames[i] + if expectedFunc != actualFunc { + expectedFmt = expectedFunc + actualFmt = actualFunc + return + } + + ot := reflect.TypeOf(expectedOpt) + var expectedValues []reflect.Value + var actualValues []reflect.Value + if ot.NumIn() == 0 { + return + } + + for i := 0; i < ot.NumIn(); i++ { + vt := ot.In(i).Elem() + expectedValues = append(expectedValues, reflect.New(vt)) + actualValues = append(actualValues, reflect.New(vt)) + } + + reflect.ValueOf(expectedOpt).Call(expectedValues) + reflect.ValueOf(actualOpt).Call(actualValues) + + for i := 0; i < ot.NumIn(); i++ { + if !assert.ObjectsAreEqual(expectedValues[i].Interface(), actualValues[i].Interface()) { + expectedFmt = fmt.Sprintf("%s %+v", expectedNames[i], expectedValues[i].Interface()) + actualFmt = fmt.Sprintf("%s %+v", expectedNames[i], actualValues[i].Interface()) + return + } + } + } + + return "", "" +} + +func funcName(opt interface{}) string { + n := runtime.FuncForPC(reflect.ValueOf(opt).Pointer()).Name() + return strings.TrimSuffix(path.Base(n), path.Ext(n)) +} diff --git a/tools/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go b/tools/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go index acf7a03937..21bb485b4e 100644 --- a/tools/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go +++ b/tools/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go @@ -127,7 +127,12 @@ func (r *runner) isopen(b *ssa.BasicBlock, i int) bool { resRefs := *val.Referrers() for _, resRef := range resRefs { switch resRef := resRef.(type) { - case *ssa.Store: // Call in Closure function + case *ssa.Store: // Call in Closure function / Response is global variable + if _, ok := resRef.Addr.(*ssa.Global); ok { + // Referrers for globals are always nil, so skip. + return false + } + if len(*resRef.Addr.Referrers()) == 0 { return true } @@ -218,6 +223,10 @@ func (r *runner) getResVal(instr ssa.Instruction) (ssa.Value, bool) { if instr.Type().String() == r.resTyp.String() { return instr, true } + case *ssa.Store: + if instr.Val.Type().String() == r.resTyp.String() { + return instr.Val, true + } } return nil, false } diff --git a/tools/vendor/github.com/timonwong/loggercheck/README.md b/tools/vendor/github.com/timonwong/loggercheck/README.md index 36fe887d7f..14aeca3717 100644 --- a/tools/vendor/github.com/timonwong/loggercheck/README.md +++ b/tools/vendor/github.com/timonwong/loggercheck/README.md @@ -24,7 +24,7 @@ go install github.com/timonwong/loggercheck/cmd/loggercheck ## Usage ``` -loggercheck: Checks key valur pairs for common logger libraries (kitlog,logr,klog,zap). +loggercheck: Checks key value pairs for common logger libraries (kitlog,logr,klog,zap). Usage: loggercheck [-flag] [package] diff --git a/tools/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go b/tools/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go index 259be96c64..42cbd01937 100644 --- a/tools/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go +++ b/tools/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go @@ -6,7 +6,6 @@ import ( "go/printer" "go/token" "go/types" - "strconv" "unicode/utf8" "golang.org/x/tools/go/analysis" @@ -18,21 +17,11 @@ const ( DiagnosticCategory = "logging" ) -// extractValueFromStringArg returns true if the argument is string literal or string constant. +// extractValueFromStringArg returns true if the argument is a string type (literal or constant). func extractValueFromStringArg(pass *analysis.Pass, arg ast.Expr) (value string, ok bool) { - switch arg := arg.(type) { - case *ast.BasicLit: // literals, string literals specifically - if arg.Kind == token.STRING { - if val, err := strconv.Unquote(arg.Value); err == nil { - return val, true - } - } - case *ast.Ident: // identifiers, string constants specifically - if arg.Obj != nil && arg.Obj.Kind == ast.Con { - typeAndValue := pass.TypesInfo.Types[arg] - if typ, ok := typeAndValue.Type.(*types.Basic); ok && typ.Kind() == types.String { - return constant.StringVal(typeAndValue.Value), true - } + if typeAndValue, ok := pass.TypesInfo.Types[arg]; ok { + if typ, ok := typeAndValue.Type.(*types.Basic); ok && typ.Kind() == types.String && typeAndValue.Value != nil { + return constant.StringVal(typeAndValue.Value), true } } diff --git a/tools/vendor/github.com/timonwong/loggercheck/loggercheck.go b/tools/vendor/github.com/timonwong/loggercheck/loggercheck.go index 704f9678ab..8bd10aee80 100644 --- a/tools/vendor/github.com/timonwong/loggercheck/loggercheck.go +++ b/tools/vendor/github.com/timonwong/loggercheck/loggercheck.go @@ -18,7 +18,7 @@ import ( "github.com/timonwong/loggercheck/internal/sets" ) -const Doc = `Checks key valur pairs for common logger libraries (kitlog,klog,logr,zap).` +const Doc = `Checks key value pairs for common logger libraries (kitlog,klog,logr,zap).` func NewAnalyzer(opts ...Option) *analysis.Analyzer { l := newLoggerCheck(opts...) diff --git a/tools/vendor/github.com/xen0n/gosmopolitan/.editorconfig b/tools/vendor/github.com/xen0n/gosmopolitan/.editorconfig new file mode 100644 index 0000000000..0c0f7e7e28 --- /dev/null +++ b/tools/vendor/github.com/xen0n/gosmopolitan/.editorconfig @@ -0,0 +1,23 @@ +# EditorConfig is awesome: http://EditorConfig.org + +root = true + +[*] +indent_style = space +trim_trailing_whitespace = true +end_of_line = lf +insert_final_newline = true +charset = utf-8 + +[{*.sh,*.md}] +indent_size = 4 + +[{*.yaml,*.yml}] +indent_size = 2 + +# hard tabs for Go and Makefile per best practice of file format +[*.go] +indent_style = tab + +[Makefile] +indent_style = tab diff --git a/tools/vendor/github.com/xen0n/gosmopolitan/.gitignore b/tools/vendor/github.com/xen0n/gosmopolitan/.gitignore new file mode 100644 index 0000000000..1a1abaa202 --- /dev/null +++ b/tools/vendor/github.com/xen0n/gosmopolitan/.gitignore @@ -0,0 +1,188 @@ +# ignore the local build artifact +/gosmopolitan + +# and test artifacts +/coverage.txt + +# the following are auto-generated + +# Created by https://www.toptal.com/developers/gitignore/api/go,visualstudiocode,vim,goland +# Edit at https://www.toptal.com/developers/gitignore?templates=go,visualstudiocode,vim,goland + +### Go ### +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work + +### GoLand ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### GoLand Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint +.idea/**/sonarlint/ + +# SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml +.idea/**/markdown-navigator/ + +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +# Azure Toolkit for IntelliJ plugin +# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij +.idea/**/azureSettings.xml + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +# End of https://www.toptal.com/developers/gitignore/api/go,visualstudiocode,vim,goland diff --git a/tools/vendor/github.com/xen0n/gosmopolitan/.golangci.yml b/tools/vendor/github.com/xen0n/gosmopolitan/.golangci.yml new file mode 100644 index 0000000000..0bce12501a --- /dev/null +++ b/tools/vendor/github.com/xen0n/gosmopolitan/.golangci.yml @@ -0,0 +1,31 @@ +run: + go: '1.19' + modules-download-mode: readonly + +linters: + enable: + - goheader + - goimports + - gosec + - gosimple + - lll + - nakedret + - revive + - stylecheck + - unused + +linters-settings: + goheader: + template: |- + SPDX-License-Identifier: GPL-3.0-or-later + goimports: + local-prefixes: github.com/xen0n/gosmopolitan + gosimple: + go: '1.19' + lll: + line-length: 120 + tab-width: 4 + nakedret: + max-func-lines: 1 + stylecheck: + go: '1.19' diff --git a/tools/vendor/github.com/xen0n/gosmopolitan/LICENSE b/tools/vendor/github.com/xen0n/gosmopolitan/LICENSE new file mode 100644 index 0000000000..94a9ed024d --- /dev/null +++ b/tools/vendor/github.com/xen0n/gosmopolitan/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/tools/vendor/github.com/xen0n/gosmopolitan/README.md b/tools/vendor/github.com/xen0n/gosmopolitan/README.md new file mode 100644 index 0000000000..93e3701e5d --- /dev/null +++ b/tools/vendor/github.com/xen0n/gosmopolitan/README.md @@ -0,0 +1,135 @@ +# gosmopolitan + +![GitHub Workflow Status (main branch)](https://img.shields.io/github/actions/workflow/status/xen0n/gosmopolitan/go.yml?branch=main) +![Codecov](https://img.shields.io/codecov/c/gh/xen0n/gosmopolitan) +![GitHub license info](https://img.shields.io/github/license/xen0n/gosmopolitan) +![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/xen0n/gosmopolitan) +[![Go Report Card](https://goreportcard.com/badge/github.com/xen0n/gosmopolitan)](https://goreportcard.com/report/github.com/xen0n/gosmopolitan) +[![Go Reference](https://pkg.go.dev/badge/github.com/xen0n/gosmopolitan.svg)](https://pkg.go.dev/github.com/xen0n/gosmopolitan) + +[简体中文](./README.zh-Hans.md) + +`gosmopolitan` checks your Go codebase for code smells that may prove to be +hindrance to internationalization ("i18n") and/or localization ("l10n"). + +The name is a wordplay on "cosmopolitan". + +## Checks + +Currently `gosmopolitan` checks for the following anti-patterns: + +* Occurrences of string literals containing characters from certain writing + systems. + + Existence of such strings often means the relevant logic is hard to + internationalize, or at least, require special care when doing i18n/l10n. + +* Usages of `time.Local`. + + An internationalized app or library should almost never process time and + date values in the timezone in which it is running; instead one should use + the respective user preference, or the timezone as dictated by the domain + logic. + +Note that local times are produced in a lot more ways than via direct casts to +`time.Local` alone, such as: + +* `time.LoadLocation("Local")` +* received from a `time.Ticker` +* functions explicitly documented to return local times + * `time.Now()` + * `time.Unix()` + * `time.UnixMilli()` + * `time.UnixMicro()` + +Proper identification of these use cases require a fairly complete dataflow +analysis pass, which is not implemented currently. In addition, right now you +have to pay close attention to externally-provided time values (such as from +your framework like Gin or gRPC) as they are not properly tracked either. + +## Caveats + +Note that the checks implemented here are only suitable for codebases with the +following characteristics, and may not suit your particular project's needs: + +* Originally developed for an audience using non-Latin writing system(s), +* Returns bare strings intended for humans containing such non-Latin characters, and +* May occasionally (or frequently) refer to the local timezone. + +For example, the lints may prove valuable if you're revamping a web service +originally targetting the Chinese market (hence producing strings with Chinese +characters all over the place) to be more i18n-aware. Conversely, if you want +to identify some of the i18n-naïve places in an English-only app, the linter +will output nothing. + +## golangci-lint integration + +`gosmopolitan` is not integrated into [`golangci-lint`][gcl-home] yet, but +you can nevertheless run it [as a custom plugin][gcl-plugin]. + +[gcl-home]: https://golangci-lint.run +[gcl-plugin]: https://golangci-lint.run/contributing/new-linters/#how-to-add-a-private-linter-to-golangci-lint + +First make yourself a plugin `.so` file like this: + +```go +// compile this with something like `go build -buildmode=plugin` + +package main + +import ( + "github.com/xen0n/gosmopolitan" + "golang.org/x/tools/go/analysis" +) + +type analyzerPlugin struct{} + +func (analyzerPlugin) GetAnalyzers() []*analysis.Analyzer { + // You can customize the options via gosmopolitan.NewAnalyzerWithConfig + // instead. + return []*analysis.Analyzer{ + gosmopolitan.DefaultAnalyzer, + } +} + +var AnalyzerPlugin analyzerPlugin +``` + +You just need to make sure the `golang.org/x/tools` version used to build the +plugin is consistent with that of your `golangci-lint` binary. (Of course the +`golangci-lint` binary should be built with plugin support enabled too; +notably, [the Homebrew `golangci-lint` is built without plugin support][hb-issue], +so beware of this.) + +[hb-issue]: https://github.com/golangci/golangci-lint/issues/1182 + +|`golangci-lint` version|`gosmopolitan` tag to use| +|-----------------------|-------------------------| +|1.50.x|v1.0.0| + +Then reference it in your `.golangci.yml`, and enable it in the `linters` +section: + +```yaml +linters: + # ... + enable: + # ... + - gosmopolitan + # ... + +linters-settings: + custom: + gosmopolitan: + path: 'path/to/your/plugin.so' + description: 'Report certain i18n/l10n anti-patterns in your Go codebase' + original-url: 'https://github.com/xen0n/gosmopolitan' + # ... +``` + +Then you can `golangci-lint run` and `//nolint:gosmopolitan` as you would +with any other supported linter. + +## License + +`gosmopolitan` is licensed under the GPL license, version 3 or later. diff --git a/tools/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md b/tools/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md new file mode 100644 index 0000000000..7f1b7b7adf --- /dev/null +++ b/tools/vendor/github.com/xen0n/gosmopolitan/README.zh-Hans.md @@ -0,0 +1,121 @@ +# gosmopolitan + +![GitHub Workflow Status (main branch)](https://img.shields.io/github/actions/workflow/status/xen0n/gosmopolitan/go.yml?branch=main) +![Codecov](https://img.shields.io/codecov/c/gh/xen0n/gosmopolitan) +![GitHub license info](https://img.shields.io/github/license/xen0n/gosmopolitan) +![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/xen0n/gosmopolitan) +[![Go Report Card](https://goreportcard.com/badge/github.com/xen0n/gosmopolitan)](https://goreportcard.com/report/github.com/xen0n/gosmopolitan) +[![Go Reference](https://pkg.go.dev/badge/github.com/xen0n/gosmopolitan.svg)](https://pkg.go.dev/github.com/xen0n/gosmopolitan) + +[English](./README.md) + +用 `gosmopolitan` 检查你的 Go 代码库里有没有国际化(“i18n“)或者本地化(”l10n“)的阻碍。 + +项目名字来自“cosmopolitan”的文字游戏。 + +## 检查 + +`gosmopolitan` 目前会检查以下的反模式(anti-patterns): + +* 含有来自特定书写系统字符的字符串字面量(string literals)。 + + 项目中存在这种字符串,通常意味着相关的逻辑不便于国际化,或者至少在国际化/本地化适配过程中会涉及特殊对待。 + +* `time.Local` 的使用。 + + 支持国际化的应用或程序库,几乎永远不应以程序当前运行环境的时区来处理时间、日期数据。 + 相反,在这种场景下,开发者应该使用相应的用户偏好,或者按照领域逻辑确定应该使用的时区。 + +注意:除了直接向 `time.Local` 转换之外,还有很多其他写法会产生本地时区的时刻,例如: + +* `time.LoadLocation("Local")` +* 从 `time.Ticker` 收到的值 +* 文档中明确了会返回本地时刻的函数 + * `time.Now()` + * `time.Unix()` + * `time.UnixMilli()` + * `time.UnixMicro()` + +为了正确识别这些使用场景,需要有一个相当完善的数据流分析 pass,目前还没实现。 +此外,当前您还需要自行密切注意从外部传入的时刻值(例如从您使用的 Gin 或 gRPC +之类框架传来的那些),因为这些值当前也没有被正确跟踪。 + +## 注意事项 + +请注意,本库中实现的检查仅适用于具有以下性质的代码库,因此可能不适用于您的具体场景: + +* 项目原先是为使用非拉丁字母书写系统的受众群体开发的, +* 项目会返回包含这些非拉丁字母字符的裸的字符串(即,未经处理或变换的), +* 项目可能偶尔(或者经常)引用程序当前运行环境的本地时区。 + +举个例子:如果您在翻新一个本来面向中国用户群体(因此到处都在产生含有汉字的字符串)的 +web 服务,以使其更加国际化,这里的 lints 可能会很有价值。 +反之,如果您想在一个仅支持英语的应用里,寻找其中不利于国际化的那部分写法,本 +linter 则什么都不会输出。 + +## 与 golangci-lint 集成 + +`gosmopolitan` 目前没有集成进上游 [`golangci-lint`][gcl-home],但您仍然可以[以自定义插件的方式][gcl-plugin]使用本项目。 + +[gcl-home]: https://golangci-lint.run +[gcl-plugin]: https://golangci-lint.run/contributing/new-linters/#how-to-add-a-private-linter-to-golangci-lint + +首先像这样做一个插件 `.so` 文件: + +```go +// 用类似 `go build -buildmode=plugin` 的方式编译 + +package main + +import ( + "github.com/xen0n/gosmopolitan" + "golang.org/x/tools/go/analysis" +) + +type analyzerPlugin struct{} + +func (analyzerPlugin) GetAnalyzers() []*analysis.Analyzer { + // 你可以用 gosmopolitan.NewAnalyzer 来自定义配置。 + return []*analysis.Analyzer{ + gosmopolitan.DefaultAnalyzer, + } +} + +var AnalyzerPlugin analyzerPlugin +``` + +您只需要保证构建时使用的 `golang.org/x/tools` 模块版本和您的 `golangci-lint` +二进制的相应模块版本一致。(当然,`golangci-lint` 二进制也应该包含插件支持; +[Homebrew 的 `golangci-lint` 没有插件支持][hb-issue],尤其需要注意。) + +[hb-issue]: https://github.com/golangci/golangci-lint/issues/1182 + +|`golangci-lint` 版本|对应可用的 `gosmopolitan` tag| +|--------------------|-----------------------------| +|1.50.x|v1.0.0| + +然后在您的 `.golangci.yml` 中引用它,在 `linters` 一节中启用它: + +```yaml +linters: + # ... + enable: + # ... + - gosmopolitan + # ... + +linters-settings: + custom: + gosmopolitan: + path: 'path/to/your/plugin.so' + description: 'Report certain i18n/l10n anti-patterns in your Go codebase' + original-url: 'https://github.com/xen0n/gosmopolitan' + # ... +``` + +这样您就可以像使用其他 linters 一样 `golangci-lint run` 和 +`//nolint:gosmopolitan` 了。 + +## 许可证 + +`gosmopolitan` 以 GPL v3 或更新的版本许可使用。 diff --git a/tools/vendor/github.com/xen0n/gosmopolitan/lib.go b/tools/vendor/github.com/xen0n/gosmopolitan/lib.go new file mode 100644 index 0000000000..67b1151c71 --- /dev/null +++ b/tools/vendor/github.com/xen0n/gosmopolitan/lib.go @@ -0,0 +1,385 @@ +// SPDX-License-Identifier: GPL-3.0-or-later + +package gosmopolitan + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "regexp" + "strings" + "unicode" + + "golang.org/x/text/runes" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const analyzerName = "gosmopolitan" +const analyzerDoc = "Report certain i18n/l10n anti-patterns in your Go codebase" + +type AnalyzerConfig struct { + // LookAtTests is flag controlling whether the lints are going to look at + // test files, despite other config knobs of the Go analysis tooling + // framework telling us otherwise. + // + // By default gosmopolitan does not look at test files, because i18n-aware + // apps most probably have many unmarked strings in test cases, and names + // and descriptions *of* test cases are probably in the program's original + // natural language too. + LookAtTests bool + // EscapeHatches is optionally a list of fully qualified names, in the + // `(full/pkg/path).name` form, to act as "i18n escape hatches". Inside + // call-like expressions to those names, the string literal script check + // is ignored. + // + // With this functionality in place, you can use type aliases like + // `type R = string` as markers, or have explicitly i18n-aware functions + // exempt from the checks. + EscapeHatches []string + // WatchForScripts is optionally a list of Unicode script names to watch + // for any usage in string literals. The range of supported scripts is + // determined by the [unicode.Scripts] map and values are case-sensitive. + WatchForScripts []string + // AllowTimeLocal is flag controlling whether usages of [time.Local] are + // allowed (i.e. not reported). + AllowTimeLocal bool +} + +func NewAnalyzer() *analysis.Analyzer { + var lookAtTests bool + var escapeHatchesStr string + var watchForScriptsStr string + var allowTimeLocal bool + + a := &analysis.Analyzer{ + Name: analyzerName, + Doc: analyzerDoc, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, + Run: func(p *analysis.Pass) (any, error) { + cfg := AnalyzerConfig{ + LookAtTests: lookAtTests, + EscapeHatches: strings.Split(escapeHatchesStr, ","), + WatchForScripts: strings.Split(watchForScriptsStr, ","), + AllowTimeLocal: allowTimeLocal, + } + pctx := processCtx{cfg: &cfg, p: p} + return pctx.run() + }, + RunDespiteErrors: false, + } + + a.Flags.BoolVar(&lookAtTests, + "lookattests", + false, + "also check the test files", + ) + a.Flags.StringVar( + &escapeHatchesStr, + "escapehatches", + "", + "comma-separated list of fully qualified names to act as i18n escape hatches", + ) + a.Flags.StringVar( + &watchForScriptsStr, + "watchforscripts", + "Han", + "comma-separated list of Unicode scripts to watch out for occurrence in string literals", + ) + a.Flags.BoolVar(&allowTimeLocal, + "allowtimelocal", + false, + "allow time.Local usages", + ) + + return a +} + +func NewAnalyzerWithConfig(cfg *AnalyzerConfig) *analysis.Analyzer { + return &analysis.Analyzer{ + Name: analyzerName, + Doc: analyzerDoc, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, + Run: func(p *analysis.Pass) (any, error) { + pctx := processCtx{cfg: cfg, p: p} + return pctx.run() + }, + RunDespiteErrors: false, + } +} + +var DefaultAnalyzer = NewAnalyzer() + +func validateUnicodeScriptName(name string) error { + if _, ok := unicode.Scripts[name]; !ok { + return fmt.Errorf("invalid Unicode script name: %s", name) + } + return nil +} + +// example input: ["Han", "Arabic"] +// example output: `\p{Han}|\p{Arabic}` +// assumes len(scriptNames) > 0 +func makeUnicodeScriptMatcherRegexpString(scriptNames []string) string { + var sb strings.Builder + for i, s := range scriptNames { + if i > 0 { + sb.WriteRune('|') + } + sb.WriteString(`\p{`) + sb.WriteString(s) + sb.WriteRune('}') + } + return sb.String() +} + +func makeUnicodeScriptMatcherRegexp(scriptNames []string) (*regexp.Regexp, error) { + return regexp.Compile(makeUnicodeScriptMatcherRegexpString(scriptNames)) +} + +type processCtx struct { + cfg *AnalyzerConfig + p *analysis.Pass +} + +func mapSlice[T any, U any](x []T, fn func(T) U) []U { + if x == nil { + return nil + } + y := make([]U, len(x)) + for i, v := range x { + y[i] = fn(v) + } + return y +} + +func sliceToSet[T comparable](x []T) map[T]struct{} { + // lo.SliceToMap(x, func(k T) (T, struct{}) { return k, struct{}{} }) + y := make(map[T]struct{}, len(x)) + for _, k := range x { + y[k] = struct{}{} + } + return y +} + +func getFullyQualifiedName(x types.Object) string { + pkg := x.Pkg() + if pkg == nil { + return x.Name() + } + return fmt.Sprintf("%s.%s", pkg.Path(), x.Name()) +} + +// if input is in the "(%s).%s" form, remove the parens, else return the +// unchanged input +// +// this is for maintaining compatibility with the previous FQN notation that +// was born out of my confusion (the previous notation, while commonly seen, +// seems to be only for methods or pointer receiver types; the parens-less +// form is in fact unambiguous, because Go identifiers can't contain periods.) +func unquoteInputFQN(x string) string { + if len(x) == 0 || x[0] != '(' { + return x + } + + before, after, found := strings.Cut(x[1:], ")") + if !found { + // malformed input: string in "(xxxxx" form with unclosed parens! + // in this case, only removing the opening parens might be better than + // doing nothing after all + return x[1:] + } + + // at this point, + // input: "(foo).bar" + // before: "foo" + // after: ".bar" + return before + after +} + +func (c *processCtx) run() (any, error) { + escapeHatchesSet := sliceToSet(mapSlice(c.cfg.EscapeHatches, unquoteInputFQN)) + + if len(c.cfg.WatchForScripts) == 0 { + c.cfg.WatchForScripts = []string{"Han"} + } + + for _, s := range c.cfg.WatchForScripts { + if err := validateUnicodeScriptName(s); err != nil { + return nil, err + } + } + + charRE, err := makeUnicodeScriptMatcherRegexp(c.cfg.WatchForScripts) + if err != nil { + return nil, err + } + + usq := newUnicodeScriptQuerier(c.cfg.WatchForScripts) + + insp := c.p.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // support ignoring the test files, because test files could be full of + // i18n and l10n fixtures, and we want to focus on the actual run-time + // logic + // + // TODO: is there a way to both ignore test files earlier, and make use of + // inspect.Analyzer's cached results? currently Inspector doesn't provide + // a way to selectively travese some files' AST but not others. + isBelongingToTestFiles := func(n ast.Node) bool { + return strings.HasSuffix(c.p.Fset.File(n.Pos()).Name(), "_test.go") + } + + shouldSkipTheContainingFile := func(n ast.Node) bool { + if c.cfg.LookAtTests { + return false + } + return isBelongingToTestFiles(n) + } + + insp.Nodes(nil, func(n ast.Node, push bool) bool { + // we only need to look at each node once + if !push { + return false + } + + if shouldSkipTheContainingFile(n) { + return false + } + + // skip blocks that can contain string literals but are not otherwise + // interesting for us + switch n.(type) { + case *ast.ImportSpec, *ast.TypeSpec: + // import blocks, type declarations + return false + } + + // and don't look inside escape hatches + referentFQN := c.getFullyQualifiedNameOfReferent(n) + if referentFQN != "" { + _, isEscapeHatch := escapeHatchesSet[referentFQN] + // if isEscapeHatch: don't recurse (false) + return !isEscapeHatch + } + + // check only string literals + lit, ok := n.(*ast.BasicLit) + if !ok { + return true + } + if lit.Kind != token.STRING { + return true + } + + // report string literals containing characters of given script (in + // the sense of "writing system") + if charRE.MatchString(lit.Value) { + match := charRE.FindIndex([]byte(lit.Value)) + matchCh := []byte(lit.Value)[match[0]:match[1]] + scriptName := usq.queryScriptForRuneBytes(matchCh) + + c.p.Report(analysis.Diagnostic{ + Pos: lit.Pos() + token.Pos(match[0]), + End: lit.Pos() + token.Pos(match[1]), + Message: fmt.Sprintf("string literal contains rune in %s script", scriptName), + }) + } + + return true + }) + + if !c.cfg.AllowTimeLocal { + // check time.Local usages + insp.Nodes([]ast.Node{(*ast.Ident)(nil)}, func(n ast.Node, push bool) bool { + // we only need to look at each node once + if !push { + return false + } + + if shouldSkipTheContainingFile(n) { + return false + } + + ident := n.(*ast.Ident) + + d := c.p.TypesInfo.ObjectOf(ident) + if d == nil || d.Pkg() == nil { + return true + } + + if d.Pkg().Path() == "time" && d.Name() == "Local" { + c.p.Report(analysis.Diagnostic{ + Pos: n.Pos(), + End: n.End(), + Message: "usage of time.Local", + }) + } + + return true + }) + } + + return nil, nil +} + +func (c *processCtx) getFullyQualifiedNameOfReferent(n ast.Node) string { + var ident *ast.Ident + switch e := n.(type) { + case *ast.CallExpr: + ident = getIdentOfTypeOfExpr(e.Fun) + + case *ast.CompositeLit: + ident = getIdentOfTypeOfExpr(e.Type) + + default: + return "" + } + + referent := c.p.TypesInfo.Uses[ident] + if referent == nil { + return "" + } + + return getFullyQualifiedName(referent) +} + +func getIdentOfTypeOfExpr(e ast.Expr) *ast.Ident { + switch x := e.(type) { + case *ast.Ident: + return x + case *ast.SelectorExpr: + return x.Sel + } + return nil +} + +type unicodeScriptQuerier struct { + sets map[string]runes.Set +} + +func newUnicodeScriptQuerier(scriptNames []string) *unicodeScriptQuerier { + sets := make(map[string]runes.Set, len(scriptNames)) + for _, s := range scriptNames { + sets[s] = runes.In(unicode.Scripts[s]) + } + return &unicodeScriptQuerier{ + sets: sets, + } +} + +func (x *unicodeScriptQuerier) queryScriptForRuneBytes(b []byte) string { + r := []rune(string(b))[0] + for s, set := range x.sets { + if set.Contains(r) { + return s + } + } + return "" +} diff --git a/tools/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml b/tools/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml new file mode 100644 index 0000000000..c1b23f00e9 --- /dev/null +++ b/tools/vendor/github.com/ykadowak/zerologlint/.goreleaser.yaml @@ -0,0 +1,31 @@ +before: + hooks: + - go mod tidy +builds: + - id: zerologlint + main: ./cmd/zerologlint + binary: zerologlint + env: + - CGO_ENABLED=0 + goos: + - linux + - windows + - darwin +archives: + - replacements: + darwin: Darwin + linux: Linux + windows: Windows + 386: i386 + amd64: x86_64 +checksum: + name_template: 'checksums.txt' +snapshot: + name_template: "{{ incpatch .Version }}-next" +changelog: + sort: asc + filters: + exclude: + - '^docs:' + - '^test:' + - '^ci:' diff --git a/tools/vendor/github.com/ykadowak/zerologlint/LICENSE b/tools/vendor/github.com/ykadowak/zerologlint/LICENSE new file mode 100644 index 0000000000..92a1e3b318 --- /dev/null +++ b/tools/vendor/github.com/ykadowak/zerologlint/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Yusuke Kadowaki + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tools/vendor/github.com/ykadowak/zerologlint/README.md b/tools/vendor/github.com/ykadowak/zerologlint/README.md new file mode 100644 index 0000000000..14443e2e3c --- /dev/null +++ b/tools/vendor/github.com/ykadowak/zerologlint/README.md @@ -0,0 +1,51 @@ +# zerologlint +![build](https://github.com/ykadowak/zerologlint/actions/workflows/testing.yaml/badge.svg) + +`zerologlint` is a linter for [zerolog](https://github.com/rs/zerolog) that can be run with `go vet` or through [golangci-lint](https://golangci-lint.run/) since `v1.53.0`. +It detects the wrong usage of `zerolog` that a user forgets to dispatch `zerolog.Event` with `Send` or `Msg` like functions, in which case nothing will be logged. For more detailed explanations of the cases it detects, see [Examples](#Example). + +## Install + +```bash +go install github.com/ykadowak/zerologlint/cmd/zerologlint@latest +``` + +## Usage +```bash +go vet -vettool=`which zerologlint` ./... +``` + +or you can also use it with [golangci-lint](https://golangci-lint.run/) since `v1.53.0`. + +## Examples +```go +package main + +import ( + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" +) + +func main() { + // 1. Basic case + log.Info() // "must be dispatched by Msg or Send method" + + // 2. Nested case + log.Info(). // "must be dispatched by Msg or Send method" + Str("foo", "bar"). + Dict("dict", zerolog.Dict(). + Str("bar", "baz"). + Int("n", 1), + ) + + // 3. Reassignment case + logger := log.Info() // "must be dispatched by Msg or Send method" + if err != nil { + logger = log.Error() // "must be dispatched by Msg or Send method" + } + logger.Str("foo", "bar") + + // 4. Deferred case + defer log.Info() // "must be dispatched by Msg or Send method" +} +``` diff --git a/tools/vendor/github.com/ykadowak/zerologlint/zerologlint.go b/tools/vendor/github.com/ykadowak/zerologlint/zerologlint.go new file mode 100644 index 0000000000..bd588f996a --- /dev/null +++ b/tools/vendor/github.com/ykadowak/zerologlint/zerologlint.go @@ -0,0 +1,141 @@ +package zerologlint + +import ( + "go/token" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" + + "github.com/gostaticanalysis/comment/passes/commentmap" +) + +var Analyzer = &analysis.Analyzer{ + Name: "zerologlinter", + Doc: "finds cases where zerolog methods are not followed by Msg or Send", + Run: run, + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + commentmap.Analyzer, + }, +} + +type posser interface { + Pos() token.Pos +} + +// posser is an interface just to hold both ssa.Call and ssa.Defer in our set +type callDefer interface { + Common() *ssa.CallCommon + Pos() token.Pos +} + +func run(pass *analysis.Pass) (interface{}, error) { + srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs + + // This set holds all the ssa block that is a zerolog.Event type instance + // that should be dispatched. + // Everytime the zerolog.Event is dispatched with Msg() or Send(), + // deletes that block from this set. + // At the end, check if the set is empty, or report the not dispatched block. + set := make(map[posser]struct{}) + + for _, sf := range srcFuncs { + for _, b := range sf.Blocks { + for _, instr := range b.Instrs { + if c, ok := instr.(*ssa.Call); ok { + inspect(c, &set) + } else if c, ok := instr.(*ssa.Defer); ok { + inspect(c, &set) + } + } + } + } + // At the end, if the set is clear -> ok. + // Otherwise, there must be a left zerolog.Event var that weren't dispached. So report it. + for k := range set { + pass.Reportf(k.Pos(), "must be dispatched by Msg or Send method") + } + return nil, nil +} + +func inspect(cd callDefer, set *map[posser]struct{}) { + c := cd.Common() + + // check if it's in github.com/rs/zerolog/log since there's some + // functions in github.com/rs/zerolog that returns zerolog.Event + // which should not be included + if isInLogPkg(*c) { + if isZerologEvent(c.Value) { + // check if this is a new instance of zerolog.Event like logger := log.Error() + // which should be dispatched afterwards at some point + if len(c.Args) == 0 { + (*set)[cd] = struct{}{} + } + return + } + } + + // if the call does not return zerolog.Event, + // check if the base is zerolog.Event. + // if so, check if the StaticCallee is Send() or Msg(). + // if so, remove the arg[0] from the set. + for _, arg := range c.Args { + if isZerologEvent(arg) { + if isDispatchMethod(*c) { + val := getRootSsaValue(arg) + // if there's branch, remove both ways from the set + if phi, ok := val.(*ssa.Phi); ok { + for _, edge := range phi.Edges { + delete(*set, edge) + } + } else { + delete(*set, val) + } + } + } + } +} + +func isInLogPkg(c ssa.CallCommon) bool { + switch v := c.Value.(type) { + case ssa.Member: + p := v.Package() + if p == nil { + return false + } + return strings.HasSuffix(p.Pkg.Path(), "github.com/rs/zerolog/log") + default: + return false + } +} + +func isZerologEvent(v ssa.Value) bool { + ts := v.Type().String() + return strings.HasSuffix(ts, "github.com/rs/zerolog.Event") +} + +func isDispatchMethod(c ssa.CallCommon) bool { + m := c.StaticCallee().Name() + if m == "Send" || m == "Msg" || m == "Msgf" || m == "MsgFunc" { + return true + } + return false +} + +func getRootSsaValue(v ssa.Value) ssa.Value { + if c, ok := v.(*ssa.Call); ok { + v := c.Value() + // When there is no receiver, that's the block of zerolog.Event + // eg. Error() method in log.Error().Str("foo", "bar"). Send() + if len(v.Call.Args) == 0 { + return v + } + + // Ok to just return the receiver because all the method in this + // chain is zerolog.Event at this point. + return getRootSsaValue(v.Call.Args[0]) + } + return v +} diff --git a/tools/vendor/github.com/junk1tm/musttag/.golangci.yml b/tools/vendor/go.tmz.dev/musttag/.golangci.yml similarity index 100% rename from tools/vendor/github.com/junk1tm/musttag/.golangci.yml rename to tools/vendor/go.tmz.dev/musttag/.golangci.yml diff --git a/tools/vendor/go.tmz.dev/musttag/.goreleaser.yml b/tools/vendor/go.tmz.dev/musttag/.goreleaser.yml new file mode 100644 index 0000000000..259080886f --- /dev/null +++ b/tools/vendor/go.tmz.dev/musttag/.goreleaser.yml @@ -0,0 +1,27 @@ +builds: + - main: ./cmd/musttag + env: + - CGO_ENABLED=0 + flags: + - -trimpath + ldflags: + - -s -w -X main.version={{ .Version }} + targets: + - darwin_amd64 + - darwin_arm64 + - linux_amd64 + - windows_amd64 + +archives: + - format_overrides: + - goos: windows + format: zip + +brews: + - tap: + owner: tmzane + name: homebrew-tap + token: '{{ .Env.HOMEBREW_TAP_TOKEN }}' + homepage: https://github.com/tmzane/musttag + description: A Go linter that enforces field tags in (un)marshaled structs + license: MPL-2.0 diff --git a/tools/vendor/go.tmz.dev/musttag/LICENSE b/tools/vendor/go.tmz.dev/musttag/LICENSE new file mode 100644 index 0000000000..a612ad9813 --- /dev/null +++ b/tools/vendor/go.tmz.dev/musttag/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/tools/vendor/go.tmz.dev/musttag/README.md b/tools/vendor/go.tmz.dev/musttag/README.md new file mode 100644 index 0000000000..621c7a9a3d --- /dev/null +++ b/tools/vendor/go.tmz.dev/musttag/README.md @@ -0,0 +1,106 @@ +# musttag + +[![checks](https://github.com/tmzane/musttag/actions/workflows/checks.yml/badge.svg)](https://github.com/tmzane/musttag/actions/workflows/checks.yml) +[![pkg.go.dev](https://pkg.go.dev/badge/go.tmz.dev/musttag.svg)](https://pkg.go.dev/go.tmz.dev/musttag) +[![goreportcard](https://goreportcard.com/badge/go.tmz.dev/musttag)](https://goreportcard.com/report/go.tmz.dev/musttag) +[![codecov](https://codecov.io/gh/tmzane/musttag/branch/main/graph/badge.svg)](https://codecov.io/gh/tmzane/musttag) + +A Go linter that enforces field tags in (un)marshaled structs + +## 📌 About + +`musttag` checks that exported fields of a struct passed to a `Marshal`-like function are annotated with the relevant tag: + +```go +// BAD: +var user struct { + Name string +} +data, err := json.Marshal(user) + +// GOOD: +var user struct { + Name string `json:"name"` +} +data, err := json.Marshal(user) +``` + +The rational from [Uber Style Guide][1]: + +> The serialized form of the structure is a contract between different systems. +> Changes to the structure of the serialized form, including field names, break this contract. +> Specifying field names inside tags makes the contract explicit, +> and it guards against accidentally breaking the contract by refactoring or renaming fields. + +## 🚀 Features + +The following packages are supported out of the box: + +* [`encoding/json`][2] +* [`encoding/xml`][3] +* [`gopkg.in/yaml.v3`][4] +* [`github.com/BurntSushi/toml`][5] +* [`github.com/mitchellh/mapstructure`][6] +* [`github.com/jmoiron/sqlx`][7] + +In addition, any [custom package](#custom-packages) can be added to the list. + +## 📋 Usage + +`musttag` is already integrated into `golangci-lint`, and this is the recommended way to use it. + +To enable the linter, add the following lines to `.golangci.yml`: + +```yaml +linters: + enable: + - musttag +``` + +If you'd rather prefer to use `musttag` standalone, you can install it via `brew`... + +```shell +brew install tmzane/tap/musttag +``` + +...or download a prebuilt binary from the [Releases][9] page. + +Then run it either directly or as a `go vet` tool: + +```shell +go vet -vettool=$(which musttag) ./... +``` + +### Custom packages + +To enable reporting a custom function, you need to add its description to `.golangci.yml`. + +The following is an example of adding support for the `hclsimple.DecodeFile` function from [`github.com/hashicorp/hcl`][8]: + +```yaml +linters-settings: + musttag: + functions: + # The full name of the function, including the package. + - name: github.com/hashicorp/hcl/v2/hclsimple.DecodeFile + # The struct tag whose presence should be ensured. + tag: hcl + # The position of the argument to check. + arg-pos: 2 +``` + +The same can be done via the `-fn=name:tag:arg-pos` flag when using `musttag` standalone: + +```shell +musttag -fn="github.com/hashicorp/hcl/v2/hclsimple.DecodeFile:hcl:2" ./... +``` + +[1]: https://github.com/uber-go/guide/blob/master/style.md#use-field-tags-in-marshaled-structs +[2]: https://pkg.go.dev/encoding/json +[3]: https://pkg.go.dev/encoding/xml +[4]: https://github.com/go-yaml/yaml +[5]: https://github.com/BurntSushi/toml +[6]: https://github.com/mitchellh/mapstructure +[7]: https://github.com/jmoiron/sqlx +[8]: https://github.com/hashicorp/hcl +[9]: https://github.com/tmzane/musttag/releases diff --git a/tools/vendor/go.tmz.dev/musttag/builtins.go b/tools/vendor/go.tmz.dev/musttag/builtins.go new file mode 100644 index 0000000000..66914fa9e8 --- /dev/null +++ b/tools/vendor/go.tmz.dev/musttag/builtins.go @@ -0,0 +1,67 @@ +package musttag + +// builtins is a set of functions supported out of the box. +var builtins = []Func{ + // https://pkg.go.dev/encoding/json + {Name: "encoding/json.Marshal", Tag: "json", ArgPos: 0}, + {Name: "encoding/json.MarshalIndent", Tag: "json", ArgPos: 0}, + {Name: "encoding/json.Unmarshal", Tag: "json", ArgPos: 1}, + {Name: "(*encoding/json.Encoder).Encode", Tag: "json", ArgPos: 0}, + {Name: "(*encoding/json.Decoder).Decode", Tag: "json", ArgPos: 0}, + + // https://pkg.go.dev/encoding/xml + {Name: "encoding/xml.Marshal", Tag: "xml", ArgPos: 0}, + {Name: "encoding/xml.MarshalIndent", Tag: "xml", ArgPos: 0}, + {Name: "encoding/xml.Unmarshal", Tag: "xml", ArgPos: 1}, + {Name: "(*encoding/xml.Encoder).Encode", Tag: "xml", ArgPos: 0}, + {Name: "(*encoding/xml.Decoder).Decode", Tag: "xml", ArgPos: 0}, + {Name: "(*encoding/xml.Encoder).EncodeElement", Tag: "xml", ArgPos: 0}, + {Name: "(*encoding/xml.Decoder).DecodeElement", Tag: "xml", ArgPos: 0}, + + // https://github.com/go-yaml/yaml + {Name: "gopkg.in/yaml.v3.Marshal", Tag: "yaml", ArgPos: 0}, + {Name: "gopkg.in/yaml.v3.Unmarshal", Tag: "yaml", ArgPos: 1}, + {Name: "(*gopkg.in/yaml.v3.Encoder).Encode", Tag: "yaml", ArgPos: 0}, + {Name: "(*gopkg.in/yaml.v3.Decoder).Decode", Tag: "yaml", ArgPos: 0}, + + // https://github.com/BurntSushi/toml + {Name: "github.com/BurntSushi/toml.Unmarshal", Tag: "toml", ArgPos: 1}, + {Name: "github.com/BurntSushi/toml.Decode", Tag: "toml", ArgPos: 1}, + {Name: "github.com/BurntSushi/toml.DecodeFS", Tag: "toml", ArgPos: 2}, + {Name: "github.com/BurntSushi/toml.DecodeFile", Tag: "toml", ArgPos: 1}, + {Name: "(*github.com/BurntSushi/toml.Encoder).Encode", Tag: "toml", ArgPos: 0}, + {Name: "(*github.com/BurntSushi/toml.Decoder).Decode", Tag: "toml", ArgPos: 0}, + + // https://github.com/mitchellh/mapstructure + {Name: "github.com/mitchellh/mapstructure.Decode", Tag: "mapstructure", ArgPos: 1}, + {Name: "github.com/mitchellh/mapstructure.DecodeMetadata", Tag: "mapstructure", ArgPos: 1}, + {Name: "github.com/mitchellh/mapstructure.WeakDecode", Tag: "mapstructure", ArgPos: 1}, + {Name: "github.com/mitchellh/mapstructure.WeakDecodeMetadata", Tag: "mapstructure", ArgPos: 1}, + + // https://github.com/jmoiron/sqlx + {Name: "github.com/jmoiron/sqlx.Get", Tag: "db", ArgPos: 1}, + {Name: "github.com/jmoiron/sqlx.GetContext", Tag: "db", ArgPos: 2}, + {Name: "github.com/jmoiron/sqlx.Select", Tag: "db", ArgPos: 1}, + {Name: "github.com/jmoiron/sqlx.SelectContext", Tag: "db", ArgPos: 2}, + {Name: "github.com/jmoiron/sqlx.StructScan", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.Conn).GetContext", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.Conn).SelectContext", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.DB).Get", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.DB).GetContext", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.DB).Select", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.DB).SelectContext", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.NamedStmt).Get", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.NamedStmt).GetContext", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.NamedStmt).Select", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.NamedStmt).SelectContext", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.Row).StructScan", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.Rows).StructScan", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.Stmt).Get", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.Stmt).GetContext", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.Stmt).Select", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.Stmt).SelectContext", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.Tx).Get", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.Tx).GetContext", Tag: "db", ArgPos: 1}, + {Name: "(*github.com/jmoiron/sqlx.Tx).Select", Tag: "db", ArgPos: 0}, + {Name: "(*github.com/jmoiron/sqlx.Tx).SelectContext", Tag: "db", ArgPos: 1}, +} diff --git a/tools/vendor/go.tmz.dev/musttag/musttag.go b/tools/vendor/go.tmz.dev/musttag/musttag.go new file mode 100644 index 0000000000..200163f264 --- /dev/null +++ b/tools/vendor/go.tmz.dev/musttag/musttag.go @@ -0,0 +1,260 @@ +// Package musttag implements the musttag analyzer. +package musttag + +import ( + "flag" + "fmt" + "go/ast" + "go/token" + "go/types" + "path" + "path/filepath" + "reflect" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +// Func describes a function call to look for, e.g. json.Marshal. +type Func struct { + Name string // Name is the full name of the function, including the package. + Tag string // Tag is the struct tag whose presence should be ensured. + ArgPos int // ArgPos is the position of the argument to check. +} + +func (fn Func) shortName() string { + name := strings.NewReplacer("*", "", "(", "", ")", "").Replace(fn.Name) + return path.Base(name) +} + +// New creates a new musttag analyzer. +// To report a custom function provide its description via Func, +// it will be added to the builtin ones. +func New(funcs ...Func) *analysis.Analyzer { + var flagFuncs []Func + return &analysis.Analyzer{ + Name: "musttag", + Doc: "enforce field tags in (un)marshaled structs", + Flags: flags(&flagFuncs), + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: func(pass *analysis.Pass) (any, error) { + l := len(builtins) + len(funcs) + len(flagFuncs) + m := make(map[string]Func, l) + toMap := func(slice []Func) { + for _, fn := range slice { + m[fn.Name] = fn + } + } + toMap(builtins) + toMap(funcs) + toMap(flagFuncs) + return run(pass, m) + }, + } +} + +// flags creates a flag set for the analyzer. +// The funcs slice will be filled with custom functions passed via CLI flags. +func flags(funcs *[]Func) flag.FlagSet { + fs := flag.NewFlagSet("musttag", flag.ContinueOnError) + fs.Func("fn", "report custom function (name:tag:argpos)", func(s string) error { + parts := strings.Split(s, ":") + if len(parts) != 3 || parts[0] == "" || parts[1] == "" { + return strconv.ErrSyntax + } + pos, err := strconv.Atoi(parts[2]) + if err != nil { + return err + } + *funcs = append(*funcs, Func{ + Name: parts[0], + Tag: parts[1], + ArgPos: pos, + }) + return nil + }) + return *fs +} + +// for tests only. +var ( + report = func(pass *analysis.Pass, st *structType, fn Func, fnPos token.Position) { + const format = "`%s` should be annotated with the `%s` tag as it is passed to `%s` at %s" + pass.Reportf(st.Pos, format, st.Name, fn.Tag, fn.shortName(), fnPos) + } + + // HACK: mainModulePackages() does not return packages from `testdata`, + // because it is ignored by the go tool, and thus, by the `go list` command. + // For tests to pass we need to add the packages with tests to the main module manually. + testPackages []string +) + +// run starts the analysis. +func run(pass *analysis.Pass, funcs map[string]Func) (any, error) { + moduleDir, modulePackages, err := mainModule() + if err != nil { + return nil, err + } + for _, pkg := range testPackages { + modulePackages[pkg] = struct{}{} + } + + walk := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + filter := []ast.Node{(*ast.CallExpr)(nil)} + + walk.Preorder(filter, func(n ast.Node) { + if err != nil { + return // there is already an error. + } + + call, ok := n.(*ast.CallExpr) + if !ok { + return // not a function call. + } + + caller := typeutil.StaticCallee(pass.TypesInfo, call) + if caller == nil { + return // not a static call. + } + + fn, ok := funcs[caller.FullName()] + if !ok { + return // the function is not supported. + } + + if len(call.Args) <= fn.ArgPos { + err = fmt.Errorf("Func.ArgPos cannot be %d: %s accepts only %d argument(s)", fn.ArgPos, fn.Name, len(call.Args)) + return + } + + arg := call.Args[fn.ArgPos] + if unary, ok := arg.(*ast.UnaryExpr); ok { + arg = unary.X // e.g. json.Marshal(&foo) + } + + initialPos := token.NoPos + switch arg := arg.(type) { + case *ast.Ident: // e.g. json.Marshal(foo) + if arg.Obj == nil { + return // e.g. json.Marshal(nil) + } + initialPos = arg.Obj.Pos() + case *ast.CompositeLit: // e.g. json.Marshal(struct{}{}) + initialPos = arg.Pos() + } + + checker := checker{ + mainModule: modulePackages, + seenTypes: make(map[string]struct{}), + } + + t := pass.TypesInfo.TypeOf(arg) + st, ok := checker.parseStructType(t, initialPos) + if !ok { + return // not a struct argument. + } + + result, ok := checker.checkStructType(st, fn.Tag) + if ok { + return // nothing to report. + } + + p := pass.Fset.Position(call.Pos()) + p.Filename, _ = filepath.Rel(moduleDir, p.Filename) + report(pass, result, fn, p) + }) + + return nil, err +} + +// structType is an extension for types.Struct. +// The content of the fields depends on whether the type is named or not. +type structType struct { + *types.Struct + Name string // for types.Named: the type's name; for anonymous: a placeholder string. + Pos token.Pos // for types.Named: the type's position; for anonymous: the corresponding identifier's position. +} + +// checker parses and checks struct types. +type checker struct { + mainModule map[string]struct{} // do not check types outside of the main module; see issue #17. + seenTypes map[string]struct{} // prevent panic on recursive types; see issue #16. +} + +// parseStructType parses the given types.Type, returning the underlying struct type. +func (c *checker) parseStructType(t types.Type, pos token.Pos) (*structType, bool) { + for { + // unwrap pointers (if any) first. + ptr, ok := t.(*types.Pointer) + if !ok { + break + } + t = ptr.Elem() + } + + switch t := t.(type) { + case *types.Named: // a struct of the named type. + pkg := t.Obj().Pkg() // may be nil; see issue #38. + if pkg == nil { + return nil, false + } + if _, ok := c.mainModule[pkg.Path()]; !ok { + return nil, false + } + s, ok := t.Underlying().(*types.Struct) + if !ok { + return nil, false + } + return &structType{ + Struct: s, + Pos: t.Obj().Pos(), + Name: t.Obj().Name(), + }, true + + case *types.Struct: // an anonymous struct. + return &structType{ + Struct: t, + Pos: pos, + Name: "anonymous struct", + }, true + } + + return nil, false +} + +// checkStructType recursively checks whether the given struct type is annotated with the tag. +// The result is the type of the first nested struct which fields are not properly annotated. +func (c *checker) checkStructType(st *structType, tag string) (*structType, bool) { + c.seenTypes[st.String()] = struct{}{} + + for i := 0; i < st.NumFields(); i++ { + field := st.Field(i) + if !field.Exported() { + continue + } + + if _, ok := reflect.StructTag(st.Tag(i)).Lookup(tag); !ok { + // tag is not required for embedded types; see issue #12. + if !field.Embedded() { + return st, false + } + } + + nested, ok := c.parseStructType(field.Type(), st.Pos) // TODO: or field.Pos()? + if !ok { + continue + } + if _, ok := c.seenTypes[nested.String()]; ok { + continue + } + if result, ok := c.checkStructType(nested, tag); !ok { + return result, false + } + } + + return nil, true +} diff --git a/tools/vendor/go.tmz.dev/musttag/utils.go b/tools/vendor/go.tmz.dev/musttag/utils.go new file mode 100644 index 0000000000..73e21310a4 --- /dev/null +++ b/tools/vendor/go.tmz.dev/musttag/utils.go @@ -0,0 +1,41 @@ +package musttag + +import ( + "fmt" + "os/exec" + "strings" +) + +// mainModule returns the directory and the set of packages of the main module. +func mainModule() (dir string, packages map[string]struct{}, _ error) { + // https://pkg.go.dev/cmd/go#hdr-Package_lists_and_patterns + // > When using modules, "all" expands to all packages in the main module + // > and their dependencies, including dependencies needed by tests of any of those. + + // NOTE: the command may run out of file descriptors if go version <= 1.18, + // especially on macOS, which has the default soft limit set to 256 (ulimit -nS). + // Since go1.19 the limit is automatically increased to the maximum allowed value; + // see https://github.com/golang/go/issues/46279 for details. + cmd := [...]string{"go", "list", "-f={{if and (not .Standard) .Module.Main}}{{.ImportPath}}{{end}}", "all"} + + out, err := exec.Command(cmd[0], cmd[1:]...).Output() + if err != nil { + return "", nil, fmt.Errorf("running `go list all`: %w", err) + } + + list := strings.Split(strings.TrimSpace(string(out)), "\n") + packages = make(map[string]struct{}, len(list)*2) + + for _, pkg := range list { + packages[pkg] = struct{}{} + packages[pkg+"_test"] = struct{}{} // `*_test` packages belong to the main module, see issue #24. + } + + out, err = exec.Command("go", "list", "-m", "-f={{.Dir}}").Output() + if err != nil { + return "", nil, fmt.Errorf("running `go list -m`: %w", err) + } + + dir = strings.TrimSpace(string(out)) + return dir, packages, nil +} diff --git a/tools/vendor/go.uber.org/zap/.readme.tmpl b/tools/vendor/go.uber.org/zap/.readme.tmpl index 3154a1e64c..92aa65d660 100644 --- a/tools/vendor/go.uber.org/zap/.readme.tmpl +++ b/tools/vendor/go.uber.org/zap/.readme.tmpl @@ -96,14 +96,14 @@ Released under the [MIT License](LICENSE.txt). 1 In particular, keep in mind that we may be benchmarking against slightly older versions of other packages. Versions are -pinned in zap's [glide.lock][] file. [↩](#anchor-versions) +pinned in the [benchmarks/go.mod][] file. [↩](#anchor-versions) -[doc-img]: https://godoc.org/go.uber.org/zap?status.svg -[doc]: https://godoc.org/go.uber.org/zap -[ci-img]: https://travis-ci.com/uber-go/zap.svg?branch=master -[ci]: https://travis-ci.com/uber-go/zap +[doc-img]: https://pkg.go.dev/badge/go.uber.org/zap +[doc]: https://pkg.go.dev/go.uber.org/zap +[ci-img]: https://github.com/uber-go/zap/actions/workflows/go.yml/badge.svg +[ci]: https://github.com/uber-go/zap/actions/workflows/go.yml [cov-img]: https://codecov.io/gh/uber-go/zap/branch/master/graph/badge.svg [cov]: https://codecov.io/gh/uber-go/zap [benchmarking suite]: https://github.com/uber-go/zap/tree/master/benchmarks -[glide.lock]: https://github.com/uber-go/zap/blob/master/glide.lock +[benchmarks/go.mod]: https://github.com/uber-go/zap/blob/master/benchmarks/go.mod diff --git a/tools/vendor/go.uber.org/zap/CHANGELOG.md b/tools/vendor/go.uber.org/zap/CHANGELOG.md index 3b99bf0ac8..0db1f9f15f 100644 --- a/tools/vendor/go.uber.org/zap/CHANGELOG.md +++ b/tools/vendor/go.uber.org/zap/CHANGELOG.md @@ -1,4 +1,161 @@ # Changelog +All notable changes to this project will be documented in this file. + +This project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). + +## 1.24.0 (30 Nov 2022) + +Enhancements: +* [#1148][]: Add `Level` to both `Logger` and `SugaredLogger` that reports the + current minimum enabled log level. +* [#1185][]: `SugaredLogger` turns errors to zap.Error automatically. + +Thanks to @Abirdcfly, @craigpastro, @nnnkkk7, and @sashamelentyev for their +contributions to this release. + +[#1148]: https://github.coml/uber-go/zap/pull/1148 +[#1185]: https://github.coml/uber-go/zap/pull/1185 + +## 1.23.0 (24 Aug 2022) + +Enhancements: +* [#1147][]: Add a `zapcore.LevelOf` function to determine the level of a + `LevelEnabler` or `Core`. +* [#1155][]: Add `zap.Stringers` field constructor to log arrays of objects + that implement `String() string`. + +[#1147]: https://github.com/uber-go/zap/pull/1147 +[#1155]: https://github.com/uber-go/zap/pull/1155 + + +## 1.22.0 (8 Aug 2022) + +Enhancements: +* [#1071][]: Add `zap.Objects` and `zap.ObjectValues` field constructors to log + arrays of objects. With these two constructors, you don't need to implement + `zapcore.ArrayMarshaler` for use with `zap.Array` if those objects implement + `zapcore.ObjectMarshaler`. +* [#1079][]: Add `SugaredLogger.WithOptions` to build a copy of an existing + `SugaredLogger` with the provided options applied. +* [#1080][]: Add `*ln` variants to `SugaredLogger` for each log level. + These functions provide a string joining behavior similar to `fmt.Println`. +* [#1088][]: Add `zap.WithFatalHook` option to control the behavior of the + logger for `Fatal`-level log entries. This defaults to exiting the program. +* [#1108][]: Add a `zap.Must` function that you can use with `NewProduction` or + `NewDevelopment` to panic if the system was unable to build the logger. +* [#1118][]: Add a `Logger.Log` method that allows specifying the log level for + a statement dynamically. + +Thanks to @cardil, @craigpastro, @sashamelentyev, @shota3506, and @zhupeijun +for their contributions to this release. + +[#1071]: https://github.com/uber-go/zap/pull/1071 +[#1079]: https://github.com/uber-go/zap/pull/1079 +[#1080]: https://github.com/uber-go/zap/pull/1080 +[#1088]: https://github.com/uber-go/zap/pull/1088 +[#1108]: https://github.com/uber-go/zap/pull/1108 +[#1118]: https://github.com/uber-go/zap/pull/1118 + +## 1.21.0 (7 Feb 2022) + +Enhancements: +* [#1047][]: Add `zapcore.ParseLevel` to parse a `Level` from a string. +* [#1048][]: Add `zap.ParseAtomicLevel` to parse an `AtomicLevel` from a + string. + +Bugfixes: +* [#1058][]: Fix panic in JSON encoder when `EncodeLevel` is unset. + +Other changes: +* [#1052][]: Improve encoding performance when the `AddCaller` and + `AddStacktrace` options are used together. + +[#1047]: https://github.com/uber-go/zap/pull/1047 +[#1048]: https://github.com/uber-go/zap/pull/1048 +[#1052]: https://github.com/uber-go/zap/pull/1052 +[#1058]: https://github.com/uber-go/zap/pull/1058 + +Thanks to @aerosol and @Techassi for their contributions to this release. + +## 1.20.0 (4 Jan 2022) + +Enhancements: +* [#989][]: Add `EncoderConfig.SkipLineEnding` flag to disable adding newline + characters between log statements. +* [#1039][]: Add `EncoderConfig.NewReflectedEncoder` field to customize JSON + encoding of reflected log fields. + +Bugfixes: +* [#1011][]: Fix inaccurate precision when encoding complex64 as JSON. +* [#554][], [#1017][]: Close JSON namespaces opened in `MarshalLogObject` + methods when the methods return. +* [#1033][]: Avoid panicking in Sampler core if `thereafter` is zero. + +Other changes: +* [#1028][]: Drop support for Go < 1.15. + +[#554]: https://github.com/uber-go/zap/pull/554 +[#989]: https://github.com/uber-go/zap/pull/989 +[#1011]: https://github.com/uber-go/zap/pull/1011 +[#1017]: https://github.com/uber-go/zap/pull/1017 +[#1028]: https://github.com/uber-go/zap/pull/1028 +[#1033]: https://github.com/uber-go/zap/pull/1033 +[#1039]: https://github.com/uber-go/zap/pull/1039 + +Thanks to @psrajat, @lruggieri, @sammyrnycreal for their contributions to this release. + +## 1.19.1 (8 Sep 2021) + +Bugfixes: +* [#1001][]: JSON: Fix complex number encoding with negative imaginary part. Thanks to @hemantjadon. +* [#1003][]: JSON: Fix inaccurate precision when encoding float32. + +[#1001]: https://github.com/uber-go/zap/pull/1001 +[#1003]: https://github.com/uber-go/zap/pull/1003 + +## 1.19.0 (9 Aug 2021) + +Enhancements: +* [#975][]: Avoid panicking in Sampler core if the level is out of bounds. +* [#984][]: Reduce the size of BufferedWriteSyncer by aligning the fields + better. + +[#975]: https://github.com/uber-go/zap/pull/975 +[#984]: https://github.com/uber-go/zap/pull/984 + +Thanks to @lancoLiu and @thockin for their contributions to this release. + +## 1.18.1 (28 Jun 2021) + +Bugfixes: +* [#974][]: Fix nil dereference in logger constructed by `zap.NewNop`. + +[#974]: https://github.com/uber-go/zap/pull/974 + +## 1.18.0 (28 Jun 2021) + +Enhancements: +* [#961][]: Add `zapcore.BufferedWriteSyncer`, a new `WriteSyncer` that buffers + messages in-memory and flushes them periodically. +* [#971][]: Add `zapio.Writer` to use a Zap logger as an `io.Writer`. +* [#897][]: Add `zap.WithClock` option to control the source of time via the + new `zapcore.Clock` interface. +* [#949][]: Avoid panicking in `zap.SugaredLogger` when arguments of `*w` + methods don't match expectations. +* [#943][]: Add support for filtering by level or arbitrary matcher function to + `zaptest/observer`. +* [#691][]: Comply with `io.StringWriter` and `io.ByteWriter` in Zap's + `buffer.Buffer`. + +Thanks to @atrn0, @ernado, @heyanfu, @hnlq715, @zchee +for their contributions to this release. + +[#691]: https://github.com/uber-go/zap/pull/691 +[#897]: https://github.com/uber-go/zap/pull/897 +[#943]: https://github.com/uber-go/zap/pull/943 +[#949]: https://github.com/uber-go/zap/pull/949 +[#961]: https://github.com/uber-go/zap/pull/961 +[#971]: https://github.com/uber-go/zap/pull/971 ## 1.17.0 (25 May 2021) diff --git a/tools/vendor/go.uber.org/zap/CONTRIBUTING.md b/tools/vendor/go.uber.org/zap/CONTRIBUTING.md index 5cd9656871..ea02f3cae2 100644 --- a/tools/vendor/go.uber.org/zap/CONTRIBUTING.md +++ b/tools/vendor/go.uber.org/zap/CONTRIBUTING.md @@ -16,7 +16,7 @@ you to accept the CLA when you open your pull request. [Fork][fork], then clone the repository: -``` +```bash mkdir -p $GOPATH/src/go.uber.org cd $GOPATH/src/go.uber.org git clone git@github.com:your_github_username/zap.git @@ -27,21 +27,16 @@ git fetch upstream Make sure that the tests and the linters pass: -``` +```bash make test make lint ``` -If you're not using the minor version of Go specified in the Makefile's -`LINTABLE_MINOR_VERSIONS` variable, `make lint` doesn't do anything. This is -fine, but it means that you'll only discover lint failures after you open your -pull request. - ## Making Changes Start by creating a new branch for your changes: -``` +```bash cd $GOPATH/src/go.uber.org/zap git checkout master git fetch upstream @@ -52,22 +47,22 @@ git checkout -b cool_new_feature Make your changes, then ensure that `make lint` and `make test` still pass. If you're satisfied with your changes, push them to your fork. -``` +```bash git push origin cool_new_feature ``` Then use the GitHub UI to open a pull request. -At this point, you're waiting on us to review your changes. We *try* to respond +At this point, you're waiting on us to review your changes. We _try_ to respond to issues and pull requests within a few business days, and we may suggest some improvements or alternatives. Once your changes are approved, one of the project maintainers will merge them. We're much more likely to approve your changes if you: -* Add tests for new functionality. -* Write a [good commit message][commit-message]. -* Maintain backward compatibility. +- Add tests for new functionality. +- Write a [good commit message][commit-message]. +- Maintain backward compatibility. [fork]: https://github.com/uber-go/zap/fork [open-issue]: https://github.com/uber-go/zap/issues/new diff --git a/tools/vendor/go.uber.org/zap/README.md b/tools/vendor/go.uber.org/zap/README.md index 1e64d6cffc..a553a428c8 100644 --- a/tools/vendor/go.uber.org/zap/README.md +++ b/tools/vendor/go.uber.org/zap/README.md @@ -54,7 +54,7 @@ and make many small allocations. Put differently, using `encoding/json` and Zap takes a different approach. It includes a reflection-free, zero-allocation JSON encoder, and the base `Logger` strives to avoid serialization overhead and allocations wherever possible. By building the high-level `SugaredLogger` -on that foundation, zap lets users *choose* when they need to count every +on that foundation, zap lets users _choose_ when they need to count every allocation and when they'd prefer a more familiar, loosely typed API. As measured by its own [benchmarking suite][], not only is zap more performant @@ -64,40 +64,40 @@ id="anchor-versions">[1](#footnote-versions) Log a message and 10 fields: -| Package | Time | Time % to zap | Objects Allocated | -| :------ | :--: | :-----------: | :---------------: | -| :zap: zap | 862 ns/op | +0% | 5 allocs/op -| :zap: zap (sugared) | 1250 ns/op | +45% | 11 allocs/op -| zerolog | 4021 ns/op | +366% | 76 allocs/op -| go-kit | 4542 ns/op | +427% | 105 allocs/op -| apex/log | 26785 ns/op | +3007% | 115 allocs/op -| logrus | 29501 ns/op | +3322% | 125 allocs/op -| log15 | 29906 ns/op | +3369% | 122 allocs/op +| Package | Time | Time % to zap | Objects Allocated | +| :------------------ | :---------: | :-----------: | :---------------: | +| :zap: zap | 2900 ns/op | +0% | 5 allocs/op | +| :zap: zap (sugared) | 3475 ns/op | +20% | 10 allocs/op | +| zerolog | 10639 ns/op | +267% | 32 allocs/op | +| go-kit | 14434 ns/op | +398% | 59 allocs/op | +| logrus | 17104 ns/op | +490% | 81 allocs/op | +| apex/log | 32424 ns/op | +1018% | 66 allocs/op | +| log15 | 33579 ns/op | +1058% | 76 allocs/op | Log a message with a logger that already has 10 fields of context: -| Package | Time | Time % to zap | Objects Allocated | -| :------ | :--: | :-----------: | :---------------: | -| :zap: zap | 126 ns/op | +0% | 0 allocs/op -| :zap: zap (sugared) | 187 ns/op | +48% | 2 allocs/op -| zerolog | 88 ns/op | -30% | 0 allocs/op -| go-kit | 5087 ns/op | +3937% | 103 allocs/op -| log15 | 18548 ns/op | +14621% | 73 allocs/op -| apex/log | 26012 ns/op | +20544% | 104 allocs/op -| logrus | 27236 ns/op | +21516% | 113 allocs/op +| Package | Time | Time % to zap | Objects Allocated | +| :------------------ | :---------: | :-----------: | :---------------: | +| :zap: zap | 373 ns/op | +0% | 0 allocs/op | +| :zap: zap (sugared) | 452 ns/op | +21% | 1 allocs/op | +| zerolog | 288 ns/op | -23% | 0 allocs/op | +| go-kit | 11785 ns/op | +3060% | 58 allocs/op | +| logrus | 19629 ns/op | +5162% | 70 allocs/op | +| log15 | 21866 ns/op | +5762% | 72 allocs/op | +| apex/log | 30890 ns/op | +8182% | 55 allocs/op | Log a static string, without any context or `printf`-style templating: -| Package | Time | Time % to zap | Objects Allocated | -| :------ | :--: | :-----------: | :---------------: | -| :zap: zap | 118 ns/op | +0% | 0 allocs/op -| :zap: zap (sugared) | 191 ns/op | +62% | 2 allocs/op -| zerolog | 93 ns/op | -21% | 0 allocs/op -| go-kit | 280 ns/op | +137% | 11 allocs/op -| standard library | 499 ns/op | +323% | 2 allocs/op -| apex/log | 1990 ns/op | +1586% | 10 allocs/op -| logrus | 3129 ns/op | +2552% | 24 allocs/op -| log15 | 3887 ns/op | +3194% | 23 allocs/op +| Package | Time | Time % to zap | Objects Allocated | +| :------------------ | :--------: | :-----------: | :---------------: | +| :zap: zap | 381 ns/op | +0% | 0 allocs/op | +| :zap: zap (sugared) | 410 ns/op | +8% | 1 allocs/op | +| zerolog | 369 ns/op | -3% | 0 allocs/op | +| standard library | 385 ns/op | +1% | 2 allocs/op | +| go-kit | 606 ns/op | +59% | 11 allocs/op | +| logrus | 1730 ns/op | +354% | 25 allocs/op | +| apex/log | 1998 ns/op | +424% | 7 allocs/op | +| log15 | 4546 ns/op | +1093% | 22 allocs/op | ## Development Status: Stable @@ -131,4 +131,3 @@ pinned in the [benchmarks/go.mod][] file. [↩](#anchor-versions) [cov]: https://codecov.io/gh/uber-go/zap [benchmarking suite]: https://github.com/uber-go/zap/tree/master/benchmarks [benchmarks/go.mod]: https://github.com/uber-go/zap/blob/master/benchmarks/go.mod - diff --git a/tools/vendor/go.uber.org/zap/array_go118.go b/tools/vendor/go.uber.org/zap/array_go118.go new file mode 100644 index 0000000000..d0d2c49d69 --- /dev/null +++ b/tools/vendor/go.uber.org/zap/array_go118.go @@ -0,0 +1,156 @@ +// Copyright (c) 2022 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +//go:build go1.18 +// +build go1.18 + +package zap + +import ( + "fmt" + + "go.uber.org/zap/zapcore" +) + +// Objects constructs a field with the given key, holding a list of the +// provided objects that can be marshaled by Zap. +// +// Note that these objects must implement zapcore.ObjectMarshaler directly. +// That is, if you're trying to marshal a []Request, the MarshalLogObject +// method must be declared on the Request type, not its pointer (*Request). +// If it's on the pointer, use ObjectValues. +// +// Given an object that implements MarshalLogObject on the value receiver, you +// can log a slice of those objects with Objects like so: +// +// type Author struct{ ... } +// func (a Author) MarshalLogObject(enc zapcore.ObjectEncoder) error +// +// var authors []Author = ... +// logger.Info("loading article", zap.Objects("authors", authors)) +// +// Similarly, given a type that implements MarshalLogObject on its pointer +// receiver, you can log a slice of pointers to that object with Objects like +// so: +// +// type Request struct{ ... } +// func (r *Request) MarshalLogObject(enc zapcore.ObjectEncoder) error +// +// var requests []*Request = ... +// logger.Info("sending requests", zap.Objects("requests", requests)) +// +// If instead, you have a slice of values of such an object, use the +// ObjectValues constructor. +// +// var requests []Request = ... +// logger.Info("sending requests", zap.ObjectValues("requests", requests)) +func Objects[T zapcore.ObjectMarshaler](key string, values []T) Field { + return Array(key, objects[T](values)) +} + +type objects[T zapcore.ObjectMarshaler] []T + +func (os objects[T]) MarshalLogArray(arr zapcore.ArrayEncoder) error { + for _, o := range os { + if err := arr.AppendObject(o); err != nil { + return err + } + } + return nil +} + +// ObjectMarshalerPtr is a constraint that specifies that the given type +// implements zapcore.ObjectMarshaler on a pointer receiver. +type ObjectMarshalerPtr[T any] interface { + *T + zapcore.ObjectMarshaler +} + +// ObjectValues constructs a field with the given key, holding a list of the +// provided objects, where pointers to these objects can be marshaled by Zap. +// +// Note that pointers to these objects must implement zapcore.ObjectMarshaler. +// That is, if you're trying to marshal a []Request, the MarshalLogObject +// method must be declared on the *Request type, not the value (Request). +// If it's on the value, use Objects. +// +// Given an object that implements MarshalLogObject on the pointer receiver, +// you can log a slice of those objects with ObjectValues like so: +// +// type Request struct{ ... } +// func (r *Request) MarshalLogObject(enc zapcore.ObjectEncoder) error +// +// var requests []Request = ... +// logger.Info("sending requests", zap.ObjectValues("requests", requests)) +// +// If instead, you have a slice of pointers of such an object, use the Objects +// field constructor. +// +// var requests []*Request = ... +// logger.Info("sending requests", zap.Objects("requests", requests)) +func ObjectValues[T any, P ObjectMarshalerPtr[T]](key string, values []T) Field { + return Array(key, objectValues[T, P](values)) +} + +type objectValues[T any, P ObjectMarshalerPtr[T]] []T + +func (os objectValues[T, P]) MarshalLogArray(arr zapcore.ArrayEncoder) error { + for i := range os { + // It is necessary for us to explicitly reference the "P" type. + // We cannot simply pass "&os[i]" to AppendObject because its type + // is "*T", which the type system does not consider as + // implementing ObjectMarshaler. + // Only the type "P" satisfies ObjectMarshaler, which we have + // to convert "*T" to explicitly. + var p P = &os[i] + if err := arr.AppendObject(p); err != nil { + return err + } + } + return nil +} + +// Stringers constructs a field with the given key, holding a list of the +// output provided by the value's String method +// +// Given an object that implements String on the value receiver, you +// can log a slice of those objects with Objects like so: +// +// type Request struct{ ... } +// func (a Request) String() string +// +// var requests []Request = ... +// logger.Info("sending requests", zap.Stringers("requests", requests)) +// +// Note that these objects must implement fmt.Stringer directly. +// That is, if you're trying to marshal a []Request, the String method +// must be declared on the Request type, not its pointer (*Request). +func Stringers[T fmt.Stringer](key string, values []T) Field { + return Array(key, stringers[T](values)) +} + +type stringers[T fmt.Stringer] []T + +func (os stringers[T]) MarshalLogArray(arr zapcore.ArrayEncoder) error { + for _, o := range os { + arr.AppendString(o.String()) + } + return nil +} diff --git a/tools/vendor/go.uber.org/zap/buffer/buffer.go b/tools/vendor/go.uber.org/zap/buffer/buffer.go index 3f4b86e081..9e929cd98e 100644 --- a/tools/vendor/go.uber.org/zap/buffer/buffer.go +++ b/tools/vendor/go.uber.org/zap/buffer/buffer.go @@ -106,6 +106,24 @@ func (b *Buffer) Write(bs []byte) (int, error) { return len(bs), nil } +// WriteByte writes a single byte to the Buffer. +// +// Error returned is always nil, function signature is compatible +// with bytes.Buffer and bufio.Writer +func (b *Buffer) WriteByte(v byte) error { + b.AppendByte(v) + return nil +} + +// WriteString writes a string to the Buffer. +// +// Error returned is always nil, function signature is compatible +// with bytes.Buffer and bufio.Writer +func (b *Buffer) WriteString(s string) (int, error) { + b.AppendString(s) + return len(s), nil +} + // TrimNewline trims any final "\n" byte from the end of the buffer. func (b *Buffer) TrimNewline() { if i := len(b.bs) - 1; i >= 0 { diff --git a/tools/vendor/go.uber.org/zap/config.go b/tools/vendor/go.uber.org/zap/config.go index 55637fb0b4..ee6096766a 100644 --- a/tools/vendor/go.uber.org/zap/config.go +++ b/tools/vendor/go.uber.org/zap/config.go @@ -21,7 +21,7 @@ package zap import ( - "fmt" + "errors" "sort" "time" @@ -182,7 +182,7 @@ func (cfg Config) Build(opts ...Option) (*Logger, error) { } if cfg.Level == (AtomicLevel{}) { - return nil, fmt.Errorf("missing Level") + return nil, errors.New("missing Level") } log := New( diff --git a/tools/vendor/go.uber.org/zap/doc.go b/tools/vendor/go.uber.org/zap/doc.go index 8638dd1b96..3c50d7b4d3 100644 --- a/tools/vendor/go.uber.org/zap/doc.go +++ b/tools/vendor/go.uber.org/zap/doc.go @@ -32,7 +32,7 @@ // they need to count every allocation and when they'd prefer a more familiar, // loosely typed API. // -// Choosing a Logger +// # Choosing a Logger // // In contexts where performance is nice, but not critical, use the // SugaredLogger. It's 4-10x faster than other structured logging packages and @@ -41,14 +41,15 @@ // variadic number of key-value pairs. (For more advanced use cases, they also // accept strongly typed fields - see the SugaredLogger.With documentation for // details.) -// sugar := zap.NewExample().Sugar() -// defer sugar.Sync() -// sugar.Infow("failed to fetch URL", -// "url", "http://example.com", -// "attempt", 3, -// "backoff", time.Second, -// ) -// sugar.Infof("failed to fetch URL: %s", "http://example.com") +// +// sugar := zap.NewExample().Sugar() +// defer sugar.Sync() +// sugar.Infow("failed to fetch URL", +// "url", "http://example.com", +// "attempt", 3, +// "backoff", time.Second, +// ) +// sugar.Infof("failed to fetch URL: %s", "http://example.com") // // By default, loggers are unbuffered. However, since zap's low-level APIs // allow buffering, calling Sync before letting your process exit is a good @@ -57,32 +58,35 @@ // In the rare contexts where every microsecond and every allocation matter, // use the Logger. It's even faster than the SugaredLogger and allocates far // less, but it only supports strongly-typed, structured logging. -// logger := zap.NewExample() -// defer logger.Sync() -// logger.Info("failed to fetch URL", -// zap.String("url", "http://example.com"), -// zap.Int("attempt", 3), -// zap.Duration("backoff", time.Second), -// ) +// +// logger := zap.NewExample() +// defer logger.Sync() +// logger.Info("failed to fetch URL", +// zap.String("url", "http://example.com"), +// zap.Int("attempt", 3), +// zap.Duration("backoff", time.Second), +// ) // // Choosing between the Logger and SugaredLogger doesn't need to be an // application-wide decision: converting between the two is simple and // inexpensive. -// logger := zap.NewExample() -// defer logger.Sync() -// sugar := logger.Sugar() -// plain := sugar.Desugar() // -// Configuring Zap +// logger := zap.NewExample() +// defer logger.Sync() +// sugar := logger.Sugar() +// plain := sugar.Desugar() +// +// # Configuring Zap // // The simplest way to build a Logger is to use zap's opinionated presets: // NewExample, NewProduction, and NewDevelopment. These presets build a logger // with a single function call: -// logger, err := zap.NewProduction() -// if err != nil { -// log.Fatalf("can't initialize zap logger: %v", err) -// } -// defer logger.Sync() +// +// logger, err := zap.NewProduction() +// if err != nil { +// log.Fatalf("can't initialize zap logger: %v", err) +// } +// defer logger.Sync() // // Presets are fine for small projects, but larger projects and organizations // naturally require a bit more customization. For most users, zap's Config @@ -94,7 +98,7 @@ // go.uber.org/zap/zapcore. See the package-level AdvancedConfiguration // example for sample code. // -// Extending Zap +// # Extending Zap // // The zap package itself is a relatively thin wrapper around the interfaces // in go.uber.org/zap/zapcore. Extending zap to support a new encoding (e.g., @@ -106,7 +110,7 @@ // Similarly, package authors can use the high-performance Encoder and Core // implementations in the zapcore package to build their own loggers. // -// Frequently Asked Questions +// # Frequently Asked Questions // // An FAQ covering everything from installation errors to design decisions is // available at https://github.com/uber-go/zap/blob/master/FAQ.md. diff --git a/tools/vendor/go.uber.org/zap/encoder.go b/tools/vendor/go.uber.org/zap/encoder.go index 08ed833543..caa04ceefd 100644 --- a/tools/vendor/go.uber.org/zap/encoder.go +++ b/tools/vendor/go.uber.org/zap/encoder.go @@ -63,7 +63,7 @@ func RegisterEncoder(name string, constructor func(zapcore.EncoderConfig) (zapco func newEncoder(name string, encoderConfig zapcore.EncoderConfig) (zapcore.Encoder, error) { if encoderConfig.TimeKey != "" && encoderConfig.EncodeTime == nil { - return nil, fmt.Errorf("missing EncodeTime in EncoderConfig") + return nil, errors.New("missing EncodeTime in EncoderConfig") } _encoderMutex.RLock() diff --git a/tools/vendor/go.uber.org/zap/global.go b/tools/vendor/go.uber.org/zap/global.go index c1ac0507cd..3cb46c9e0a 100644 --- a/tools/vendor/go.uber.org/zap/global.go +++ b/tools/vendor/go.uber.org/zap/global.go @@ -31,6 +31,7 @@ import ( ) const ( + _stdLogDefaultDepth = 1 _loggerWriterDepth = 2 _programmerErrorTemplate = "You've found a bug in zap! Please file a bug at " + "https://github.com/uber-go/zap/issues/new and reference this error: %v" diff --git a/tools/vendor/go.uber.org/zap/http_handler.go b/tools/vendor/go.uber.org/zap/http_handler.go index 1297c33b32..632b6831a8 100644 --- a/tools/vendor/go.uber.org/zap/http_handler.go +++ b/tools/vendor/go.uber.org/zap/http_handler.go @@ -22,6 +22,7 @@ package zap import ( "encoding/json" + "errors" "fmt" "io" "net/http" @@ -32,22 +33,23 @@ import ( // ServeHTTP is a simple JSON endpoint that can report on or change the current // logging level. // -// GET +// # GET // // The GET request returns a JSON description of the current logging level like: -// {"level":"info"} // -// PUT +// {"level":"info"} +// +// # PUT // // The PUT request changes the logging level. It is perfectly safe to change the // logging level while a program is running. Two content types are supported: // -// Content-Type: application/x-www-form-urlencoded +// Content-Type: application/x-www-form-urlencoded // // With this content type, the level can be provided through the request body or // a query parameter. The log level is URL encoded like: // -// level=debug +// level=debug // // The request body takes precedence over the query parameter, if both are // specified. @@ -55,18 +57,17 @@ import ( // This content type is the default for a curl PUT request. Following are two // example curl requests that both set the logging level to debug. // -// curl -X PUT localhost:8080/log/level?level=debug -// curl -X PUT localhost:8080/log/level -d level=debug +// curl -X PUT localhost:8080/log/level?level=debug +// curl -X PUT localhost:8080/log/level -d level=debug // // For any other content type, the payload is expected to be JSON encoded and // look like: // -// {"level":"info"} +// {"level":"info"} // // An example curl request could look like this: // -// curl -X PUT localhost:8080/log/level -H "Content-Type: application/json" -d '{"level":"debug"}' -// +// curl -X PUT localhost:8080/log/level -H "Content-Type: application/json" -d '{"level":"debug"}' func (lvl AtomicLevel) ServeHTTP(w http.ResponseWriter, r *http.Request) { type errorResponse struct { Error string `json:"error"` @@ -108,7 +109,7 @@ func decodePutRequest(contentType string, r *http.Request) (zapcore.Level, error func decodePutURL(r *http.Request) (zapcore.Level, error) { lvl := r.FormValue("level") if lvl == "" { - return 0, fmt.Errorf("must specify logging level") + return 0, errors.New("must specify logging level") } var l zapcore.Level if err := l.UnmarshalText([]byte(lvl)); err != nil { @@ -125,7 +126,7 @@ func decodePutJSON(body io.Reader) (zapcore.Level, error) { return 0, fmt.Errorf("malformed request body: %v", err) } if pld.Level == nil { - return 0, fmt.Errorf("must specify logging level") + return 0, errors.New("must specify logging level") } return *pld.Level, nil diff --git a/tools/vendor/go.uber.org/zap/internal/exit/exit.go b/tools/vendor/go.uber.org/zap/internal/exit/exit.go index dfc5b05feb..f673f9947b 100644 --- a/tools/vendor/go.uber.org/zap/internal/exit/exit.go +++ b/tools/vendor/go.uber.org/zap/internal/exit/exit.go @@ -24,24 +24,25 @@ package exit import "os" -var real = func() { os.Exit(1) } +var _exit = os.Exit -// Exit normally terminates the process by calling os.Exit(1). If the package -// is stubbed, it instead records a call in the testing spy. -func Exit() { - real() +// With terminates the process by calling os.Exit(code). If the package is +// stubbed, it instead records a call in the testing spy. +func With(code int) { + _exit(code) } // A StubbedExit is a testing fake for os.Exit. type StubbedExit struct { Exited bool - prev func() + Code int + prev func(code int) } // Stub substitutes a fake for the call to os.Exit(1). func Stub() *StubbedExit { - s := &StubbedExit{prev: real} - real = s.exit + s := &StubbedExit{prev: _exit} + _exit = s.exit return s } @@ -56,9 +57,10 @@ func WithStub(f func()) *StubbedExit { // Unstub restores the previous exit function. func (se *StubbedExit) Unstub() { - real = se.prev + _exit = se.prev } -func (se *StubbedExit) exit() { +func (se *StubbedExit) exit(code int) { se.Exited = true + se.Code = code } diff --git a/tools/vendor/go.uber.org/zap/global_go112.go b/tools/vendor/go.uber.org/zap/internal/level_enabler.go similarity index 70% rename from tools/vendor/go.uber.org/zap/global_go112.go rename to tools/vendor/go.uber.org/zap/internal/level_enabler.go index 6b5dbda807..5f3e3f1b92 100644 --- a/tools/vendor/go.uber.org/zap/global_go112.go +++ b/tools/vendor/go.uber.org/zap/internal/level_enabler.go @@ -1,4 +1,4 @@ -// Copyright (c) 2019 Uber Technologies, Inc. +// Copyright (c) 2022 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -18,9 +18,18 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -// See #682 for more information. -// +build go1.12 +package internal -package zap +import "go.uber.org/zap/zapcore" -const _stdLogDefaultDepth = 1 +// LeveledEnabler is an interface satisfied by LevelEnablers that are able to +// report their own level. +// +// This interface is defined to use more conveniently in tests and non-zapcore +// packages. +// This cannot be imported from zapcore because of the cyclic dependency. +type LeveledEnabler interface { + zapcore.LevelEnabler + + Level() zapcore.Level +} diff --git a/tools/vendor/go.uber.org/zap/level.go b/tools/vendor/go.uber.org/zap/level.go index 3567a9a1e6..db951e19a5 100644 --- a/tools/vendor/go.uber.org/zap/level.go +++ b/tools/vendor/go.uber.org/zap/level.go @@ -22,6 +22,7 @@ package zap import ( "go.uber.org/atomic" + "go.uber.org/zap/internal" "go.uber.org/zap/zapcore" ) @@ -70,6 +71,8 @@ type AtomicLevel struct { l *atomic.Int32 } +var _ internal.LeveledEnabler = AtomicLevel{} + // NewAtomicLevel creates an AtomicLevel with InfoLevel and above logging // enabled. func NewAtomicLevel() AtomicLevel { @@ -86,6 +89,23 @@ func NewAtomicLevelAt(l zapcore.Level) AtomicLevel { return a } +// ParseAtomicLevel parses an AtomicLevel based on a lowercase or all-caps ASCII +// representation of the log level. If the provided ASCII representation is +// invalid an error is returned. +// +// This is particularly useful when dealing with text input to configure log +// levels. +func ParseAtomicLevel(text string) (AtomicLevel, error) { + a := NewAtomicLevel() + l, err := zapcore.ParseLevel(text) + if err != nil { + return a, err + } + + a.SetLevel(l) + return a, nil +} + // Enabled implements the zapcore.LevelEnabler interface, which allows the // AtomicLevel to be used in place of traditional static levels. func (lvl AtomicLevel) Enabled(l zapcore.Level) bool { diff --git a/tools/vendor/go.uber.org/zap/logger.go b/tools/vendor/go.uber.org/zap/logger.go index 553f258e74..cd44030d13 100644 --- a/tools/vendor/go.uber.org/zap/logger.go +++ b/tools/vendor/go.uber.org/zap/logger.go @@ -22,12 +22,11 @@ package zap import ( "fmt" - "io/ioutil" + "io" "os" - "runtime" "strings" - "time" + "go.uber.org/zap/internal/bufferpool" "go.uber.org/zap/zapcore" ) @@ -43,7 +42,7 @@ type Logger struct { development bool addCaller bool - onFatal zapcore.CheckWriteAction // default is WriteThenFatal + onFatal zapcore.CheckWriteHook // default is WriteThenFatal name string errorOutput zapcore.WriteSyncer @@ -51,6 +50,8 @@ type Logger struct { addStack zapcore.LevelEnabler callerSkip int + + clock zapcore.Clock } // New constructs a new Logger from the provided zapcore.Core and Options. If @@ -71,6 +72,7 @@ func New(core zapcore.Core, options ...Option) *Logger { core: core, errorOutput: zapcore.Lock(os.Stderr), addStack: zapcore.FatalLevel + 1, + clock: zapcore.DefaultClock, } return log.WithOptions(options...) } @@ -83,8 +85,9 @@ func New(core zapcore.Core, options ...Option) *Logger { func NewNop() *Logger { return &Logger{ core: zapcore.NewNopCore(), - errorOutput: zapcore.AddSync(ioutil.Discard), + errorOutput: zapcore.AddSync(io.Discard), addStack: zapcore.FatalLevel + 1, + clock: zapcore.DefaultClock, } } @@ -104,6 +107,19 @@ func NewDevelopment(options ...Option) (*Logger, error) { return NewDevelopmentConfig().Build(options...) } +// Must is a helper that wraps a call to a function returning (*Logger, error) +// and panics if the error is non-nil. It is intended for use in variable +// initialization such as: +// +// var logger = zap.Must(zap.NewProduction()) +func Must(logger *Logger, err error) *Logger { + if err != nil { + panic(err) + } + + return logger +} + // NewExample builds a Logger that's designed for use in zap's testable // examples. It writes DebugLevel and above logs to standard out as JSON, but // omits the timestamp and calling function to keep example output @@ -167,6 +183,13 @@ func (log *Logger) With(fields ...Field) *Logger { return l } +// Level reports the minimum enabled level for this logger. +// +// For NopLoggers, this is [zapcore.InvalidLevel]. +func (log *Logger) Level() zapcore.Level { + return zapcore.LevelOf(log.core) +} + // Check returns a CheckedEntry if logging a message at the specified level // is enabled. It's a completely optional optimization; in high-performance // applications, Check can help avoid allocating a slice to hold fields. @@ -174,6 +197,14 @@ func (log *Logger) Check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { return log.check(lvl, msg) } +// Log logs a message at the specified level. The message includes any fields +// passed at the log site, as well as any fields accumulated on the logger. +func (log *Logger) Log(lvl zapcore.Level, msg string, fields ...Field) { + if ce := log.check(lvl, msg); ce != nil { + ce.Write(fields...) + } +} + // Debug logs a message at DebugLevel. The message includes any fields passed // at the log site, as well as any fields accumulated on the logger. func (log *Logger) Debug(msg string, fields ...Field) { @@ -256,8 +287,10 @@ func (log *Logger) clone() *Logger { } func (log *Logger) check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { - // check must always be called directly by a method in the Logger interface - // (e.g., Check, Info, Fatal). + // Logger.check must always be called directly by a method in the + // Logger interface (e.g., Check, Info, Fatal). + // This skips Logger.check and the Info/Fatal/Check/etc. method that + // called it. const callerSkipOffset = 2 // Check the level first to reduce the cost of disabled log calls. @@ -270,7 +303,7 @@ func (log *Logger) check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { // log message will actually be written somewhere. ent := zapcore.Entry{ LoggerName: log.name, - Time: time.Now(), + Time: log.clock.Now(), Level: lvl, Message: msg, } @@ -280,18 +313,27 @@ func (log *Logger) check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { // Set up any required terminal behavior. switch ent.Level { case zapcore.PanicLevel: - ce = ce.Should(ent, zapcore.WriteThenPanic) + ce = ce.After(ent, zapcore.WriteThenPanic) case zapcore.FatalLevel: onFatal := log.onFatal - // Noop is the default value for CheckWriteAction, and it leads to - // continued execution after a Fatal which is unexpected. - if onFatal == zapcore.WriteThenNoop { + // nil or WriteThenNoop will lead to continued execution after + // a Fatal log entry, which is unexpected. For example, + // + // f, err := os.Open(..) + // if err != nil { + // log.Fatal("cannot open", zap.Error(err)) + // } + // fmt.Println(f.Name()) + // + // The f.Name() will panic if we continue execution after the + // log.Fatal. + if onFatal == nil || onFatal == zapcore.WriteThenNoop { onFatal = zapcore.WriteThenFatal } - ce = ce.Should(ent, onFatal) + ce = ce.After(ent, onFatal) case zapcore.DPanicLevel: if log.development { - ce = ce.Should(ent, zapcore.WriteThenPanic) + ce = ce.After(ent, zapcore.WriteThenPanic) } } @@ -304,42 +346,55 @@ func (log *Logger) check(lvl zapcore.Level, msg string) *zapcore.CheckedEntry { // Thread the error output through to the CheckedEntry. ce.ErrorOutput = log.errorOutput - if log.addCaller { - frame, defined := getCallerFrame(log.callerSkip + callerSkipOffset) - if !defined { - fmt.Fprintf(log.errorOutput, "%v Logger.check error: failed to get caller\n", time.Now().UTC()) + + addStack := log.addStack.Enabled(ce.Level) + if !log.addCaller && !addStack { + return ce + } + + // Adding the caller or stack trace requires capturing the callers of + // this function. We'll share information between these two. + stackDepth := stacktraceFirst + if addStack { + stackDepth = stacktraceFull + } + stack := captureStacktrace(log.callerSkip+callerSkipOffset, stackDepth) + defer stack.Free() + + if stack.Count() == 0 { + if log.addCaller { + fmt.Fprintf(log.errorOutput, "%v Logger.check error: failed to get caller\n", ent.Time.UTC()) log.errorOutput.Sync() } + return ce + } + + frame, more := stack.Next() - ce.Entry.Caller = zapcore.EntryCaller{ - Defined: defined, + if log.addCaller { + ce.Caller = zapcore.EntryCaller{ + Defined: frame.PC != 0, PC: frame.PC, File: frame.File, Line: frame.Line, Function: frame.Function, } } - if log.addStack.Enabled(ce.Entry.Level) { - ce.Entry.Stack = StackSkip("", log.callerSkip+callerSkipOffset).String - } - return ce -} + if addStack { + buffer := bufferpool.Get() + defer buffer.Free() -// getCallerFrame gets caller frame. The argument skip is the number of stack -// frames to ascend, with 0 identifying the caller of getCallerFrame. The -// boolean ok is false if it was not possible to recover the information. -// -// Note: This implementation is similar to runtime.Caller, but it returns the whole frame. -func getCallerFrame(skip int) (frame runtime.Frame, ok bool) { - const skipOffset = 2 // skip getCallerFrame and Callers - - pc := make([]uintptr, 1) - numFrames := runtime.Callers(skip+skipOffset, pc) - if numFrames < 1 { - return + stackfmt := newStackFormatter(buffer) + + // We've already extracted the first frame, so format that + // separately and defer to stackfmt for the rest. + stackfmt.FormatFrame(frame) + if more { + stackfmt.FormatStack(stack) + } + ce.Stack = buffer.String() } - frame, _ = runtime.CallersFrames(pc).Next() - return frame, frame.PC != 0 + return ce } diff --git a/tools/vendor/go.uber.org/zap/options.go b/tools/vendor/go.uber.org/zap/options.go index 0135c20923..c4f3bca3d2 100644 --- a/tools/vendor/go.uber.org/zap/options.go +++ b/tools/vendor/go.uber.org/zap/options.go @@ -133,8 +133,35 @@ func IncreaseLevel(lvl zapcore.LevelEnabler) Option { } // OnFatal sets the action to take on fatal logs. +// +// Deprecated: Use [WithFatalHook] instead. func OnFatal(action zapcore.CheckWriteAction) Option { + return WithFatalHook(action) +} + +// WithFatalHook sets a CheckWriteHook to run on fatal logs. +// Zap will call this hook after writing a log statement with a Fatal level. +// +// For example, the following builds a logger that will exit the current +// goroutine after writing a fatal log message, but it will not exit the +// program. +// +// zap.New(core, zap.WithFatalHook(zapcore.WriteThenGoexit)) +// +// It is important that the provided CheckWriteHook stops the control flow at +// the current statement to meet expectations of callers of the logger. +// We recommend calling os.Exit or runtime.Goexit inside custom hooks at +// minimum. +func WithFatalHook(hook zapcore.CheckWriteHook) Option { + return optionFunc(func(log *Logger) { + log.onFatal = hook + }) +} + +// WithClock specifies the clock used by the logger to determine the current +// time for logged entries. Defaults to the system clock with time.Now. +func WithClock(clock zapcore.Clock) Option { return optionFunc(func(log *Logger) { - log.onFatal = action + log.clock = clock }) } diff --git a/tools/vendor/go.uber.org/zap/sink.go b/tools/vendor/go.uber.org/zap/sink.go index df46fa87a7..478c9a10ff 100644 --- a/tools/vendor/go.uber.org/zap/sink.go +++ b/tools/vendor/go.uber.org/zap/sink.go @@ -1,4 +1,4 @@ -// Copyright (c) 2016 Uber Technologies, Inc. +// Copyright (c) 2016-2022 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -26,6 +26,7 @@ import ( "io" "net/url" "os" + "path/filepath" "strings" "sync" @@ -34,23 +35,7 @@ import ( const schemeFile = "file" -var ( - _sinkMutex sync.RWMutex - _sinkFactories map[string]func(*url.URL) (Sink, error) // keyed by scheme -) - -func init() { - resetSinkRegistry() -} - -func resetSinkRegistry() { - _sinkMutex.Lock() - defer _sinkMutex.Unlock() - - _sinkFactories = map[string]func(*url.URL) (Sink, error){ - schemeFile: newFileSink, - } -} +var _sinkRegistry = newSinkRegistry() // Sink defines the interface to write to and close logger destinations. type Sink interface { @@ -58,10 +43,6 @@ type Sink interface { io.Closer } -type nopCloserSink struct{ zapcore.WriteSyncer } - -func (nopCloserSink) Close() error { return nil } - type errSinkNotFound struct { scheme string } @@ -70,16 +51,29 @@ func (e *errSinkNotFound) Error() string { return fmt.Sprintf("no sink found for scheme %q", e.scheme) } -// RegisterSink registers a user-supplied factory for all sinks with a -// particular scheme. -// -// All schemes must be ASCII, valid under section 3.1 of RFC 3986 -// (https://tools.ietf.org/html/rfc3986#section-3.1), and must not already -// have a factory registered. Zap automatically registers a factory for the -// "file" scheme. -func RegisterSink(scheme string, factory func(*url.URL) (Sink, error)) error { - _sinkMutex.Lock() - defer _sinkMutex.Unlock() +type nopCloserSink struct{ zapcore.WriteSyncer } + +func (nopCloserSink) Close() error { return nil } + +type sinkRegistry struct { + mu sync.Mutex + factories map[string]func(*url.URL) (Sink, error) // keyed by scheme + openFile func(string, int, os.FileMode) (*os.File, error) // type matches os.OpenFile +} + +func newSinkRegistry() *sinkRegistry { + sr := &sinkRegistry{ + factories: make(map[string]func(*url.URL) (Sink, error)), + openFile: os.OpenFile, + } + sr.RegisterSink(schemeFile, sr.newFileSinkFromURL) + return sr +} + +// RegisterScheme registers the given factory for the specific scheme. +func (sr *sinkRegistry) RegisterSink(scheme string, factory func(*url.URL) (Sink, error)) error { + sr.mu.Lock() + defer sr.mu.Unlock() if scheme == "" { return errors.New("can't register a sink factory for empty string") @@ -88,14 +82,22 @@ func RegisterSink(scheme string, factory func(*url.URL) (Sink, error)) error { if err != nil { return fmt.Errorf("%q is not a valid scheme: %v", scheme, err) } - if _, ok := _sinkFactories[normalized]; ok { + if _, ok := sr.factories[normalized]; ok { return fmt.Errorf("sink factory already registered for scheme %q", normalized) } - _sinkFactories[normalized] = factory + sr.factories[normalized] = factory return nil } -func newSink(rawURL string) (Sink, error) { +func (sr *sinkRegistry) newSink(rawURL string) (Sink, error) { + // URL parsing doesn't work well for Windows paths such as `c:\log.txt`, as scheme is set to + // the drive, and path is unset unless `c:/log.txt` is used. + // To avoid Windows-specific URL handling, we instead check IsAbs to open as a file. + // filepath.IsAbs is OS-specific, so IsAbs('c:/log.txt') is false outside of Windows. + if filepath.IsAbs(rawURL) { + return sr.newFileSinkFromPath(rawURL) + } + u, err := url.Parse(rawURL) if err != nil { return nil, fmt.Errorf("can't parse %q as a URL: %v", rawURL, err) @@ -104,16 +106,27 @@ func newSink(rawURL string) (Sink, error) { u.Scheme = schemeFile } - _sinkMutex.RLock() - factory, ok := _sinkFactories[u.Scheme] - _sinkMutex.RUnlock() + sr.mu.Lock() + factory, ok := sr.factories[u.Scheme] + sr.mu.Unlock() if !ok { return nil, &errSinkNotFound{u.Scheme} } return factory(u) } -func newFileSink(u *url.URL) (Sink, error) { +// RegisterSink registers a user-supplied factory for all sinks with a +// particular scheme. +// +// All schemes must be ASCII, valid under section 0.1 of RFC 3986 +// (https://tools.ietf.org/html/rfc3983#section-3.1), and must not already +// have a factory registered. Zap automatically registers a factory for the +// "file" scheme. +func RegisterSink(scheme string, factory func(*url.URL) (Sink, error)) error { + return _sinkRegistry.RegisterSink(scheme, factory) +} + +func (sr *sinkRegistry) newFileSinkFromURL(u *url.URL) (Sink, error) { if u.User != nil { return nil, fmt.Errorf("user and password not allowed with file URLs: got %v", u) } @@ -130,13 +143,18 @@ func newFileSink(u *url.URL) (Sink, error) { if hn := u.Hostname(); hn != "" && hn != "localhost" { return nil, fmt.Errorf("file URLs must leave host empty or use localhost: got %v", u) } - switch u.Path { + + return sr.newFileSinkFromPath(u.Path) +} + +func (sr *sinkRegistry) newFileSinkFromPath(path string) (Sink, error) { + switch path { case "stdout": return nopCloserSink{os.Stdout}, nil case "stderr": return nopCloserSink{os.Stderr}, nil } - return os.OpenFile(u.Path, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0666) + return sr.openFile(path, os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0666) } func normalizeScheme(s string) (string, error) { diff --git a/tools/vendor/go.uber.org/zap/stacktrace.go b/tools/vendor/go.uber.org/zap/stacktrace.go index 0cf8c1ddff..817a3bde8b 100644 --- a/tools/vendor/go.uber.org/zap/stacktrace.go +++ b/tools/vendor/go.uber.org/zap/stacktrace.go @@ -24,62 +24,153 @@ import ( "runtime" "sync" + "go.uber.org/zap/buffer" "go.uber.org/zap/internal/bufferpool" ) -var ( - _stacktracePool = sync.Pool{ - New: func() interface{} { - return newProgramCounters(64) - }, - } +var _stacktracePool = sync.Pool{ + New: func() interface{} { + return &stacktrace{ + storage: make([]uintptr, 64), + } + }, +} + +type stacktrace struct { + pcs []uintptr // program counters; always a subslice of storage + frames *runtime.Frames + + // The size of pcs varies depending on requirements: + // it will be one if the only the first frame was requested, + // and otherwise it will reflect the depth of the call stack. + // + // storage decouples the slice we need (pcs) from the slice we pool. + // We will always allocate a reasonably large storage, but we'll use + // only as much of it as we need. + storage []uintptr +} + +// stacktraceDepth specifies how deep of a stack trace should be captured. +type stacktraceDepth int + +const ( + // stacktraceFirst captures only the first frame. + stacktraceFirst stacktraceDepth = iota + + // stacktraceFull captures the entire call stack, allocating more + // storage for it if needed. + stacktraceFull ) -func takeStacktrace(skip int) string { - buffer := bufferpool.Get() - defer buffer.Free() - programCounters := _stacktracePool.Get().(*programCounters) - defer _stacktracePool.Put(programCounters) - - var numFrames int - for { - // Skip the call to runtime.Callers and takeStacktrace so that the - // program counters start at the caller of takeStacktrace. - numFrames = runtime.Callers(skip+2, programCounters.pcs) - if numFrames < len(programCounters.pcs) { - break - } - // Don't put the too-short counter slice back into the pool; this lets - // the pool adjust if we consistently take deep stacktraces. - programCounters = newProgramCounters(len(programCounters.pcs) * 2) +// captureStacktrace captures a stack trace of the specified depth, skipping +// the provided number of frames. skip=0 identifies the caller of +// captureStacktrace. +// +// The caller must call Free on the returned stacktrace after using it. +func captureStacktrace(skip int, depth stacktraceDepth) *stacktrace { + stack := _stacktracePool.Get().(*stacktrace) + + switch depth { + case stacktraceFirst: + stack.pcs = stack.storage[:1] + case stacktraceFull: + stack.pcs = stack.storage } - i := 0 - frames := runtime.CallersFrames(programCounters.pcs[:numFrames]) + // Unlike other "skip"-based APIs, skip=0 identifies runtime.Callers + // itself. +2 to skip captureStacktrace and runtime.Callers. + numFrames := runtime.Callers( + skip+2, + stack.pcs, + ) - // Note: On the last iteration, frames.Next() returns false, with a valid - // frame, but we ignore this frame. The last frame is a a runtime frame which - // adds noise, since it's only either runtime.main or runtime.goexit. - for frame, more := frames.Next(); more; frame, more = frames.Next() { - if i != 0 { - buffer.AppendByte('\n') + // runtime.Callers truncates the recorded stacktrace if there is no + // room in the provided slice. For the full stack trace, keep expanding + // storage until there are fewer frames than there is room. + if depth == stacktraceFull { + pcs := stack.pcs + for numFrames == len(pcs) { + pcs = make([]uintptr, len(pcs)*2) + numFrames = runtime.Callers(skip+2, pcs) } - i++ - buffer.AppendString(frame.Function) - buffer.AppendByte('\n') - buffer.AppendByte('\t') - buffer.AppendString(frame.File) - buffer.AppendByte(':') - buffer.AppendInt(int64(frame.Line)) + + // Discard old storage instead of returning it to the pool. + // This will adjust the pool size over time if stack traces are + // consistently very deep. + stack.storage = pcs + stack.pcs = pcs[:numFrames] + } else { + stack.pcs = stack.pcs[:numFrames] } + stack.frames = runtime.CallersFrames(stack.pcs) + return stack +} + +// Free releases resources associated with this stacktrace +// and returns it back to the pool. +func (st *stacktrace) Free() { + st.frames = nil + st.pcs = nil + _stacktracePool.Put(st) +} + +// Count reports the total number of frames in this stacktrace. +// Count DOES NOT change as Next is called. +func (st *stacktrace) Count() int { + return len(st.pcs) +} + +// Next returns the next frame in the stack trace, +// and a boolean indicating whether there are more after it. +func (st *stacktrace) Next() (_ runtime.Frame, more bool) { + return st.frames.Next() +} + +func takeStacktrace(skip int) string { + stack := captureStacktrace(skip+1, stacktraceFull) + defer stack.Free() + + buffer := bufferpool.Get() + defer buffer.Free() + + stackfmt := newStackFormatter(buffer) + stackfmt.FormatStack(stack) return buffer.String() } -type programCounters struct { - pcs []uintptr +// stackFormatter formats a stack trace into a readable string representation. +type stackFormatter struct { + b *buffer.Buffer + nonEmpty bool // whehther we've written at least one frame already +} + +// newStackFormatter builds a new stackFormatter. +func newStackFormatter(b *buffer.Buffer) stackFormatter { + return stackFormatter{b: b} } -func newProgramCounters(size int) *programCounters { - return &programCounters{make([]uintptr, size)} +// FormatStack formats all remaining frames in the provided stacktrace -- minus +// the final runtime.main/runtime.goexit frame. +func (sf *stackFormatter) FormatStack(stack *stacktrace) { + // Note: On the last iteration, frames.Next() returns false, with a valid + // frame, but we ignore this frame. The last frame is a runtime frame which + // adds noise, since it's only either runtime.main or runtime.goexit. + for frame, more := stack.Next(); more; frame, more = stack.Next() { + sf.FormatFrame(frame) + } +} + +// FormatFrame formats the given frame. +func (sf *stackFormatter) FormatFrame(frame runtime.Frame) { + if sf.nonEmpty { + sf.b.AppendByte('\n') + } + sf.nonEmpty = true + sf.b.AppendString(frame.Function) + sf.b.AppendByte('\n') + sf.b.AppendByte('\t') + sf.b.AppendString(frame.File) + sf.b.AppendByte(':') + sf.b.AppendInt(int64(frame.Line)) } diff --git a/tools/vendor/go.uber.org/zap/sugar.go b/tools/vendor/go.uber.org/zap/sugar.go index 4084dada79..ac387b3e47 100644 --- a/tools/vendor/go.uber.org/zap/sugar.go +++ b/tools/vendor/go.uber.org/zap/sugar.go @@ -31,6 +31,7 @@ import ( const ( _oddNumberErrMsg = "Ignored key without a value." _nonStringKeyErrMsg = "Ignored key-value pairs with non-string keys." + _multipleErrMsg = "Multiple errors without a key." ) // A SugaredLogger wraps the base Logger functionality in a slower, but less @@ -38,10 +39,19 @@ const ( // method. // // Unlike the Logger, the SugaredLogger doesn't insist on structured logging. -// For each log level, it exposes three methods: one for loosely-typed -// structured logging, one for println-style formatting, and one for -// printf-style formatting. For example, SugaredLoggers can produce InfoLevel -// output with Infow ("info with" structured context), Info, or Infof. +// For each log level, it exposes four methods: +// +// - methods named after the log level for log.Print-style logging +// - methods ending in "w" for loosely-typed structured logging +// - methods ending in "f" for log.Printf-style logging +// - methods ending in "ln" for log.Println-style logging +// +// For example, the methods for InfoLevel are: +// +// Info(...any) Print-style logging +// Infow(...any) Structured logging (read as "info with") +// Infof(string, ...any) Printf-style logging +// Infoln(...any) Println-style logging type SugaredLogger struct { base *Logger } @@ -61,27 +71,40 @@ func (s *SugaredLogger) Named(name string) *SugaredLogger { return &SugaredLogger{base: s.base.Named(name)} } +// WithOptions clones the current SugaredLogger, applies the supplied Options, +// and returns the result. It's safe to use concurrently. +func (s *SugaredLogger) WithOptions(opts ...Option) *SugaredLogger { + base := s.base.clone() + for _, opt := range opts { + opt.apply(base) + } + return &SugaredLogger{base: base} +} + // With adds a variadic number of fields to the logging context. It accepts a // mix of strongly-typed Field objects and loosely-typed key-value pairs. When // processing pairs, the first element of the pair is used as the field key // and the second as the field value. // // For example, -// sugaredLogger.With( -// "hello", "world", -// "failure", errors.New("oh no"), -// Stack(), -// "count", 42, -// "user", User{Name: "alice"}, -// ) +// +// sugaredLogger.With( +// "hello", "world", +// "failure", errors.New("oh no"), +// Stack(), +// "count", 42, +// "user", User{Name: "alice"}, +// ) +// // is the equivalent of -// unsugared.With( -// String("hello", "world"), -// String("failure", "oh no"), -// Stack(), -// Int("count", 42), -// Object("user", User{Name: "alice"}), -// ) +// +// unsugared.With( +// String("hello", "world"), +// String("failure", "oh no"), +// Stack(), +// Int("count", 42), +// Object("user", User{Name: "alice"}), +// ) // // Note that the keys in key-value pairs should be strings. In development, // passing a non-string key panics. In production, the logger is more @@ -92,6 +115,13 @@ func (s *SugaredLogger) With(args ...interface{}) *SugaredLogger { return &SugaredLogger{base: s.base.With(s.sweetenFields(args)...)} } +// Level reports the minimum enabled level for this logger. +// +// For NopLoggers, this is [zapcore.InvalidLevel]. +func (s *SugaredLogger) Level() zapcore.Level { + return zapcore.LevelOf(s.base.core) +} + // Debug uses fmt.Sprint to construct and log a message. func (s *SugaredLogger) Debug(args ...interface{}) { s.log(DebugLevel, "", args, nil) @@ -168,7 +198,8 @@ func (s *SugaredLogger) Fatalf(template string, args ...interface{}) { // pairs are treated as they are in With. // // When debug-level logging is disabled, this is much faster than -// s.With(keysAndValues).Debug(msg) +// +// s.With(keysAndValues).Debug(msg) func (s *SugaredLogger) Debugw(msg string, keysAndValues ...interface{}) { s.log(DebugLevel, msg, nil, keysAndValues) } @@ -210,11 +241,48 @@ func (s *SugaredLogger) Fatalw(msg string, keysAndValues ...interface{}) { s.log(FatalLevel, msg, nil, keysAndValues) } +// Debugln uses fmt.Sprintln to construct and log a message. +func (s *SugaredLogger) Debugln(args ...interface{}) { + s.logln(DebugLevel, args, nil) +} + +// Infoln uses fmt.Sprintln to construct and log a message. +func (s *SugaredLogger) Infoln(args ...interface{}) { + s.logln(InfoLevel, args, nil) +} + +// Warnln uses fmt.Sprintln to construct and log a message. +func (s *SugaredLogger) Warnln(args ...interface{}) { + s.logln(WarnLevel, args, nil) +} + +// Errorln uses fmt.Sprintln to construct and log a message. +func (s *SugaredLogger) Errorln(args ...interface{}) { + s.logln(ErrorLevel, args, nil) +} + +// DPanicln uses fmt.Sprintln to construct and log a message. In development, the +// logger then panics. (See DPanicLevel for details.) +func (s *SugaredLogger) DPanicln(args ...interface{}) { + s.logln(DPanicLevel, args, nil) +} + +// Panicln uses fmt.Sprintln to construct and log a message, then panics. +func (s *SugaredLogger) Panicln(args ...interface{}) { + s.logln(PanicLevel, args, nil) +} + +// Fatalln uses fmt.Sprintln to construct and log a message, then calls os.Exit. +func (s *SugaredLogger) Fatalln(args ...interface{}) { + s.logln(FatalLevel, args, nil) +} + // Sync flushes any buffered log entries. func (s *SugaredLogger) Sync() error { return s.base.Sync() } +// log message with Sprint, Sprintf, or neither. func (s *SugaredLogger) log(lvl zapcore.Level, template string, fmtArgs []interface{}, context []interface{}) { // If logging at this level is completely disabled, skip the overhead of // string formatting. @@ -228,6 +296,18 @@ func (s *SugaredLogger) log(lvl zapcore.Level, template string, fmtArgs []interf } } +// logln message with Sprintln +func (s *SugaredLogger) logln(lvl zapcore.Level, fmtArgs []interface{}, context []interface{}) { + if lvl < DPanicLevel && !s.base.Core().Enabled(lvl) { + return + } + + msg := getMessageln(fmtArgs) + if ce := s.base.Check(lvl, msg); ce != nil { + ce.Write(s.sweetenFields(context)...) + } +} + // getMessage format with Sprint, Sprintf, or neither. func getMessage(template string, fmtArgs []interface{}) string { if len(fmtArgs) == 0 { @@ -246,15 +326,24 @@ func getMessage(template string, fmtArgs []interface{}) string { return fmt.Sprint(fmtArgs...) } +// getMessageln format with Sprintln. +func getMessageln(fmtArgs []interface{}) string { + msg := fmt.Sprintln(fmtArgs...) + return msg[:len(msg)-1] +} + func (s *SugaredLogger) sweetenFields(args []interface{}) []Field { if len(args) == 0 { return nil } - // Allocate enough space for the worst case; if users pass only structured - // fields, we shouldn't penalize them with extra allocations. - fields := make([]Field, 0, len(args)) - var invalid invalidPairs + var ( + // Allocate enough space for the worst case; if users pass only structured + // fields, we shouldn't penalize them with extra allocations. + fields = make([]Field, 0, len(args)) + invalid invalidPairs + seenError bool + ) for i := 0; i < len(args); { // This is a strongly-typed field. Consume it and move on. @@ -264,9 +353,21 @@ func (s *SugaredLogger) sweetenFields(args []interface{}) []Field { continue } + // If it is an error, consume it and move on. + if err, ok := args[i].(error); ok { + if !seenError { + seenError = true + fields = append(fields, Error(err)) + } else { + s.base.Error(_multipleErrMsg, Error(err)) + } + i++ + continue + } + // Make sure this element isn't a dangling key. if i == len(args)-1 { - s.base.DPanic(_oddNumberErrMsg, Any("ignored", args[i])) + s.base.Error(_oddNumberErrMsg, Any("ignored", args[i])) break } @@ -287,7 +388,7 @@ func (s *SugaredLogger) sweetenFields(args []interface{}) []Field { // If we encountered any invalid key-value pairs, log an error. if len(invalid) > 0 { - s.base.DPanic(_nonStringKeyErrMsg, Array("invalid", invalid)) + s.base.Error(_nonStringKeyErrMsg, Array("invalid", invalid)) } return fields } diff --git a/tools/vendor/go.uber.org/zap/writer.go b/tools/vendor/go.uber.org/zap/writer.go index 86a709ab0b..f08728e1ec 100644 --- a/tools/vendor/go.uber.org/zap/writer.go +++ b/tools/vendor/go.uber.org/zap/writer.go @@ -1,4 +1,4 @@ -// Copyright (c) 2016 Uber Technologies, Inc. +// Copyright (c) 2016-2022 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -23,7 +23,6 @@ package zap import ( "fmt" "io" - "io/ioutil" "go.uber.org/zap/zapcore" @@ -69,9 +68,9 @@ func open(paths []string) ([]zapcore.WriteSyncer, func(), error) { var openErr error for _, path := range paths { - sink, err := newSink(path) + sink, err := _sinkRegistry.newSink(path) if err != nil { - openErr = multierr.Append(openErr, fmt.Errorf("couldn't open sink %q: %v", path, err)) + openErr = multierr.Append(openErr, fmt.Errorf("open sink %q: %w", path, err)) continue } writers = append(writers, sink) @@ -79,7 +78,7 @@ func open(paths []string) ([]zapcore.WriteSyncer, func(), error) { } if openErr != nil { close() - return writers, nil, openErr + return nil, nil, openErr } return writers, close, nil @@ -93,7 +92,7 @@ func open(paths []string) ([]zapcore.WriteSyncer, func(), error) { // using zapcore.NewMultiWriteSyncer and zapcore.Lock individually. func CombineWriteSyncers(writers ...zapcore.WriteSyncer) zapcore.WriteSyncer { if len(writers) == 0 { - return zapcore.AddSync(ioutil.Discard) + return zapcore.AddSync(io.Discard) } return zapcore.Lock(zapcore.NewMultiWriteSyncer(writers...)) } diff --git a/tools/vendor/go.uber.org/zap/zapcore/buffered_write_syncer.go b/tools/vendor/go.uber.org/zap/zapcore/buffered_write_syncer.go new file mode 100644 index 0000000000..a40e93b3ec --- /dev/null +++ b/tools/vendor/go.uber.org/zap/zapcore/buffered_write_syncer.go @@ -0,0 +1,219 @@ +// Copyright (c) 2021 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package zapcore + +import ( + "bufio" + "sync" + "time" + + "go.uber.org/multierr" +) + +const ( + // _defaultBufferSize specifies the default size used by Buffer. + _defaultBufferSize = 256 * 1024 // 256 kB + + // _defaultFlushInterval specifies the default flush interval for + // Buffer. + _defaultFlushInterval = 30 * time.Second +) + +// A BufferedWriteSyncer is a WriteSyncer that buffers writes in-memory before +// flushing them to a wrapped WriteSyncer after reaching some limit, or at some +// fixed interval--whichever comes first. +// +// BufferedWriteSyncer is safe for concurrent use. You don't need to use +// zapcore.Lock for WriteSyncers with BufferedWriteSyncer. +// +// To set up a BufferedWriteSyncer, construct a WriteSyncer for your log +// destination (*os.File is a valid WriteSyncer), wrap it with +// BufferedWriteSyncer, and defer a Stop() call for when you no longer need the +// object. +// +// func main() { +// ws := ... // your log destination +// bws := &zapcore.BufferedWriteSyncer{WS: ws} +// defer bws.Stop() +// +// // ... +// core := zapcore.NewCore(enc, bws, lvl) +// logger := zap.New(core) +// +// // ... +// } +// +// By default, a BufferedWriteSyncer will buffer up to 256 kilobytes of logs, +// waiting at most 30 seconds between flushes. +// You can customize these parameters by setting the Size or FlushInterval +// fields. +// For example, the following buffers up to 512 kB of logs before flushing them +// to Stderr, with a maximum of one minute between each flush. +// +// ws := &BufferedWriteSyncer{ +// WS: os.Stderr, +// Size: 512 * 1024, // 512 kB +// FlushInterval: time.Minute, +// } +// defer ws.Stop() +type BufferedWriteSyncer struct { + // WS is the WriteSyncer around which BufferedWriteSyncer will buffer + // writes. + // + // This field is required. + WS WriteSyncer + + // Size specifies the maximum amount of data the writer will buffered + // before flushing. + // + // Defaults to 256 kB if unspecified. + Size int + + // FlushInterval specifies how often the writer should flush data if + // there have been no writes. + // + // Defaults to 30 seconds if unspecified. + FlushInterval time.Duration + + // Clock, if specified, provides control of the source of time for the + // writer. + // + // Defaults to the system clock. + Clock Clock + + // unexported fields for state + mu sync.Mutex + initialized bool // whether initialize() has run + stopped bool // whether Stop() has run + writer *bufio.Writer + ticker *time.Ticker + stop chan struct{} // closed when flushLoop should stop + done chan struct{} // closed when flushLoop has stopped +} + +func (s *BufferedWriteSyncer) initialize() { + size := s.Size + if size == 0 { + size = _defaultBufferSize + } + + flushInterval := s.FlushInterval + if flushInterval == 0 { + flushInterval = _defaultFlushInterval + } + + if s.Clock == nil { + s.Clock = DefaultClock + } + + s.ticker = s.Clock.NewTicker(flushInterval) + s.writer = bufio.NewWriterSize(s.WS, size) + s.stop = make(chan struct{}) + s.done = make(chan struct{}) + s.initialized = true + go s.flushLoop() +} + +// Write writes log data into buffer syncer directly, multiple Write calls will be batched, +// and log data will be flushed to disk when the buffer is full or periodically. +func (s *BufferedWriteSyncer) Write(bs []byte) (int, error) { + s.mu.Lock() + defer s.mu.Unlock() + + if !s.initialized { + s.initialize() + } + + // To avoid partial writes from being flushed, we manually flush the existing buffer if: + // * The current write doesn't fit into the buffer fully, and + // * The buffer is not empty (since bufio will not split large writes when the buffer is empty) + if len(bs) > s.writer.Available() && s.writer.Buffered() > 0 { + if err := s.writer.Flush(); err != nil { + return 0, err + } + } + + return s.writer.Write(bs) +} + +// Sync flushes buffered log data into disk directly. +func (s *BufferedWriteSyncer) Sync() error { + s.mu.Lock() + defer s.mu.Unlock() + + var err error + if s.initialized { + err = s.writer.Flush() + } + + return multierr.Append(err, s.WS.Sync()) +} + +// flushLoop flushes the buffer at the configured interval until Stop is +// called. +func (s *BufferedWriteSyncer) flushLoop() { + defer close(s.done) + + for { + select { + case <-s.ticker.C: + // we just simply ignore error here + // because the underlying bufio writer stores any errors + // and we return any error from Sync() as part of the close + _ = s.Sync() + case <-s.stop: + return + } + } +} + +// Stop closes the buffer, cleans up background goroutines, and flushes +// remaining unwritten data. +func (s *BufferedWriteSyncer) Stop() (err error) { + var stopped bool + + // Critical section. + func() { + s.mu.Lock() + defer s.mu.Unlock() + + if !s.initialized { + return + } + + stopped = s.stopped + if stopped { + return + } + s.stopped = true + + s.ticker.Stop() + close(s.stop) // tell flushLoop to stop + <-s.done // and wait until it has + }() + + // Don't call Sync on consecutive Stops. + if !stopped { + err = s.Sync() + } + + return err +} diff --git a/tools/vendor/go.uber.org/zap/zapcore/clock.go b/tools/vendor/go.uber.org/zap/zapcore/clock.go new file mode 100644 index 0000000000..422fd82a6b --- /dev/null +++ b/tools/vendor/go.uber.org/zap/zapcore/clock.go @@ -0,0 +1,48 @@ +// Copyright (c) 2021 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package zapcore + +import "time" + +// DefaultClock is the default clock used by Zap in operations that require +// time. This clock uses the system clock for all operations. +var DefaultClock = systemClock{} + +// Clock is a source of time for logged entries. +type Clock interface { + // Now returns the current local time. + Now() time.Time + + // NewTicker returns *time.Ticker that holds a channel + // that delivers "ticks" of a clock. + NewTicker(time.Duration) *time.Ticker +} + +// systemClock implements default Clock that uses system time. +type systemClock struct{} + +func (systemClock) Now() time.Time { + return time.Now() +} + +func (systemClock) NewTicker(duration time.Duration) *time.Ticker { + return time.NewTicker(duration) +} diff --git a/tools/vendor/go.uber.org/zap/zapcore/console_encoder.go b/tools/vendor/go.uber.org/zap/zapcore/console_encoder.go index 2307af404c..1aa5dc3646 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/console_encoder.go +++ b/tools/vendor/go.uber.org/zap/zapcore/console_encoder.go @@ -125,11 +125,7 @@ func (c consoleEncoder) EncodeEntry(ent Entry, fields []Field) (*buffer.Buffer, line.AppendString(ent.Stack) } - if c.LineEnding != "" { - line.AppendString(c.LineEnding) - } else { - line.AppendString(DefaultLineEnding) - } + line.AppendString(c.LineEnding) return line, nil } diff --git a/tools/vendor/go.uber.org/zap/zapcore/core.go b/tools/vendor/go.uber.org/zap/zapcore/core.go index a1ef8b034b..9dfd64051f 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/core.go +++ b/tools/vendor/go.uber.org/zap/zapcore/core.go @@ -69,6 +69,15 @@ type ioCore struct { out WriteSyncer } +var ( + _ Core = (*ioCore)(nil) + _ leveledEnabler = (*ioCore)(nil) +) + +func (c *ioCore) Level() Level { + return LevelOf(c.LevelEnabler) +} + func (c *ioCore) With(fields []Field) Core { clone := c.clone() addFields(clone.enc, fields) diff --git a/tools/vendor/go.uber.org/zap/zapcore/encoder.go b/tools/vendor/go.uber.org/zap/zapcore/encoder.go index 6601ca166c..5769ff3e4e 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/encoder.go +++ b/tools/vendor/go.uber.org/zap/zapcore/encoder.go @@ -22,6 +22,7 @@ package zapcore import ( "encoding/json" + "io" "time" "go.uber.org/zap/buffer" @@ -187,10 +188,13 @@ func (e *TimeEncoder) UnmarshalText(text []byte) error { // UnmarshalYAML unmarshals YAML to a TimeEncoder. // If value is an object with a "layout" field, it will be unmarshaled to TimeEncoder with given layout. -// timeEncoder: -// layout: 06/01/02 03:04pm +// +// timeEncoder: +// layout: 06/01/02 03:04pm +// // If value is string, it uses UnmarshalText. -// timeEncoder: iso8601 +// +// timeEncoder: iso8601 func (e *TimeEncoder) UnmarshalYAML(unmarshal func(interface{}) error) error { var o struct { Layout string `json:"layout" yaml:"layout"` @@ -312,14 +316,15 @@ func (e *NameEncoder) UnmarshalText(text []byte) error { type EncoderConfig struct { // Set the keys used for each log entry. If any key is empty, that portion // of the entry is omitted. - MessageKey string `json:"messageKey" yaml:"messageKey"` - LevelKey string `json:"levelKey" yaml:"levelKey"` - TimeKey string `json:"timeKey" yaml:"timeKey"` - NameKey string `json:"nameKey" yaml:"nameKey"` - CallerKey string `json:"callerKey" yaml:"callerKey"` - FunctionKey string `json:"functionKey" yaml:"functionKey"` - StacktraceKey string `json:"stacktraceKey" yaml:"stacktraceKey"` - LineEnding string `json:"lineEnding" yaml:"lineEnding"` + MessageKey string `json:"messageKey" yaml:"messageKey"` + LevelKey string `json:"levelKey" yaml:"levelKey"` + TimeKey string `json:"timeKey" yaml:"timeKey"` + NameKey string `json:"nameKey" yaml:"nameKey"` + CallerKey string `json:"callerKey" yaml:"callerKey"` + FunctionKey string `json:"functionKey" yaml:"functionKey"` + StacktraceKey string `json:"stacktraceKey" yaml:"stacktraceKey"` + SkipLineEnding bool `json:"skipLineEnding" yaml:"skipLineEnding"` + LineEnding string `json:"lineEnding" yaml:"lineEnding"` // Configure the primitive representations of common complex types. For // example, some users may want all time.Times serialized as floating-point // seconds since epoch, while others may prefer ISO8601 strings. @@ -330,6 +335,9 @@ type EncoderConfig struct { // Unlike the other primitive type encoders, EncodeName is optional. The // zero value falls back to FullNameEncoder. EncodeName NameEncoder `json:"nameEncoder" yaml:"nameEncoder"` + // Configure the encoder for interface{} type objects. + // If not provided, objects are encoded using json.Encoder + NewReflectedEncoder func(io.Writer) ReflectedEncoder `json:"-" yaml:"-"` // Configures the field separator used by the console encoder. Defaults // to tab. ConsoleSeparator string `json:"consoleSeparator" yaml:"consoleSeparator"` diff --git a/tools/vendor/go.uber.org/zap/zapcore/entry.go b/tools/vendor/go.uber.org/zap/zapcore/entry.go index 4aa8b4f90b..9d326e95ea 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/entry.go +++ b/tools/vendor/go.uber.org/zap/zapcore/entry.go @@ -27,10 +27,9 @@ import ( "sync" "time" + "go.uber.org/multierr" "go.uber.org/zap/internal/bufferpool" "go.uber.org/zap/internal/exit" - - "go.uber.org/multierr" ) var ( @@ -152,6 +151,27 @@ type Entry struct { Stack string } +// CheckWriteHook is a custom action that may be executed after an entry is +// written. +// +// Register one on a CheckedEntry with the After method. +// +// if ce := logger.Check(...); ce != nil { +// ce = ce.After(hook) +// ce.Write(...) +// } +// +// You can configure the hook for Fatal log statements at the logger level with +// the zap.WithFatalHook option. +type CheckWriteHook interface { + // OnWrite is invoked with the CheckedEntry that was written and a list + // of fields added with that entry. + // + // The list of fields DOES NOT include fields that were already added + // to the logger with the With method. + OnWrite(*CheckedEntry, []Field) +} + // CheckWriteAction indicates what action to take after a log entry is // processed. Actions are ordered in increasing severity. type CheckWriteAction uint8 @@ -164,21 +184,36 @@ const ( WriteThenGoexit // WriteThenPanic causes a panic after Write. WriteThenPanic - // WriteThenFatal causes a fatal os.Exit after Write. + // WriteThenFatal causes an os.Exit(1) after Write. WriteThenFatal ) +// OnWrite implements the OnWrite method to keep CheckWriteAction compatible +// with the new CheckWriteHook interface which deprecates CheckWriteAction. +func (a CheckWriteAction) OnWrite(ce *CheckedEntry, _ []Field) { + switch a { + case WriteThenGoexit: + runtime.Goexit() + case WriteThenPanic: + panic(ce.Message) + case WriteThenFatal: + exit.With(1) + } +} + +var _ CheckWriteHook = CheckWriteAction(0) + // CheckedEntry is an Entry together with a collection of Cores that have // already agreed to log it. // -// CheckedEntry references should be created by calling AddCore or Should on a +// CheckedEntry references should be created by calling AddCore or After on a // nil *CheckedEntry. References are returned to a pool after Write, and MUST // NOT be retained after calling their Write method. type CheckedEntry struct { Entry ErrorOutput WriteSyncer dirty bool // best-effort detection of pool misuse - should CheckWriteAction + after CheckWriteHook cores []Core } @@ -186,7 +221,7 @@ func (ce *CheckedEntry) reset() { ce.Entry = Entry{} ce.ErrorOutput = nil ce.dirty = false - ce.should = WriteThenNoop + ce.after = nil for i := range ce.cores { // don't keep references to cores ce.cores[i] = nil @@ -208,7 +243,7 @@ func (ce *CheckedEntry) Write(fields ...Field) { // If the entry is dirty, log an internal error; because the // CheckedEntry is being used after it was returned to the pool, // the message may be an amalgamation from multiple call sites. - fmt.Fprintf(ce.ErrorOutput, "%v Unsafe CheckedEntry re-use near Entry %+v.\n", time.Now(), ce.Entry) + fmt.Fprintf(ce.ErrorOutput, "%v Unsafe CheckedEntry re-use near Entry %+v.\n", ce.Time, ce.Entry) ce.ErrorOutput.Sync() } return @@ -219,24 +254,16 @@ func (ce *CheckedEntry) Write(fields ...Field) { for i := range ce.cores { err = multierr.Append(err, ce.cores[i].Write(ce.Entry, fields)) } - if ce.ErrorOutput != nil { - if err != nil { - fmt.Fprintf(ce.ErrorOutput, "%v write error: %v\n", time.Now(), err) - ce.ErrorOutput.Sync() - } + if err != nil && ce.ErrorOutput != nil { + fmt.Fprintf(ce.ErrorOutput, "%v write error: %v\n", ce.Time, err) + ce.ErrorOutput.Sync() } - should, msg := ce.should, ce.Message - putCheckedEntry(ce) - - switch should { - case WriteThenPanic: - panic(msg) - case WriteThenFatal: - exit.Exit() - case WriteThenGoexit: - runtime.Goexit() + hook := ce.after + if hook != nil { + hook.OnWrite(ce, fields) } + putCheckedEntry(ce) } // AddCore adds a Core that has agreed to log this CheckedEntry. It's intended to be @@ -254,11 +281,20 @@ func (ce *CheckedEntry) AddCore(ent Entry, core Core) *CheckedEntry { // Should sets this CheckedEntry's CheckWriteAction, which controls whether a // Core will panic or fatal after writing this log entry. Like AddCore, it's // safe to call on nil CheckedEntry references. +// +// Deprecated: Use [CheckedEntry.After] instead. func (ce *CheckedEntry) Should(ent Entry, should CheckWriteAction) *CheckedEntry { + return ce.After(ent, should) +} + +// After sets this CheckEntry's CheckWriteHook, which will be called after this +// log entry has been written. It's safe to call this on nil CheckedEntry +// references. +func (ce *CheckedEntry) After(ent Entry, hook CheckWriteHook) *CheckedEntry { if ce == nil { ce = getCheckedEntry() ce.Entry = ent } - ce.should = should + ce.after = hook return ce } diff --git a/tools/vendor/go.uber.org/zap/zapcore/error.go b/tools/vendor/go.uber.org/zap/zapcore/error.go index f2a07d7864..06359907af 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/error.go +++ b/tools/vendor/go.uber.org/zap/zapcore/error.go @@ -36,13 +36,13 @@ import ( // causer (from github.com/pkg/errors), a ${key}Causes field is added with an // array of objects containing the errors this error was comprised of. // -// { -// "error": err.Error(), -// "errorVerbose": fmt.Sprintf("%+v", err), -// "errorCauses": [ -// ... -// ], -// } +// { +// "error": err.Error(), +// "errorVerbose": fmt.Sprintf("%+v", err), +// "errorCauses": [ +// ... +// ], +// } func encodeError(key string, err error, enc ObjectEncoder) (retErr error) { // Try to capture panics (from nil references or otherwise) when calling // the Error() method @@ -83,7 +83,7 @@ type errorGroup interface { Errors() []error } -// Note that errArry and errArrayElem are very similar to the version +// Note that errArray and errArrayElem are very similar to the version // implemented in the top-level error.go file. We can't re-use this because // that would require exporting errArray as part of the zapcore API. diff --git a/tools/vendor/go.uber.org/zap/zapcore/hook.go b/tools/vendor/go.uber.org/zap/zapcore/hook.go index 5db4afb302..198def9917 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/hook.go +++ b/tools/vendor/go.uber.org/zap/zapcore/hook.go @@ -27,6 +27,11 @@ type hooked struct { funcs []func(Entry) error } +var ( + _ Core = (*hooked)(nil) + _ leveledEnabler = (*hooked)(nil) +) + // RegisterHooks wraps a Core and runs a collection of user-defined callback // hooks each time a message is logged. Execution of the callbacks is blocking. // @@ -40,6 +45,10 @@ func RegisterHooks(core Core, hooks ...func(Entry) error) Core { } } +func (h *hooked) Level() Level { + return LevelOf(h.Core) +} + func (h *hooked) Check(ent Entry, ce *CheckedEntry) *CheckedEntry { // Let the wrapped Core decide whether to log this message or not. This // also gives the downstream a chance to register itself directly with the diff --git a/tools/vendor/go.uber.org/zap/zapcore/increase_level.go b/tools/vendor/go.uber.org/zap/zapcore/increase_level.go index 5a1749261a..7a11237ae9 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/increase_level.go +++ b/tools/vendor/go.uber.org/zap/zapcore/increase_level.go @@ -27,6 +27,11 @@ type levelFilterCore struct { level LevelEnabler } +var ( + _ Core = (*levelFilterCore)(nil) + _ leveledEnabler = (*levelFilterCore)(nil) +) + // NewIncreaseLevelCore creates a core that can be used to increase the level of // an existing Core. It cannot be used to decrease the logging level, as it acts // as a filter before calling the underlying core. If level decreases the log level, @@ -45,6 +50,10 @@ func (c *levelFilterCore) Enabled(lvl Level) bool { return c.level.Enabled(lvl) } +func (c *levelFilterCore) Level() Level { + return LevelOf(c.level) +} + func (c *levelFilterCore) With(fields []Field) Core { return &levelFilterCore{c.core.With(fields), c.level} } diff --git a/tools/vendor/go.uber.org/zap/zapcore/json_encoder.go b/tools/vendor/go.uber.org/zap/zapcore/json_encoder.go index 5cf7d917e9..3921c5cd33 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/json_encoder.go +++ b/tools/vendor/go.uber.org/zap/zapcore/json_encoder.go @@ -22,7 +22,6 @@ package zapcore import ( "encoding/base64" - "encoding/json" "math" "sync" "time" @@ -64,7 +63,7 @@ type jsonEncoder struct { // for encoding generic values by reflection reflectBuf *buffer.Buffer - reflectEnc *json.Encoder + reflectEnc ReflectedEncoder } // NewJSONEncoder creates a fast, low-allocation JSON encoder. The encoder @@ -72,7 +71,9 @@ type jsonEncoder struct { // // Note that the encoder doesn't deduplicate keys, so it's possible to produce // a message like -// {"foo":"bar","foo":"baz"} +// +// {"foo":"bar","foo":"baz"} +// // This is permitted by the JSON specification, but not encouraged. Many // libraries will ignore duplicate key-value pairs (typically keeping the last // pair) when unmarshaling, but users should attempt to avoid adding duplicate @@ -82,6 +83,17 @@ func NewJSONEncoder(cfg EncoderConfig) Encoder { } func newJSONEncoder(cfg EncoderConfig, spaced bool) *jsonEncoder { + if cfg.SkipLineEnding { + cfg.LineEnding = "" + } else if cfg.LineEnding == "" { + cfg.LineEnding = DefaultLineEnding + } + + // If no EncoderConfig.NewReflectedEncoder is provided by the user, then use default + if cfg.NewReflectedEncoder == nil { + cfg.NewReflectedEncoder = defaultReflectedEncoder + } + return &jsonEncoder{ EncoderConfig: &cfg, buf: bufferpool.Get(), @@ -118,6 +130,11 @@ func (enc *jsonEncoder) AddComplex128(key string, val complex128) { enc.AppendComplex128(val) } +func (enc *jsonEncoder) AddComplex64(key string, val complex64) { + enc.addKey(key) + enc.AppendComplex64(val) +} + func (enc *jsonEncoder) AddDuration(key string, val time.Duration) { enc.addKey(key) enc.AppendDuration(val) @@ -128,6 +145,11 @@ func (enc *jsonEncoder) AddFloat64(key string, val float64) { enc.AppendFloat64(val) } +func (enc *jsonEncoder) AddFloat32(key string, val float32) { + enc.addKey(key) + enc.AppendFloat32(val) +} + func (enc *jsonEncoder) AddInt64(key string, val int64) { enc.addKey(key) enc.AppendInt64(val) @@ -136,10 +158,7 @@ func (enc *jsonEncoder) AddInt64(key string, val int64) { func (enc *jsonEncoder) resetReflectBuf() { if enc.reflectBuf == nil { enc.reflectBuf = bufferpool.Get() - enc.reflectEnc = json.NewEncoder(enc.reflectBuf) - - // For consistency with our custom JSON encoder. - enc.reflectEnc.SetEscapeHTML(false) + enc.reflectEnc = enc.NewReflectedEncoder(enc.reflectBuf) } else { enc.reflectBuf.Reset() } @@ -201,10 +220,16 @@ func (enc *jsonEncoder) AppendArray(arr ArrayMarshaler) error { } func (enc *jsonEncoder) AppendObject(obj ObjectMarshaler) error { + // Close ONLY new openNamespaces that are created during + // AppendObject(). + old := enc.openNamespaces + enc.openNamespaces = 0 enc.addElementSeparator() enc.buf.AppendByte('{') err := obj.MarshalLogObject(enc) enc.buf.AppendByte('}') + enc.closeOpenNamespaces() + enc.openNamespaces = old return err } @@ -220,16 +245,23 @@ func (enc *jsonEncoder) AppendByteString(val []byte) { enc.buf.AppendByte('"') } -func (enc *jsonEncoder) AppendComplex128(val complex128) { +// appendComplex appends the encoded form of the provided complex128 value. +// precision specifies the encoding precision for the real and imaginary +// components of the complex number. +func (enc *jsonEncoder) appendComplex(val complex128, precision int) { enc.addElementSeparator() // Cast to a platform-independent, fixed-size type. r, i := float64(real(val)), float64(imag(val)) enc.buf.AppendByte('"') // Because we're always in a quoted string, we can use strconv without // special-casing NaN and +/-Inf. - enc.buf.AppendFloat(r, 64) - enc.buf.AppendByte('+') - enc.buf.AppendFloat(i, 64) + enc.buf.AppendFloat(r, precision) + // If imaginary part is less than 0, minus (-) sign is added by default + // by AppendFloat. + if i >= 0 { + enc.buf.AppendByte('+') + } + enc.buf.AppendFloat(i, precision) enc.buf.AppendByte('i') enc.buf.AppendByte('"') } @@ -292,29 +324,28 @@ func (enc *jsonEncoder) AppendUint64(val uint64) { enc.buf.AppendUint(val) } -func (enc *jsonEncoder) AddComplex64(k string, v complex64) { enc.AddComplex128(k, complex128(v)) } -func (enc *jsonEncoder) AddFloat32(k string, v float32) { enc.AddFloat64(k, float64(v)) } -func (enc *jsonEncoder) AddInt(k string, v int) { enc.AddInt64(k, int64(v)) } -func (enc *jsonEncoder) AddInt32(k string, v int32) { enc.AddInt64(k, int64(v)) } -func (enc *jsonEncoder) AddInt16(k string, v int16) { enc.AddInt64(k, int64(v)) } -func (enc *jsonEncoder) AddInt8(k string, v int8) { enc.AddInt64(k, int64(v)) } -func (enc *jsonEncoder) AddUint(k string, v uint) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AddUint32(k string, v uint32) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AddUint16(k string, v uint16) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AddUint8(k string, v uint8) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AddUintptr(k string, v uintptr) { enc.AddUint64(k, uint64(v)) } -func (enc *jsonEncoder) AppendComplex64(v complex64) { enc.AppendComplex128(complex128(v)) } -func (enc *jsonEncoder) AppendFloat64(v float64) { enc.appendFloat(v, 64) } -func (enc *jsonEncoder) AppendFloat32(v float32) { enc.appendFloat(float64(v), 32) } -func (enc *jsonEncoder) AppendInt(v int) { enc.AppendInt64(int64(v)) } -func (enc *jsonEncoder) AppendInt32(v int32) { enc.AppendInt64(int64(v)) } -func (enc *jsonEncoder) AppendInt16(v int16) { enc.AppendInt64(int64(v)) } -func (enc *jsonEncoder) AppendInt8(v int8) { enc.AppendInt64(int64(v)) } -func (enc *jsonEncoder) AppendUint(v uint) { enc.AppendUint64(uint64(v)) } -func (enc *jsonEncoder) AppendUint32(v uint32) { enc.AppendUint64(uint64(v)) } -func (enc *jsonEncoder) AppendUint16(v uint16) { enc.AppendUint64(uint64(v)) } -func (enc *jsonEncoder) AppendUint8(v uint8) { enc.AppendUint64(uint64(v)) } -func (enc *jsonEncoder) AppendUintptr(v uintptr) { enc.AppendUint64(uint64(v)) } +func (enc *jsonEncoder) AddInt(k string, v int) { enc.AddInt64(k, int64(v)) } +func (enc *jsonEncoder) AddInt32(k string, v int32) { enc.AddInt64(k, int64(v)) } +func (enc *jsonEncoder) AddInt16(k string, v int16) { enc.AddInt64(k, int64(v)) } +func (enc *jsonEncoder) AddInt8(k string, v int8) { enc.AddInt64(k, int64(v)) } +func (enc *jsonEncoder) AddUint(k string, v uint) { enc.AddUint64(k, uint64(v)) } +func (enc *jsonEncoder) AddUint32(k string, v uint32) { enc.AddUint64(k, uint64(v)) } +func (enc *jsonEncoder) AddUint16(k string, v uint16) { enc.AddUint64(k, uint64(v)) } +func (enc *jsonEncoder) AddUint8(k string, v uint8) { enc.AddUint64(k, uint64(v)) } +func (enc *jsonEncoder) AddUintptr(k string, v uintptr) { enc.AddUint64(k, uint64(v)) } +func (enc *jsonEncoder) AppendComplex64(v complex64) { enc.appendComplex(complex128(v), 32) } +func (enc *jsonEncoder) AppendComplex128(v complex128) { enc.appendComplex(complex128(v), 64) } +func (enc *jsonEncoder) AppendFloat64(v float64) { enc.appendFloat(v, 64) } +func (enc *jsonEncoder) AppendFloat32(v float32) { enc.appendFloat(float64(v), 32) } +func (enc *jsonEncoder) AppendInt(v int) { enc.AppendInt64(int64(v)) } +func (enc *jsonEncoder) AppendInt32(v int32) { enc.AppendInt64(int64(v)) } +func (enc *jsonEncoder) AppendInt16(v int16) { enc.AppendInt64(int64(v)) } +func (enc *jsonEncoder) AppendInt8(v int8) { enc.AppendInt64(int64(v)) } +func (enc *jsonEncoder) AppendUint(v uint) { enc.AppendUint64(uint64(v)) } +func (enc *jsonEncoder) AppendUint32(v uint32) { enc.AppendUint64(uint64(v)) } +func (enc *jsonEncoder) AppendUint16(v uint16) { enc.AppendUint64(uint64(v)) } +func (enc *jsonEncoder) AppendUint8(v uint8) { enc.AppendUint64(uint64(v)) } +func (enc *jsonEncoder) AppendUintptr(v uintptr) { enc.AppendUint64(uint64(v)) } func (enc *jsonEncoder) Clone() Encoder { clone := enc.clone() @@ -335,7 +366,7 @@ func (enc *jsonEncoder) EncodeEntry(ent Entry, fields []Field) (*buffer.Buffer, final := enc.clone() final.buf.AppendByte('{') - if final.LevelKey != "" { + if final.LevelKey != "" && final.EncodeLevel != nil { final.addKey(final.LevelKey) cur := final.buf.Len() final.EncodeLevel(ent.Level, final) @@ -396,11 +427,7 @@ func (enc *jsonEncoder) EncodeEntry(ent Entry, fields []Field) (*buffer.Buffer, final.AddString(final.StacktraceKey, ent.Stack) } final.buf.AppendByte('}') - if final.LineEnding != "" { - final.buf.AppendString(final.LineEnding) - } else { - final.buf.AppendString(DefaultLineEnding) - } + final.buf.AppendString(final.LineEnding) ret := final.buf putJSONEncoder(final) @@ -415,6 +442,7 @@ func (enc *jsonEncoder) closeOpenNamespaces() { for i := 0; i < enc.openNamespaces; i++ { enc.buf.AppendByte('}') } + enc.openNamespaces = 0 } func (enc *jsonEncoder) addKey(key string) { diff --git a/tools/vendor/go.uber.org/zap/zapcore/level.go b/tools/vendor/go.uber.org/zap/zapcore/level.go index e575c9f432..e01a241316 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/level.go +++ b/tools/vendor/go.uber.org/zap/zapcore/level.go @@ -53,8 +53,62 @@ const ( _minLevel = DebugLevel _maxLevel = FatalLevel + + // InvalidLevel is an invalid value for Level. + // + // Core implementations may panic if they see messages of this level. + InvalidLevel = _maxLevel + 1 ) +// ParseLevel parses a level based on the lower-case or all-caps ASCII +// representation of the log level. If the provided ASCII representation is +// invalid an error is returned. +// +// This is particularly useful when dealing with text input to configure log +// levels. +func ParseLevel(text string) (Level, error) { + var level Level + err := level.UnmarshalText([]byte(text)) + return level, err +} + +type leveledEnabler interface { + LevelEnabler + + Level() Level +} + +// LevelOf reports the minimum enabled log level for the given LevelEnabler +// from Zap's supported log levels, or [InvalidLevel] if none of them are +// enabled. +// +// A LevelEnabler may implement a 'Level() Level' method to override the +// behavior of this function. +// +// func (c *core) Level() Level { +// return c.currentLevel +// } +// +// It is recommended that [Core] implementations that wrap other cores use +// LevelOf to retrieve the level of the wrapped core. For example, +// +// func (c *coreWrapper) Level() Level { +// return zapcore.LevelOf(c.wrappedCore) +// } +func LevelOf(enab LevelEnabler) Level { + if lvler, ok := enab.(leveledEnabler); ok { + return lvler.Level() + } + + for lvl := _minLevel; lvl <= _maxLevel; lvl++ { + if enab.Enabled(lvl) { + return lvl + } + } + + return InvalidLevel +} + // String returns a lower-case ASCII representation of the log level. func (l Level) String() string { switch l { diff --git a/tools/vendor/go.uber.org/zap/global_prego112.go b/tools/vendor/go.uber.org/zap/zapcore/reflected_encoder.go similarity index 64% rename from tools/vendor/go.uber.org/zap/global_prego112.go rename to tools/vendor/go.uber.org/zap/zapcore/reflected_encoder.go index d3ab9af933..8746360eca 100644 --- a/tools/vendor/go.uber.org/zap/global_prego112.go +++ b/tools/vendor/go.uber.org/zap/zapcore/reflected_encoder.go @@ -1,4 +1,4 @@ -// Copyright (c) 2019 Uber Technologies, Inc. +// Copyright (c) 2016 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -18,9 +18,24 @@ // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. -// See #682 for more information. -// +build !go1.12 +package zapcore -package zap +import ( + "encoding/json" + "io" +) -const _stdLogDefaultDepth = 2 +// ReflectedEncoder serializes log fields that can't be serialized with Zap's +// JSON encoder. These have the ReflectType field type. +// Use EncoderConfig.NewReflectedEncoder to set this. +type ReflectedEncoder interface { + // Encode encodes and writes to the underlying data stream. + Encode(interface{}) error +} + +func defaultReflectedEncoder(w io.Writer) ReflectedEncoder { + enc := json.NewEncoder(w) + // For consistency with our custom JSON encoder. + enc.SetEscapeHTML(false) + return enc +} diff --git a/tools/vendor/go.uber.org/zap/zapcore/sampler.go b/tools/vendor/go.uber.org/zap/zapcore/sampler.go index 25f10ca1d7..dc518055a4 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/sampler.go +++ b/tools/vendor/go.uber.org/zap/zapcore/sampler.go @@ -1,4 +1,4 @@ -// Copyright (c) 2016 Uber Technologies, Inc. +// Copyright (c) 2016-2022 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -113,12 +113,12 @@ func nopSamplingHook(Entry, SamplingDecision) {} // This hook may be used to get visibility into the performance of the sampler. // For example, use it to track metrics of dropped versus sampled logs. // -// var dropped atomic.Int64 -// zapcore.SamplerHook(func(ent zapcore.Entry, dec zapcore.SamplingDecision) { -// if dec&zapcore.LogDropped > 0 { -// dropped.Inc() -// } -// }) +// var dropped atomic.Int64 +// zapcore.SamplerHook(func(ent zapcore.Entry, dec zapcore.SamplingDecision) { +// if dec&zapcore.LogDropped > 0 { +// dropped.Inc() +// } +// }) func SamplerHook(hook func(entry Entry, dec SamplingDecision)) SamplerOption { return optionFunc(func(s *sampler) { s.hook = hook @@ -133,10 +133,21 @@ func SamplerHook(hook func(entry Entry, dec SamplingDecision)) SamplerOption { // each tick. If more Entries with the same level and message are seen during // the same interval, every Mth message is logged and the rest are dropped. // +// For example, +// +// core = NewSamplerWithOptions(core, time.Second, 10, 5) +// +// This will log the first 10 log entries with the same level and message +// in a one second interval as-is. Following that, it will allow through +// every 5th log entry with the same level and message in that interval. +// +// If thereafter is zero, the Core will drop all log entries after the first N +// in that interval. +// // Sampler can be configured to report sampling decisions with the SamplerHook // option. // -// Keep in mind that zap's sampling implementation is optimized for speed over +// Keep in mind that Zap's sampling implementation is optimized for speed over // absolute precision; under load, each tick may be slightly over- or // under-sampled. func NewSamplerWithOptions(core Core, tick time.Duration, first, thereafter int, opts ...SamplerOption) Core { @@ -164,6 +175,11 @@ type sampler struct { hook func(Entry, SamplingDecision) } +var ( + _ Core = (*sampler)(nil) + _ leveledEnabler = (*sampler)(nil) +) + // NewSampler creates a Core that samples incoming entries, which // caps the CPU and I/O load of logging while attempting to preserve a // representative subset of your logs. @@ -181,6 +197,10 @@ func NewSampler(core Core, tick time.Duration, first, thereafter int) Core { return NewSamplerWithOptions(core, tick, first, thereafter) } +func (s *sampler) Level() Level { + return LevelOf(s.Core) +} + func (s *sampler) With(fields []Field) Core { return &sampler{ Core: s.Core.With(fields), @@ -197,12 +217,14 @@ func (s *sampler) Check(ent Entry, ce *CheckedEntry) *CheckedEntry { return ce } - counter := s.counts.get(ent.Level, ent.Message) - n := counter.IncCheckReset(ent.Time, s.tick) - if n > s.first && (n-s.first)%s.thereafter != 0 { - s.hook(ent, LogDropped) - return ce + if ent.Level >= _minLevel && ent.Level <= _maxLevel { + counter := s.counts.get(ent.Level, ent.Message) + n := counter.IncCheckReset(ent.Time, s.tick) + if n > s.first && (s.thereafter == 0 || (n-s.first)%s.thereafter != 0) { + s.hook(ent, LogDropped) + return ce + } + s.hook(ent, LogSampled) } - s.hook(ent, LogSampled) return s.Core.Check(ent, ce) } diff --git a/tools/vendor/go.uber.org/zap/zapcore/tee.go b/tools/vendor/go.uber.org/zap/zapcore/tee.go index 07a32eef9a..9bb32f0557 100644 --- a/tools/vendor/go.uber.org/zap/zapcore/tee.go +++ b/tools/vendor/go.uber.org/zap/zapcore/tee.go @@ -1,4 +1,4 @@ -// Copyright (c) 2016 Uber Technologies, Inc. +// Copyright (c) 2016-2022 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal @@ -24,6 +24,11 @@ import "go.uber.org/multierr" type multiCore []Core +var ( + _ leveledEnabler = multiCore(nil) + _ Core = multiCore(nil) +) + // NewTee creates a Core that duplicates log entries into two or more // underlying Cores. // @@ -48,6 +53,16 @@ func (mc multiCore) With(fields []Field) Core { return clone } +func (mc multiCore) Level() Level { + minLvl := _maxLevel // mc is never empty + for i := range mc { + if lvl := LevelOf(mc[i]); lvl < minLvl { + minLvl = lvl + } + } + return minLvl +} + func (mc multiCore) Enabled(lvl Level) bool { for i := range mc { if mc[i].Enabled(lvl) { diff --git a/tools/vendor/golang.org/x/exp/slices/slices.go b/tools/vendor/golang.org/x/exp/slices/slices.go index 8a237c5d61..2540bd6825 100644 --- a/tools/vendor/golang.org/x/exp/slices/slices.go +++ b/tools/vendor/golang.org/x/exp/slices/slices.go @@ -104,8 +104,8 @@ func CompareFunc[E1, E2 any](s1 []E1, s2 []E2, cmp func(E1, E2) int) int { // Index returns the index of the first occurrence of v in s, // or -1 if not present. func Index[E comparable](s []E, v E) int { - for i, vs := range s { - if v == vs { + for i := range s { + if v == s[i] { return i } } @@ -115,8 +115,8 @@ func Index[E comparable](s []E, v E) int { // IndexFunc returns the first index i satisfying f(s[i]), // or -1 if none do. func IndexFunc[E any](s []E, f func(E) bool) int { - for i, v := range s { - if f(v) { + for i := range s { + if f(s[i]) { return i } } @@ -128,6 +128,12 @@ func Contains[E comparable](s []E, v E) bool { return Index(s, v) >= 0 } +// ContainsFunc reports whether at least one +// element e of s satisfies f(e). +func ContainsFunc[E any](s []E, f func(E) bool) bool { + return IndexFunc(s, f) >= 0 +} + // Insert inserts the values v... into s at index i, // returning the modified slice. // In the returned slice r, r[i] == v[0]. @@ -151,12 +157,35 @@ func Insert[S ~[]E, E any](s S, i int, v ...E) S { // Delete removes the elements s[i:j] from s, returning the modified slice. // Delete panics if s[i:j] is not a valid slice of s. // Delete modifies the contents of the slice s; it does not create a new slice. -// Delete is O(len(s)-(j-i)), so if many items must be deleted, it is better to +// Delete is O(len(s)-j), so if many items must be deleted, it is better to // make a single call deleting them all together than to delete one at a time. +// Delete might not modify the elements s[len(s)-(j-i):len(s)]. If those +// elements contain pointers you might consider zeroing those elements so that +// objects they reference can be garbage collected. func Delete[S ~[]E, E any](s S, i, j int) S { + _ = s[i:j] // bounds check + return append(s[:i], s[j:]...) } +// Replace replaces the elements s[i:j] by the given v, and returns the +// modified slice. Replace panics if s[i:j] is not a valid slice of s. +func Replace[S ~[]E, E any](s S, i, j int, v ...E) S { + _ = s[i:j] // verify that i:j is a valid subslice + tot := len(s[:i]) + len(v) + len(s[j:]) + if tot <= cap(s) { + s2 := s[:tot] + copy(s2[i+len(v):], s[j:]) + copy(s2[i:], v) + return s2 + } + s2 := make(S, tot) + copy(s2, s[:i]) + copy(s2[i:], v) + copy(s2[i+len(v):], s[j:]) + return s2 +} + // Clone returns a copy of the slice. // The elements are copied using assignment, so this is a shallow clone. func Clone[S ~[]E, E any](s S) S { @@ -170,17 +199,20 @@ func Clone[S ~[]E, E any](s S) S { // Compact replaces consecutive runs of equal elements with a single copy. // This is like the uniq command found on Unix. // Compact modifies the contents of the slice s; it does not create a new slice. +// When Compact discards m elements in total, it might not modify the elements +// s[len(s)-m:len(s)]. If those elements contain pointers you might consider +// zeroing those elements so that objects they reference can be garbage collected. func Compact[S ~[]E, E comparable](s S) S { - if len(s) == 0 { + if len(s) < 2 { return s } i := 1 - last := s[0] - for _, v := range s[1:] { - if v != last { - s[i] = v + for k := 1; k < len(s); k++ { + if s[k] != s[k-1] { + if i != k { + s[i] = s[k] + } i++ - last = v } } return s[:i] @@ -188,16 +220,16 @@ func Compact[S ~[]E, E comparable](s S) S { // CompactFunc is like Compact but uses a comparison function. func CompactFunc[S ~[]E, E any](s S, eq func(E, E) bool) S { - if len(s) == 0 { + if len(s) < 2 { return s } i := 1 - last := s[0] - for _, v := range s[1:] { - if !eq(v, last) { - s[i] = v + for k := 1; k < len(s); k++ { + if !eq(s[k], s[k-1]) { + if i != k { + s[i] = s[k] + } i++ - last = v } } return s[:i] @@ -205,11 +237,19 @@ func CompactFunc[S ~[]E, E any](s S, eq func(E, E) bool) S { // Grow increases the slice's capacity, if necessary, to guarantee space for // another n elements. After Grow(n), at least n elements can be appended -// to the slice without another allocation. Grow may modify elements of the -// slice between the length and the capacity. If n is negative or too large to +// to the slice without another allocation. If n is negative or too large to // allocate the memory, Grow panics. func Grow[S ~[]E, E any](s S, n int) S { - return append(s, make(S, n)...)[:len(s)] + if n < 0 { + panic("cannot be negative") + } + if n -= cap(s) - len(s); n > 0 { + // TODO(https://go.dev/issue/53888): Make using []E instead of S + // to workaround a compiler bug where the runtime.growslice optimization + // does not take effect. Revert when the compiler is fixed. + s = append([]E(s)[:cap(s)], make([]E, n)...)[:len(s)] + } + return s } // Clip removes unused capacity from the slice, returning s[:len(s):len(s)]. diff --git a/tools/vendor/golang.org/x/exp/slices/sort.go b/tools/vendor/golang.org/x/exp/slices/sort.go index c22e74bd10..231b6448ac 100644 --- a/tools/vendor/golang.org/x/exp/slices/sort.go +++ b/tools/vendor/golang.org/x/exp/slices/sort.go @@ -30,7 +30,7 @@ func SortFunc[E any](x []E, less func(a, b E) bool) { pdqsortLessFunc(x, 0, n, bits.Len(uint(n)), less) } -// SortStable sorts the slice x while keeping the original order of equal +// SortStableFunc sorts the slice x while keeping the original order of equal // elements, using less to compare elements. func SortStableFunc[E any](x []E, less func(a, b E) bool) { stableLessFunc(x, len(x), less) @@ -62,46 +62,47 @@ func IsSortedFunc[E any](x []E, less func(a, b E) bool) bool { // sort order; it also returns a bool saying whether the target is really found // in the slice. The slice must be sorted in increasing order. func BinarySearch[E constraints.Ordered](x []E, target E) (int, bool) { - // search returns the leftmost position where f returns true, or len(x) if f - // returns false for all x. This is the insertion position for target in x, - // and could point to an element that's either == target or not. - pos := search(len(x), func(i int) bool { return x[i] >= target }) - if pos >= len(x) || x[pos] != target { - return pos, false - } else { - return pos, true + // Inlining is faster than calling BinarySearchFunc with a lambda. + n := len(x) + // Define x[-1] < target and x[n] >= target. + // Invariant: x[i-1] < target, x[j] >= target. + i, j := 0, n + for i < j { + h := int(uint(i+j) >> 1) // avoid overflow when computing h + // i ≤ h < j + if x[h] < target { + i = h + 1 // preserves x[i-1] < target + } else { + j = h // preserves x[j] >= target + } } + // i == j, x[i-1] < target, and x[j] (= x[i]) >= target => answer is i. + return i, i < n && x[i] == target } // BinarySearchFunc works like BinarySearch, but uses a custom comparison -// function. The slice must be sorted in increasing order, where "increasing" is -// defined by cmp. cmp(a, b) is expected to return an integer comparing the two -// parameters: 0 if a == b, a negative number if a < b and a positive number if -// a > b. -func BinarySearchFunc[E any](x []E, target E, cmp func(E, E) int) (int, bool) { - pos := search(len(x), func(i int) bool { return cmp(x[i], target) >= 0 }) - if pos >= len(x) || cmp(x[pos], target) != 0 { - return pos, false - } else { - return pos, true - } -} - -func search(n int, f func(int) bool) int { - // Define f(-1) == false and f(n) == true. - // Invariant: f(i-1) == false, f(j) == true. +// function. The slice must be sorted in increasing order, where "increasing" +// is defined by cmp. cmp should return 0 if the slice element matches +// the target, a negative number if the slice element precedes the target, +// or a positive number if the slice element follows the target. +// cmp must implement the same ordering as the slice, such that if +// cmp(a, t) < 0 and cmp(b, t) >= 0, then a must precede b in the slice. +func BinarySearchFunc[E, T any](x []E, target T, cmp func(E, T) int) (int, bool) { + n := len(x) + // Define cmp(x[-1], target) < 0 and cmp(x[n], target) >= 0 . + // Invariant: cmp(x[i - 1], target) < 0, cmp(x[j], target) >= 0. i, j := 0, n for i < j { h := int(uint(i+j) >> 1) // avoid overflow when computing h // i ≤ h < j - if !f(h) { - i = h + 1 // preserves f(i-1) == false + if cmp(x[h], target) < 0 { + i = h + 1 // preserves cmp(x[i - 1], target) < 0 } else { - j = h // preserves f(j) == true + j = h // preserves cmp(x[j], target) >= 0 } } - // i == j, f(i-1) == false, and f(j) (= f(i)) == true => answer is i. - return i + // i == j, cmp(x[i-1], target) < 0, and cmp(x[j], target) (= cmp(x[i], target)) >= 0 => answer is i. + return i, i < n && cmp(x[i], target) == 0 } type sortedHint int // hint for pdqsort when choosing the pivot diff --git a/tools/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go b/tools/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go index 2681af35af..150f887e7a 100644 --- a/tools/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go +++ b/tools/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go @@ -13,7 +13,7 @@ import ( "sync" ) -// Regexp is a wrapper around regexp.Regexp, where the underlying regexp will be +// Regexp is a wrapper around [regexp.Regexp], where the underlying regexp will be // compiled the first time it is needed. type Regexp struct { str string diff --git a/tools/vendor/golang.org/x/mod/modfile/print.go b/tools/vendor/golang.org/x/mod/modfile/print.go index 524f93022a..2a0123d4b9 100644 --- a/tools/vendor/golang.org/x/mod/modfile/print.go +++ b/tools/vendor/golang.org/x/mod/modfile/print.go @@ -16,7 +16,13 @@ import ( func Format(f *FileSyntax) []byte { pr := &printer{} pr.file(f) - return pr.Bytes() + + // remove trailing blank lines + b := pr.Bytes() + for len(b) > 0 && b[len(b)-1] == '\n' && (len(b) == 1 || b[len(b)-2] == '\n') { + b = b[:len(b)-1] + } + return b } // A printer collects the state during printing of a file or expression. @@ -59,7 +65,11 @@ func (p *printer) newline() { } p.trim() - p.printf("\n") + if b := p.Bytes(); len(b) == 0 || (len(b) >= 2 && b[len(b)-1] == '\n' && b[len(b)-2] == '\n') { + // skip the blank line at top of file or after a blank line + } else { + p.printf("\n") + } for i := 0; i < p.margin; i++ { p.printf("\t") } diff --git a/tools/vendor/golang.org/x/mod/modfile/read.go b/tools/vendor/golang.org/x/mod/modfile/read.go index a503bc2105..5b5bb5e115 100644 --- a/tools/vendor/golang.org/x/mod/modfile/read.go +++ b/tools/vendor/golang.org/x/mod/modfile/read.go @@ -65,7 +65,7 @@ type Comments struct { } // Comment returns the receiver. This isn't useful by itself, but -// a Comments struct is embedded into all the expression +// a [Comments] struct is embedded into all the expression // implementation types, and this gives each of those a Comment // method to satisfy the Expr interface. func (c *Comments) Comment() *Comments { diff --git a/tools/vendor/golang.org/x/mod/modfile/rule.go b/tools/vendor/golang.org/x/mod/modfile/rule.go index 6bcde8fabe..930b6c59bc 100644 --- a/tools/vendor/golang.org/x/mod/modfile/rule.go +++ b/tools/vendor/golang.org/x/mod/modfile/rule.go @@ -5,17 +5,17 @@ // Package modfile implements a parser and formatter for go.mod files. // // The go.mod syntax is described in -// https://golang.org/cmd/go/#hdr-The_go_mod_file. +// https://pkg.go.dev/cmd/go/#hdr-The_go_mod_file. // -// The Parse and ParseLax functions both parse a go.mod file and return an +// The [Parse] and [ParseLax] functions both parse a go.mod file and return an // abstract syntax tree. ParseLax ignores unknown statements and may be used to // parse go.mod files that may have been developed with newer versions of Go. // -// The File struct returned by Parse and ParseLax represent an abstract -// go.mod file. File has several methods like AddNewRequire and DropReplace -// that can be used to programmatically edit a file. +// The [File] struct returned by Parse and ParseLax represent an abstract +// go.mod file. File has several methods like [File.AddNewRequire] and +// [File.DropReplace] that can be used to programmatically edit a file. // -// The Format function formats a File back to a byte slice which can be +// The [Format] function formats a File back to a byte slice which can be // written to a file. package modfile @@ -35,12 +35,13 @@ import ( // A File is the parsed, interpreted form of a go.mod file. type File struct { - Module *Module - Go *Go - Require []*Require - Exclude []*Exclude - Replace []*Replace - Retract []*Retract + Module *Module + Go *Go + Toolchain *Toolchain + Require []*Require + Exclude []*Exclude + Replace []*Replace + Retract []*Retract Syntax *FileSyntax } @@ -58,6 +59,12 @@ type Go struct { Syntax *Line } +// A Toolchain is the toolchain statement. +type Toolchain struct { + Name string // "go1.21rc1" + Syntax *Line +} + // An Exclude is a single exclude statement. type Exclude struct { Mod module.Version @@ -219,7 +226,7 @@ var dontFixRetract VersionFixer = func(_, vers string) (string, error) { // data is the content of the file. // // fix is an optional function that canonicalizes module versions. -// If fix is nil, all module versions must be canonical (module.CanonicalVersion +// If fix is nil, all module versions must be canonical ([module.CanonicalVersion] // must return the same string). func Parse(file string, data []byte, fix VersionFixer) (*File, error) { return parseToFile(file, data, fix, true) @@ -296,9 +303,13 @@ func parseToFile(file string, data []byte, fix VersionFixer, strict bool) (parse return f, nil } -var GoVersionRE = lazyregexp.New(`^([1-9][0-9]*)\.(0|[1-9][0-9]*)$`) +var GoVersionRE = lazyregexp.New(`^([1-9][0-9]*)\.(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))?([a-z]+[0-9]+)?$`) var laxGoVersionRE = lazyregexp.New(`^v?(([1-9][0-9]*)\.(0|[1-9][0-9]*))([^0-9].*)$`) +// Toolchains must be named beginning with `go1`, +// like "go1.20.3" or "go1.20.3-gccgo". As a special case, "default" is also permitted. +var ToolchainRE = lazyregexp.New(`^default$|^go1($|\.)`) + func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, args []string, fix VersionFixer, strict bool) { // If strict is false, this module is a dependency. // We ignore all unknown directives as well as main-module-only @@ -364,6 +375,21 @@ func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, a f.Go = &Go{Syntax: line} f.Go.Version = args[0] + case "toolchain": + if f.Toolchain != nil { + errorf("repeated toolchain statement") + return + } + if len(args) != 1 { + errorf("toolchain directive expects exactly one argument") + return + } else if strict && !ToolchainRE.MatchString(args[0]) { + errorf("invalid toolchain version '%s': must match format go1.23 or local", args[0]) + return + } + f.Toolchain = &Toolchain{Syntax: line} + f.Toolchain.Name = args[0] + case "module": if f.Module != nil { errorf("repeated module statement") @@ -612,6 +638,22 @@ func (f *WorkFile) add(errs *ErrorList, line *Line, verb string, args []string, f.Go = &Go{Syntax: line} f.Go.Version = args[0] + case "toolchain": + if f.Toolchain != nil { + errorf("repeated toolchain statement") + return + } + if len(args) != 1 { + errorf("toolchain directive expects exactly one argument") + return + } else if !ToolchainRE.MatchString(args[0]) { + errorf("invalid toolchain version '%s': must match format go1.23 or local", args[0]) + return + } + + f.Toolchain = &Toolchain{Syntax: line} + f.Toolchain.Name = args[0] + case "use": if len(args) != 1 { errorf("usage: %s local/dir", verb) @@ -881,7 +923,7 @@ func (f *File) Format() ([]byte, error) { } // Cleanup cleans up the file f after any edit operations. -// To avoid quadratic behavior, modifications like DropRequire +// To avoid quadratic behavior, modifications like [File.DropRequire] // clear the entry but do not remove it from the slice. // Cleanup cleans out all the cleared entries. func (f *File) Cleanup() { @@ -926,7 +968,7 @@ func (f *File) Cleanup() { func (f *File) AddGoStmt(version string) error { if !GoVersionRE.MatchString(version) { - return fmt.Errorf("invalid language version string %q", version) + return fmt.Errorf("invalid language version %q", version) } if f.Go == nil { var hint Expr @@ -944,6 +986,44 @@ func (f *File) AddGoStmt(version string) error { return nil } +// DropGoStmt deletes the go statement from the file. +func (f *File) DropGoStmt() { + if f.Go != nil { + f.Go.Syntax.markRemoved() + f.Go = nil + } +} + +// DropToolchainStmt deletes the toolchain statement from the file. +func (f *File) DropToolchainStmt() { + if f.Toolchain != nil { + f.Toolchain.Syntax.markRemoved() + f.Toolchain = nil + } +} + +func (f *File) AddToolchainStmt(name string) error { + if !ToolchainRE.MatchString(name) { + return fmt.Errorf("invalid toolchain name %q", name) + } + if f.Toolchain == nil { + var hint Expr + if f.Go != nil && f.Go.Syntax != nil { + hint = f.Go.Syntax + } else if f.Module != nil && f.Module.Syntax != nil { + hint = f.Module.Syntax + } + f.Toolchain = &Toolchain{ + Name: name, + Syntax: f.Syntax.addLine(hint, "toolchain", name), + } + } else { + f.Toolchain.Name = name + f.Syntax.updateLine(f.Toolchain.Syntax, "toolchain", name) + } + return nil +} + // AddRequire sets the first require line for path to version vers, // preserving any existing comments for that line and removing all // other lines for path. @@ -995,8 +1075,8 @@ func (f *File) AddNewRequire(path, vers string, indirect bool) { // The requirements in req must specify at most one distinct version for each // module path. // -// If any existing requirements may be removed, the caller should call Cleanup -// after all edits are complete. +// If any existing requirements may be removed, the caller should call +// [File.Cleanup] after all edits are complete. func (f *File) SetRequire(req []*Require) { type elem struct { version string @@ -1387,13 +1467,21 @@ func (f *File) DropRetract(vi VersionInterval) error { func (f *File) SortBlocks() { f.removeDups() // otherwise sorting is unsafe + // semanticSortForExcludeVersionV is the Go version (plus leading "v") at which + // lines in exclude blocks start to use semantic sort instead of lexicographic sort. + // See go.dev/issue/60028. + const semanticSortForExcludeVersionV = "v1.21" + useSemanticSortForExclude := f.Go != nil && semver.Compare("v"+f.Go.Version, semanticSortForExcludeVersionV) >= 0 + for _, stmt := range f.Syntax.Stmt { block, ok := stmt.(*LineBlock) if !ok { continue } less := lineLess - if block.Token[0] == "retract" { + if block.Token[0] == "exclude" && useSemanticSortForExclude { + less = lineExcludeLess + } else if block.Token[0] == "retract" { less = lineRetractLess } sort.SliceStable(block.Line, func(i, j int) bool { @@ -1496,6 +1584,22 @@ func lineLess(li, lj *Line) bool { return len(li.Token) < len(lj.Token) } +// lineExcludeLess reports whether li should be sorted before lj for lines in +// an "exclude" block. +func lineExcludeLess(li, lj *Line) bool { + if len(li.Token) != 2 || len(lj.Token) != 2 { + // Not a known exclude specification. + // Fall back to sorting lexicographically. + return lineLess(li, lj) + } + // An exclude specification has two tokens: ModulePath and Version. + // Compare module path by string order and version by semver rules. + if pi, pj := li.Token[0], lj.Token[0]; pi != pj { + return pi < pj + } + return semver.Compare(li.Token[1], lj.Token[1]) < 0 +} + // lineRetractLess returns whether li should be sorted before lj for lines in // a "retract" block. It treats each line as a version interval. Single versions // are compared as if they were intervals with the same low and high version. diff --git a/tools/vendor/golang.org/x/mod/modfile/work.go b/tools/vendor/golang.org/x/mod/modfile/work.go index 0c0e521525..d7b99376eb 100644 --- a/tools/vendor/golang.org/x/mod/modfile/work.go +++ b/tools/vendor/golang.org/x/mod/modfile/work.go @@ -12,9 +12,10 @@ import ( // A WorkFile is the parsed, interpreted form of a go.work file. type WorkFile struct { - Go *Go - Use []*Use - Replace []*Replace + Go *Go + Toolchain *Toolchain + Use []*Use + Replace []*Replace Syntax *FileSyntax } @@ -33,7 +34,7 @@ type Use struct { // data is the content of the file. // // fix is an optional function that canonicalizes module versions. -// If fix is nil, all module versions must be canonical (module.CanonicalVersion +// If fix is nil, all module versions must be canonical ([module.CanonicalVersion] // must return the same string). func ParseWork(file string, data []byte, fix VersionFixer) (*WorkFile, error) { fs, err := parse(file, data) @@ -82,7 +83,7 @@ func ParseWork(file string, data []byte, fix VersionFixer) (*WorkFile, error) { } // Cleanup cleans up the file f after any edit operations. -// To avoid quadratic behavior, modifications like DropRequire +// To avoid quadratic behavior, modifications like [WorkFile.DropRequire] // clear the entry but do not remove it from the slice. // Cleanup cleans out all the cleared entries. func (f *WorkFile) Cleanup() { @@ -109,7 +110,7 @@ func (f *WorkFile) Cleanup() { func (f *WorkFile) AddGoStmt(version string) error { if !GoVersionRE.MatchString(version) { - return fmt.Errorf("invalid language version string %q", version) + return fmt.Errorf("invalid language version %q", version) } if f.Go == nil { stmt := &Line{Token: []string{"go", version}} @@ -117,7 +118,7 @@ func (f *WorkFile) AddGoStmt(version string) error { Version: version, Syntax: stmt, } - // Find the first non-comment-only block that's and add + // Find the first non-comment-only block and add // the go statement before it. That will keep file comments at the top. i := 0 for i = 0; i < len(f.Syntax.Stmt); i++ { @@ -133,6 +134,56 @@ func (f *WorkFile) AddGoStmt(version string) error { return nil } +func (f *WorkFile) AddToolchainStmt(name string) error { + if !ToolchainRE.MatchString(name) { + return fmt.Errorf("invalid toolchain name %q", name) + } + if f.Toolchain == nil { + stmt := &Line{Token: []string{"toolchain", name}} + f.Toolchain = &Toolchain{ + Name: name, + Syntax: stmt, + } + // Find the go line and add the toolchain line after it. + // Or else find the first non-comment-only block and add + // the toolchain line before it. That will keep file comments at the top. + i := 0 + for i = 0; i < len(f.Syntax.Stmt); i++ { + if line, ok := f.Syntax.Stmt[i].(*Line); ok && len(line.Token) > 0 && line.Token[0] == "go" { + i++ + goto Found + } + } + for i = 0; i < len(f.Syntax.Stmt); i++ { + if _, ok := f.Syntax.Stmt[i].(*CommentBlock); !ok { + break + } + } + Found: + f.Syntax.Stmt = append(append(f.Syntax.Stmt[:i:i], stmt), f.Syntax.Stmt[i:]...) + } else { + f.Toolchain.Name = name + f.Syntax.updateLine(f.Toolchain.Syntax, "toolchain", name) + } + return nil +} + +// DropGoStmt deletes the go statement from the file. +func (f *WorkFile) DropGoStmt() { + if f.Go != nil { + f.Go.Syntax.markRemoved() + f.Go = nil + } +} + +// DropToolchainStmt deletes the toolchain statement from the file. +func (f *WorkFile) DropToolchainStmt() { + if f.Toolchain != nil { + f.Toolchain.Syntax.markRemoved() + f.Toolchain = nil + } +} + func (f *WorkFile) AddUse(diskPath, modulePath string) error { need := true for _, d := range f.Use { diff --git a/tools/vendor/golang.org/x/mod/module/module.go b/tools/vendor/golang.org/x/mod/module/module.go index e9dec6e614..2a364b229b 100644 --- a/tools/vendor/golang.org/x/mod/module/module.go +++ b/tools/vendor/golang.org/x/mod/module/module.go @@ -4,7 +4,7 @@ // Package module defines the module.Version type along with support code. // -// The module.Version type is a simple Path, Version pair: +// The [module.Version] type is a simple Path, Version pair: // // type Version struct { // Path string @@ -12,7 +12,7 @@ // } // // There are no restrictions imposed directly by use of this structure, -// but additional checking functions, most notably Check, verify that +// but additional checking functions, most notably [Check], verify that // a particular path, version pair is valid. // // # Escaped Paths @@ -140,7 +140,7 @@ type ModuleError struct { Err error } -// VersionError returns a ModuleError derived from a Version and error, +// VersionError returns a [ModuleError] derived from a [Version] and error, // or err itself if it is already such an error. func VersionError(v Version, err error) error { var mErr *ModuleError @@ -169,7 +169,7 @@ func (e *ModuleError) Unwrap() error { return e.Err } // An InvalidVersionError indicates an error specific to a version, with the // module path unknown or specified externally. // -// A ModuleError may wrap an InvalidVersionError, but an InvalidVersionError +// A [ModuleError] may wrap an InvalidVersionError, but an InvalidVersionError // must not wrap a ModuleError. type InvalidVersionError struct { Version string @@ -193,8 +193,8 @@ func (e *InvalidVersionError) Error() string { func (e *InvalidVersionError) Unwrap() error { return e.Err } // An InvalidPathError indicates a module, import, or file path doesn't -// satisfy all naming constraints. See CheckPath, CheckImportPath, -// and CheckFilePath for specific restrictions. +// satisfy all naming constraints. See [CheckPath], [CheckImportPath], +// and [CheckFilePath] for specific restrictions. type InvalidPathError struct { Kind string // "module", "import", or "file" Path string @@ -294,7 +294,7 @@ func fileNameOK(r rune) bool { } // CheckPath checks that a module path is valid. -// A valid module path is a valid import path, as checked by CheckImportPath, +// A valid module path is a valid import path, as checked by [CheckImportPath], // with three additional constraints. // First, the leading path element (up to the first slash, if any), // by convention a domain name, must contain only lower-case ASCII letters, @@ -380,7 +380,7 @@ const ( // checkPath returns an error describing why the path is not valid. // Because these checks apply to module, import, and file paths, // and because other checks may be applied, the caller is expected to wrap -// this error with InvalidPathError. +// this error with [InvalidPathError]. func checkPath(path string, kind pathKind) error { if !utf8.ValidString(path) { return fmt.Errorf("invalid UTF-8") @@ -532,7 +532,7 @@ var badWindowsNames = []string{ // they require ".vN" instead of "/vN", and for all N, not just N >= 2. // SplitPathVersion returns with ok = false when presented with // a path whose last path element does not satisfy the constraints -// applied by CheckPath, such as "example.com/pkg/v1" or "example.com/pkg/v1.2". +// applied by [CheckPath], such as "example.com/pkg/v1" or "example.com/pkg/v1.2". func SplitPathVersion(path string) (prefix, pathMajor string, ok bool) { if strings.HasPrefix(path, "gopkg.in/") { return splitGopkgIn(path) @@ -582,7 +582,7 @@ func splitGopkgIn(path string) (prefix, pathMajor string, ok bool) { // MatchPathMajor reports whether the semantic version v // matches the path major version pathMajor. // -// MatchPathMajor returns true if and only if CheckPathMajor returns nil. +// MatchPathMajor returns true if and only if [CheckPathMajor] returns nil. func MatchPathMajor(v, pathMajor string) bool { return CheckPathMajor(v, pathMajor) == nil } @@ -622,7 +622,7 @@ func CheckPathMajor(v, pathMajor string) error { // PathMajorPrefix returns the major-version tag prefix implied by pathMajor. // An empty PathMajorPrefix allows either v0 or v1. // -// Note that MatchPathMajor may accept some versions that do not actually begin +// Note that [MatchPathMajor] may accept some versions that do not actually begin // with this prefix: namely, it accepts a 'v0.0.0-' prefix for a '.v1' // pathMajor, even though that pathMajor implies 'v1' tagging. func PathMajorPrefix(pathMajor string) string { @@ -643,7 +643,7 @@ func PathMajorPrefix(pathMajor string) string { } // CanonicalVersion returns the canonical form of the version string v. -// It is the same as semver.Canonical(v) except that it preserves the special build suffix "+incompatible". +// It is the same as [semver.Canonical] except that it preserves the special build suffix "+incompatible". func CanonicalVersion(v string) string { cv := semver.Canonical(v) if semver.Build(v) == "+incompatible" { @@ -652,8 +652,8 @@ func CanonicalVersion(v string) string { return cv } -// Sort sorts the list by Path, breaking ties by comparing Version fields. -// The Version fields are interpreted as semantic versions (using semver.Compare) +// Sort sorts the list by Path, breaking ties by comparing [Version] fields. +// The Version fields are interpreted as semantic versions (using [semver.Compare]) // optionally followed by a tie-breaking suffix introduced by a slash character, // like in "v0.0.1/go.mod". func Sort(list []Version) { @@ -793,7 +793,7 @@ func unescapeString(escaped string) (string, bool) { } // MatchPrefixPatterns reports whether any path prefix of target matches one of -// the glob patterns (as defined by path.Match) in the comma-separated globs +// the glob patterns (as defined by [path.Match]) in the comma-separated globs // list. This implements the algorithm used when matching a module path to the // GOPRIVATE environment variable, as described by 'go help module-private'. // diff --git a/tools/vendor/golang.org/x/mod/module/pseudo.go b/tools/vendor/golang.org/x/mod/module/pseudo.go index f04ad37886..9cf19d3254 100644 --- a/tools/vendor/golang.org/x/mod/module/pseudo.go +++ b/tools/vendor/golang.org/x/mod/module/pseudo.go @@ -125,7 +125,7 @@ func IsPseudoVersion(v string) bool { } // IsZeroPseudoVersion returns whether v is a pseudo-version with a zero base, -// timestamp, and revision, as returned by ZeroPseudoVersion. +// timestamp, and revision, as returned by [ZeroPseudoVersion]. func IsZeroPseudoVersion(v string) bool { return v == ZeroPseudoVersion(semver.Major(v)) } diff --git a/tools/vendor/golang.org/x/mod/semver/semver.go b/tools/vendor/golang.org/x/mod/semver/semver.go index a30a22bf20..9a2dfd33a7 100644 --- a/tools/vendor/golang.org/x/mod/semver/semver.go +++ b/tools/vendor/golang.org/x/mod/semver/semver.go @@ -140,7 +140,7 @@ func Compare(v, w string) int { // Max canonicalizes its arguments and then returns the version string // that compares greater. // -// Deprecated: use Compare instead. In most cases, returning a canonicalized +// Deprecated: use [Compare] instead. In most cases, returning a canonicalized // version is not expected or desired. func Max(v, w string) string { v = Canonical(v) @@ -151,7 +151,7 @@ func Max(v, w string) string { return w } -// ByVersion implements sort.Interface for sorting semantic version strings. +// ByVersion implements [sort.Interface] for sorting semantic version strings. type ByVersion []string func (vs ByVersion) Len() int { return len(vs) } @@ -164,7 +164,7 @@ func (vs ByVersion) Less(i, j int) bool { return vs[i] < vs[j] } -// Sort sorts a list of semantic version strings using ByVersion. +// Sort sorts a list of semantic version strings using [ByVersion]. func Sort(list []string) { sort.Sort(ByVersion(list)) } diff --git a/tools/vendor/golang.org/x/sync/errgroup/errgroup.go b/tools/vendor/golang.org/x/sync/errgroup/errgroup.go index cbee7a4e23..b18efb743f 100644 --- a/tools/vendor/golang.org/x/sync/errgroup/errgroup.go +++ b/tools/vendor/golang.org/x/sync/errgroup/errgroup.go @@ -20,7 +20,7 @@ type token struct{} // A zero Group is valid, has no limit on the number of active goroutines, // and does not cancel on error. type Group struct { - cancel func() + cancel func(error) wg sync.WaitGroup @@ -43,7 +43,7 @@ func (g *Group) done() { // returns a non-nil error or the first time Wait returns, whichever occurs // first. func WithContext(ctx context.Context) (*Group, context.Context) { - ctx, cancel := context.WithCancel(ctx) + ctx, cancel := withCancelCause(ctx) return &Group{cancel: cancel}, ctx } @@ -52,7 +52,7 @@ func WithContext(ctx context.Context) (*Group, context.Context) { func (g *Group) Wait() error { g.wg.Wait() if g.cancel != nil { - g.cancel() + g.cancel(g.err) } return g.err } @@ -76,7 +76,7 @@ func (g *Group) Go(f func() error) { g.errOnce.Do(func() { g.err = err if g.cancel != nil { - g.cancel() + g.cancel(g.err) } }) } @@ -105,7 +105,7 @@ func (g *Group) TryGo(f func() error) bool { g.errOnce.Do(func() { g.err = err if g.cancel != nil { - g.cancel() + g.cancel(g.err) } }) } diff --git a/tools/vendor/golang.org/x/sync/errgroup/go120.go b/tools/vendor/golang.org/x/sync/errgroup/go120.go new file mode 100644 index 0000000000..7d419d3760 --- /dev/null +++ b/tools/vendor/golang.org/x/sync/errgroup/go120.go @@ -0,0 +1,14 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.20 +// +build go1.20 + +package errgroup + +import "context" + +func withCancelCause(parent context.Context) (context.Context, func(error)) { + return context.WithCancelCause(parent) +} diff --git a/tools/vendor/golang.org/x/sync/errgroup/pre_go120.go b/tools/vendor/golang.org/x/sync/errgroup/pre_go120.go new file mode 100644 index 0000000000..1795c18ace --- /dev/null +++ b/tools/vendor/golang.org/x/sync/errgroup/pre_go120.go @@ -0,0 +1,15 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.20 +// +build !go1.20 + +package errgroup + +import "context" + +func withCancelCause(parent context.Context) (context.Context, func(error)) { + ctx, cancel := context.WithCancel(parent) + return ctx, func(error) { cancel() } +} diff --git a/tools/vendor/golang.org/x/sys/execabs/execabs.go b/tools/vendor/golang.org/x/sys/execabs/execabs.go index b981cfbb4a..3bf40fdfec 100644 --- a/tools/vendor/golang.org/x/sys/execabs/execabs.go +++ b/tools/vendor/golang.org/x/sys/execabs/execabs.go @@ -63,7 +63,7 @@ func LookPath(file string) (string, error) { } func fixCmd(name string, cmd *exec.Cmd) { - if filepath.Base(name) == name && !filepath.IsAbs(cmd.Path) { + if filepath.Base(name) == name && !filepath.IsAbs(cmd.Path) && !isGo119ErrFieldSet(cmd) { // exec.Command was called with a bare binary name and // exec.LookPath returned a path which is not absolute. // Set cmd.lookPathErr and clear cmd.Path so that it diff --git a/tools/vendor/golang.org/x/sys/execabs/execabs_go118.go b/tools/vendor/golang.org/x/sys/execabs/execabs_go118.go index 6ab5f50894..2000064a81 100644 --- a/tools/vendor/golang.org/x/sys/execabs/execabs_go118.go +++ b/tools/vendor/golang.org/x/sys/execabs/execabs_go118.go @@ -7,6 +7,12 @@ package execabs +import "os/exec" + func isGo119ErrDot(err error) bool { return false } + +func isGo119ErrFieldSet(cmd *exec.Cmd) bool { + return false +} diff --git a/tools/vendor/golang.org/x/sys/execabs/execabs_go119.go b/tools/vendor/golang.org/x/sys/execabs/execabs_go119.go index 46c5b525e7..f364b34189 100644 --- a/tools/vendor/golang.org/x/sys/execabs/execabs_go119.go +++ b/tools/vendor/golang.org/x/sys/execabs/execabs_go119.go @@ -15,3 +15,7 @@ import ( func isGo119ErrDot(err error) bool { return errors.Is(err, exec.ErrDot) } + +func isGo119ErrFieldSet(cmd *exec.Cmd) bool { + return cmd.Err != nil +} diff --git a/tools/vendor/golang.org/x/sys/unix/ioctl_signed.go b/tools/vendor/golang.org/x/sys/unix/ioctl_signed.go new file mode 100644 index 0000000000..7def9580e6 --- /dev/null +++ b/tools/vendor/golang.org/x/sys/unix/ioctl_signed.go @@ -0,0 +1,70 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build aix || solaris +// +build aix solaris + +package unix + +import ( + "unsafe" +) + +// ioctl itself should not be exposed directly, but additional get/set +// functions for specific types are permissible. + +// IoctlSetInt performs an ioctl operation which sets an integer value +// on fd, using the specified request number. +func IoctlSetInt(fd int, req int, value int) error { + return ioctl(fd, req, uintptr(value)) +} + +// IoctlSetPointerInt performs an ioctl operation which sets an +// integer value on fd, using the specified request number. The ioctl +// argument is called with a pointer to the integer value, rather than +// passing the integer value directly. +func IoctlSetPointerInt(fd int, req int, value int) error { + v := int32(value) + return ioctlPtr(fd, req, unsafe.Pointer(&v)) +} + +// IoctlSetWinsize performs an ioctl on fd with a *Winsize argument. +// +// To change fd's window size, the req argument should be TIOCSWINSZ. +func IoctlSetWinsize(fd int, req int, value *Winsize) error { + // TODO: if we get the chance, remove the req parameter and + // hardcode TIOCSWINSZ. + return ioctlPtr(fd, req, unsafe.Pointer(value)) +} + +// IoctlSetTermios performs an ioctl on fd with a *Termios. +// +// The req value will usually be TCSETA or TIOCSETA. +func IoctlSetTermios(fd int, req int, value *Termios) error { + // TODO: if we get the chance, remove the req parameter. + return ioctlPtr(fd, req, unsafe.Pointer(value)) +} + +// IoctlGetInt performs an ioctl operation which gets an integer value +// from fd, using the specified request number. +// +// A few ioctl requests use the return value as an output parameter; +// for those, IoctlRetInt should be used instead of this function. +func IoctlGetInt(fd int, req int) (int, error) { + var value int + err := ioctlPtr(fd, req, unsafe.Pointer(&value)) + return value, err +} + +func IoctlGetWinsize(fd int, req int) (*Winsize, error) { + var value Winsize + err := ioctlPtr(fd, req, unsafe.Pointer(&value)) + return &value, err +} + +func IoctlGetTermios(fd int, req int) (*Termios, error) { + var value Termios + err := ioctlPtr(fd, req, unsafe.Pointer(&value)) + return &value, err +} diff --git a/tools/vendor/golang.org/x/sys/unix/ioctl.go b/tools/vendor/golang.org/x/sys/unix/ioctl_unsigned.go similarity index 76% rename from tools/vendor/golang.org/x/sys/unix/ioctl.go rename to tools/vendor/golang.org/x/sys/unix/ioctl_unsigned.go index 1c51b0ec2b..649913d1ea 100644 --- a/tools/vendor/golang.org/x/sys/unix/ioctl.go +++ b/tools/vendor/golang.org/x/sys/unix/ioctl_unsigned.go @@ -2,13 +2,12 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build aix || darwin || dragonfly || freebsd || hurd || linux || netbsd || openbsd || solaris -// +build aix darwin dragonfly freebsd hurd linux netbsd openbsd solaris +//go:build darwin || dragonfly || freebsd || hurd || linux || netbsd || openbsd +// +build darwin dragonfly freebsd hurd linux netbsd openbsd package unix import ( - "runtime" "unsafe" ) @@ -27,7 +26,7 @@ func IoctlSetInt(fd int, req uint, value int) error { // passing the integer value directly. func IoctlSetPointerInt(fd int, req uint, value int) error { v := int32(value) - return ioctl(fd, req, uintptr(unsafe.Pointer(&v))) + return ioctlPtr(fd, req, unsafe.Pointer(&v)) } // IoctlSetWinsize performs an ioctl on fd with a *Winsize argument. @@ -36,9 +35,7 @@ func IoctlSetPointerInt(fd int, req uint, value int) error { func IoctlSetWinsize(fd int, req uint, value *Winsize) error { // TODO: if we get the chance, remove the req parameter and // hardcode TIOCSWINSZ. - err := ioctl(fd, req, uintptr(unsafe.Pointer(value))) - runtime.KeepAlive(value) - return err + return ioctlPtr(fd, req, unsafe.Pointer(value)) } // IoctlSetTermios performs an ioctl on fd with a *Termios. @@ -46,9 +43,7 @@ func IoctlSetWinsize(fd int, req uint, value *Winsize) error { // The req value will usually be TCSETA or TIOCSETA. func IoctlSetTermios(fd int, req uint, value *Termios) error { // TODO: if we get the chance, remove the req parameter. - err := ioctl(fd, req, uintptr(unsafe.Pointer(value))) - runtime.KeepAlive(value) - return err + return ioctlPtr(fd, req, unsafe.Pointer(value)) } // IoctlGetInt performs an ioctl operation which gets an integer value @@ -58,18 +53,18 @@ func IoctlSetTermios(fd int, req uint, value *Termios) error { // for those, IoctlRetInt should be used instead of this function. func IoctlGetInt(fd int, req uint) (int, error) { var value int - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) + err := ioctlPtr(fd, req, unsafe.Pointer(&value)) return value, err } func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { var value Winsize - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) + err := ioctlPtr(fd, req, unsafe.Pointer(&value)) return &value, err } func IoctlGetTermios(fd int, req uint) (*Termios, error) { var value Termios - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) + err := ioctlPtr(fd, req, unsafe.Pointer(&value)) return &value, err } diff --git a/tools/vendor/golang.org/x/sys/unix/ioctl_zos.go b/tools/vendor/golang.org/x/sys/unix/ioctl_zos.go index 5384e7d91d..cdc21bf76d 100644 --- a/tools/vendor/golang.org/x/sys/unix/ioctl_zos.go +++ b/tools/vendor/golang.org/x/sys/unix/ioctl_zos.go @@ -17,25 +17,23 @@ import ( // IoctlSetInt performs an ioctl operation which sets an integer value // on fd, using the specified request number. -func IoctlSetInt(fd int, req uint, value int) error { +func IoctlSetInt(fd int, req int, value int) error { return ioctl(fd, req, uintptr(value)) } // IoctlSetWinsize performs an ioctl on fd with a *Winsize argument. // // To change fd's window size, the req argument should be TIOCSWINSZ. -func IoctlSetWinsize(fd int, req uint, value *Winsize) error { +func IoctlSetWinsize(fd int, req int, value *Winsize) error { // TODO: if we get the chance, remove the req parameter and // hardcode TIOCSWINSZ. - err := ioctl(fd, req, uintptr(unsafe.Pointer(value))) - runtime.KeepAlive(value) - return err + return ioctlPtr(fd, req, unsafe.Pointer(value)) } // IoctlSetTermios performs an ioctl on fd with a *Termios. // // The req value is expected to be TCSETS, TCSETSW, or TCSETSF -func IoctlSetTermios(fd int, req uint, value *Termios) error { +func IoctlSetTermios(fd int, req int, value *Termios) error { if (req != TCSETS) && (req != TCSETSW) && (req != TCSETSF) { return ENOSYS } @@ -49,22 +47,22 @@ func IoctlSetTermios(fd int, req uint, value *Termios) error { // // A few ioctl requests use the return value as an output parameter; // for those, IoctlRetInt should be used instead of this function. -func IoctlGetInt(fd int, req uint) (int, error) { +func IoctlGetInt(fd int, req int) (int, error) { var value int - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) + err := ioctlPtr(fd, req, unsafe.Pointer(&value)) return value, err } -func IoctlGetWinsize(fd int, req uint) (*Winsize, error) { +func IoctlGetWinsize(fd int, req int) (*Winsize, error) { var value Winsize - err := ioctl(fd, req, uintptr(unsafe.Pointer(&value))) + err := ioctlPtr(fd, req, unsafe.Pointer(&value)) return &value, err } // IoctlGetTermios performs an ioctl on fd with a *Termios. // // The req value is expected to be TCGETS -func IoctlGetTermios(fd int, req uint) (*Termios, error) { +func IoctlGetTermios(fd int, req int) (*Termios, error) { var value Termios if req != TCGETS { return &value, ENOSYS diff --git a/tools/vendor/golang.org/x/sys/unix/mkall.sh b/tools/vendor/golang.org/x/sys/unix/mkall.sh index 8e3947c368..e6f31d374d 100644 --- a/tools/vendor/golang.org/x/sys/unix/mkall.sh +++ b/tools/vendor/golang.org/x/sys/unix/mkall.sh @@ -50,7 +50,7 @@ if [[ "$GOOS" = "linux" ]]; then # Use the Docker-based build system # Files generated through docker (use $cmd so you can Ctl-C the build or run) $cmd docker build --tag generate:$GOOS $GOOS - $cmd docker run --interactive --tty --volume $(cd -- "$(dirname -- "$0")/.." && /bin/pwd):/build generate:$GOOS + $cmd docker run --interactive --tty --volume $(cd -- "$(dirname -- "$0")/.." && pwd):/build generate:$GOOS exit fi diff --git a/tools/vendor/golang.org/x/sys/unix/mkerrors.sh b/tools/vendor/golang.org/x/sys/unix/mkerrors.sh index 7456d9ddde..0c4d14929a 100644 --- a/tools/vendor/golang.org/x/sys/unix/mkerrors.sh +++ b/tools/vendor/golang.org/x/sys/unix/mkerrors.sh @@ -66,6 +66,7 @@ includes_Darwin=' #include #include #include +#include #include #include #include @@ -203,6 +204,7 @@ struct ltchars { #include #include #include +#include #include #include #include @@ -517,10 +519,11 @@ ccflags="$@" $2 ~ /^LOCK_(SH|EX|NB|UN)$/ || $2 ~ /^LO_(KEY|NAME)_SIZE$/ || $2 ~ /^LOOP_(CLR|CTL|GET|SET)_/ || - $2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|TCP|MCAST|EVFILT|NOTE|SHUT|PROT|MAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR|LOCAL|TCPOPT)_/ || + $2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|TCP|MCAST|EVFILT|NOTE|SHUT|PROT|MAP|MREMAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR|LOCAL|TCPOPT|UDP)_/ || $2 ~ /^NFC_(GENL|PROTO|COMM|RF|SE|DIRECTION|LLCP|SOCKPROTO)_/ || $2 ~ /^NFC_.*_(MAX)?SIZE$/ || $2 ~ /^RAW_PAYLOAD_/ || + $2 ~ /^[US]F_/ || $2 ~ /^TP_STATUS_/ || $2 ~ /^FALLOC_/ || $2 ~ /^ICMPV?6?_(FILTER|SEC)/ || @@ -738,7 +741,8 @@ main(void) e = errors[i].num; if(i > 0 && errors[i-1].num == e) continue; - strcpy(buf, strerror(e)); + strncpy(buf, strerror(e), sizeof(buf) - 1); + buf[sizeof(buf) - 1] = '\0'; // lowercase first letter: Bad -> bad, but STREAM -> STREAM. if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z) buf[0] += a - A; @@ -757,7 +761,8 @@ main(void) e = signals[i].num; if(i > 0 && signals[i-1].num == e) continue; - strcpy(buf, strsignal(e)); + strncpy(buf, strsignal(e), sizeof(buf) - 1); + buf[sizeof(buf) - 1] = '\0'; // lowercase first letter: Bad -> bad, but STREAM -> STREAM. if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z) buf[0] += a - A; diff --git a/tools/vendor/golang.org/x/sys/unix/mremap.go b/tools/vendor/golang.org/x/sys/unix/mremap.go new file mode 100644 index 0000000000..86213c05d6 --- /dev/null +++ b/tools/vendor/golang.org/x/sys/unix/mremap.go @@ -0,0 +1,40 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build linux +// +build linux + +package unix + +import "unsafe" + +type mremapMmapper struct { + mmapper + mremap func(oldaddr uintptr, oldlength uintptr, newlength uintptr, flags int, newaddr uintptr) (xaddr uintptr, err error) +} + +func (m *mremapMmapper) Mremap(oldData []byte, newLength int, flags int) (data []byte, err error) { + if newLength <= 0 || len(oldData) == 0 || len(oldData) != cap(oldData) || flags&MREMAP_FIXED != 0 { + return nil, EINVAL + } + + pOld := &oldData[cap(oldData)-1] + m.Lock() + defer m.Unlock() + bOld := m.active[pOld] + if bOld == nil || &bOld[0] != &oldData[0] { + return nil, EINVAL + } + newAddr, errno := m.mremap(uintptr(unsafe.Pointer(&bOld[0])), uintptr(len(bOld)), uintptr(newLength), flags, 0) + if errno != nil { + return nil, errno + } + bNew := unsafe.Slice((*byte)(unsafe.Pointer(newAddr)), newLength) + pNew := &bNew[cap(bNew)-1] + if flags&MREMAP_DONTUNMAP == 0 { + delete(m.active, pOld) + } + m.active[pNew] = bNew + return bNew, nil +} diff --git a/tools/vendor/golang.org/x/sys/unix/ptrace_darwin.go b/tools/vendor/golang.org/x/sys/unix/ptrace_darwin.go index 463c3eff7f..39dba6ca6a 100644 --- a/tools/vendor/golang.org/x/sys/unix/ptrace_darwin.go +++ b/tools/vendor/golang.org/x/sys/unix/ptrace_darwin.go @@ -7,6 +7,12 @@ package unix +import "unsafe" + func ptrace(request int, pid int, addr uintptr, data uintptr) error { return ptrace1(request, pid, addr, data) } + +func ptracePtr(request int, pid int, addr uintptr, data unsafe.Pointer) error { + return ptrace1Ptr(request, pid, addr, data) +} diff --git a/tools/vendor/golang.org/x/sys/unix/ptrace_ios.go b/tools/vendor/golang.org/x/sys/unix/ptrace_ios.go index ed0509a011..9ea66330a9 100644 --- a/tools/vendor/golang.org/x/sys/unix/ptrace_ios.go +++ b/tools/vendor/golang.org/x/sys/unix/ptrace_ios.go @@ -7,6 +7,12 @@ package unix +import "unsafe" + func ptrace(request int, pid int, addr uintptr, data uintptr) (err error) { return ENOTSUP } + +func ptracePtr(request int, pid int, addr uintptr, data unsafe.Pointer) (err error) { + return ENOTSUP +} diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_aix.go b/tools/vendor/golang.org/x/sys/unix/syscall_aix.go index 2db1b51e99..c406ae00f4 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_aix.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_aix.go @@ -292,9 +292,7 @@ func anyToSockaddr(fd int, rsa *RawSockaddrAny) (Sockaddr, error) { break } } - - bytes := (*[len(pp.Path)]byte)(unsafe.Pointer(&pp.Path[0]))[0:n] - sa.Name = string(bytes) + sa.Name = string(unsafe.Slice((*byte)(unsafe.Pointer(&pp.Path[0])), n)) return sa, nil case AF_INET: @@ -410,7 +408,8 @@ func (w WaitStatus) CoreDump() bool { return w&0x80 == 0x80 } func (w WaitStatus) TrapCause() int { return -1 } -//sys ioctl(fd int, req uint, arg uintptr) (err error) +//sys ioctl(fd int, req int, arg uintptr) (err error) +//sys ioctlPtr(fd int, req int, arg unsafe.Pointer) (err error) = ioctl // fcntl must never be called with cmd=F_DUP2FD because it doesn't work on AIX // There is no way to create a custom fcntl and to keep //sys fcntl easily, diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_aix_ppc.go b/tools/vendor/golang.org/x/sys/unix/syscall_aix_ppc.go index e92a0be163..f2871fa953 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_aix_ppc.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_aix_ppc.go @@ -8,7 +8,6 @@ package unix //sysnb Getrlimit(resource int, rlim *Rlimit) (err error) = getrlimit64 -//sysnb Setrlimit(resource int, rlim *Rlimit) (err error) = setrlimit64 //sys Seek(fd int, offset int64, whence int) (off int64, err error) = lseek64 //sys mmap(addr uintptr, length uintptr, prot int, flags int, fd int, offset int64) (xaddr uintptr, err error) diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_aix_ppc64.go b/tools/vendor/golang.org/x/sys/unix/syscall_aix_ppc64.go index 16eed17098..75718ec0f1 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_aix_ppc64.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_aix_ppc64.go @@ -8,7 +8,6 @@ package unix //sysnb Getrlimit(resource int, rlim *Rlimit) (err error) -//sysnb Setrlimit(resource int, rlim *Rlimit) (err error) //sys Seek(fd int, offset int64, whence int) (off int64, err error) = lseek //sys mmap(addr uintptr, length uintptr, prot int, flags int, fd int, offset int64) (xaddr uintptr, err error) = mmap64 diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_bsd.go b/tools/vendor/golang.org/x/sys/unix/syscall_bsd.go index eda42671f1..7705c3270b 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_bsd.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_bsd.go @@ -245,8 +245,7 @@ func anyToSockaddr(fd int, rsa *RawSockaddrAny) (Sockaddr, error) { break } } - bytes := (*[len(pp.Path)]byte)(unsafe.Pointer(&pp.Path[0]))[0:n] - sa.Name = string(bytes) + sa.Name = string(unsafe.Slice((*byte)(unsafe.Pointer(&pp.Path[0])), n)) return sa, nil case AF_INET: diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_darwin.go b/tools/vendor/golang.org/x/sys/unix/syscall_darwin.go index 192b071b3d..206921504c 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_darwin.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_darwin.go @@ -14,7 +14,6 @@ package unix import ( "fmt" - "runtime" "syscall" "unsafe" ) @@ -376,11 +375,10 @@ func Flistxattr(fd int, dest []byte) (sz int, err error) { func Kill(pid int, signum syscall.Signal) (err error) { return kill(pid, int(signum), 1) } //sys ioctl(fd int, req uint, arg uintptr) (err error) +//sys ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) = SYS_IOCTL func IoctlCtlInfo(fd int, ctlInfo *CtlInfo) error { - err := ioctl(fd, CTLIOCGINFO, uintptr(unsafe.Pointer(ctlInfo))) - runtime.KeepAlive(ctlInfo) - return err + return ioctlPtr(fd, CTLIOCGINFO, unsafe.Pointer(ctlInfo)) } // IfreqMTU is struct ifreq used to get or set a network device's MTU. @@ -394,16 +392,14 @@ type IfreqMTU struct { func IoctlGetIfreqMTU(fd int, ifname string) (*IfreqMTU, error) { var ifreq IfreqMTU copy(ifreq.Name[:], ifname) - err := ioctl(fd, SIOCGIFMTU, uintptr(unsafe.Pointer(&ifreq))) + err := ioctlPtr(fd, SIOCGIFMTU, unsafe.Pointer(&ifreq)) return &ifreq, err } // IoctlSetIfreqMTU performs the SIOCSIFMTU ioctl operation on fd to set the MTU // of the network device specified by ifreq.Name. func IoctlSetIfreqMTU(fd int, ifreq *IfreqMTU) error { - err := ioctl(fd, SIOCSIFMTU, uintptr(unsafe.Pointer(ifreq))) - runtime.KeepAlive(ifreq) - return err + return ioctlPtr(fd, SIOCSIFMTU, unsafe.Pointer(ifreq)) } //sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS_SYSCTL @@ -617,6 +613,7 @@ func SysctlKinfoProcSlice(name string, args ...int) ([]KinfoProc, error) { //sys Rmdir(path string) (err error) //sys Seek(fd int, offset int64, whence int) (newoffset int64, err error) = SYS_LSEEK //sys Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) +//sys Setattrlist(path string, attrlist *Attrlist, attrBuf []byte, options int) (err error) //sys Setegid(egid int) (err error) //sysnb Seteuid(euid int) (err error) //sysnb Setgid(gid int) (err error) @@ -626,7 +623,6 @@ func SysctlKinfoProcSlice(name string, args ...int) ([]KinfoProc, error) { //sys Setprivexec(flag int) (err error) //sysnb Setregid(rgid int, egid int) (err error) //sysnb Setreuid(ruid int, euid int) (err error) -//sysnb Setrlimit(which int, lim *Rlimit) (err error) //sysnb Setsid() (pid int, err error) //sysnb Settimeofday(tp *Timeval) (err error) //sysnb Setuid(uid int) (err error) @@ -680,7 +676,6 @@ func SysctlKinfoProcSlice(name string, args ...int) ([]KinfoProc, error) { // Kqueue_from_portset_np // Kqueue_portset // Getattrlist -// Setattrlist // Getdirentriesattr // Searchfs // Delete diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go b/tools/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go index b37310ce9b..9fa879806b 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_darwin_amd64.go @@ -47,5 +47,6 @@ func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, //sys getfsstat(buf unsafe.Pointer, size uintptr, flags int) (n int, err error) = SYS_GETFSSTAT64 //sys Lstat(path string, stat *Stat_t) (err error) = SYS_LSTAT64 //sys ptrace1(request int, pid int, addr uintptr, data uintptr) (err error) = SYS_ptrace +//sys ptrace1Ptr(request int, pid int, addr unsafe.Pointer, data uintptr) (err error) = SYS_ptrace //sys Stat(path string, stat *Stat_t) (err error) = SYS_STAT64 //sys Statfs(path string, stat *Statfs_t) (err error) = SYS_STATFS64 diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go b/tools/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go index d51ec99630..f17b8c526a 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_darwin_arm64.go @@ -47,5 +47,6 @@ func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, //sys getfsstat(buf unsafe.Pointer, size uintptr, flags int) (n int, err error) = SYS_GETFSSTAT //sys Lstat(path string, stat *Stat_t) (err error) //sys ptrace1(request int, pid int, addr uintptr, data uintptr) (err error) = SYS_ptrace +//sys ptrace1Ptr(request int, pid int, addr unsafe.Pointer, data uintptr) (err error) = SYS_ptrace //sys Stat(path string, stat *Stat_t) (err error) //sys Statfs(path string, stat *Statfs_t) (err error) diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_dragonfly.go b/tools/vendor/golang.org/x/sys/unix/syscall_dragonfly.go index a41111a794..d4ce988e72 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_dragonfly.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_dragonfly.go @@ -172,6 +172,7 @@ func Getfsstat(buf []Statfs_t, flags int) (n int, err error) { } //sys ioctl(fd int, req uint, arg uintptr) (err error) +//sys ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) = SYS_IOCTL //sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL @@ -325,7 +326,6 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e //sysnb Setreuid(ruid int, euid int) (err error) //sysnb Setresgid(rgid int, egid int, sgid int) (err error) //sysnb Setresuid(ruid int, euid int, suid int) (err error) -//sysnb Setrlimit(which int, lim *Rlimit) (err error) //sysnb Setsid() (pid int, err error) //sysnb Settimeofday(tp *Timeval) (err error) //sysnb Setuid(uid int) (err error) diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd.go b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd.go index d50b9dc250..afb10106f6 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd.go @@ -161,7 +161,8 @@ func Getfsstat(buf []Statfs_t, flags int) (n int, err error) { return } -//sys ioctl(fd int, req uint, arg uintptr) (err error) +//sys ioctl(fd int, req uint, arg uintptr) (err error) = SYS_IOCTL +//sys ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) = SYS_IOCTL //sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL @@ -253,6 +254,7 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e } //sys ptrace(request int, pid int, addr uintptr, data int) (err error) +//sys ptracePtr(request int, pid int, addr unsafe.Pointer, data int) (err error) = SYS_PTRACE func PtraceAttach(pid int) (err error) { return ptrace(PT_ATTACH, pid, 0, 0) @@ -267,19 +269,36 @@ func PtraceDetach(pid int) (err error) { } func PtraceGetFpRegs(pid int, fpregsout *FpReg) (err error) { - return ptrace(PT_GETFPREGS, pid, uintptr(unsafe.Pointer(fpregsout)), 0) + return ptracePtr(PT_GETFPREGS, pid, unsafe.Pointer(fpregsout), 0) } func PtraceGetRegs(pid int, regsout *Reg) (err error) { - return ptrace(PT_GETREGS, pid, uintptr(unsafe.Pointer(regsout)), 0) + return ptracePtr(PT_GETREGS, pid, unsafe.Pointer(regsout), 0) +} + +func PtraceIO(req int, pid int, offs uintptr, out []byte, countin int) (count int, err error) { + ioDesc := PtraceIoDesc{ + Op: int32(req), + Offs: offs, + } + if countin > 0 { + _ = out[:countin] // check bounds + ioDesc.Addr = &out[0] + } else if out != nil { + ioDesc.Addr = (*byte)(unsafe.Pointer(&_zero)) + } + ioDesc.SetLen(countin) + + err = ptracePtr(PT_IO, pid, unsafe.Pointer(&ioDesc), 0) + return int(ioDesc.Len), err } func PtraceLwpEvents(pid int, enable int) (err error) { return ptrace(PT_LWP_EVENTS, pid, 0, enable) } -func PtraceLwpInfo(pid int, info uintptr) (err error) { - return ptrace(PT_LWPINFO, pid, info, int(unsafe.Sizeof(PtraceLwpInfoStruct{}))) +func PtraceLwpInfo(pid int, info *PtraceLwpInfoStruct) (err error) { + return ptracePtr(PT_LWPINFO, pid, unsafe.Pointer(info), int(unsafe.Sizeof(*info))) } func PtracePeekData(pid int, addr uintptr, out []byte) (count int, err error) { @@ -299,13 +318,25 @@ func PtracePokeText(pid int, addr uintptr, data []byte) (count int, err error) { } func PtraceSetRegs(pid int, regs *Reg) (err error) { - return ptrace(PT_SETREGS, pid, uintptr(unsafe.Pointer(regs)), 0) + return ptracePtr(PT_SETREGS, pid, unsafe.Pointer(regs), 0) } func PtraceSingleStep(pid int) (err error) { return ptrace(PT_STEP, pid, 1, 0) } +func Dup3(oldfd, newfd, flags int) error { + if oldfd == newfd || flags&^O_CLOEXEC != 0 { + return EINVAL + } + how := F_DUP2FD + if flags&O_CLOEXEC != 0 { + how = F_DUP2FD_CLOEXEC + } + _, err := fcntl(oldfd, how, newfd) + return err +} + /* * Exposed directly */ @@ -402,7 +433,6 @@ func PtraceSingleStep(pid int) (err error) { //sysnb Setreuid(ruid int, euid int) (err error) //sysnb Setresgid(rgid int, egid int, sgid int) (err error) //sysnb Setresuid(ruid int, euid int, suid int) (err error) -//sysnb Setrlimit(which int, lim *Rlimit) (err error) //sysnb Setsid() (pid int, err error) //sysnb Settimeofday(tp *Timeval) (err error) //sysnb Setuid(uid int) (err error) diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_386.go b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_386.go index 6a91d471d0..b8da510043 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_386.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_386.go @@ -42,6 +42,10 @@ func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } +func (d *PtraceIoDesc) SetLen(length int) { + d.Len = uint32(length) +} + func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) { var writtenOut uint64 = 0 _, _, e1 := Syscall9(SYS_SENDFILE, uintptr(infd), uintptr(outfd), uintptr(*offset), uintptr((*offset)>>32), uintptr(count), 0, uintptr(unsafe.Pointer(&writtenOut)), 0, 0) @@ -57,16 +61,5 @@ func sendfile(outfd int, infd int, offset *int64, count int) (written int, err e func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err syscall.Errno) func PtraceGetFsBase(pid int, fsbase *int64) (err error) { - return ptrace(PT_GETFSBASE, pid, uintptr(unsafe.Pointer(fsbase)), 0) -} - -func PtraceIO(req int, pid int, offs uintptr, out []byte, countin int) (count int, err error) { - ioDesc := PtraceIoDesc{ - Op: int32(req), - Offs: offs, - Addr: uintptr(unsafe.Pointer(&out[0])), // TODO(#58351): this is not safe. - Len: uint32(countin), - } - err = ptrace(PT_IO, pid, uintptr(unsafe.Pointer(&ioDesc)), 0) - return int(ioDesc.Len), err + return ptracePtr(PT_GETFSBASE, pid, unsafe.Pointer(fsbase), 0) } diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go index 48110a0abb..47155c4839 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_amd64.go @@ -42,6 +42,10 @@ func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } +func (d *PtraceIoDesc) SetLen(length int) { + d.Len = uint64(length) +} + func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) { var writtenOut uint64 = 0 _, _, e1 := Syscall9(SYS_SENDFILE, uintptr(infd), uintptr(outfd), uintptr(*offset), uintptr(count), 0, uintptr(unsafe.Pointer(&writtenOut)), 0, 0, 0) @@ -57,16 +61,5 @@ func sendfile(outfd int, infd int, offset *int64, count int) (written int, err e func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err syscall.Errno) func PtraceGetFsBase(pid int, fsbase *int64) (err error) { - return ptrace(PT_GETFSBASE, pid, uintptr(unsafe.Pointer(fsbase)), 0) -} - -func PtraceIO(req int, pid int, offs uintptr, out []byte, countin int) (count int, err error) { - ioDesc := PtraceIoDesc{ - Op: int32(req), - Offs: offs, - Addr: uintptr(unsafe.Pointer(&out[0])), // TODO(#58351): this is not safe. - Len: uint64(countin), - } - err = ptrace(PT_IO, pid, uintptr(unsafe.Pointer(&ioDesc)), 0) - return int(ioDesc.Len), err + return ptracePtr(PT_GETFSBASE, pid, unsafe.Pointer(fsbase), 0) } diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go index 52f1d4b75a..08932093fa 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_arm.go @@ -42,6 +42,10 @@ func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } +func (d *PtraceIoDesc) SetLen(length int) { + d.Len = uint32(length) +} + func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) { var writtenOut uint64 = 0 _, _, e1 := Syscall9(SYS_SENDFILE, uintptr(infd), uintptr(outfd), uintptr(*offset), uintptr((*offset)>>32), uintptr(count), 0, uintptr(unsafe.Pointer(&writtenOut)), 0, 0) @@ -55,14 +59,3 @@ func sendfile(outfd int, infd int, offset *int64, count int) (written int, err e } func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err syscall.Errno) - -func PtraceIO(req int, pid int, offs uintptr, out []byte, countin int) (count int, err error) { - ioDesc := PtraceIoDesc{ - Op: int32(req), - Offs: offs, - Addr: uintptr(unsafe.Pointer(&out[0])), // TODO(#58351): this is not safe. - Len: uint32(countin), - } - err = ptrace(PT_IO, pid, uintptr(unsafe.Pointer(&ioDesc)), 0) - return int(ioDesc.Len), err -} diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_arm64.go b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_arm64.go index 5537ee4f2e..d151a0d0e5 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_arm64.go @@ -42,6 +42,10 @@ func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } +func (d *PtraceIoDesc) SetLen(length int) { + d.Len = uint64(length) +} + func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) { var writtenOut uint64 = 0 _, _, e1 := Syscall9(SYS_SENDFILE, uintptr(infd), uintptr(outfd), uintptr(*offset), uintptr(count), 0, uintptr(unsafe.Pointer(&writtenOut)), 0, 0, 0) @@ -55,14 +59,3 @@ func sendfile(outfd int, infd int, offset *int64, count int) (written int, err e } func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err syscall.Errno) - -func PtraceIO(req int, pid int, offs uintptr, out []byte, countin int) (count int, err error) { - ioDesc := PtraceIoDesc{ - Op: int32(req), - Offs: offs, - Addr: uintptr(unsafe.Pointer(&out[0])), // TODO(#58351): this is not safe. - Len: uint64(countin), - } - err = ptrace(PT_IO, pid, uintptr(unsafe.Pointer(&ioDesc)), 0) - return int(ioDesc.Len), err -} diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_riscv64.go b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_riscv64.go index 164abd5d21..d5cd64b378 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_riscv64.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_freebsd_riscv64.go @@ -42,6 +42,10 @@ func (cmsg *Cmsghdr) SetLen(length int) { cmsg.Len = uint32(length) } +func (d *PtraceIoDesc) SetLen(length int) { + d.Len = uint64(length) +} + func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) { var writtenOut uint64 = 0 _, _, e1 := Syscall9(SYS_SENDFILE, uintptr(infd), uintptr(outfd), uintptr(*offset), uintptr(count), 0, uintptr(unsafe.Pointer(&writtenOut)), 0, 0, 0) @@ -55,14 +59,3 @@ func sendfile(outfd int, infd int, offset *int64, count int) (written int, err e } func Syscall9(num, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r1, r2 uintptr, err syscall.Errno) - -func PtraceIO(req int, pid int, offs uintptr, out []byte, countin int) (count int, err error) { - ioDesc := PtraceIoDesc{ - Op: int32(req), - Offs: offs, - Addr: uintptr(unsafe.Pointer(&out[0])), // TODO(#58351): this is not safe. - Len: uint64(countin), - } - err = ptrace(PT_IO, pid, uintptr(unsafe.Pointer(&ioDesc)), 0) - return int(ioDesc.Len), err -} diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_hurd.go b/tools/vendor/golang.org/x/sys/unix/syscall_hurd.go index 4ffb64808d..381fd4673b 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_hurd.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_hurd.go @@ -20,3 +20,11 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { } return } + +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + r0, er := C.ioctl(C.int(fd), C.ulong(req), C.uintptr_t(uintptr(arg))) + if r0 == -1 && er != nil { + err = er + } + return +} diff --git a/tools/vendor/golang.org/x/sys/unix/syscall_linux.go b/tools/vendor/golang.org/x/sys/unix/syscall_linux.go index 5443dddd48..39de5f1430 100644 --- a/tools/vendor/golang.org/x/sys/unix/syscall_linux.go +++ b/tools/vendor/golang.org/x/sys/unix/syscall_linux.go @@ -1015,8 +1015,7 @@ func anyToSockaddr(fd int, rsa *RawSockaddrAny) (Sockaddr, error) { for n < len(pp.Path) && pp.Path[n] != 0 { n++ } - bytes := (*[len(pp.Path)]byte)(unsafe.Pointer(&pp.Path[0]))[0:n] - sa.Name = string(bytes) + sa.Name = string(unsafe.Slice((*byte)(unsafe.Pointer(&pp.Path[0])), n)) return sa, nil case AF_INET: @@ -1365,6 +1364,10 @@ func SetsockoptTCPRepairOpt(fd, level, opt int, o []TCPRepairOpt) (err error) { return setsockopt(fd, level, opt, unsafe.Pointer(&o[0]), uintptr(SizeofTCPRepairOpt*len(o))) } +func SetsockoptTCPMD5Sig(fd, level, opt int, s *TCPMD5Sig) error { + return setsockopt(fd, level, opt, unsafe.Pointer(s), unsafe.Sizeof(*s)) +} + // Keyctl Commands (http://man7.org/linux/man-pages/man2/keyctl.2.html) // KeyctlInt calls keyctl commands in which each argument is an int. @@ -1579,6 +1582,7 @@ func BindToDevice(fd int, device string) (err error) { } //sys ptrace(request int, pid int, addr uintptr, data uintptr) (err error) +//sys ptracePtr(request int, pid int, addr uintptr, data unsafe.Pointer) (err error) = SYS_PTRACE func ptracePeek(req int, pid int, addr uintptr, out []byte) (count int, err error) { // The peek requests are machine-size oriented, so we wrap it @@ -1596,7 +1600,7 @@ func ptracePeek(req int, pid int, addr uintptr, out []byte) (count int, err erro // boundary. n := 0 if addr%SizeofPtr != 0 { - err = ptrace(req, pid, addr-addr%SizeofPtr, uintptr(unsafe.Pointer(&buf[0]))) + err = ptracePtr(req, pid, addr-addr%SizeofPtr, unsafe.Pointer(&buf[0])) if err != nil { return 0, err } @@ -1608,7 +1612,7 @@ func ptracePeek(req int, pid int, addr uintptr, out []byte) (count int, err erro for len(out) > 0 { // We use an internal buffer to guarantee alignment. // It's not documented if this is necessary, but we're paranoid. - err = ptrace(req, pid, addr+uintptr(n), uintptr(unsafe.Pointer(&buf[0]))) + err = ptracePtr(req, pid, addr+uintptr(n), unsafe.Pointer(&buf[0])) if err != nil { return n, err } @@ -1640,7 +1644,7 @@ func ptracePoke(pokeReq int, peekReq int, pid int, addr uintptr, data []byte) (c n := 0 if addr%SizeofPtr != 0 { var buf [SizeofPtr]byte - err = ptrace(peekReq, pid, addr-addr%SizeofPtr, uintptr(unsafe.Pointer(&buf[0]))) + err = ptracePtr(peekReq, pid, addr-addr%SizeofPtr, unsafe.Pointer(&buf[0])) if err != nil { return 0, err } @@ -1667,7 +1671,7 @@ func ptracePoke(pokeReq int, peekReq int, pid int, addr uintptr, data []byte) (c // Trailing edge. if len(data) > 0 { var buf [SizeofPtr]byte - err = ptrace(peekReq, pid, addr+uintptr(n), uintptr(unsafe.Pointer(&buf[0]))) + err = ptracePtr(peekReq, pid, addr+uintptr(n), unsafe.Pointer(&buf[0])) if err != nil { return n, err } @@ -1695,12 +1699,23 @@ func PtracePokeUser(pid int, addr uintptr, data []byte) (count int, err error) { return ptracePoke(PTRACE_POKEUSR, PTRACE_PEEKUSR, pid, addr, data) } +// elfNT_PRSTATUS is a copy of the debug/elf.NT_PRSTATUS constant so +// x/sys/unix doesn't need to depend on debug/elf and thus +// compress/zlib, debug/dwarf, and other packages. +const elfNT_PRSTATUS = 1 + func PtraceGetRegs(pid int, regsout *PtraceRegs) (err error) { - return ptrace(PTRACE_GETREGS, pid, 0, uintptr(unsafe.Pointer(regsout))) + var iov Iovec + iov.Base = (*byte)(unsafe.Pointer(regsout)) + iov.SetLen(int(unsafe.Sizeof(*regsout))) + return ptracePtr(PTRACE_GETREGSET, pid, uintptr(elfNT_PRSTATUS), unsafe.Pointer(&iov)) } func PtraceSetRegs(pid int, regs *PtraceRegs) (err error) { - return ptrace(PTRACE_SETREGS, pid, 0, uintptr(unsafe.Pointer(regs))) + var iov Iovec + iov.Base = (*byte)(unsafe.Pointer(regs)) + iov.SetLen(int(unsafe.Sizeof(*regs))) + return ptracePtr(PTRACE_SETREGSET, pid, uintptr(elfNT_PRSTATUS), unsafe.Pointer(&iov)) } func PtraceSetOptions(pid int, options int) (err error) { @@ -1709,7 +1724,7 @@ func PtraceSetOptions(pid int, options int) (err error) { func PtraceGetEventMsg(pid int) (msg uint, err error) { var data _C_long - err = ptrace(PTRACE_GETEVENTMSG, pid, 0, uintptr(unsafe.Pointer(&data))) + err = ptracePtr(PTRACE_GETEVENTMSG, pid, 0, unsafe.Pointer(&data)) msg = uint(data) return } @@ -1869,7 +1884,6 @@ func Getpgrp() (pid int) { //sys OpenTree(dfd int, fileName string, flags uint) (r int, err error) //sys PerfEventOpen(attr *PerfEventAttr, pid int, cpu int, groupFd int, flags int) (fd int, err error) //sys PivotRoot(newroot string, putold string) (err error) = SYS_PIVOT_ROOT -//sysnb Prlimit(pid int, resource int, newlimit *Rlimit, old *Rlimit) (err error) = SYS_PRLIMIT64 //sys Prctl(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error) //sys Pselect(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timespec, sigmask *Sigset_t) (n int, err error) = SYS_PSELECT6 //sys read(fd int, p []byte) (n int, err error) @@ -1883,6 +1897,15 @@ func Getpgrp() (pid int) { //sysnb Settimeofday(tv *Timeval) (err error) //sys Setns(fd int, nstype int) (err error) +//go:linkname syscall_prlimit syscall.prlimit +func syscall_prlimit(pid, resource int, newlimit, old *syscall.Rlimit) error + +func Prlimit(pid, resource int, newlimit, old *Rlimit) error { + // Just call the syscall version, because as of Go 1.21 + // it will affect starting a new process. + return syscall_prlimit(pid, resource, (*syscall.Rlimit)(newlimit), (*syscall.Rlimit)(old)) +} + // PrctlRetInt performs a prctl operation specified by option and further // optional arguments arg2 through arg5 depending on option. It returns a // non-negative integer that is returned by the prctl syscall. @@ -2101,11 +2124,15 @@ func writevRacedetect(iovecs []Iovec, n int) { // mmap varies by architecture; see syscall_linux_*.go. //sys munmap(addr uintptr, length uintptr) (err error) +//sys mremap(oldaddr uintptr, oldlength uintptr, newlength uintptr, flags int, newaddr uintptr) (xaddr uintptr, err error) -var mapper = &mmapper{ - active: make(map[*byte][]byte), - mmap: mmap, - munmap: munmap, +var mapper = &mremapMmapper{ + mmapper: mmapper{ + active: make(map[*byte][]byte), + mmap: mmap, + munmap: munmap, + }, + mremap: mremap, } func Mmap(fd int, offset int64, length int, prot int, flags int) (data []byte, err error) { @@ -2116,6 +2143,10 @@ func Munmap(b []byte) (err error) { return mapper.Munmap(b) } +func Mremap(oldData []byte, newLength int, flags int) (data []byte, err error) { + return mapper.Mremap(oldData, newLength, flags) +} + //sys Madvise(b []byte, advice int) (err error) //sys Mprotect(b []byte, prot int) (err error) //sys Mlock(b []byte) (err error) @@ -2154,6 +2185,14 @@ func isGroupMember(gid int) bool { return false } +func isCapDacOverrideSet() bool { + hdr := CapUserHeader{Version: LINUX_CAPABILITY_VERSION_3} + data := [2]CapUserData{} + err := Capget(&hdr, &data[0]) + + return err == nil && data[0].Effective&(1< 0 { + _p1 = unsafe.Pointer(&attrBuf[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + _, _, e1 := syscall_syscall6(libc_setattrlist_trampoline_addr, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(attrlist)), uintptr(_p1), uintptr(len(attrBuf)), uintptr(options), 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +var libc_setattrlist_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_setattrlist setattrlist "/usr/lib/libSystem.B.dylib" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Setegid(egid int) (err error) { _, _, e1 := syscall_syscall(libc_setegid_trampoline_addr, uintptr(egid), 0, 0) if e1 != 0 { @@ -2115,20 +2148,6 @@ var libc_setreuid_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawSyscall(libc_setrlimit_trampoline_addr, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -var libc_setrlimit_trampoline_addr uintptr - -//go:cgo_import_dynamic libc_setrlimit setrlimit "/usr/lib/libSystem.B.dylib" - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := syscall_rawSyscall(libc_setsid_trampoline_addr, 0, 0, 0) pid = int(r0) @@ -2502,6 +2521,14 @@ func ptrace1(request int, pid int, addr uintptr, data uintptr) (err error) { return } +func ptrace1Ptr(request int, pid int, addr uintptr, data unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall6(libc_ptrace_trampoline_addr, uintptr(request), uintptr(pid), addr, uintptr(data), 0, 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + var libc_ptrace_trampoline_addr uintptr //go:cgo_import_dynamic libc_ptrace ptrace "/usr/lib/libSystem.B.dylib" diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_amd64.s b/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_amd64.s index 95fe4c0eb9..4baaed0bc1 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_amd64.s +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_amd64.s @@ -705,6 +705,11 @@ TEXT libc_select_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_select_trampoline_addr(SB), RODATA, $8 DATA ·libc_select_trampoline_addr(SB)/8, $libc_select_trampoline<>(SB) +TEXT libc_setattrlist_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_setattrlist(SB) +GLOBL ·libc_setattrlist_trampoline_addr(SB), RODATA, $8 +DATA ·libc_setattrlist_trampoline_addr(SB)/8, $libc_setattrlist_trampoline<>(SB) + TEXT libc_setegid_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setegid(SB) @@ -759,12 +764,6 @@ TEXT libc_setreuid_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_setreuid_trampoline_addr(SB), RODATA, $8 DATA ·libc_setreuid_trampoline_addr(SB)/8, $libc_setreuid_trampoline<>(SB) -TEXT libc_setrlimit_trampoline<>(SB),NOSPLIT,$0-0 - JMP libc_setrlimit(SB) - -GLOBL ·libc_setrlimit_trampoline_addr(SB), RODATA, $8 -DATA ·libc_setrlimit_trampoline_addr(SB)/8, $libc_setrlimit_trampoline<>(SB) - TEXT libc_setsid_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setsid(SB) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.go index 26a0fdc505..51d6f3fb25 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.go @@ -725,6 +725,14 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { return } +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + var libc_ioctl_trampoline_addr uintptr //go:cgo_import_dynamic libc_ioctl ioctl "/usr/lib/libSystem.B.dylib" @@ -1984,6 +1992,31 @@ var libc_select_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func Setattrlist(path string, attrlist *Attrlist, attrBuf []byte, options int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + var _p1 unsafe.Pointer + if len(attrBuf) > 0 { + _p1 = unsafe.Pointer(&attrBuf[0]) + } else { + _p1 = unsafe.Pointer(&_zero) + } + _, _, e1 := syscall_syscall6(libc_setattrlist_trampoline_addr, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(attrlist)), uintptr(_p1), uintptr(len(attrBuf)), uintptr(options), 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +var libc_setattrlist_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_setattrlist setattrlist "/usr/lib/libSystem.B.dylib" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Setegid(egid int) (err error) { _, _, e1 := syscall_syscall(libc_setegid_trampoline_addr, uintptr(egid), 0, 0) if e1 != 0 { @@ -2115,20 +2148,6 @@ var libc_setreuid_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawSyscall(libc_setrlimit_trampoline_addr, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -var libc_setrlimit_trampoline_addr uintptr - -//go:cgo_import_dynamic libc_setrlimit setrlimit "/usr/lib/libSystem.B.dylib" - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := syscall_rawSyscall(libc_setsid_trampoline_addr, 0, 0, 0) pid = int(r0) @@ -2502,6 +2521,14 @@ func ptrace1(request int, pid int, addr uintptr, data uintptr) (err error) { return } +func ptrace1Ptr(request int, pid int, addr uintptr, data unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall6(libc_ptrace_trampoline_addr, uintptr(request), uintptr(pid), addr, uintptr(data), 0, 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + var libc_ptrace_trampoline_addr uintptr //go:cgo_import_dynamic libc_ptrace ptrace "/usr/lib/libSystem.B.dylib" diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.s b/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.s index efa5b4c987..c3b82c0379 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.s +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_darwin_arm64.s @@ -705,6 +705,11 @@ TEXT libc_select_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_select_trampoline_addr(SB), RODATA, $8 DATA ·libc_select_trampoline_addr(SB)/8, $libc_select_trampoline<>(SB) +TEXT libc_setattrlist_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_setattrlist(SB) +GLOBL ·libc_setattrlist_trampoline_addr(SB), RODATA, $8 +DATA ·libc_setattrlist_trampoline_addr(SB)/8, $libc_setattrlist_trampoline<>(SB) + TEXT libc_setegid_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setegid(SB) @@ -759,12 +764,6 @@ TEXT libc_setreuid_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_setreuid_trampoline_addr(SB), RODATA, $8 DATA ·libc_setreuid_trampoline_addr(SB)/8, $libc_setreuid_trampoline<>(SB) -TEXT libc_setrlimit_trampoline<>(SB),NOSPLIT,$0-0 - JMP libc_setrlimit(SB) - -GLOBL ·libc_setrlimit_trampoline_addr(SB), RODATA, $8 -DATA ·libc_setrlimit_trampoline_addr(SB)/8, $libc_setrlimit_trampoline<>(SB) - TEXT libc_setsid_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setsid(SB) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_dragonfly_amd64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_dragonfly_amd64.go index 54749f9c5e..0eabac7ade 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_dragonfly_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_dragonfly_amd64.go @@ -436,6 +436,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -1400,16 +1410,6 @@ func Setresuid(ruid int, euid int, suid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_386.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_386.go index 77479d4581..ee313eb007 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_386.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_386.go @@ -388,6 +388,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -414,6 +424,16 @@ func ptrace(request int, pid int, addr uintptr, data int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ptracePtr(request int, pid int, addr unsafe.Pointer, data int) (err error) { + _, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Access(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) @@ -1625,16 +1645,6 @@ func Setresuid(ruid int, euid int, suid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_amd64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_amd64.go index 2e966d4d7a..4c986e448e 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_amd64.go @@ -388,6 +388,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -414,6 +424,16 @@ func ptrace(request int, pid int, addr uintptr, data int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ptracePtr(request int, pid int, addr unsafe.Pointer, data int) (err error) { + _, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Access(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) @@ -1625,16 +1645,6 @@ func Setresuid(ruid int, euid int, suid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm.go index d65a7c0fa6..555216944a 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm.go @@ -388,6 +388,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -414,6 +424,16 @@ func ptrace(request int, pid int, addr uintptr, data int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ptracePtr(request int, pid int, addr unsafe.Pointer, data int) (err error) { + _, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Access(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) @@ -1625,16 +1645,6 @@ func Setresuid(ruid int, euid int, suid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm64.go index 6f0b97c6db..67a226fbf5 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_arm64.go @@ -388,6 +388,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -414,6 +424,16 @@ func ptrace(request int, pid int, addr uintptr, data int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ptracePtr(request int, pid int, addr unsafe.Pointer, data int) (err error) { + _, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Access(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) @@ -1625,16 +1645,6 @@ func Setresuid(ruid int, euid int, suid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_riscv64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_riscv64.go index e1c23b5272..f0b9ddaaa2 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_riscv64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_freebsd_riscv64.go @@ -388,6 +388,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -414,6 +424,16 @@ func ptrace(request int, pid int, addr uintptr, data int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ptracePtr(request int, pid int, addr unsafe.Pointer, data int) (err error) { + _, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Access(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) @@ -1625,16 +1645,6 @@ func Setresuid(ruid int, euid int, suid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux.go index 36ea3a55b7..7ceec233fb 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux.go @@ -379,6 +379,16 @@ func ptrace(request int, pid int, addr uintptr, data uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ptracePtr(request int, pid int, addr uintptr, data unsafe.Pointer) (err error) { + _, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func reboot(magic1 uint, magic2 uint, cmd int, arg string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(arg) @@ -1336,16 +1346,6 @@ func PivotRoot(newroot string, putold string) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Prlimit(pid int, resource int, newlimit *Rlimit, old *Rlimit) (err error) { - _, _, e1 := RawSyscall6(SYS_PRLIMIT64, uintptr(pid), uintptr(resource), uintptr(unsafe.Pointer(newlimit)), uintptr(unsafe.Pointer(old)), 0, 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Prctl(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error) { _, _, e1 := Syscall6(SYS_PRCTL, uintptr(option), uintptr(arg2), uintptr(arg3), uintptr(arg4), uintptr(arg5), 0) if e1 != 0 { @@ -1868,6 +1868,17 @@ func munmap(addr uintptr, length uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func mremap(oldaddr uintptr, oldlength uintptr, newlength uintptr, flags int, newaddr uintptr) (xaddr uintptr, err error) { + r0, _, e1 := Syscall6(SYS_MREMAP, uintptr(oldaddr), uintptr(oldlength), uintptr(newlength), uintptr(flags), uintptr(newaddr), 0) + xaddr = uintptr(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func Madvise(b []byte, advice int) (err error) { var _p0 unsafe.Pointer if len(b) > 0 { @@ -2172,3 +2183,17 @@ func rtSigprocmask(how int, set *Sigset_t, oldset *Sigset_t, sigsetsize uintptr) } return } + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getresuid(ruid *_C_int, euid *_C_int, suid *_C_int) { + RawSyscallNoError(SYS_GETRESUID, uintptr(unsafe.Pointer(ruid)), uintptr(unsafe.Pointer(euid)), uintptr(unsafe.Pointer(suid))) + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getresgid(rgid *_C_int, egid *_C_int, sgid *_C_int) { + RawSyscallNoError(SYS_GETRESGID, uintptr(unsafe.Pointer(rgid)), uintptr(unsafe.Pointer(egid)), uintptr(unsafe.Pointer(sgid))) + return +} diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_386.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_386.go index c81b0ad477..07b549cc25 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_386.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_386.go @@ -411,16 +411,6 @@ func getrlimit(resource int, rlim *rlimit32) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func setrlimit(resource int, rlim *rlimit32) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func futimesat(dirfd int, path string, times *[2]Timeval) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go index 2206bce7f4..5f481bf83f 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_amd64.go @@ -334,16 +334,6 @@ func setfsuid(uid int) (prev int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Shutdown(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go index edf6b39f16..824cd52c7f 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_arm.go @@ -578,16 +578,6 @@ func getrlimit(resource int, rlim *rlimit32) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func setrlimit(resource int, rlim *rlimit32) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func armSyncFileRange(fd int, flags int, off int64, n int64) (err error) { _, _, e1 := Syscall6(SYS_ARM_SYNC_FILE_RANGE, uintptr(fd), uintptr(flags), uintptr(off), uintptr(off>>32), uintptr(n), uintptr(n>>32)) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go index 190609f214..e77aecfe98 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_arm64.go @@ -289,16 +289,6 @@ func setfsuid(uid int) (prev int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func setrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Shutdown(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go index 5f984cbb1c..961a3afb7b 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips.go @@ -644,16 +644,6 @@ func getrlimit(resource int, rlim *rlimit32) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func setrlimit(resource int, rlim *rlimit32) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Alarm(seconds uint) (remaining uint, err error) { r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) remaining = uint(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go index 46fc380a40..ed05005e91 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64.go @@ -278,16 +278,6 @@ func setfsuid(uid int) (prev int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Shutdown(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go index cbd0d4dadb..d365b718f3 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mips64le.go @@ -278,16 +278,6 @@ func setfsuid(uid int) (prev int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Shutdown(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go index 0c13d15f07..c3f1b8bbde 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_mipsle.go @@ -644,16 +644,6 @@ func getrlimit(resource int, rlim *rlimit32) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func setrlimit(resource int, rlim *rlimit32) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Alarm(seconds uint) (remaining uint, err error) { r0, _, e1 := Syscall(SYS_ALARM, uintptr(seconds), 0, 0) remaining = uint(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go index e01432aed5..a6574cf98b 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc.go @@ -624,16 +624,6 @@ func getrlimit(resource int, rlim *rlimit32) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func setrlimit(resource int, rlim *rlimit32) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func syncFileRange2(fd int, flags int, off int64, n int64) (err error) { _, _, e1 := Syscall6(SYS_SYNC_FILE_RANGE2, uintptr(fd), uintptr(flags), uintptr(off>>32), uintptr(off), uintptr(n>>32), uintptr(n)) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go index 13c7ee7baf..f40990264f 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64.go @@ -349,16 +349,6 @@ func setfsuid(uid int) (prev int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Shutdown(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go index 02d0c0fd61..9dfcc29974 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_ppc64le.go @@ -349,16 +349,6 @@ func setfsuid(uid int) (prev int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Shutdown(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go index 9fee3b1d23..0b29239583 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_riscv64.go @@ -269,16 +269,6 @@ func setfsuid(uid int) (prev int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Shutdown(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go index 647bbfecd6..6cde32237d 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go @@ -319,16 +319,6 @@ func setfsuid(uid int) (prev int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int64, err error) { r0, _, e1 := Syscall6(SYS_SPLICE, uintptr(rfd), uintptr(unsafe.Pointer(roff)), uintptr(wfd), uintptr(unsafe.Pointer(woff)), uintptr(len), uintptr(flags)) n = int64(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go index ada057f891..5253d65bf1 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_linux_sparc64.go @@ -329,16 +329,6 @@ func setfsuid(uid int) (prev int, err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(resource int, rlim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Shutdown(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_386.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_386.go index 79f7389963..cdb2af5ae0 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_386.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_386.go @@ -405,6 +405,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -1597,16 +1607,6 @@ func Setreuid(ruid int, euid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_amd64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_amd64.go index fb161f3a26..9d25f76b0b 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_amd64.go @@ -405,6 +405,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -1597,16 +1607,6 @@ func Setreuid(ruid int, euid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm.go index 4c8ac993a8..d3f8035169 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm.go @@ -405,6 +405,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -1597,16 +1607,6 @@ func Setreuid(ruid int, euid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm64.go index 76dd8ec4fd..887188a529 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_netbsd_arm64.go @@ -405,6 +405,16 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -1597,16 +1607,6 @@ func Setreuid(ruid int, euid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go index caeb807bd4..9ab9abf721 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go @@ -519,6 +519,28 @@ var libc_getcwd_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func getresuid(ruid *_C_int, euid *_C_int, suid *_C_int) { + syscall_rawSyscall(libc_getresuid_trampoline_addr, uintptr(unsafe.Pointer(ruid)), uintptr(unsafe.Pointer(euid)), uintptr(unsafe.Pointer(suid))) + return +} + +var libc_getresuid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresuid getresuid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getresgid(rgid *_C_int, egid *_C_int, sgid *_C_int) { + syscall_rawSyscall(libc_getresgid_trampoline_addr, uintptr(unsafe.Pointer(rgid)), uintptr(unsafe.Pointer(egid)), uintptr(unsafe.Pointer(sgid))) + return +} + +var libc_getresgid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresgid getresgid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ioctl(fd int, req uint, arg uintptr) (err error) { _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) if e1 != 0 { @@ -527,6 +549,14 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { return } +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + var libc_ioctl_trampoline_addr uintptr //go:cgo_import_dynamic libc_ioctl ioctl "libc.so" @@ -1886,20 +1916,6 @@ var libc_setresuid_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawSyscall(libc_setrlimit_trampoline_addr, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -var libc_setrlimit_trampoline_addr uintptr - -//go:cgo_import_dynamic libc_setrlimit setrlimit "libc.so" - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setrtable(rtable int) (err error) { _, _, e1 := syscall_rawSyscall(libc_setrtable_trampoline_addr, uintptr(rtable), 0, 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.s b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.s index 087444250c..3dcacd30d7 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.s +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.s @@ -158,6 +158,16 @@ TEXT libc_getcwd_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_getcwd_trampoline_addr(SB), RODATA, $4 DATA ·libc_getcwd_trampoline_addr(SB)/4, $libc_getcwd_trampoline<>(SB) +TEXT libc_getresuid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresuid(SB) +GLOBL ·libc_getresuid_trampoline_addr(SB), RODATA, $4 +DATA ·libc_getresuid_trampoline_addr(SB)/4, $libc_getresuid_trampoline<>(SB) + +TEXT libc_getresgid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresgid(SB) +GLOBL ·libc_getresgid_trampoline_addr(SB), RODATA, $4 +DATA ·libc_getresgid_trampoline_addr(SB)/4, $libc_getresgid_trampoline<>(SB) + TEXT libc_ioctl_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ioctl(SB) GLOBL ·libc_ioctl_trampoline_addr(SB), RODATA, $4 @@ -573,11 +583,6 @@ TEXT libc_setresuid_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_setresuid_trampoline_addr(SB), RODATA, $4 DATA ·libc_setresuid_trampoline_addr(SB)/4, $libc_setresuid_trampoline<>(SB) -TEXT libc_setrlimit_trampoline<>(SB),NOSPLIT,$0-0 - JMP libc_setrlimit(SB) -GLOBL ·libc_setrlimit_trampoline_addr(SB), RODATA, $4 -DATA ·libc_setrlimit_trampoline_addr(SB)/4, $libc_setrlimit_trampoline<>(SB) - TEXT libc_setrtable_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setrtable(SB) GLOBL ·libc_setrtable_trampoline_addr(SB), RODATA, $4 diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go index a05e5f4fff..915761eab7 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go @@ -519,6 +519,28 @@ var libc_getcwd_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func getresuid(ruid *_C_int, euid *_C_int, suid *_C_int) { + syscall_rawSyscall(libc_getresuid_trampoline_addr, uintptr(unsafe.Pointer(ruid)), uintptr(unsafe.Pointer(euid)), uintptr(unsafe.Pointer(suid))) + return +} + +var libc_getresuid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresuid getresuid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getresgid(rgid *_C_int, egid *_C_int, sgid *_C_int) { + syscall_rawSyscall(libc_getresgid_trampoline_addr, uintptr(unsafe.Pointer(rgid)), uintptr(unsafe.Pointer(egid)), uintptr(unsafe.Pointer(sgid))) + return +} + +var libc_getresgid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresgid getresgid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ioctl(fd int, req uint, arg uintptr) (err error) { _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) if e1 != 0 { @@ -533,6 +555,16 @@ var libc_ioctl_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { @@ -1886,20 +1918,6 @@ var libc_setresuid_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawSyscall(libc_setrlimit_trampoline_addr, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -var libc_setrlimit_trampoline_addr uintptr - -//go:cgo_import_dynamic libc_setrlimit setrlimit "libc.so" - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setrtable(rtable int) (err error) { _, _, e1 := syscall_rawSyscall(libc_setrtable_trampoline_addr, uintptr(rtable), 0, 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.s b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.s index 5782cd1084..2763620b01 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.s +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.s @@ -158,6 +158,16 @@ TEXT libc_getcwd_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_getcwd_trampoline_addr(SB), RODATA, $8 DATA ·libc_getcwd_trampoline_addr(SB)/8, $libc_getcwd_trampoline<>(SB) +TEXT libc_getresuid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresuid(SB) +GLOBL ·libc_getresuid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresuid_trampoline_addr(SB)/8, $libc_getresuid_trampoline<>(SB) + +TEXT libc_getresgid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresgid(SB) +GLOBL ·libc_getresgid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresgid_trampoline_addr(SB)/8, $libc_getresgid_trampoline<>(SB) + TEXT libc_ioctl_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ioctl(SB) GLOBL ·libc_ioctl_trampoline_addr(SB), RODATA, $8 @@ -573,11 +583,6 @@ TEXT libc_setresuid_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_setresuid_trampoline_addr(SB), RODATA, $8 DATA ·libc_setresuid_trampoline_addr(SB)/8, $libc_setresuid_trampoline<>(SB) -TEXT libc_setrlimit_trampoline<>(SB),NOSPLIT,$0-0 - JMP libc_setrlimit(SB) -GLOBL ·libc_setrlimit_trampoline_addr(SB), RODATA, $8 -DATA ·libc_setrlimit_trampoline_addr(SB)/8, $libc_setrlimit_trampoline<>(SB) - TEXT libc_setrtable_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setrtable(SB) GLOBL ·libc_setrtable_trampoline_addr(SB), RODATA, $8 diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go index b2da8e50cc..8e87fdf153 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go @@ -519,6 +519,28 @@ var libc_getcwd_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func getresuid(ruid *_C_int, euid *_C_int, suid *_C_int) { + syscall_rawSyscall(libc_getresuid_trampoline_addr, uintptr(unsafe.Pointer(ruid)), uintptr(unsafe.Pointer(euid)), uintptr(unsafe.Pointer(suid))) + return +} + +var libc_getresuid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresuid getresuid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getresgid(rgid *_C_int, egid *_C_int, sgid *_C_int) { + syscall_rawSyscall(libc_getresgid_trampoline_addr, uintptr(unsafe.Pointer(rgid)), uintptr(unsafe.Pointer(egid)), uintptr(unsafe.Pointer(sgid))) + return +} + +var libc_getresgid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresgid getresgid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ioctl(fd int, req uint, arg uintptr) (err error) { _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) if e1 != 0 { @@ -527,6 +549,14 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { return } +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + var libc_ioctl_trampoline_addr uintptr //go:cgo_import_dynamic libc_ioctl ioctl "libc.so" @@ -1886,20 +1916,6 @@ var libc_setresuid_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawSyscall(libc_setrlimit_trampoline_addr, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -var libc_setrlimit_trampoline_addr uintptr - -//go:cgo_import_dynamic libc_setrlimit setrlimit "libc.so" - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setrtable(rtable int) (err error) { _, _, e1 := syscall_rawSyscall(libc_setrtable_trampoline_addr, uintptr(rtable), 0, 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.s b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.s index cf310420c9..c922314048 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.s +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.s @@ -158,6 +158,16 @@ TEXT libc_getcwd_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_getcwd_trampoline_addr(SB), RODATA, $4 DATA ·libc_getcwd_trampoline_addr(SB)/4, $libc_getcwd_trampoline<>(SB) +TEXT libc_getresuid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresuid(SB) +GLOBL ·libc_getresuid_trampoline_addr(SB), RODATA, $4 +DATA ·libc_getresuid_trampoline_addr(SB)/4, $libc_getresuid_trampoline<>(SB) + +TEXT libc_getresgid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresgid(SB) +GLOBL ·libc_getresgid_trampoline_addr(SB), RODATA, $4 +DATA ·libc_getresgid_trampoline_addr(SB)/4, $libc_getresgid_trampoline<>(SB) + TEXT libc_ioctl_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ioctl(SB) GLOBL ·libc_ioctl_trampoline_addr(SB), RODATA, $4 @@ -573,11 +583,6 @@ TEXT libc_setresuid_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_setresuid_trampoline_addr(SB), RODATA, $4 DATA ·libc_setresuid_trampoline_addr(SB)/4, $libc_setresuid_trampoline<>(SB) -TEXT libc_setrlimit_trampoline<>(SB),NOSPLIT,$0-0 - JMP libc_setrlimit(SB) -GLOBL ·libc_setrlimit_trampoline_addr(SB), RODATA, $4 -DATA ·libc_setrlimit_trampoline_addr(SB)/4, $libc_setrlimit_trampoline<>(SB) - TEXT libc_setrtable_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setrtable(SB) GLOBL ·libc_setrtable_trampoline_addr(SB), RODATA, $4 diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go index 048b2655e6..12a7a2160e 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go @@ -519,6 +519,28 @@ var libc_getcwd_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func getresuid(ruid *_C_int, euid *_C_int, suid *_C_int) { + syscall_rawSyscall(libc_getresuid_trampoline_addr, uintptr(unsafe.Pointer(ruid)), uintptr(unsafe.Pointer(euid)), uintptr(unsafe.Pointer(suid))) + return +} + +var libc_getresuid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresuid getresuid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getresgid(rgid *_C_int, egid *_C_int, sgid *_C_int) { + syscall_rawSyscall(libc_getresgid_trampoline_addr, uintptr(unsafe.Pointer(rgid)), uintptr(unsafe.Pointer(egid)), uintptr(unsafe.Pointer(sgid))) + return +} + +var libc_getresgid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresgid getresgid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ioctl(fd int, req uint, arg uintptr) (err error) { _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) if e1 != 0 { @@ -527,6 +549,14 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { return } +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + var libc_ioctl_trampoline_addr uintptr //go:cgo_import_dynamic libc_ioctl ioctl "libc.so" @@ -1886,20 +1916,6 @@ var libc_setresuid_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawSyscall(libc_setrlimit_trampoline_addr, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -var libc_setrlimit_trampoline_addr uintptr - -//go:cgo_import_dynamic libc_setrlimit setrlimit "libc.so" - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setrtable(rtable int) (err error) { _, _, e1 := syscall_rawSyscall(libc_setrtable_trampoline_addr, uintptr(rtable), 0, 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.s b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.s index 484bb42e0a..a6bc32c922 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.s +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.s @@ -158,6 +158,16 @@ TEXT libc_getcwd_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_getcwd_trampoline_addr(SB), RODATA, $8 DATA ·libc_getcwd_trampoline_addr(SB)/8, $libc_getcwd_trampoline<>(SB) +TEXT libc_getresuid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresuid(SB) +GLOBL ·libc_getresuid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresuid_trampoline_addr(SB)/8, $libc_getresuid_trampoline<>(SB) + +TEXT libc_getresgid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresgid(SB) +GLOBL ·libc_getresgid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresgid_trampoline_addr(SB)/8, $libc_getresgid_trampoline<>(SB) + TEXT libc_ioctl_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ioctl(SB) GLOBL ·libc_ioctl_trampoline_addr(SB), RODATA, $8 @@ -573,11 +583,6 @@ TEXT libc_setresuid_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_setresuid_trampoline_addr(SB), RODATA, $8 DATA ·libc_setresuid_trampoline_addr(SB)/8, $libc_setresuid_trampoline<>(SB) -TEXT libc_setrlimit_trampoline<>(SB),NOSPLIT,$0-0 - JMP libc_setrlimit(SB) -GLOBL ·libc_setrlimit_trampoline_addr(SB), RODATA, $8 -DATA ·libc_setrlimit_trampoline_addr(SB)/8, $libc_setrlimit_trampoline<>(SB) - TEXT libc_setrtable_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setrtable(SB) GLOBL ·libc_setrtable_trampoline_addr(SB), RODATA, $8 diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go index 6f33e37e72..b19e8aa031 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go @@ -519,6 +519,28 @@ var libc_getcwd_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func getresuid(ruid *_C_int, euid *_C_int, suid *_C_int) { + syscall_rawSyscall(libc_getresuid_trampoline_addr, uintptr(unsafe.Pointer(ruid)), uintptr(unsafe.Pointer(euid)), uintptr(unsafe.Pointer(suid))) + return +} + +var libc_getresuid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresuid getresuid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getresgid(rgid *_C_int, egid *_C_int, sgid *_C_int) { + syscall_rawSyscall(libc_getresgid_trampoline_addr, uintptr(unsafe.Pointer(rgid)), uintptr(unsafe.Pointer(egid)), uintptr(unsafe.Pointer(sgid))) + return +} + +var libc_getresgid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresgid getresgid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ioctl(fd int, req uint, arg uintptr) (err error) { _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) if e1 != 0 { @@ -527,6 +549,14 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { return } +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + var libc_ioctl_trampoline_addr uintptr //go:cgo_import_dynamic libc_ioctl ioctl "libc.so" @@ -1886,20 +1916,6 @@ var libc_setresuid_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawSyscall(libc_setrlimit_trampoline_addr, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -var libc_setrlimit_trampoline_addr uintptr - -//go:cgo_import_dynamic libc_setrlimit setrlimit "libc.so" - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setrtable(rtable int) (err error) { _, _, e1 := syscall_rawSyscall(libc_setrtable_trampoline_addr, uintptr(rtable), 0, 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.s b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.s index 55af27263a..b4e7bceabf 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.s +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.s @@ -158,6 +158,16 @@ TEXT libc_getcwd_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_getcwd_trampoline_addr(SB), RODATA, $8 DATA ·libc_getcwd_trampoline_addr(SB)/8, $libc_getcwd_trampoline<>(SB) +TEXT libc_getresuid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresuid(SB) +GLOBL ·libc_getresuid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresuid_trampoline_addr(SB)/8, $libc_getresuid_trampoline<>(SB) + +TEXT libc_getresgid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresgid(SB) +GLOBL ·libc_getresgid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresgid_trampoline_addr(SB)/8, $libc_getresgid_trampoline<>(SB) + TEXT libc_ioctl_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ioctl(SB) GLOBL ·libc_ioctl_trampoline_addr(SB), RODATA, $8 @@ -573,11 +583,6 @@ TEXT libc_setresuid_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_setresuid_trampoline_addr(SB), RODATA, $8 DATA ·libc_setresuid_trampoline_addr(SB)/8, $libc_setresuid_trampoline<>(SB) -TEXT libc_setrlimit_trampoline<>(SB),NOSPLIT,$0-0 - JMP libc_setrlimit(SB) -GLOBL ·libc_setrlimit_trampoline_addr(SB), RODATA, $8 -DATA ·libc_setrlimit_trampoline_addr(SB)/8, $libc_setrlimit_trampoline<>(SB) - TEXT libc_setrtable_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setrtable(SB) GLOBL ·libc_setrtable_trampoline_addr(SB), RODATA, $8 diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.go index 330cf7f7ac..fb99594c93 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.go @@ -519,6 +519,28 @@ var libc_getcwd_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func getresuid(ruid *_C_int, euid *_C_int, suid *_C_int) { + syscall_rawSyscall(libc_getresuid_trampoline_addr, uintptr(unsafe.Pointer(ruid)), uintptr(unsafe.Pointer(euid)), uintptr(unsafe.Pointer(suid))) + return +} + +var libc_getresuid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresuid getresuid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getresgid(rgid *_C_int, egid *_C_int, sgid *_C_int) { + syscall_rawSyscall(libc_getresgid_trampoline_addr, uintptr(unsafe.Pointer(rgid)), uintptr(unsafe.Pointer(egid)), uintptr(unsafe.Pointer(sgid))) + return +} + +var libc_getresgid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresgid getresgid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ioctl(fd int, req uint, arg uintptr) (err error) { _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) if e1 != 0 { @@ -527,6 +549,14 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { return } +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + var libc_ioctl_trampoline_addr uintptr //go:cgo_import_dynamic libc_ioctl ioctl "libc.so" @@ -1886,20 +1916,6 @@ var libc_setresuid_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawSyscall(libc_setrlimit_trampoline_addr, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -var libc_setrlimit_trampoline_addr uintptr - -//go:cgo_import_dynamic libc_setrlimit setrlimit "libc.so" - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setrtable(rtable int) (err error) { _, _, e1 := syscall_rawSyscall(libc_setrtable_trampoline_addr, uintptr(rtable), 0, 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.s b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.s index 4028255b0d..ca3f766009 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.s +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.s @@ -189,6 +189,18 @@ TEXT libc_getcwd_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_getcwd_trampoline_addr(SB), RODATA, $8 DATA ·libc_getcwd_trampoline_addr(SB)/8, $libc_getcwd_trampoline<>(SB) +TEXT libc_getresuid_trampoline<>(SB),NOSPLIT,$0-0 + CALL libc_getresuid(SB) + RET +GLOBL ·libc_getresuid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresuid_trampoline_addr(SB)/8, $libc_getresuid_trampoline<>(SB) + +TEXT libc_getresgid_trampoline<>(SB),NOSPLIT,$0-0 + CALL libc_getresgid(SB) + RET +GLOBL ·libc_getresgid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresgid_trampoline_addr(SB)/8, $libc_getresgid_trampoline<>(SB) + TEXT libc_ioctl_trampoline<>(SB),NOSPLIT,$0-0 CALL libc_ioctl(SB) RET @@ -687,12 +699,6 @@ TEXT libc_setresuid_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_setresuid_trampoline_addr(SB), RODATA, $8 DATA ·libc_setresuid_trampoline_addr(SB)/8, $libc_setresuid_trampoline<>(SB) -TEXT libc_setrlimit_trampoline<>(SB),NOSPLIT,$0-0 - CALL libc_setrlimit(SB) - RET -GLOBL ·libc_setrlimit_trampoline_addr(SB), RODATA, $8 -DATA ·libc_setrlimit_trampoline_addr(SB)/8, $libc_setrlimit_trampoline<>(SB) - TEXT libc_setrtable_trampoline<>(SB),NOSPLIT,$0-0 CALL libc_setrtable(SB) RET diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.go index 5f24de0d9d..32cbbbc52b 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.go @@ -519,6 +519,28 @@ var libc_getcwd_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func getresuid(ruid *_C_int, euid *_C_int, suid *_C_int) { + syscall_rawSyscall(libc_getresuid_trampoline_addr, uintptr(unsafe.Pointer(ruid)), uintptr(unsafe.Pointer(euid)), uintptr(unsafe.Pointer(suid))) + return +} + +var libc_getresuid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresuid getresuid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func getresgid(rgid *_C_int, egid *_C_int, sgid *_C_int) { + syscall_rawSyscall(libc_getresgid_trampoline_addr, uintptr(unsafe.Pointer(rgid)), uintptr(unsafe.Pointer(egid)), uintptr(unsafe.Pointer(sgid))) + return +} + +var libc_getresgid_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_getresgid getresgid "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ioctl(fd int, req uint, arg uintptr) (err error) { _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) if e1 != 0 { @@ -527,6 +549,14 @@ func ioctl(fd int, req uint, arg uintptr) (err error) { return } +func ioctlPtr(fd int, req uint, arg unsafe.Pointer) (err error) { + _, _, e1 := syscall_syscall(libc_ioctl_trampoline_addr, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + var libc_ioctl_trampoline_addr uintptr //go:cgo_import_dynamic libc_ioctl ioctl "libc.so" @@ -1886,20 +1916,6 @@ var libc_setresuid_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := syscall_rawSyscall(libc_setrlimit_trampoline_addr, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) - if e1 != 0 { - err = errnoErr(e1) - } - return -} - -var libc_setrlimit_trampoline_addr uintptr - -//go:cgo_import_dynamic libc_setrlimit setrlimit "libc.so" - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setrtable(rtable int) (err error) { _, _, e1 := syscall_rawSyscall(libc_setrtable_trampoline_addr, uintptr(rtable), 0, 0) if e1 != 0 { diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.s b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.s index e1fbd4dfa8..477a7d5b21 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.s +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.s @@ -158,6 +158,16 @@ TEXT libc_getcwd_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_getcwd_trampoline_addr(SB), RODATA, $8 DATA ·libc_getcwd_trampoline_addr(SB)/8, $libc_getcwd_trampoline<>(SB) +TEXT libc_getresuid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresuid(SB) +GLOBL ·libc_getresuid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresuid_trampoline_addr(SB)/8, $libc_getresuid_trampoline<>(SB) + +TEXT libc_getresgid_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_getresgid(SB) +GLOBL ·libc_getresgid_trampoline_addr(SB), RODATA, $8 +DATA ·libc_getresgid_trampoline_addr(SB)/8, $libc_getresgid_trampoline<>(SB) + TEXT libc_ioctl_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ioctl(SB) GLOBL ·libc_ioctl_trampoline_addr(SB), RODATA, $8 @@ -573,11 +583,6 @@ TEXT libc_setresuid_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_setresuid_trampoline_addr(SB), RODATA, $8 DATA ·libc_setresuid_trampoline_addr(SB)/8, $libc_setresuid_trampoline<>(SB) -TEXT libc_setrlimit_trampoline<>(SB),NOSPLIT,$0-0 - JMP libc_setrlimit(SB) -GLOBL ·libc_setrlimit_trampoline_addr(SB), RODATA, $8 -DATA ·libc_setrlimit_trampoline_addr(SB)/8, $libc_setrlimit_trampoline<>(SB) - TEXT libc_setrtable_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_setrtable(SB) GLOBL ·libc_setrtable_trampoline_addr(SB), RODATA, $8 diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_solaris_amd64.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_solaris_amd64.go index 78d4a4240e..609d1c598a 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_solaris_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_solaris_amd64.go @@ -110,7 +110,6 @@ import ( //go:cgo_import_dynamic libc_setpriority setpriority "libc.so" //go:cgo_import_dynamic libc_setregid setregid "libc.so" //go:cgo_import_dynamic libc_setreuid setreuid "libc.so" -//go:cgo_import_dynamic libc_setrlimit setrlimit "libc.so" //go:cgo_import_dynamic libc_setsid setsid "libc.so" //go:cgo_import_dynamic libc_setuid setuid "libc.so" //go:cgo_import_dynamic libc_shutdown shutdown "libsocket.so" @@ -250,7 +249,6 @@ import ( //go:linkname procSetpriority libc_setpriority //go:linkname procSetregid libc_setregid //go:linkname procSetreuid libc_setreuid -//go:linkname procSetrlimit libc_setrlimit //go:linkname procSetsid libc_setsid //go:linkname procSetuid libc_setuid //go:linkname procshutdown libc_shutdown @@ -391,7 +389,6 @@ var ( procSetpriority, procSetregid, procSetreuid, - procSetrlimit, procSetsid, procSetuid, procshutdown, @@ -646,7 +643,18 @@ func __minor(version int, dev uint64) (val uint) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func ioctlRet(fd int, req uint, arg uintptr) (ret int, err error) { +func ioctlRet(fd int, req int, arg uintptr) (ret int, err error) { + r0, _, e1 := sysvicall6(uintptr(unsafe.Pointer(&procioctl)), 3, uintptr(fd), uintptr(req), uintptr(arg), 0, 0, 0) + ret = int(r0) + if e1 != 0 { + err = e1 + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func ioctlPtrRet(fd int, req int, arg unsafe.Pointer) (ret int, err error) { r0, _, e1 := sysvicall6(uintptr(unsafe.Pointer(&procioctl)), 3, uintptr(fd), uintptr(req), uintptr(arg), 0, 0, 0) ret = int(r0) if e1 != 0 { @@ -1639,16 +1647,6 @@ func Setreuid(ruid int, euid int) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func Setrlimit(which int, lim *Rlimit) (err error) { - _, _, e1 := rawSysvicall6(uintptr(unsafe.Pointer(&procSetrlimit)), 2, uintptr(which), uintptr(unsafe.Pointer(lim)), 0, 0, 0, 0) - if e1 != 0 { - err = e1 - } - return -} - -// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT - func Setsid() (pid int, err error) { r0, _, e1 := rawSysvicall6(uintptr(unsafe.Pointer(&procSetsid)), 0, 0, 0, 0, 0, 0, 0) pid = int(r0) diff --git a/tools/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go b/tools/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go index f2079457c6..c31681743c 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go +++ b/tools/vendor/golang.org/x/sys/unix/zsyscall_zos_s390x.go @@ -257,7 +257,17 @@ func munmap(addr uintptr, length uintptr) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT -func ioctl(fd int, req uint, arg uintptr) (err error) { +func ioctl(fd int, req int, arg uintptr) (err error) { + _, _, e1 := syscall_syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func ioctlPtr(fd int, req int, arg unsafe.Pointer) (err error) { _, _, e1 := syscall_syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) if e1 != 0 { err = errnoErr(e1) diff --git a/tools/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go b/tools/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go index 7ea465204b..e6ed7d637d 100644 --- a/tools/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go +++ b/tools/vendor/golang.org/x/sys/unix/zsysnum_linux_s390x.go @@ -372,6 +372,7 @@ const ( SYS_LANDLOCK_CREATE_RULESET = 444 SYS_LANDLOCK_ADD_RULE = 445 SYS_LANDLOCK_RESTRICT_SELF = 446 + SYS_MEMFD_SECRET = 447 SYS_PROCESS_MRELEASE = 448 SYS_FUTEX_WAITV = 449 SYS_SET_MEMPOLICY_HOME_NODE = 450 diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go index e2a64f0991..690cefc3d0 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_darwin_amd64.go @@ -151,6 +151,16 @@ type Dirent struct { _ [3]byte } +type Attrlist struct { + Bitmapcount uint16 + Reserved uint16 + Commonattr uint32 + Volattr uint32 + Dirattr uint32 + Fileattr uint32 + Forkattr uint32 +} + const ( PathMax = 0x400 ) @@ -610,6 +620,7 @@ const ( AT_REMOVEDIR = 0x80 AT_SYMLINK_FOLLOW = 0x40 AT_SYMLINK_NOFOLLOW = 0x20 + AT_EACCESS = 0x10 ) type PollFd struct { diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go index 34aa775219..5bffc10eac 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_darwin_arm64.go @@ -151,6 +151,16 @@ type Dirent struct { _ [3]byte } +type Attrlist struct { + Bitmapcount uint16 + Reserved uint16 + Commonattr uint32 + Volattr uint32 + Dirattr uint32 + Fileattr uint32 + Forkattr uint32 +} + const ( PathMax = 0x400 ) @@ -610,6 +620,7 @@ const ( AT_REMOVEDIR = 0x80 AT_SYMLINK_FOLLOW = 0x40 AT_SYMLINK_NOFOLLOW = 0x20 + AT_EACCESS = 0x10 ) type PollFd struct { diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go index d9c78cdcbc..29dc483378 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_386.go @@ -362,7 +362,7 @@ type FpExtendedPrecision struct{} type PtraceIoDesc struct { Op int32 Offs uintptr - Addr uintptr + Addr *byte Len uint32 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go index 26991b1655..0a89b28906 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_amd64.go @@ -367,7 +367,7 @@ type FpExtendedPrecision struct{} type PtraceIoDesc struct { Op int32 Offs uintptr - Addr uintptr + Addr *byte Len uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go index f8324e7e7f..c8666bb152 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_arm.go @@ -350,7 +350,7 @@ type FpExtendedPrecision struct { type PtraceIoDesc struct { Op int32 Offs uintptr - Addr uintptr + Addr *byte Len uint32 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go index 4220411f34..88fb48a887 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_arm64.go @@ -347,7 +347,7 @@ type FpExtendedPrecision struct{} type PtraceIoDesc struct { Op int32 Offs uintptr - Addr uintptr + Addr *byte Len uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_riscv64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_riscv64.go index 0660fd45c7..698dc975e9 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_riscv64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_freebsd_riscv64.go @@ -348,7 +348,7 @@ type FpExtendedPrecision struct{} type PtraceIoDesc struct { Op int32 Offs uintptr - Addr uintptr + Addr *byte Len uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux.go index 7d9fc8f1c9..02e2462c8f 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux.go @@ -456,36 +456,60 @@ type Ucred struct { } type TCPInfo struct { - State uint8 - Ca_state uint8 - Retransmits uint8 - Probes uint8 - Backoff uint8 - Options uint8 - Rto uint32 - Ato uint32 - Snd_mss uint32 - Rcv_mss uint32 - Unacked uint32 - Sacked uint32 - Lost uint32 - Retrans uint32 - Fackets uint32 - Last_data_sent uint32 - Last_ack_sent uint32 - Last_data_recv uint32 - Last_ack_recv uint32 - Pmtu uint32 - Rcv_ssthresh uint32 - Rtt uint32 - Rttvar uint32 - Snd_ssthresh uint32 - Snd_cwnd uint32 - Advmss uint32 - Reordering uint32 - Rcv_rtt uint32 - Rcv_space uint32 - Total_retrans uint32 + State uint8 + Ca_state uint8 + Retransmits uint8 + Probes uint8 + Backoff uint8 + Options uint8 + Rto uint32 + Ato uint32 + Snd_mss uint32 + Rcv_mss uint32 + Unacked uint32 + Sacked uint32 + Lost uint32 + Retrans uint32 + Fackets uint32 + Last_data_sent uint32 + Last_ack_sent uint32 + Last_data_recv uint32 + Last_ack_recv uint32 + Pmtu uint32 + Rcv_ssthresh uint32 + Rtt uint32 + Rttvar uint32 + Snd_ssthresh uint32 + Snd_cwnd uint32 + Advmss uint32 + Reordering uint32 + Rcv_rtt uint32 + Rcv_space uint32 + Total_retrans uint32 + Pacing_rate uint64 + Max_pacing_rate uint64 + Bytes_acked uint64 + Bytes_received uint64 + Segs_out uint32 + Segs_in uint32 + Notsent_bytes uint32 + Min_rtt uint32 + Data_segs_in uint32 + Data_segs_out uint32 + Delivery_rate uint64 + Busy_time uint64 + Rwnd_limited uint64 + Sndbuf_limited uint64 + Delivered uint32 + Delivered_ce uint32 + Bytes_sent uint64 + Bytes_retrans uint64 + Dsack_dups uint32 + Reord_seen uint32 + Rcv_ooopack uint32 + Snd_wnd uint32 + Rcv_wnd uint32 + Rehash uint32 } type CanFilter struct { @@ -528,7 +552,7 @@ const ( SizeofIPv6MTUInfo = 0x20 SizeofICMPv6Filter = 0x20 SizeofUcred = 0xc - SizeofTCPInfo = 0x68 + SizeofTCPInfo = 0xf0 SizeofCanFilter = 0x8 SizeofTCPRepairOpt = 0x8 ) @@ -1043,6 +1067,7 @@ const ( PerfBitCommExec = CBitFieldMaskBit24 PerfBitUseClockID = CBitFieldMaskBit25 PerfBitContextSwitch = CBitFieldMaskBit26 + PerfBitWriteBackward = CBitFieldMaskBit27 ) const ( @@ -1239,7 +1264,7 @@ type TCPMD5Sig struct { Flags uint8 Prefixlen uint8 Keylen uint16 - _ uint32 + Ifindex int32 Key [80]uint8 } @@ -1513,6 +1538,10 @@ const ( IFLA_GRO_MAX_SIZE = 0x3a IFLA_TSO_MAX_SIZE = 0x3b IFLA_TSO_MAX_SEGS = 0x3c + IFLA_ALLMULTI = 0x3d + IFLA_DEVLINK_PORT = 0x3e + IFLA_GSO_IPV4_MAX_SIZE = 0x3f + IFLA_GRO_IPV4_MAX_SIZE = 0x40 IFLA_PROTO_DOWN_REASON_UNSPEC = 0x0 IFLA_PROTO_DOWN_REASON_MASK = 0x1 IFLA_PROTO_DOWN_REASON_VALUE = 0x2 @@ -1939,7 +1968,11 @@ const ( NFT_MSG_GETOBJ = 0x13 NFT_MSG_DELOBJ = 0x14 NFT_MSG_GETOBJ_RESET = 0x15 - NFT_MSG_MAX = 0x19 + NFT_MSG_NEWFLOWTABLE = 0x16 + NFT_MSG_GETFLOWTABLE = 0x17 + NFT_MSG_DELFLOWTABLE = 0x18 + NFT_MSG_GETRULE_RESET = 0x19 + NFT_MSG_MAX = 0x21 NFTA_LIST_UNSPEC = 0x0 NFTA_LIST_ELEM = 0x1 NFTA_HOOK_UNSPEC = 0x0 @@ -2443,9 +2476,11 @@ const ( SOF_TIMESTAMPING_OPT_STATS = 0x1000 SOF_TIMESTAMPING_OPT_PKTINFO = 0x2000 SOF_TIMESTAMPING_OPT_TX_SWHW = 0x4000 + SOF_TIMESTAMPING_BIND_PHC = 0x8000 + SOF_TIMESTAMPING_OPT_ID_TCP = 0x10000 - SOF_TIMESTAMPING_LAST = 0x8000 - SOF_TIMESTAMPING_MASK = 0xffff + SOF_TIMESTAMPING_LAST = 0x10000 + SOF_TIMESTAMPING_MASK = 0x1ffff SCM_TSTAMP_SND = 0x0 SCM_TSTAMP_SCHED = 0x1 @@ -2524,6 +2559,11 @@ const ( BPF_REG_8 = 0x8 BPF_REG_9 = 0x9 BPF_REG_10 = 0xa + BPF_CGROUP_ITER_ORDER_UNSPEC = 0x0 + BPF_CGROUP_ITER_SELF_ONLY = 0x1 + BPF_CGROUP_ITER_DESCENDANTS_PRE = 0x2 + BPF_CGROUP_ITER_DESCENDANTS_POST = 0x3 + BPF_CGROUP_ITER_ANCESTORS_UP = 0x4 BPF_MAP_CREATE = 0x0 BPF_MAP_LOOKUP_ELEM = 0x1 BPF_MAP_UPDATE_ELEM = 0x2 @@ -2535,6 +2575,7 @@ const ( BPF_PROG_ATTACH = 0x8 BPF_PROG_DETACH = 0x9 BPF_PROG_TEST_RUN = 0xa + BPF_PROG_RUN = 0xa BPF_PROG_GET_NEXT_ID = 0xb BPF_MAP_GET_NEXT_ID = 0xc BPF_PROG_GET_FD_BY_ID = 0xd @@ -2579,6 +2620,7 @@ const ( BPF_MAP_TYPE_CPUMAP = 0x10 BPF_MAP_TYPE_XSKMAP = 0x11 BPF_MAP_TYPE_SOCKHASH = 0x12 + BPF_MAP_TYPE_CGROUP_STORAGE_DEPRECATED = 0x13 BPF_MAP_TYPE_CGROUP_STORAGE = 0x13 BPF_MAP_TYPE_REUSEPORT_SOCKARRAY = 0x14 BPF_MAP_TYPE_PERCPU_CGROUP_STORAGE = 0x15 @@ -2589,6 +2631,10 @@ const ( BPF_MAP_TYPE_STRUCT_OPS = 0x1a BPF_MAP_TYPE_RINGBUF = 0x1b BPF_MAP_TYPE_INODE_STORAGE = 0x1c + BPF_MAP_TYPE_TASK_STORAGE = 0x1d + BPF_MAP_TYPE_BLOOM_FILTER = 0x1e + BPF_MAP_TYPE_USER_RINGBUF = 0x1f + BPF_MAP_TYPE_CGRP_STORAGE = 0x20 BPF_PROG_TYPE_UNSPEC = 0x0 BPF_PROG_TYPE_SOCKET_FILTER = 0x1 BPF_PROG_TYPE_KPROBE = 0x2 @@ -2620,6 +2666,7 @@ const ( BPF_PROG_TYPE_EXT = 0x1c BPF_PROG_TYPE_LSM = 0x1d BPF_PROG_TYPE_SK_LOOKUP = 0x1e + BPF_PROG_TYPE_SYSCALL = 0x1f BPF_CGROUP_INET_INGRESS = 0x0 BPF_CGROUP_INET_EGRESS = 0x1 BPF_CGROUP_INET_SOCK_CREATE = 0x2 @@ -2658,6 +2705,12 @@ const ( BPF_XDP_CPUMAP = 0x23 BPF_SK_LOOKUP = 0x24 BPF_XDP = 0x25 + BPF_SK_SKB_VERDICT = 0x26 + BPF_SK_REUSEPORT_SELECT = 0x27 + BPF_SK_REUSEPORT_SELECT_OR_MIGRATE = 0x28 + BPF_PERF_EVENT = 0x29 + BPF_TRACE_KPROBE_MULTI = 0x2a + BPF_LSM_CGROUP = 0x2b BPF_LINK_TYPE_UNSPEC = 0x0 BPF_LINK_TYPE_RAW_TRACEPOINT = 0x1 BPF_LINK_TYPE_TRACING = 0x2 @@ -2665,6 +2718,9 @@ const ( BPF_LINK_TYPE_ITER = 0x4 BPF_LINK_TYPE_NETNS = 0x5 BPF_LINK_TYPE_XDP = 0x6 + BPF_LINK_TYPE_PERF_EVENT = 0x7 + BPF_LINK_TYPE_KPROBE_MULTI = 0x8 + BPF_LINK_TYPE_STRUCT_OPS = 0x9 BPF_ANY = 0x0 BPF_NOEXIST = 0x1 BPF_EXIST = 0x2 @@ -2702,6 +2758,7 @@ const ( BPF_F_ZERO_CSUM_TX = 0x2 BPF_F_DONT_FRAGMENT = 0x4 BPF_F_SEQ_NUMBER = 0x8 + BPF_F_TUNINFO_FLAGS = 0x10 BPF_F_INDEX_MASK = 0xffffffff BPF_F_CURRENT_CPU = 0xffffffff BPF_F_CTXLEN_MASK = 0xfffff00000000 @@ -2716,6 +2773,7 @@ const ( BPF_F_ADJ_ROOM_ENCAP_L4_GRE = 0x8 BPF_F_ADJ_ROOM_ENCAP_L4_UDP = 0x10 BPF_F_ADJ_ROOM_NO_CSUM_RESET = 0x20 + BPF_F_ADJ_ROOM_ENCAP_L2_ETH = 0x40 BPF_ADJ_ROOM_ENCAP_L2_MASK = 0xff BPF_ADJ_ROOM_ENCAP_L2_SHIFT = 0x38 BPF_F_SYSCTL_BASE_NAME = 0x1 @@ -2740,10 +2798,16 @@ const ( BPF_LWT_ENCAP_SEG6 = 0x0 BPF_LWT_ENCAP_SEG6_INLINE = 0x1 BPF_LWT_ENCAP_IP = 0x2 + BPF_F_BPRM_SECUREEXEC = 0x1 + BPF_F_BROADCAST = 0x8 + BPF_F_EXCLUDE_INGRESS = 0x10 + BPF_SKB_TSTAMP_UNSPEC = 0x0 + BPF_SKB_TSTAMP_DELIVERY_MONO = 0x1 BPF_OK = 0x0 BPF_DROP = 0x2 BPF_REDIRECT = 0x7 BPF_LWT_REROUTE = 0x80 + BPF_FLOW_DISSECTOR_CONTINUE = 0x81 BPF_SOCK_OPS_RTO_CB_FLAG = 0x1 BPF_SOCK_OPS_RETRANS_CB_FLAG = 0x2 BPF_SOCK_OPS_STATE_CB_FLAG = 0x4 @@ -2807,6 +2871,10 @@ const ( BPF_FIB_LKUP_RET_UNSUPP_LWT = 0x6 BPF_FIB_LKUP_RET_NO_NEIGH = 0x7 BPF_FIB_LKUP_RET_FRAG_NEEDED = 0x8 + BPF_MTU_CHK_SEGS = 0x1 + BPF_MTU_CHK_RET_SUCCESS = 0x0 + BPF_MTU_CHK_RET_FRAG_NEEDED = 0x1 + BPF_MTU_CHK_RET_SEGS_TOOBIG = 0x2 BPF_FD_TYPE_RAW_TRACEPOINT = 0x0 BPF_FD_TYPE_TRACEPOINT = 0x1 BPF_FD_TYPE_KPROBE = 0x2 @@ -2816,6 +2884,19 @@ const ( BPF_FLOW_DISSECTOR_F_PARSE_1ST_FRAG = 0x1 BPF_FLOW_DISSECTOR_F_STOP_AT_FLOW_LABEL = 0x2 BPF_FLOW_DISSECTOR_F_STOP_AT_ENCAP = 0x4 + BPF_CORE_FIELD_BYTE_OFFSET = 0x0 + BPF_CORE_FIELD_BYTE_SIZE = 0x1 + BPF_CORE_FIELD_EXISTS = 0x2 + BPF_CORE_FIELD_SIGNED = 0x3 + BPF_CORE_FIELD_LSHIFT_U64 = 0x4 + BPF_CORE_FIELD_RSHIFT_U64 = 0x5 + BPF_CORE_TYPE_ID_LOCAL = 0x6 + BPF_CORE_TYPE_ID_TARGET = 0x7 + BPF_CORE_TYPE_EXISTS = 0x8 + BPF_CORE_TYPE_SIZE = 0x9 + BPF_CORE_ENUMVAL_EXISTS = 0xa + BPF_CORE_ENUMVAL_VALUE = 0xb + BPF_CORE_TYPE_MATCHES = 0xc ) const ( @@ -3265,7 +3346,7 @@ const ( DEVLINK_ATTR_LINECARD_SUPPORTED_TYPES = 0xae DEVLINK_ATTR_NESTED_DEVLINK = 0xaf DEVLINK_ATTR_SELFTESTS = 0xb0 - DEVLINK_ATTR_MAX = 0xb0 + DEVLINK_ATTR_MAX = 0xb3 DEVLINK_DPIPE_FIELD_MAPPING_TYPE_NONE = 0x0 DEVLINK_DPIPE_FIELD_MAPPING_TYPE_IFINDEX = 0x1 DEVLINK_DPIPE_MATCH_TYPE_FIELD_EXACT = 0x0 @@ -3281,7 +3362,8 @@ const ( DEVLINK_PORT_FUNCTION_ATTR_HW_ADDR = 0x1 DEVLINK_PORT_FN_ATTR_STATE = 0x2 DEVLINK_PORT_FN_ATTR_OPSTATE = 0x3 - DEVLINK_PORT_FUNCTION_ATTR_MAX = 0x3 + DEVLINK_PORT_FN_ATTR_CAPS = 0x4 + DEVLINK_PORT_FUNCTION_ATTR_MAX = 0x4 ) type FsverityDigest struct { @@ -3572,7 +3654,8 @@ const ( ETHTOOL_MSG_MODULE_SET = 0x23 ETHTOOL_MSG_PSE_GET = 0x24 ETHTOOL_MSG_PSE_SET = 0x25 - ETHTOOL_MSG_USER_MAX = 0x25 + ETHTOOL_MSG_RSS_GET = 0x26 + ETHTOOL_MSG_USER_MAX = 0x2b ETHTOOL_MSG_KERNEL_NONE = 0x0 ETHTOOL_MSG_STRSET_GET_REPLY = 0x1 ETHTOOL_MSG_LINKINFO_GET_REPLY = 0x2 @@ -3611,7 +3694,8 @@ const ( ETHTOOL_MSG_MODULE_GET_REPLY = 0x23 ETHTOOL_MSG_MODULE_NTF = 0x24 ETHTOOL_MSG_PSE_GET_REPLY = 0x25 - ETHTOOL_MSG_KERNEL_MAX = 0x25 + ETHTOOL_MSG_RSS_GET_REPLY = 0x26 + ETHTOOL_MSG_KERNEL_MAX = 0x2b ETHTOOL_A_HEADER_UNSPEC = 0x0 ETHTOOL_A_HEADER_DEV_INDEX = 0x1 ETHTOOL_A_HEADER_DEV_NAME = 0x2 @@ -3679,7 +3763,8 @@ const ( ETHTOOL_A_LINKSTATE_SQI_MAX = 0x4 ETHTOOL_A_LINKSTATE_EXT_STATE = 0x5 ETHTOOL_A_LINKSTATE_EXT_SUBSTATE = 0x6 - ETHTOOL_A_LINKSTATE_MAX = 0x6 + ETHTOOL_A_LINKSTATE_EXT_DOWN_CNT = 0x7 + ETHTOOL_A_LINKSTATE_MAX = 0x7 ETHTOOL_A_DEBUG_UNSPEC = 0x0 ETHTOOL_A_DEBUG_HEADER = 0x1 ETHTOOL_A_DEBUG_MSGMASK = 0x2 @@ -3714,7 +3799,7 @@ const ( ETHTOOL_A_RINGS_TCP_DATA_SPLIT = 0xb ETHTOOL_A_RINGS_CQE_SIZE = 0xc ETHTOOL_A_RINGS_TX_PUSH = 0xd - ETHTOOL_A_RINGS_MAX = 0xd + ETHTOOL_A_RINGS_MAX = 0x10 ETHTOOL_A_CHANNELS_UNSPEC = 0x0 ETHTOOL_A_CHANNELS_HEADER = 0x1 ETHTOOL_A_CHANNELS_RX_MAX = 0x2 @@ -3752,14 +3837,14 @@ const ( ETHTOOL_A_COALESCE_RATE_SAMPLE_INTERVAL = 0x17 ETHTOOL_A_COALESCE_USE_CQE_MODE_TX = 0x18 ETHTOOL_A_COALESCE_USE_CQE_MODE_RX = 0x19 - ETHTOOL_A_COALESCE_MAX = 0x19 + ETHTOOL_A_COALESCE_MAX = 0x1c ETHTOOL_A_PAUSE_UNSPEC = 0x0 ETHTOOL_A_PAUSE_HEADER = 0x1 ETHTOOL_A_PAUSE_AUTONEG = 0x2 ETHTOOL_A_PAUSE_RX = 0x3 ETHTOOL_A_PAUSE_TX = 0x4 ETHTOOL_A_PAUSE_STATS = 0x5 - ETHTOOL_A_PAUSE_MAX = 0x5 + ETHTOOL_A_PAUSE_MAX = 0x6 ETHTOOL_A_PAUSE_STAT_UNSPEC = 0x0 ETHTOOL_A_PAUSE_STAT_PAD = 0x1 ETHTOOL_A_PAUSE_STAT_TX_FRAMES = 0x2 @@ -4409,7 +4494,7 @@ const ( NL80211_ATTR_MAC_HINT = 0xc8 NL80211_ATTR_MAC_MASK = 0xd7 NL80211_ATTR_MAX_AP_ASSOC_STA = 0xca - NL80211_ATTR_MAX = 0x140 + NL80211_ATTR_MAX = 0x145 NL80211_ATTR_MAX_CRIT_PROT_DURATION = 0xb4 NL80211_ATTR_MAX_CSA_COUNTERS = 0xce NL80211_ATTR_MAX_MATCH_SETS = 0x85 @@ -4552,6 +4637,7 @@ const ( NL80211_ATTR_SUPPORT_MESH_AUTH = 0x73 NL80211_ATTR_SURVEY_INFO = 0x54 NL80211_ATTR_SURVEY_RADIO_STATS = 0xda + NL80211_ATTR_TD_BITMAP = 0x141 NL80211_ATTR_TDLS_ACTION = 0x88 NL80211_ATTR_TDLS_DIALOG_TOKEN = 0x89 NL80211_ATTR_TDLS_EXTERNAL_SETUP = 0x8c @@ -4637,7 +4723,7 @@ const ( NL80211_BAND_ATTR_HT_CAPA = 0x4 NL80211_BAND_ATTR_HT_MCS_SET = 0x3 NL80211_BAND_ATTR_IFTYPE_DATA = 0x9 - NL80211_BAND_ATTR_MAX = 0xb + NL80211_BAND_ATTR_MAX = 0xd NL80211_BAND_ATTR_RATES = 0x2 NL80211_BAND_ATTR_VHT_CAPA = 0x8 NL80211_BAND_ATTR_VHT_MCS_SET = 0x7 @@ -4778,7 +4864,7 @@ const ( NL80211_CMD_LEAVE_IBSS = 0x2c NL80211_CMD_LEAVE_MESH = 0x45 NL80211_CMD_LEAVE_OCB = 0x6d - NL80211_CMD_MAX = 0x98 + NL80211_CMD_MAX = 0x99 NL80211_CMD_MICHAEL_MIC_FAILURE = 0x29 NL80211_CMD_MODIFY_LINK_STA = 0x97 NL80211_CMD_NAN_MATCH = 0x78 @@ -5752,3 +5838,28 @@ const ( AUDIT_NLGRP_NONE = 0x0 AUDIT_NLGRP_READLOG = 0x1 ) + +const ( + TUN_F_CSUM = 0x1 + TUN_F_TSO4 = 0x2 + TUN_F_TSO6 = 0x4 + TUN_F_TSO_ECN = 0x8 + TUN_F_UFO = 0x10 + TUN_F_USO4 = 0x20 + TUN_F_USO6 = 0x40 +) + +const ( + VIRTIO_NET_HDR_F_NEEDS_CSUM = 0x1 + VIRTIO_NET_HDR_F_DATA_VALID = 0x2 + VIRTIO_NET_HDR_F_RSC_INFO = 0x4 +) + +const ( + VIRTIO_NET_HDR_GSO_NONE = 0x0 + VIRTIO_NET_HDR_GSO_TCPV4 = 0x1 + VIRTIO_NET_HDR_GSO_UDP = 0x3 + VIRTIO_NET_HDR_GSO_TCPV6 = 0x4 + VIRTIO_NET_HDR_GSO_UDP_L4 = 0x5 + VIRTIO_NET_HDR_GSO_ECN = 0x80 +) diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_386.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_386.go index 89c516a29a..6d8acbcc57 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_386.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_386.go @@ -337,6 +337,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint32 @@ -414,7 +416,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [122]int8 + Data [122]byte _ uint32 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go index 62b4fb2699..59293c6884 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_amd64.go @@ -350,6 +350,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -427,7 +429,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]int8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go index e86b35893e..40cfa38c29 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_arm.go @@ -328,6 +328,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint32 @@ -405,7 +407,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [122]uint8 + Data [122]byte _ uint32 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go index 6c6be4c911..055bc4216d 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_arm64.go @@ -329,6 +329,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -406,7 +408,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]int8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go index 4982ea355a..f28affbc60 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_loong64.go @@ -330,6 +330,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -407,7 +409,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]int8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go index 173141a670..9d71e7ccd8 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips.go @@ -333,6 +333,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint32 @@ -410,7 +412,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [122]int8 + Data [122]byte _ uint32 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go index 93ae4c5167..fd5ccd332a 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips64.go @@ -332,6 +332,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -409,7 +411,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]int8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go index 4e4e510ca5..7704de77a2 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mips64le.go @@ -332,6 +332,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -409,7 +411,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]int8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go index 3f5ba013d9..df00b87571 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_mipsle.go @@ -333,6 +333,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint32 @@ -410,7 +412,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [122]int8 + Data [122]byte _ uint32 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go index 71dfe7cdb4..0942840db6 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc.go @@ -340,6 +340,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint32 @@ -417,7 +419,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [122]uint8 + Data [122]byte _ uint32 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go index 3a2b7f0a66..0348743950 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64.go @@ -339,6 +339,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -416,7 +418,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]uint8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go index a52d627563..bad0670475 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_ppc64le.go @@ -339,6 +339,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -416,7 +418,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]uint8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go index dfc007d8a6..9ea54b7b86 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_riscv64.go @@ -357,6 +357,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -434,7 +436,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]uint8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go index b53cb9103d..aa268d025c 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_s390x.go @@ -352,6 +352,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -429,7 +431,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]int8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go index fe0aa35472..444045b6c5 100644 --- a/tools/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go +++ b/tools/vendor/golang.org/x/sys/unix/ztypes_linux_sparc64.go @@ -334,6 +334,8 @@ type Taskstats struct { Ac_exe_inode uint64 Wpcopy_count uint64 Wpcopy_delay_total uint64 + Irq_count uint64 + Irq_delay_total uint64 } type cpuMask uint64 @@ -411,7 +413,7 @@ const ( type SockaddrStorage struct { Family uint16 - _ [118]int8 + Data [118]byte _ uint64 } diff --git a/tools/vendor/golang.org/x/sys/windows/env_windows.go b/tools/vendor/golang.org/x/sys/windows/env_windows.go index 92ac05ff4e..b8ad192506 100644 --- a/tools/vendor/golang.org/x/sys/windows/env_windows.go +++ b/tools/vendor/golang.org/x/sys/windows/env_windows.go @@ -37,14 +37,14 @@ func (token Token) Environ(inheritExisting bool) (env []string, err error) { return nil, err } defer DestroyEnvironmentBlock(block) - blockp := uintptr(unsafe.Pointer(block)) + blockp := unsafe.Pointer(block) for { - entry := UTF16PtrToString((*uint16)(unsafe.Pointer(blockp))) + entry := UTF16PtrToString((*uint16)(blockp)) if len(entry) == 0 { break } env = append(env, entry) - blockp += 2 * (uintptr(len(entry)) + 1) + blockp = unsafe.Add(blockp, 2*(len(entry)+1)) } return env, nil } diff --git a/tools/vendor/golang.org/x/sys/windows/exec_windows.go b/tools/vendor/golang.org/x/sys/windows/exec_windows.go index 75980fd44a..a52e0331d8 100644 --- a/tools/vendor/golang.org/x/sys/windows/exec_windows.go +++ b/tools/vendor/golang.org/x/sys/windows/exec_windows.go @@ -95,12 +95,17 @@ func ComposeCommandLine(args []string) string { // DecomposeCommandLine breaks apart its argument command line into unescaped parts using CommandLineToArgv, // as gathered from GetCommandLine, QUERY_SERVICE_CONFIG's BinaryPathName argument, or elsewhere that // command lines are passed around. +// DecomposeCommandLine returns error if commandLine contains NUL. func DecomposeCommandLine(commandLine string) ([]string, error) { if len(commandLine) == 0 { return []string{}, nil } + utf16CommandLine, err := UTF16FromString(commandLine) + if err != nil { + return nil, errorspkg.New("string with NUL passed to DecomposeCommandLine") + } var argc int32 - argv, err := CommandLineToArgv(StringToUTF16Ptr(commandLine), &argc) + argv, err := CommandLineToArgv(&utf16CommandLine[0], &argc) if err != nil { return nil, err } diff --git a/tools/vendor/golang.org/x/sys/windows/service.go b/tools/vendor/golang.org/x/sys/windows/service.go index f8deca8397..c44a1b9636 100644 --- a/tools/vendor/golang.org/x/sys/windows/service.go +++ b/tools/vendor/golang.org/x/sys/windows/service.go @@ -141,6 +141,12 @@ const ( SERVICE_DYNAMIC_INFORMATION_LEVEL_START_REASON = 1 ) +type ENUM_SERVICE_STATUS struct { + ServiceName *uint16 + DisplayName *uint16 + ServiceStatus SERVICE_STATUS +} + type SERVICE_STATUS struct { ServiceType uint32 CurrentState uint32 @@ -212,6 +218,10 @@ type SERVICE_FAILURE_ACTIONS struct { Actions *SC_ACTION } +type SERVICE_FAILURE_ACTIONS_FLAG struct { + FailureActionsOnNonCrashFailures int32 +} + type SC_ACTION struct { Type uint32 Delay uint32 @@ -245,3 +255,4 @@ type QUERY_SERVICE_LOCK_STATUS struct { //sys UnsubscribeServiceChangeNotifications(subscription uintptr) = sechost.UnsubscribeServiceChangeNotifications? //sys RegisterServiceCtrlHandlerEx(serviceName *uint16, handlerProc uintptr, context uintptr) (handle Handle, err error) = advapi32.RegisterServiceCtrlHandlerExW //sys QueryServiceDynamicInformation(service Handle, infoLevel uint32, dynamicInfo unsafe.Pointer) (err error) = advapi32.QueryServiceDynamicInformation? +//sys EnumDependentServices(service Handle, activityState uint32, services *ENUM_SERVICE_STATUS, buffSize uint32, bytesNeeded *uint32, servicesReturned *uint32) (err error) = advapi32.EnumDependentServicesW diff --git a/tools/vendor/golang.org/x/sys/windows/syscall_windows.go b/tools/vendor/golang.org/x/sys/windows/syscall_windows.go index 41cb3c01fd..9645900754 100644 --- a/tools/vendor/golang.org/x/sys/windows/syscall_windows.go +++ b/tools/vendor/golang.org/x/sys/windows/syscall_windows.go @@ -405,7 +405,7 @@ func NewCallbackCDecl(fn interface{}) uintptr { //sys VerQueryValue(block unsafe.Pointer, subBlock string, pointerToBufferPointer unsafe.Pointer, bufSize *uint32) (err error) = version.VerQueryValueW // Process Status API (PSAPI) -//sys EnumProcesses(processIds []uint32, bytesReturned *uint32) (err error) = psapi.EnumProcesses +//sys enumProcesses(processIds *uint32, nSize uint32, bytesReturned *uint32) (err error) = psapi.EnumProcesses //sys EnumProcessModules(process Handle, module *Handle, cb uint32, cbNeeded *uint32) (err error) = psapi.EnumProcessModules //sys EnumProcessModulesEx(process Handle, module *Handle, cb uint32, cbNeeded *uint32, filterFlag uint32) (err error) = psapi.EnumProcessModulesEx //sys GetModuleInformation(process Handle, module Handle, modinfo *ModuleInfo, cb uint32) (err error) = psapi.GetModuleInformation @@ -824,6 +824,9 @@ const socket_error = uintptr(^uint32(0)) //sys WSAStartup(verreq uint32, data *WSAData) (sockerr error) = ws2_32.WSAStartup //sys WSACleanup() (err error) [failretval==socket_error] = ws2_32.WSACleanup //sys WSAIoctl(s Handle, iocc uint32, inbuf *byte, cbif uint32, outbuf *byte, cbob uint32, cbbr *uint32, overlapped *Overlapped, completionRoutine uintptr) (err error) [failretval==socket_error] = ws2_32.WSAIoctl +//sys WSALookupServiceBegin(querySet *WSAQUERYSET, flags uint32, handle *Handle) (err error) [failretval==socket_error] = ws2_32.WSALookupServiceBeginW +//sys WSALookupServiceNext(handle Handle, flags uint32, size *int32, querySet *WSAQUERYSET) (err error) [failretval==socket_error] = ws2_32.WSALookupServiceNextW +//sys WSALookupServiceEnd(handle Handle) (err error) [failretval==socket_error] = ws2_32.WSALookupServiceEnd //sys socket(af int32, typ int32, protocol int32) (handle Handle, err error) [failretval==InvalidHandle] = ws2_32.socket //sys sendto(s Handle, buf []byte, flags int32, to unsafe.Pointer, tolen int32) (err error) [failretval==socket_error] = ws2_32.sendto //sys recvfrom(s Handle, buf []byte, flags int32, from *RawSockaddrAny, fromlen *int32) (n int32, err error) [failretval==-1] = ws2_32.recvfrom @@ -1019,8 +1022,7 @@ func (rsa *RawSockaddrAny) Sockaddr() (Sockaddr, error) { for n < len(pp.Path) && pp.Path[n] != 0 { n++ } - bytes := (*[len(pp.Path)]byte)(unsafe.Pointer(&pp.Path[0]))[0:n] - sa.Name = string(bytes) + sa.Name = string(unsafe.Slice((*byte)(unsafe.Pointer(&pp.Path[0])), n)) return sa, nil case AF_INET: @@ -1352,6 +1354,17 @@ func SetsockoptIPv6Mreq(fd Handle, level, opt int, mreq *IPv6Mreq) (err error) { return syscall.EWINDOWS } +func EnumProcesses(processIds []uint32, bytesReturned *uint32) error { + // EnumProcesses syscall expects the size parameter to be in bytes, but the code generated with mksyscall uses + // the length of the processIds slice instead. Hence, this wrapper function is added to fix the discrepancy. + var p *uint32 + if len(processIds) > 0 { + p = &processIds[0] + } + size := uint32(len(processIds) * 4) + return enumProcesses(p, size, bytesReturned) +} + func Getpid() (pid int) { return int(GetCurrentProcessId()) } func FindFirstFile(name *uint16, data *Win32finddata) (handle Handle, err error) { diff --git a/tools/vendor/golang.org/x/sys/windows/types_windows.go b/tools/vendor/golang.org/x/sys/windows/types_windows.go index 0c4add9741..88e62a6385 100644 --- a/tools/vendor/golang.org/x/sys/windows/types_windows.go +++ b/tools/vendor/golang.org/x/sys/windows/types_windows.go @@ -1243,6 +1243,51 @@ const ( DnsSectionAdditional = 0x0003 ) +const ( + // flags of WSALookupService + LUP_DEEP = 0x0001 + LUP_CONTAINERS = 0x0002 + LUP_NOCONTAINERS = 0x0004 + LUP_NEAREST = 0x0008 + LUP_RETURN_NAME = 0x0010 + LUP_RETURN_TYPE = 0x0020 + LUP_RETURN_VERSION = 0x0040 + LUP_RETURN_COMMENT = 0x0080 + LUP_RETURN_ADDR = 0x0100 + LUP_RETURN_BLOB = 0x0200 + LUP_RETURN_ALIASES = 0x0400 + LUP_RETURN_QUERY_STRING = 0x0800 + LUP_RETURN_ALL = 0x0FF0 + LUP_RES_SERVICE = 0x8000 + + LUP_FLUSHCACHE = 0x1000 + LUP_FLUSHPREVIOUS = 0x2000 + + LUP_NON_AUTHORITATIVE = 0x4000 + LUP_SECURE = 0x8000 + LUP_RETURN_PREFERRED_NAMES = 0x10000 + LUP_DNS_ONLY = 0x20000 + + LUP_ADDRCONFIG = 0x100000 + LUP_DUAL_ADDR = 0x200000 + LUP_FILESERVER = 0x400000 + LUP_DISABLE_IDN_ENCODING = 0x00800000 + LUP_API_ANSI = 0x01000000 + + LUP_RESOLUTION_HANDLE = 0x80000000 +) + +const ( + // values of WSAQUERYSET's namespace + NS_ALL = 0 + NS_DNS = 12 + NS_NLA = 15 + NS_BTH = 16 + NS_EMAIL = 37 + NS_PNRPNAME = 38 + NS_PNRPCLOUD = 39 +) + type DNSSRVData struct { Target *uint16 Priority uint16 @@ -2175,19 +2220,23 @@ type JOBOBJECT_BASIC_UI_RESTRICTIONS struct { } const ( - // JobObjectInformationClass + // JobObjectInformationClass for QueryInformationJobObject and SetInformationJobObject JobObjectAssociateCompletionPortInformation = 7 + JobObjectBasicAccountingInformation = 1 + JobObjectBasicAndIoAccountingInformation = 8 JobObjectBasicLimitInformation = 2 + JobObjectBasicProcessIdList = 3 JobObjectBasicUIRestrictions = 4 JobObjectCpuRateControlInformation = 15 JobObjectEndOfJobTimeInformation = 6 JobObjectExtendedLimitInformation = 9 JobObjectGroupInformation = 11 JobObjectGroupInformationEx = 14 - JobObjectLimitViolationInformation2 = 35 + JobObjectLimitViolationInformation = 13 + JobObjectLimitViolationInformation2 = 34 JobObjectNetRateControlInformation = 32 JobObjectNotificationLimitInformation = 12 - JobObjectNotificationLimitInformation2 = 34 + JobObjectNotificationLimitInformation2 = 33 JobObjectSecurityLimitInformation = 5 ) @@ -3258,3 +3307,43 @@ const ( DWMWA_TEXT_COLOR = 36 DWMWA_VISIBLE_FRAME_BORDER_THICKNESS = 37 ) + +type WSAQUERYSET struct { + Size uint32 + ServiceInstanceName *uint16 + ServiceClassId *GUID + Version *WSAVersion + Comment *uint16 + NameSpace uint32 + NSProviderId *GUID + Context *uint16 + NumberOfProtocols uint32 + AfpProtocols *AFProtocols + QueryString *uint16 + NumberOfCsAddrs uint32 + SaBuffer *CSAddrInfo + OutputFlags uint32 + Blob *BLOB +} + +type WSAVersion struct { + Version uint32 + EnumerationOfComparison int32 +} + +type AFProtocols struct { + AddressFamily int32 + Protocol int32 +} + +type CSAddrInfo struct { + LocalAddr SocketAddress + RemoteAddr SocketAddress + SocketType int32 + Protocol int32 +} + +type BLOB struct { + Size uint32 + BlobData *byte +} diff --git a/tools/vendor/golang.org/x/sys/windows/zsyscall_windows.go b/tools/vendor/golang.org/x/sys/windows/zsyscall_windows.go index ac60052e44..566dd3e315 100644 --- a/tools/vendor/golang.org/x/sys/windows/zsyscall_windows.go +++ b/tools/vendor/golang.org/x/sys/windows/zsyscall_windows.go @@ -86,6 +86,7 @@ var ( procDeleteService = modadvapi32.NewProc("DeleteService") procDeregisterEventSource = modadvapi32.NewProc("DeregisterEventSource") procDuplicateTokenEx = modadvapi32.NewProc("DuplicateTokenEx") + procEnumDependentServicesW = modadvapi32.NewProc("EnumDependentServicesW") procEnumServicesStatusExW = modadvapi32.NewProc("EnumServicesStatusExW") procEqualSid = modadvapi32.NewProc("EqualSid") procFreeSid = modadvapi32.NewProc("FreeSid") @@ -474,6 +475,9 @@ var ( procWSAEnumProtocolsW = modws2_32.NewProc("WSAEnumProtocolsW") procWSAGetOverlappedResult = modws2_32.NewProc("WSAGetOverlappedResult") procWSAIoctl = modws2_32.NewProc("WSAIoctl") + procWSALookupServiceBeginW = modws2_32.NewProc("WSALookupServiceBeginW") + procWSALookupServiceEnd = modws2_32.NewProc("WSALookupServiceEnd") + procWSALookupServiceNextW = modws2_32.NewProc("WSALookupServiceNextW") procWSARecv = modws2_32.NewProc("WSARecv") procWSARecvFrom = modws2_32.NewProc("WSARecvFrom") procWSASend = modws2_32.NewProc("WSASend") @@ -731,6 +735,14 @@ func DuplicateTokenEx(existingToken Token, desiredAccess uint32, tokenAttributes return } +func EnumDependentServices(service Handle, activityState uint32, services *ENUM_SERVICE_STATUS, buffSize uint32, bytesNeeded *uint32, servicesReturned *uint32) (err error) { + r1, _, e1 := syscall.Syscall6(procEnumDependentServicesW.Addr(), 6, uintptr(service), uintptr(activityState), uintptr(unsafe.Pointer(services)), uintptr(buffSize), uintptr(unsafe.Pointer(bytesNeeded)), uintptr(unsafe.Pointer(servicesReturned))) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func EnumServicesStatusEx(mgr Handle, infoLevel uint32, serviceType uint32, serviceState uint32, services *byte, bufSize uint32, bytesNeeded *uint32, servicesReturned *uint32, resumeHandle *uint32, groupName *uint16) (err error) { r1, _, e1 := syscall.Syscall12(procEnumServicesStatusExW.Addr(), 10, uintptr(mgr), uintptr(infoLevel), uintptr(serviceType), uintptr(serviceState), uintptr(unsafe.Pointer(services)), uintptr(bufSize), uintptr(unsafe.Pointer(bytesNeeded)), uintptr(unsafe.Pointer(servicesReturned)), uintptr(unsafe.Pointer(resumeHandle)), uintptr(unsafe.Pointer(groupName)), 0, 0) if r1 == 0 { @@ -3504,12 +3516,8 @@ func EnumProcessModulesEx(process Handle, module *Handle, cb uint32, cbNeeded *u return } -func EnumProcesses(processIds []uint32, bytesReturned *uint32) (err error) { - var _p0 *uint32 - if len(processIds) > 0 { - _p0 = &processIds[0] - } - r1, _, e1 := syscall.Syscall(procEnumProcesses.Addr(), 3, uintptr(unsafe.Pointer(_p0)), uintptr(len(processIds)), uintptr(unsafe.Pointer(bytesReturned))) +func enumProcesses(processIds *uint32, nSize uint32, bytesReturned *uint32) (err error) { + r1, _, e1 := syscall.Syscall(procEnumProcesses.Addr(), 3, uintptr(unsafe.Pointer(processIds)), uintptr(nSize), uintptr(unsafe.Pointer(bytesReturned))) if r1 == 0 { err = errnoErr(e1) } @@ -4067,6 +4075,30 @@ func WSAIoctl(s Handle, iocc uint32, inbuf *byte, cbif uint32, outbuf *byte, cbo return } +func WSALookupServiceBegin(querySet *WSAQUERYSET, flags uint32, handle *Handle) (err error) { + r1, _, e1 := syscall.Syscall(procWSALookupServiceBeginW.Addr(), 3, uintptr(unsafe.Pointer(querySet)), uintptr(flags), uintptr(unsafe.Pointer(handle))) + if r1 == socket_error { + err = errnoErr(e1) + } + return +} + +func WSALookupServiceEnd(handle Handle) (err error) { + r1, _, e1 := syscall.Syscall(procWSALookupServiceEnd.Addr(), 1, uintptr(handle), 0, 0) + if r1 == socket_error { + err = errnoErr(e1) + } + return +} + +func WSALookupServiceNext(handle Handle, flags uint32, size *int32, querySet *WSAQUERYSET) (err error) { + r1, _, e1 := syscall.Syscall6(procWSALookupServiceNextW.Addr(), 4, uintptr(handle), uintptr(flags), uintptr(unsafe.Pointer(size)), uintptr(unsafe.Pointer(querySet)), 0, 0) + if r1 == socket_error { + err = errnoErr(e1) + } + return +} + func WSARecv(s Handle, bufs *WSABuf, bufcnt uint32, recvd *uint32, flags *uint32, overlapped *Overlapped, croutine *byte) (err error) { r1, _, e1 := syscall.Syscall9(procWSARecv.Addr(), 7, uintptr(s), uintptr(unsafe.Pointer(bufs)), uintptr(bufcnt), uintptr(unsafe.Pointer(recvd)), uintptr(unsafe.Pointer(flags)), uintptr(unsafe.Pointer(overlapped)), uintptr(unsafe.Pointer(croutine)), 0, 0) if r1 == socket_error { diff --git a/tools/vendor/golang.org/x/text/runes/cond.go b/tools/vendor/golang.org/x/text/runes/cond.go new file mode 100644 index 0000000000..df7aa02db6 --- /dev/null +++ b/tools/vendor/golang.org/x/text/runes/cond.go @@ -0,0 +1,187 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package runes + +import ( + "unicode/utf8" + + "golang.org/x/text/transform" +) + +// Note: below we pass invalid UTF-8 to the tIn and tNotIn transformers as is. +// This is done for various reasons: +// - To retain the semantics of the Nop transformer: if input is passed to a Nop +// one would expect it to be unchanged. +// - It would be very expensive to pass a converted RuneError to a transformer: +// a transformer might need more source bytes after RuneError, meaning that +// the only way to pass it safely is to create a new buffer and manage the +// intermingling of RuneErrors and normal input. +// - Many transformers leave ill-formed UTF-8 as is, so this is not +// inconsistent. Generally ill-formed UTF-8 is only replaced if it is a +// logical consequence of the operation (as for Map) or if it otherwise would +// pose security concerns (as for Remove). +// - An alternative would be to return an error on ill-formed UTF-8, but this +// would be inconsistent with other operations. + +// If returns a transformer that applies tIn to consecutive runes for which +// s.Contains(r) and tNotIn to consecutive runes for which !s.Contains(r). Reset +// is called on tIn and tNotIn at the start of each run. A Nop transformer will +// substitute a nil value passed to tIn or tNotIn. Invalid UTF-8 is translated +// to RuneError to determine which transformer to apply, but is passed as is to +// the respective transformer. +func If(s Set, tIn, tNotIn transform.Transformer) Transformer { + if tIn == nil && tNotIn == nil { + return Transformer{transform.Nop} + } + if tIn == nil { + tIn = transform.Nop + } + if tNotIn == nil { + tNotIn = transform.Nop + } + sIn, ok := tIn.(transform.SpanningTransformer) + if !ok { + sIn = dummySpan{tIn} + } + sNotIn, ok := tNotIn.(transform.SpanningTransformer) + if !ok { + sNotIn = dummySpan{tNotIn} + } + + a := &cond{ + tIn: sIn, + tNotIn: sNotIn, + f: s.Contains, + } + a.Reset() + return Transformer{a} +} + +type dummySpan struct{ transform.Transformer } + +func (d dummySpan) Span(src []byte, atEOF bool) (n int, err error) { + return 0, transform.ErrEndOfSpan +} + +type cond struct { + tIn, tNotIn transform.SpanningTransformer + f func(rune) bool + check func(rune) bool // current check to perform + t transform.SpanningTransformer // current transformer to use +} + +// Reset implements transform.Transformer. +func (t *cond) Reset() { + t.check = t.is + t.t = t.tIn + t.t.Reset() // notIn will be reset on first usage. +} + +func (t *cond) is(r rune) bool { + if t.f(r) { + return true + } + t.check = t.isNot + t.t = t.tNotIn + t.tNotIn.Reset() + return false +} + +func (t *cond) isNot(r rune) bool { + if !t.f(r) { + return true + } + t.check = t.is + t.t = t.tIn + t.tIn.Reset() + return false +} + +// This implementation of Span doesn't help all too much, but it needs to be +// there to satisfy this package's Transformer interface. +// TODO: there are certainly room for improvements, though. For example, if +// t.t == transform.Nop (which will a common occurrence) it will save a bundle +// to special-case that loop. +func (t *cond) Span(src []byte, atEOF bool) (n int, err error) { + p := 0 + for n < len(src) && err == nil { + // Don't process too much at a time as the Spanner that will be + // called on this block may terminate early. + const maxChunk = 4096 + max := len(src) + if v := n + maxChunk; v < max { + max = v + } + atEnd := false + size := 0 + current := t.t + for ; p < max; p += size { + r := rune(src[p]) + if r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[p:]); size == 1 { + if !atEOF && !utf8.FullRune(src[p:]) { + err = transform.ErrShortSrc + break + } + } + if !t.check(r) { + // The next rune will be the start of a new run. + atEnd = true + break + } + } + n2, err2 := current.Span(src[n:p], atEnd || (atEOF && p == len(src))) + n += n2 + if err2 != nil { + return n, err2 + } + // At this point either err != nil or t.check will pass for the rune at p. + p = n + size + } + return n, err +} + +func (t *cond) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + p := 0 + for nSrc < len(src) && err == nil { + // Don't process too much at a time, as the work might be wasted if the + // destination buffer isn't large enough to hold the result or a + // transform returns an error early. + const maxChunk = 4096 + max := len(src) + if n := nSrc + maxChunk; n < len(src) { + max = n + } + atEnd := false + size := 0 + current := t.t + for ; p < max; p += size { + r := rune(src[p]) + if r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[p:]); size == 1 { + if !atEOF && !utf8.FullRune(src[p:]) { + err = transform.ErrShortSrc + break + } + } + if !t.check(r) { + // The next rune will be the start of a new run. + atEnd = true + break + } + } + nDst2, nSrc2, err2 := current.Transform(dst[nDst:], src[nSrc:p], atEnd || (atEOF && p == len(src))) + nDst += nDst2 + nSrc += nSrc2 + if err2 != nil { + return nDst, nSrc, err2 + } + // At this point either err != nil or t.check will pass for the rune at p. + p = nSrc + size + } + return nDst, nSrc, err +} diff --git a/tools/vendor/golang.org/x/text/runes/runes.go b/tools/vendor/golang.org/x/text/runes/runes.go new file mode 100644 index 0000000000..930e87fedb --- /dev/null +++ b/tools/vendor/golang.org/x/text/runes/runes.go @@ -0,0 +1,355 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package runes provide transforms for UTF-8 encoded text. +package runes // import "golang.org/x/text/runes" + +import ( + "unicode" + "unicode/utf8" + + "golang.org/x/text/transform" +) + +// A Set is a collection of runes. +type Set interface { + // Contains returns true if r is contained in the set. + Contains(r rune) bool +} + +type setFunc func(rune) bool + +func (s setFunc) Contains(r rune) bool { + return s(r) +} + +// Note: using funcs here instead of wrapping types result in cleaner +// documentation and a smaller API. + +// In creates a Set with a Contains method that returns true for all runes in +// the given RangeTable. +func In(rt *unicode.RangeTable) Set { + return setFunc(func(r rune) bool { return unicode.Is(rt, r) }) +} + +// NotIn creates a Set with a Contains method that returns true for all runes not +// in the given RangeTable. +func NotIn(rt *unicode.RangeTable) Set { + return setFunc(func(r rune) bool { return !unicode.Is(rt, r) }) +} + +// Predicate creates a Set with a Contains method that returns f(r). +func Predicate(f func(rune) bool) Set { + return setFunc(f) +} + +// Transformer implements the transform.Transformer interface. +type Transformer struct { + t transform.SpanningTransformer +} + +func (t Transformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + return t.t.Transform(dst, src, atEOF) +} + +func (t Transformer) Span(b []byte, atEOF bool) (n int, err error) { + return t.t.Span(b, atEOF) +} + +func (t Transformer) Reset() { t.t.Reset() } + +// Bytes returns a new byte slice with the result of converting b using t. It +// calls Reset on t. It returns nil if any error was found. This can only happen +// if an error-producing Transformer is passed to If. +func (t Transformer) Bytes(b []byte) []byte { + b, _, err := transform.Bytes(t, b) + if err != nil { + return nil + } + return b +} + +// String returns a string with the result of converting s using t. It calls +// Reset on t. It returns the empty string if any error was found. This can only +// happen if an error-producing Transformer is passed to If. +func (t Transformer) String(s string) string { + s, _, err := transform.String(t, s) + if err != nil { + return "" + } + return s +} + +// TODO: +// - Copy: copying strings and bytes in whole-rune units. +// - Validation (maybe) +// - Well-formed-ness (maybe) + +const runeErrorString = string(utf8.RuneError) + +// Remove returns a Transformer that removes runes r for which s.Contains(r). +// Illegal input bytes are replaced by RuneError before being passed to f. +func Remove(s Set) Transformer { + if f, ok := s.(setFunc); ok { + // This little trick cuts the running time of BenchmarkRemove for sets + // created by Predicate roughly in half. + // TODO: special-case RangeTables as well. + return Transformer{remove(f)} + } + return Transformer{remove(s.Contains)} +} + +// TODO: remove transform.RemoveFunc. + +type remove func(r rune) bool + +func (remove) Reset() {} + +// Span implements transform.Spanner. +func (t remove) Span(src []byte, atEOF bool) (n int, err error) { + for r, size := rune(0), 0; n < len(src); { + if r = rune(src[n]); r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[n:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[n:]) { + err = transform.ErrShortSrc + } else { + err = transform.ErrEndOfSpan + } + break + } + if t(r) { + err = transform.ErrEndOfSpan + break + } + n += size + } + return +} + +// Transform implements transform.Transformer. +func (t remove) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for r, size := rune(0), 0; nSrc < len(src); { + if r = rune(src[nSrc]); r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + // We replace illegal bytes with RuneError. Not doing so might + // otherwise turn a sequence of invalid UTF-8 into valid UTF-8. + // The resulting byte sequence may subsequently contain runes + // for which t(r) is true that were passed unnoticed. + if !t(utf8.RuneError) { + if nDst+3 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = runeErrorString[0] + dst[nDst+1] = runeErrorString[1] + dst[nDst+2] = runeErrorString[2] + nDst += 3 + } + nSrc++ + continue + } + if t(r) { + nSrc += size + continue + } + if nDst+size > len(dst) { + err = transform.ErrShortDst + break + } + for i := 0; i < size; i++ { + dst[nDst] = src[nSrc] + nDst++ + nSrc++ + } + } + return +} + +// Map returns a Transformer that maps the runes in the input using the given +// mapping. Illegal bytes in the input are converted to utf8.RuneError before +// being passed to the mapping func. +func Map(mapping func(rune) rune) Transformer { + return Transformer{mapper(mapping)} +} + +type mapper func(rune) rune + +func (mapper) Reset() {} + +// Span implements transform.Spanner. +func (t mapper) Span(src []byte, atEOF bool) (n int, err error) { + for r, size := rune(0), 0; n < len(src); n += size { + if r = rune(src[n]); r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[n:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[n:]) { + err = transform.ErrShortSrc + } else { + err = transform.ErrEndOfSpan + } + break + } + if t(r) != r { + err = transform.ErrEndOfSpan + break + } + } + return n, err +} + +// Transform implements transform.Transformer. +func (t mapper) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + var replacement rune + var b [utf8.UTFMax]byte + + for r, size := rune(0), 0; nSrc < len(src); { + if r = rune(src[nSrc]); r < utf8.RuneSelf { + if replacement = t(r); replacement < utf8.RuneSelf { + if nDst == len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst] = byte(replacement) + nDst++ + nSrc++ + continue + } + size = 1 + } else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + + if replacement = t(utf8.RuneError); replacement == utf8.RuneError { + if nDst+3 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = runeErrorString[0] + dst[nDst+1] = runeErrorString[1] + dst[nDst+2] = runeErrorString[2] + nDst += 3 + nSrc++ + continue + } + } else if replacement = t(r); replacement == r { + if nDst+size > len(dst) { + err = transform.ErrShortDst + break + } + for i := 0; i < size; i++ { + dst[nDst] = src[nSrc] + nDst++ + nSrc++ + } + continue + } + + n := utf8.EncodeRune(b[:], replacement) + + if nDst+n > len(dst) { + err = transform.ErrShortDst + break + } + for i := 0; i < n; i++ { + dst[nDst] = b[i] + nDst++ + } + nSrc += size + } + return +} + +// ReplaceIllFormed returns a transformer that replaces all input bytes that are +// not part of a well-formed UTF-8 code sequence with utf8.RuneError. +func ReplaceIllFormed() Transformer { + return Transformer{&replaceIllFormed{}} +} + +type replaceIllFormed struct{ transform.NopResetter } + +func (t replaceIllFormed) Span(src []byte, atEOF bool) (n int, err error) { + for n < len(src) { + // ASCII fast path. + if src[n] < utf8.RuneSelf { + n++ + continue + } + + r, size := utf8.DecodeRune(src[n:]) + + // Look for a valid non-ASCII rune. + if r != utf8.RuneError || size != 1 { + n += size + continue + } + + // Look for short source data. + if !atEOF && !utf8.FullRune(src[n:]) { + err = transform.ErrShortSrc + break + } + + // We have an invalid rune. + err = transform.ErrEndOfSpan + break + } + return n, err +} + +func (t replaceIllFormed) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for nSrc < len(src) { + // ASCII fast path. + if r := src[nSrc]; r < utf8.RuneSelf { + if nDst == len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst] = r + nDst++ + nSrc++ + continue + } + + // Look for a valid non-ASCII rune. + if _, size := utf8.DecodeRune(src[nSrc:]); size != 1 { + if size != copy(dst[nDst:], src[nSrc:nSrc+size]) { + err = transform.ErrShortDst + break + } + nDst += size + nSrc += size + continue + } + + // Look for short source data. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + + // We have an invalid rune. + if nDst+3 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = runeErrorString[0] + dst[nDst+1] = runeErrorString[1] + dst[nDst+2] = runeErrorString[2] + nDst += 3 + nSrc++ + } + return nDst, nSrc, err +} diff --git a/tools/vendor/golang.org/x/text/unicode/norm/forminfo.go b/tools/vendor/golang.org/x/text/unicode/norm/forminfo.go index d69ccb4f97..487335d14d 100644 --- a/tools/vendor/golang.org/x/text/unicode/norm/forminfo.go +++ b/tools/vendor/golang.org/x/text/unicode/norm/forminfo.go @@ -13,7 +13,7 @@ import "encoding/binary" // a rune to a uint16. The values take two forms. For v >= 0x8000: // bits // 15: 1 (inverse of NFD_QC bit of qcInfo) -// 13..7: qcInfo (see below). isYesD is always true (no decompostion). +// 13..7: qcInfo (see below). isYesD is always true (no decomposition). // 6..0: ccc (compressed CCC value). // For v < 0x8000, the respective rune has a decomposition and v is an index // into a byte array of UTF-8 decomposition sequences and additional info and diff --git a/tools/vendor/golang.org/x/text/unicode/norm/tables13.0.0.go b/tools/vendor/golang.org/x/text/unicode/norm/tables13.0.0.go index 9115ef257e..f65785e8ac 100644 --- a/tools/vendor/golang.org/x/text/unicode/norm/tables13.0.0.go +++ b/tools/vendor/golang.org/x/text/unicode/norm/tables13.0.0.go @@ -1,7 +1,7 @@ // Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. -//go:build go1.16 -// +build go1.16 +//go:build go1.16 && !go1.21 +// +build go1.16,!go1.21 package norm diff --git a/tools/vendor/golang.org/x/text/unicode/norm/tables15.0.0.go b/tools/vendor/golang.org/x/text/unicode/norm/tables15.0.0.go new file mode 100644 index 0000000000..e1858b879d --- /dev/null +++ b/tools/vendor/golang.org/x/text/unicode/norm/tables15.0.0.go @@ -0,0 +1,7908 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.21 +// +build go1.21 + +package norm + +import "sync" + +const ( + // Version is the Unicode edition from which the tables are derived. + Version = "15.0.0" + + // MaxTransformChunkSize indicates the maximum number of bytes that Transform + // may need to write atomically for any Form. Making a destination buffer at + // least this size ensures that Transform can always make progress and that + // the user does not need to grow the buffer on an ErrShortDst. + MaxTransformChunkSize = 35 + maxNonStarters*4 +) + +var ccc = [56]uint8{ + 0, 1, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, 18, 19, + 20, 21, 22, 23, 24, 25, 26, 27, + 28, 29, 30, 31, 32, 33, 34, 35, + 36, 84, 91, 103, 107, 118, 122, 129, + 130, 132, 202, 214, 216, 218, 220, 222, + 224, 226, 228, 230, 232, 233, 234, 240, +} + +const ( + firstMulti = 0x199A + firstCCC = 0x2DD5 + endMulti = 0x30A1 + firstLeadingCCC = 0x4AEF + firstCCCZeroExcept = 0x4BB9 + firstStarterWithNLead = 0x4BE0 + lastDecomp = 0x4BE2 + maxDecomp = 0x8000 +) + +// decomps: 19426 bytes +var decomps = [...]byte{ + // Bytes 0 - 3f + 0x00, 0x41, 0x20, 0x41, 0x21, 0x41, 0x22, 0x41, + 0x23, 0x41, 0x24, 0x41, 0x25, 0x41, 0x26, 0x41, + 0x27, 0x41, 0x28, 0x41, 0x29, 0x41, 0x2A, 0x41, + 0x2B, 0x41, 0x2C, 0x41, 0x2D, 0x41, 0x2E, 0x41, + 0x2F, 0x41, 0x30, 0x41, 0x31, 0x41, 0x32, 0x41, + 0x33, 0x41, 0x34, 0x41, 0x35, 0x41, 0x36, 0x41, + 0x37, 0x41, 0x38, 0x41, 0x39, 0x41, 0x3A, 0x41, + 0x3B, 0x41, 0x3C, 0x41, 0x3D, 0x41, 0x3E, 0x41, + // Bytes 40 - 7f + 0x3F, 0x41, 0x40, 0x41, 0x41, 0x41, 0x42, 0x41, + 0x43, 0x41, 0x44, 0x41, 0x45, 0x41, 0x46, 0x41, + 0x47, 0x41, 0x48, 0x41, 0x49, 0x41, 0x4A, 0x41, + 0x4B, 0x41, 0x4C, 0x41, 0x4D, 0x41, 0x4E, 0x41, + 0x4F, 0x41, 0x50, 0x41, 0x51, 0x41, 0x52, 0x41, + 0x53, 0x41, 0x54, 0x41, 0x55, 0x41, 0x56, 0x41, + 0x57, 0x41, 0x58, 0x41, 0x59, 0x41, 0x5A, 0x41, + 0x5B, 0x41, 0x5C, 0x41, 0x5D, 0x41, 0x5E, 0x41, + // Bytes 80 - bf + 0x5F, 0x41, 0x60, 0x41, 0x61, 0x41, 0x62, 0x41, + 0x63, 0x41, 0x64, 0x41, 0x65, 0x41, 0x66, 0x41, + 0x67, 0x41, 0x68, 0x41, 0x69, 0x41, 0x6A, 0x41, + 0x6B, 0x41, 0x6C, 0x41, 0x6D, 0x41, 0x6E, 0x41, + 0x6F, 0x41, 0x70, 0x41, 0x71, 0x41, 0x72, 0x41, + 0x73, 0x41, 0x74, 0x41, 0x75, 0x41, 0x76, 0x41, + 0x77, 0x41, 0x78, 0x41, 0x79, 0x41, 0x7A, 0x41, + 0x7B, 0x41, 0x7C, 0x41, 0x7D, 0x41, 0x7E, 0x42, + // Bytes c0 - ff + 0xC2, 0xA2, 0x42, 0xC2, 0xA3, 0x42, 0xC2, 0xA5, + 0x42, 0xC2, 0xA6, 0x42, 0xC2, 0xAC, 0x42, 0xC2, + 0xB7, 0x42, 0xC3, 0x86, 0x42, 0xC3, 0xA6, 0x42, + 0xC3, 0xB0, 0x42, 0xC3, 0xB8, 0x42, 0xC4, 0xA6, + 0x42, 0xC4, 0xA7, 0x42, 0xC4, 0xB1, 0x42, 0xC5, + 0x8B, 0x42, 0xC5, 0x93, 0x42, 0xC6, 0x8E, 0x42, + 0xC6, 0x90, 0x42, 0xC6, 0xAB, 0x42, 0xC7, 0x80, + 0x42, 0xC7, 0x81, 0x42, 0xC7, 0x82, 0x42, 0xC8, + // Bytes 100 - 13f + 0xA2, 0x42, 0xC8, 0xB7, 0x42, 0xC9, 0x90, 0x42, + 0xC9, 0x91, 0x42, 0xC9, 0x92, 0x42, 0xC9, 0x93, + 0x42, 0xC9, 0x94, 0x42, 0xC9, 0x95, 0x42, 0xC9, + 0x96, 0x42, 0xC9, 0x97, 0x42, 0xC9, 0x98, 0x42, + 0xC9, 0x99, 0x42, 0xC9, 0x9B, 0x42, 0xC9, 0x9C, + 0x42, 0xC9, 0x9E, 0x42, 0xC9, 0x9F, 0x42, 0xC9, + 0xA0, 0x42, 0xC9, 0xA1, 0x42, 0xC9, 0xA2, 0x42, + 0xC9, 0xA3, 0x42, 0xC9, 0xA4, 0x42, 0xC9, 0xA5, + // Bytes 140 - 17f + 0x42, 0xC9, 0xA6, 0x42, 0xC9, 0xA7, 0x42, 0xC9, + 0xA8, 0x42, 0xC9, 0xA9, 0x42, 0xC9, 0xAA, 0x42, + 0xC9, 0xAB, 0x42, 0xC9, 0xAC, 0x42, 0xC9, 0xAD, + 0x42, 0xC9, 0xAE, 0x42, 0xC9, 0xAF, 0x42, 0xC9, + 0xB0, 0x42, 0xC9, 0xB1, 0x42, 0xC9, 0xB2, 0x42, + 0xC9, 0xB3, 0x42, 0xC9, 0xB4, 0x42, 0xC9, 0xB5, + 0x42, 0xC9, 0xB6, 0x42, 0xC9, 0xB7, 0x42, 0xC9, + 0xB8, 0x42, 0xC9, 0xB9, 0x42, 0xC9, 0xBA, 0x42, + // Bytes 180 - 1bf + 0xC9, 0xBB, 0x42, 0xC9, 0xBD, 0x42, 0xC9, 0xBE, + 0x42, 0xCA, 0x80, 0x42, 0xCA, 0x81, 0x42, 0xCA, + 0x82, 0x42, 0xCA, 0x83, 0x42, 0xCA, 0x84, 0x42, + 0xCA, 0x88, 0x42, 0xCA, 0x89, 0x42, 0xCA, 0x8A, + 0x42, 0xCA, 0x8B, 0x42, 0xCA, 0x8C, 0x42, 0xCA, + 0x8D, 0x42, 0xCA, 0x8E, 0x42, 0xCA, 0x8F, 0x42, + 0xCA, 0x90, 0x42, 0xCA, 0x91, 0x42, 0xCA, 0x92, + 0x42, 0xCA, 0x95, 0x42, 0xCA, 0x98, 0x42, 0xCA, + // Bytes 1c0 - 1ff + 0x99, 0x42, 0xCA, 0x9B, 0x42, 0xCA, 0x9C, 0x42, + 0xCA, 0x9D, 0x42, 0xCA, 0x9F, 0x42, 0xCA, 0xA1, + 0x42, 0xCA, 0xA2, 0x42, 0xCA, 0xA3, 0x42, 0xCA, + 0xA4, 0x42, 0xCA, 0xA5, 0x42, 0xCA, 0xA6, 0x42, + 0xCA, 0xA7, 0x42, 0xCA, 0xA8, 0x42, 0xCA, 0xA9, + 0x42, 0xCA, 0xAA, 0x42, 0xCA, 0xAB, 0x42, 0xCA, + 0xB9, 0x42, 0xCB, 0x90, 0x42, 0xCB, 0x91, 0x42, + 0xCE, 0x91, 0x42, 0xCE, 0x92, 0x42, 0xCE, 0x93, + // Bytes 200 - 23f + 0x42, 0xCE, 0x94, 0x42, 0xCE, 0x95, 0x42, 0xCE, + 0x96, 0x42, 0xCE, 0x97, 0x42, 0xCE, 0x98, 0x42, + 0xCE, 0x99, 0x42, 0xCE, 0x9A, 0x42, 0xCE, 0x9B, + 0x42, 0xCE, 0x9C, 0x42, 0xCE, 0x9D, 0x42, 0xCE, + 0x9E, 0x42, 0xCE, 0x9F, 0x42, 0xCE, 0xA0, 0x42, + 0xCE, 0xA1, 0x42, 0xCE, 0xA3, 0x42, 0xCE, 0xA4, + 0x42, 0xCE, 0xA5, 0x42, 0xCE, 0xA6, 0x42, 0xCE, + 0xA7, 0x42, 0xCE, 0xA8, 0x42, 0xCE, 0xA9, 0x42, + // Bytes 240 - 27f + 0xCE, 0xB1, 0x42, 0xCE, 0xB2, 0x42, 0xCE, 0xB3, + 0x42, 0xCE, 0xB4, 0x42, 0xCE, 0xB5, 0x42, 0xCE, + 0xB6, 0x42, 0xCE, 0xB7, 0x42, 0xCE, 0xB8, 0x42, + 0xCE, 0xB9, 0x42, 0xCE, 0xBA, 0x42, 0xCE, 0xBB, + 0x42, 0xCE, 0xBC, 0x42, 0xCE, 0xBD, 0x42, 0xCE, + 0xBE, 0x42, 0xCE, 0xBF, 0x42, 0xCF, 0x80, 0x42, + 0xCF, 0x81, 0x42, 0xCF, 0x82, 0x42, 0xCF, 0x83, + 0x42, 0xCF, 0x84, 0x42, 0xCF, 0x85, 0x42, 0xCF, + // Bytes 280 - 2bf + 0x86, 0x42, 0xCF, 0x87, 0x42, 0xCF, 0x88, 0x42, + 0xCF, 0x89, 0x42, 0xCF, 0x9C, 0x42, 0xCF, 0x9D, + 0x42, 0xD0, 0xB0, 0x42, 0xD0, 0xB1, 0x42, 0xD0, + 0xB2, 0x42, 0xD0, 0xB3, 0x42, 0xD0, 0xB4, 0x42, + 0xD0, 0xB5, 0x42, 0xD0, 0xB6, 0x42, 0xD0, 0xB7, + 0x42, 0xD0, 0xB8, 0x42, 0xD0, 0xBA, 0x42, 0xD0, + 0xBB, 0x42, 0xD0, 0xBC, 0x42, 0xD0, 0xBD, 0x42, + 0xD0, 0xBE, 0x42, 0xD0, 0xBF, 0x42, 0xD1, 0x80, + // Bytes 2c0 - 2ff + 0x42, 0xD1, 0x81, 0x42, 0xD1, 0x82, 0x42, 0xD1, + 0x83, 0x42, 0xD1, 0x84, 0x42, 0xD1, 0x85, 0x42, + 0xD1, 0x86, 0x42, 0xD1, 0x87, 0x42, 0xD1, 0x88, + 0x42, 0xD1, 0x8A, 0x42, 0xD1, 0x8B, 0x42, 0xD1, + 0x8C, 0x42, 0xD1, 0x8D, 0x42, 0xD1, 0x8E, 0x42, + 0xD1, 0x95, 0x42, 0xD1, 0x96, 0x42, 0xD1, 0x98, + 0x42, 0xD1, 0x9F, 0x42, 0xD2, 0x91, 0x42, 0xD2, + 0xAB, 0x42, 0xD2, 0xAF, 0x42, 0xD2, 0xB1, 0x42, + // Bytes 300 - 33f + 0xD3, 0x8F, 0x42, 0xD3, 0x99, 0x42, 0xD3, 0xA9, + 0x42, 0xD7, 0x90, 0x42, 0xD7, 0x91, 0x42, 0xD7, + 0x92, 0x42, 0xD7, 0x93, 0x42, 0xD7, 0x94, 0x42, + 0xD7, 0x9B, 0x42, 0xD7, 0x9C, 0x42, 0xD7, 0x9D, + 0x42, 0xD7, 0xA2, 0x42, 0xD7, 0xA8, 0x42, 0xD7, + 0xAA, 0x42, 0xD8, 0xA1, 0x42, 0xD8, 0xA7, 0x42, + 0xD8, 0xA8, 0x42, 0xD8, 0xA9, 0x42, 0xD8, 0xAA, + 0x42, 0xD8, 0xAB, 0x42, 0xD8, 0xAC, 0x42, 0xD8, + // Bytes 340 - 37f + 0xAD, 0x42, 0xD8, 0xAE, 0x42, 0xD8, 0xAF, 0x42, + 0xD8, 0xB0, 0x42, 0xD8, 0xB1, 0x42, 0xD8, 0xB2, + 0x42, 0xD8, 0xB3, 0x42, 0xD8, 0xB4, 0x42, 0xD8, + 0xB5, 0x42, 0xD8, 0xB6, 0x42, 0xD8, 0xB7, 0x42, + 0xD8, 0xB8, 0x42, 0xD8, 0xB9, 0x42, 0xD8, 0xBA, + 0x42, 0xD9, 0x81, 0x42, 0xD9, 0x82, 0x42, 0xD9, + 0x83, 0x42, 0xD9, 0x84, 0x42, 0xD9, 0x85, 0x42, + 0xD9, 0x86, 0x42, 0xD9, 0x87, 0x42, 0xD9, 0x88, + // Bytes 380 - 3bf + 0x42, 0xD9, 0x89, 0x42, 0xD9, 0x8A, 0x42, 0xD9, + 0xAE, 0x42, 0xD9, 0xAF, 0x42, 0xD9, 0xB1, 0x42, + 0xD9, 0xB9, 0x42, 0xD9, 0xBA, 0x42, 0xD9, 0xBB, + 0x42, 0xD9, 0xBE, 0x42, 0xD9, 0xBF, 0x42, 0xDA, + 0x80, 0x42, 0xDA, 0x83, 0x42, 0xDA, 0x84, 0x42, + 0xDA, 0x86, 0x42, 0xDA, 0x87, 0x42, 0xDA, 0x88, + 0x42, 0xDA, 0x8C, 0x42, 0xDA, 0x8D, 0x42, 0xDA, + 0x8E, 0x42, 0xDA, 0x91, 0x42, 0xDA, 0x98, 0x42, + // Bytes 3c0 - 3ff + 0xDA, 0xA1, 0x42, 0xDA, 0xA4, 0x42, 0xDA, 0xA6, + 0x42, 0xDA, 0xA9, 0x42, 0xDA, 0xAD, 0x42, 0xDA, + 0xAF, 0x42, 0xDA, 0xB1, 0x42, 0xDA, 0xB3, 0x42, + 0xDA, 0xBA, 0x42, 0xDA, 0xBB, 0x42, 0xDA, 0xBE, + 0x42, 0xDB, 0x81, 0x42, 0xDB, 0x85, 0x42, 0xDB, + 0x86, 0x42, 0xDB, 0x87, 0x42, 0xDB, 0x88, 0x42, + 0xDB, 0x89, 0x42, 0xDB, 0x8B, 0x42, 0xDB, 0x8C, + 0x42, 0xDB, 0x90, 0x42, 0xDB, 0x92, 0x43, 0xE0, + // Bytes 400 - 43f + 0xBC, 0x8B, 0x43, 0xE1, 0x83, 0x9C, 0x43, 0xE1, + 0x84, 0x80, 0x43, 0xE1, 0x84, 0x81, 0x43, 0xE1, + 0x84, 0x82, 0x43, 0xE1, 0x84, 0x83, 0x43, 0xE1, + 0x84, 0x84, 0x43, 0xE1, 0x84, 0x85, 0x43, 0xE1, + 0x84, 0x86, 0x43, 0xE1, 0x84, 0x87, 0x43, 0xE1, + 0x84, 0x88, 0x43, 0xE1, 0x84, 0x89, 0x43, 0xE1, + 0x84, 0x8A, 0x43, 0xE1, 0x84, 0x8B, 0x43, 0xE1, + 0x84, 0x8C, 0x43, 0xE1, 0x84, 0x8D, 0x43, 0xE1, + // Bytes 440 - 47f + 0x84, 0x8E, 0x43, 0xE1, 0x84, 0x8F, 0x43, 0xE1, + 0x84, 0x90, 0x43, 0xE1, 0x84, 0x91, 0x43, 0xE1, + 0x84, 0x92, 0x43, 0xE1, 0x84, 0x94, 0x43, 0xE1, + 0x84, 0x95, 0x43, 0xE1, 0x84, 0x9A, 0x43, 0xE1, + 0x84, 0x9C, 0x43, 0xE1, 0x84, 0x9D, 0x43, 0xE1, + 0x84, 0x9E, 0x43, 0xE1, 0x84, 0xA0, 0x43, 0xE1, + 0x84, 0xA1, 0x43, 0xE1, 0x84, 0xA2, 0x43, 0xE1, + 0x84, 0xA3, 0x43, 0xE1, 0x84, 0xA7, 0x43, 0xE1, + // Bytes 480 - 4bf + 0x84, 0xA9, 0x43, 0xE1, 0x84, 0xAB, 0x43, 0xE1, + 0x84, 0xAC, 0x43, 0xE1, 0x84, 0xAD, 0x43, 0xE1, + 0x84, 0xAE, 0x43, 0xE1, 0x84, 0xAF, 0x43, 0xE1, + 0x84, 0xB2, 0x43, 0xE1, 0x84, 0xB6, 0x43, 0xE1, + 0x85, 0x80, 0x43, 0xE1, 0x85, 0x87, 0x43, 0xE1, + 0x85, 0x8C, 0x43, 0xE1, 0x85, 0x97, 0x43, 0xE1, + 0x85, 0x98, 0x43, 0xE1, 0x85, 0x99, 0x43, 0xE1, + 0x85, 0xA0, 0x43, 0xE1, 0x86, 0x84, 0x43, 0xE1, + // Bytes 4c0 - 4ff + 0x86, 0x85, 0x43, 0xE1, 0x86, 0x88, 0x43, 0xE1, + 0x86, 0x91, 0x43, 0xE1, 0x86, 0x92, 0x43, 0xE1, + 0x86, 0x94, 0x43, 0xE1, 0x86, 0x9E, 0x43, 0xE1, + 0x86, 0xA1, 0x43, 0xE1, 0x87, 0x87, 0x43, 0xE1, + 0x87, 0x88, 0x43, 0xE1, 0x87, 0x8C, 0x43, 0xE1, + 0x87, 0x8E, 0x43, 0xE1, 0x87, 0x93, 0x43, 0xE1, + 0x87, 0x97, 0x43, 0xE1, 0x87, 0x99, 0x43, 0xE1, + 0x87, 0x9D, 0x43, 0xE1, 0x87, 0x9F, 0x43, 0xE1, + // Bytes 500 - 53f + 0x87, 0xB1, 0x43, 0xE1, 0x87, 0xB2, 0x43, 0xE1, + 0xB4, 0x82, 0x43, 0xE1, 0xB4, 0x96, 0x43, 0xE1, + 0xB4, 0x97, 0x43, 0xE1, 0xB4, 0x9C, 0x43, 0xE1, + 0xB4, 0x9D, 0x43, 0xE1, 0xB4, 0xA5, 0x43, 0xE1, + 0xB5, 0xBB, 0x43, 0xE1, 0xB6, 0x85, 0x43, 0xE1, + 0xB6, 0x91, 0x43, 0xE2, 0x80, 0x82, 0x43, 0xE2, + 0x80, 0x83, 0x43, 0xE2, 0x80, 0x90, 0x43, 0xE2, + 0x80, 0x93, 0x43, 0xE2, 0x80, 0x94, 0x43, 0xE2, + // Bytes 540 - 57f + 0x82, 0xA9, 0x43, 0xE2, 0x86, 0x90, 0x43, 0xE2, + 0x86, 0x91, 0x43, 0xE2, 0x86, 0x92, 0x43, 0xE2, + 0x86, 0x93, 0x43, 0xE2, 0x88, 0x82, 0x43, 0xE2, + 0x88, 0x87, 0x43, 0xE2, 0x88, 0x91, 0x43, 0xE2, + 0x88, 0x92, 0x43, 0xE2, 0x94, 0x82, 0x43, 0xE2, + 0x96, 0xA0, 0x43, 0xE2, 0x97, 0x8B, 0x43, 0xE2, + 0xA6, 0x85, 0x43, 0xE2, 0xA6, 0x86, 0x43, 0xE2, + 0xB1, 0xB1, 0x43, 0xE2, 0xB5, 0xA1, 0x43, 0xE3, + // Bytes 580 - 5bf + 0x80, 0x81, 0x43, 0xE3, 0x80, 0x82, 0x43, 0xE3, + 0x80, 0x88, 0x43, 0xE3, 0x80, 0x89, 0x43, 0xE3, + 0x80, 0x8A, 0x43, 0xE3, 0x80, 0x8B, 0x43, 0xE3, + 0x80, 0x8C, 0x43, 0xE3, 0x80, 0x8D, 0x43, 0xE3, + 0x80, 0x8E, 0x43, 0xE3, 0x80, 0x8F, 0x43, 0xE3, + 0x80, 0x90, 0x43, 0xE3, 0x80, 0x91, 0x43, 0xE3, + 0x80, 0x92, 0x43, 0xE3, 0x80, 0x94, 0x43, 0xE3, + 0x80, 0x95, 0x43, 0xE3, 0x80, 0x96, 0x43, 0xE3, + // Bytes 5c0 - 5ff + 0x80, 0x97, 0x43, 0xE3, 0x82, 0xA1, 0x43, 0xE3, + 0x82, 0xA2, 0x43, 0xE3, 0x82, 0xA3, 0x43, 0xE3, + 0x82, 0xA4, 0x43, 0xE3, 0x82, 0xA5, 0x43, 0xE3, + 0x82, 0xA6, 0x43, 0xE3, 0x82, 0xA7, 0x43, 0xE3, + 0x82, 0xA8, 0x43, 0xE3, 0x82, 0xA9, 0x43, 0xE3, + 0x82, 0xAA, 0x43, 0xE3, 0x82, 0xAB, 0x43, 0xE3, + 0x82, 0xAD, 0x43, 0xE3, 0x82, 0xAF, 0x43, 0xE3, + 0x82, 0xB1, 0x43, 0xE3, 0x82, 0xB3, 0x43, 0xE3, + // Bytes 600 - 63f + 0x82, 0xB5, 0x43, 0xE3, 0x82, 0xB7, 0x43, 0xE3, + 0x82, 0xB9, 0x43, 0xE3, 0x82, 0xBB, 0x43, 0xE3, + 0x82, 0xBD, 0x43, 0xE3, 0x82, 0xBF, 0x43, 0xE3, + 0x83, 0x81, 0x43, 0xE3, 0x83, 0x83, 0x43, 0xE3, + 0x83, 0x84, 0x43, 0xE3, 0x83, 0x86, 0x43, 0xE3, + 0x83, 0x88, 0x43, 0xE3, 0x83, 0x8A, 0x43, 0xE3, + 0x83, 0x8B, 0x43, 0xE3, 0x83, 0x8C, 0x43, 0xE3, + 0x83, 0x8D, 0x43, 0xE3, 0x83, 0x8E, 0x43, 0xE3, + // Bytes 640 - 67f + 0x83, 0x8F, 0x43, 0xE3, 0x83, 0x92, 0x43, 0xE3, + 0x83, 0x95, 0x43, 0xE3, 0x83, 0x98, 0x43, 0xE3, + 0x83, 0x9B, 0x43, 0xE3, 0x83, 0x9E, 0x43, 0xE3, + 0x83, 0x9F, 0x43, 0xE3, 0x83, 0xA0, 0x43, 0xE3, + 0x83, 0xA1, 0x43, 0xE3, 0x83, 0xA2, 0x43, 0xE3, + 0x83, 0xA3, 0x43, 0xE3, 0x83, 0xA4, 0x43, 0xE3, + 0x83, 0xA5, 0x43, 0xE3, 0x83, 0xA6, 0x43, 0xE3, + 0x83, 0xA7, 0x43, 0xE3, 0x83, 0xA8, 0x43, 0xE3, + // Bytes 680 - 6bf + 0x83, 0xA9, 0x43, 0xE3, 0x83, 0xAA, 0x43, 0xE3, + 0x83, 0xAB, 0x43, 0xE3, 0x83, 0xAC, 0x43, 0xE3, + 0x83, 0xAD, 0x43, 0xE3, 0x83, 0xAF, 0x43, 0xE3, + 0x83, 0xB0, 0x43, 0xE3, 0x83, 0xB1, 0x43, 0xE3, + 0x83, 0xB2, 0x43, 0xE3, 0x83, 0xB3, 0x43, 0xE3, + 0x83, 0xBB, 0x43, 0xE3, 0x83, 0xBC, 0x43, 0xE3, + 0x92, 0x9E, 0x43, 0xE3, 0x92, 0xB9, 0x43, 0xE3, + 0x92, 0xBB, 0x43, 0xE3, 0x93, 0x9F, 0x43, 0xE3, + // Bytes 6c0 - 6ff + 0x94, 0x95, 0x43, 0xE3, 0x9B, 0xAE, 0x43, 0xE3, + 0x9B, 0xBC, 0x43, 0xE3, 0x9E, 0x81, 0x43, 0xE3, + 0xA0, 0xAF, 0x43, 0xE3, 0xA1, 0xA2, 0x43, 0xE3, + 0xA1, 0xBC, 0x43, 0xE3, 0xA3, 0x87, 0x43, 0xE3, + 0xA3, 0xA3, 0x43, 0xE3, 0xA4, 0x9C, 0x43, 0xE3, + 0xA4, 0xBA, 0x43, 0xE3, 0xA8, 0xAE, 0x43, 0xE3, + 0xA9, 0xAC, 0x43, 0xE3, 0xAB, 0xA4, 0x43, 0xE3, + 0xAC, 0x88, 0x43, 0xE3, 0xAC, 0x99, 0x43, 0xE3, + // Bytes 700 - 73f + 0xAD, 0x89, 0x43, 0xE3, 0xAE, 0x9D, 0x43, 0xE3, + 0xB0, 0x98, 0x43, 0xE3, 0xB1, 0x8E, 0x43, 0xE3, + 0xB4, 0xB3, 0x43, 0xE3, 0xB6, 0x96, 0x43, 0xE3, + 0xBA, 0xAC, 0x43, 0xE3, 0xBA, 0xB8, 0x43, 0xE3, + 0xBC, 0x9B, 0x43, 0xE3, 0xBF, 0xBC, 0x43, 0xE4, + 0x80, 0x88, 0x43, 0xE4, 0x80, 0x98, 0x43, 0xE4, + 0x80, 0xB9, 0x43, 0xE4, 0x81, 0x86, 0x43, 0xE4, + 0x82, 0x96, 0x43, 0xE4, 0x83, 0xA3, 0x43, 0xE4, + // Bytes 740 - 77f + 0x84, 0xAF, 0x43, 0xE4, 0x88, 0x82, 0x43, 0xE4, + 0x88, 0xA7, 0x43, 0xE4, 0x8A, 0xA0, 0x43, 0xE4, + 0x8C, 0x81, 0x43, 0xE4, 0x8C, 0xB4, 0x43, 0xE4, + 0x8D, 0x99, 0x43, 0xE4, 0x8F, 0x95, 0x43, 0xE4, + 0x8F, 0x99, 0x43, 0xE4, 0x90, 0x8B, 0x43, 0xE4, + 0x91, 0xAB, 0x43, 0xE4, 0x94, 0xAB, 0x43, 0xE4, + 0x95, 0x9D, 0x43, 0xE4, 0x95, 0xA1, 0x43, 0xE4, + 0x95, 0xAB, 0x43, 0xE4, 0x97, 0x97, 0x43, 0xE4, + // Bytes 780 - 7bf + 0x97, 0xB9, 0x43, 0xE4, 0x98, 0xB5, 0x43, 0xE4, + 0x9A, 0xBE, 0x43, 0xE4, 0x9B, 0x87, 0x43, 0xE4, + 0xA6, 0x95, 0x43, 0xE4, 0xA7, 0xA6, 0x43, 0xE4, + 0xA9, 0xAE, 0x43, 0xE4, 0xA9, 0xB6, 0x43, 0xE4, + 0xAA, 0xB2, 0x43, 0xE4, 0xAC, 0xB3, 0x43, 0xE4, + 0xAF, 0x8E, 0x43, 0xE4, 0xB3, 0x8E, 0x43, 0xE4, + 0xB3, 0xAD, 0x43, 0xE4, 0xB3, 0xB8, 0x43, 0xE4, + 0xB5, 0x96, 0x43, 0xE4, 0xB8, 0x80, 0x43, 0xE4, + // Bytes 7c0 - 7ff + 0xB8, 0x81, 0x43, 0xE4, 0xB8, 0x83, 0x43, 0xE4, + 0xB8, 0x89, 0x43, 0xE4, 0xB8, 0x8A, 0x43, 0xE4, + 0xB8, 0x8B, 0x43, 0xE4, 0xB8, 0x8D, 0x43, 0xE4, + 0xB8, 0x99, 0x43, 0xE4, 0xB8, 0xA6, 0x43, 0xE4, + 0xB8, 0xA8, 0x43, 0xE4, 0xB8, 0xAD, 0x43, 0xE4, + 0xB8, 0xB2, 0x43, 0xE4, 0xB8, 0xB6, 0x43, 0xE4, + 0xB8, 0xB8, 0x43, 0xE4, 0xB8, 0xB9, 0x43, 0xE4, + 0xB8, 0xBD, 0x43, 0xE4, 0xB8, 0xBF, 0x43, 0xE4, + // Bytes 800 - 83f + 0xB9, 0x81, 0x43, 0xE4, 0xB9, 0x99, 0x43, 0xE4, + 0xB9, 0x9D, 0x43, 0xE4, 0xBA, 0x82, 0x43, 0xE4, + 0xBA, 0x85, 0x43, 0xE4, 0xBA, 0x86, 0x43, 0xE4, + 0xBA, 0x8C, 0x43, 0xE4, 0xBA, 0x94, 0x43, 0xE4, + 0xBA, 0xA0, 0x43, 0xE4, 0xBA, 0xA4, 0x43, 0xE4, + 0xBA, 0xAE, 0x43, 0xE4, 0xBA, 0xBA, 0x43, 0xE4, + 0xBB, 0x80, 0x43, 0xE4, 0xBB, 0x8C, 0x43, 0xE4, + 0xBB, 0xA4, 0x43, 0xE4, 0xBC, 0x81, 0x43, 0xE4, + // Bytes 840 - 87f + 0xBC, 0x91, 0x43, 0xE4, 0xBD, 0xA0, 0x43, 0xE4, + 0xBE, 0x80, 0x43, 0xE4, 0xBE, 0x86, 0x43, 0xE4, + 0xBE, 0x8B, 0x43, 0xE4, 0xBE, 0xAE, 0x43, 0xE4, + 0xBE, 0xBB, 0x43, 0xE4, 0xBE, 0xBF, 0x43, 0xE5, + 0x80, 0x82, 0x43, 0xE5, 0x80, 0xAB, 0x43, 0xE5, + 0x81, 0xBA, 0x43, 0xE5, 0x82, 0x99, 0x43, 0xE5, + 0x83, 0x8F, 0x43, 0xE5, 0x83, 0x9A, 0x43, 0xE5, + 0x83, 0xA7, 0x43, 0xE5, 0x84, 0xAA, 0x43, 0xE5, + // Bytes 880 - 8bf + 0x84, 0xBF, 0x43, 0xE5, 0x85, 0x80, 0x43, 0xE5, + 0x85, 0x85, 0x43, 0xE5, 0x85, 0x8D, 0x43, 0xE5, + 0x85, 0x94, 0x43, 0xE5, 0x85, 0xA4, 0x43, 0xE5, + 0x85, 0xA5, 0x43, 0xE5, 0x85, 0xA7, 0x43, 0xE5, + 0x85, 0xA8, 0x43, 0xE5, 0x85, 0xA9, 0x43, 0xE5, + 0x85, 0xAB, 0x43, 0xE5, 0x85, 0xAD, 0x43, 0xE5, + 0x85, 0xB7, 0x43, 0xE5, 0x86, 0x80, 0x43, 0xE5, + 0x86, 0x82, 0x43, 0xE5, 0x86, 0x8D, 0x43, 0xE5, + // Bytes 8c0 - 8ff + 0x86, 0x92, 0x43, 0xE5, 0x86, 0x95, 0x43, 0xE5, + 0x86, 0x96, 0x43, 0xE5, 0x86, 0x97, 0x43, 0xE5, + 0x86, 0x99, 0x43, 0xE5, 0x86, 0xA4, 0x43, 0xE5, + 0x86, 0xAB, 0x43, 0xE5, 0x86, 0xAC, 0x43, 0xE5, + 0x86, 0xB5, 0x43, 0xE5, 0x86, 0xB7, 0x43, 0xE5, + 0x87, 0x89, 0x43, 0xE5, 0x87, 0x8C, 0x43, 0xE5, + 0x87, 0x9C, 0x43, 0xE5, 0x87, 0x9E, 0x43, 0xE5, + 0x87, 0xA0, 0x43, 0xE5, 0x87, 0xB5, 0x43, 0xE5, + // Bytes 900 - 93f + 0x88, 0x80, 0x43, 0xE5, 0x88, 0x83, 0x43, 0xE5, + 0x88, 0x87, 0x43, 0xE5, 0x88, 0x97, 0x43, 0xE5, + 0x88, 0x9D, 0x43, 0xE5, 0x88, 0xA9, 0x43, 0xE5, + 0x88, 0xBA, 0x43, 0xE5, 0x88, 0xBB, 0x43, 0xE5, + 0x89, 0x86, 0x43, 0xE5, 0x89, 0x8D, 0x43, 0xE5, + 0x89, 0xB2, 0x43, 0xE5, 0x89, 0xB7, 0x43, 0xE5, + 0x8A, 0x89, 0x43, 0xE5, 0x8A, 0x9B, 0x43, 0xE5, + 0x8A, 0xA3, 0x43, 0xE5, 0x8A, 0xB3, 0x43, 0xE5, + // Bytes 940 - 97f + 0x8A, 0xB4, 0x43, 0xE5, 0x8B, 0x87, 0x43, 0xE5, + 0x8B, 0x89, 0x43, 0xE5, 0x8B, 0x92, 0x43, 0xE5, + 0x8B, 0x9E, 0x43, 0xE5, 0x8B, 0xA4, 0x43, 0xE5, + 0x8B, 0xB5, 0x43, 0xE5, 0x8B, 0xB9, 0x43, 0xE5, + 0x8B, 0xBA, 0x43, 0xE5, 0x8C, 0x85, 0x43, 0xE5, + 0x8C, 0x86, 0x43, 0xE5, 0x8C, 0x95, 0x43, 0xE5, + 0x8C, 0x97, 0x43, 0xE5, 0x8C, 0x9A, 0x43, 0xE5, + 0x8C, 0xB8, 0x43, 0xE5, 0x8C, 0xBB, 0x43, 0xE5, + // Bytes 980 - 9bf + 0x8C, 0xBF, 0x43, 0xE5, 0x8D, 0x81, 0x43, 0xE5, + 0x8D, 0x84, 0x43, 0xE5, 0x8D, 0x85, 0x43, 0xE5, + 0x8D, 0x89, 0x43, 0xE5, 0x8D, 0x91, 0x43, 0xE5, + 0x8D, 0x94, 0x43, 0xE5, 0x8D, 0x9A, 0x43, 0xE5, + 0x8D, 0x9C, 0x43, 0xE5, 0x8D, 0xA9, 0x43, 0xE5, + 0x8D, 0xB0, 0x43, 0xE5, 0x8D, 0xB3, 0x43, 0xE5, + 0x8D, 0xB5, 0x43, 0xE5, 0x8D, 0xBD, 0x43, 0xE5, + 0x8D, 0xBF, 0x43, 0xE5, 0x8E, 0x82, 0x43, 0xE5, + // Bytes 9c0 - 9ff + 0x8E, 0xB6, 0x43, 0xE5, 0x8F, 0x83, 0x43, 0xE5, + 0x8F, 0x88, 0x43, 0xE5, 0x8F, 0x8A, 0x43, 0xE5, + 0x8F, 0x8C, 0x43, 0xE5, 0x8F, 0x9F, 0x43, 0xE5, + 0x8F, 0xA3, 0x43, 0xE5, 0x8F, 0xA5, 0x43, 0xE5, + 0x8F, 0xAB, 0x43, 0xE5, 0x8F, 0xAF, 0x43, 0xE5, + 0x8F, 0xB1, 0x43, 0xE5, 0x8F, 0xB3, 0x43, 0xE5, + 0x90, 0x86, 0x43, 0xE5, 0x90, 0x88, 0x43, 0xE5, + 0x90, 0x8D, 0x43, 0xE5, 0x90, 0x8F, 0x43, 0xE5, + // Bytes a00 - a3f + 0x90, 0x9D, 0x43, 0xE5, 0x90, 0xB8, 0x43, 0xE5, + 0x90, 0xB9, 0x43, 0xE5, 0x91, 0x82, 0x43, 0xE5, + 0x91, 0x88, 0x43, 0xE5, 0x91, 0xA8, 0x43, 0xE5, + 0x92, 0x9E, 0x43, 0xE5, 0x92, 0xA2, 0x43, 0xE5, + 0x92, 0xBD, 0x43, 0xE5, 0x93, 0xB6, 0x43, 0xE5, + 0x94, 0x90, 0x43, 0xE5, 0x95, 0x8F, 0x43, 0xE5, + 0x95, 0x93, 0x43, 0xE5, 0x95, 0x95, 0x43, 0xE5, + 0x95, 0xA3, 0x43, 0xE5, 0x96, 0x84, 0x43, 0xE5, + // Bytes a40 - a7f + 0x96, 0x87, 0x43, 0xE5, 0x96, 0x99, 0x43, 0xE5, + 0x96, 0x9D, 0x43, 0xE5, 0x96, 0xAB, 0x43, 0xE5, + 0x96, 0xB3, 0x43, 0xE5, 0x96, 0xB6, 0x43, 0xE5, + 0x97, 0x80, 0x43, 0xE5, 0x97, 0x82, 0x43, 0xE5, + 0x97, 0xA2, 0x43, 0xE5, 0x98, 0x86, 0x43, 0xE5, + 0x99, 0x91, 0x43, 0xE5, 0x99, 0xA8, 0x43, 0xE5, + 0x99, 0xB4, 0x43, 0xE5, 0x9B, 0x97, 0x43, 0xE5, + 0x9B, 0x9B, 0x43, 0xE5, 0x9B, 0xB9, 0x43, 0xE5, + // Bytes a80 - abf + 0x9C, 0x96, 0x43, 0xE5, 0x9C, 0x97, 0x43, 0xE5, + 0x9C, 0x9F, 0x43, 0xE5, 0x9C, 0xB0, 0x43, 0xE5, + 0x9E, 0x8B, 0x43, 0xE5, 0x9F, 0x8E, 0x43, 0xE5, + 0x9F, 0xB4, 0x43, 0xE5, 0xA0, 0x8D, 0x43, 0xE5, + 0xA0, 0xB1, 0x43, 0xE5, 0xA0, 0xB2, 0x43, 0xE5, + 0xA1, 0x80, 0x43, 0xE5, 0xA1, 0x9A, 0x43, 0xE5, + 0xA1, 0x9E, 0x43, 0xE5, 0xA2, 0xA8, 0x43, 0xE5, + 0xA2, 0xAC, 0x43, 0xE5, 0xA2, 0xB3, 0x43, 0xE5, + // Bytes ac0 - aff + 0xA3, 0x98, 0x43, 0xE5, 0xA3, 0x9F, 0x43, 0xE5, + 0xA3, 0xAB, 0x43, 0xE5, 0xA3, 0xAE, 0x43, 0xE5, + 0xA3, 0xB0, 0x43, 0xE5, 0xA3, 0xB2, 0x43, 0xE5, + 0xA3, 0xB7, 0x43, 0xE5, 0xA4, 0x82, 0x43, 0xE5, + 0xA4, 0x86, 0x43, 0xE5, 0xA4, 0x8A, 0x43, 0xE5, + 0xA4, 0x95, 0x43, 0xE5, 0xA4, 0x9A, 0x43, 0xE5, + 0xA4, 0x9C, 0x43, 0xE5, 0xA4, 0xA2, 0x43, 0xE5, + 0xA4, 0xA7, 0x43, 0xE5, 0xA4, 0xA9, 0x43, 0xE5, + // Bytes b00 - b3f + 0xA5, 0x84, 0x43, 0xE5, 0xA5, 0x88, 0x43, 0xE5, + 0xA5, 0x91, 0x43, 0xE5, 0xA5, 0x94, 0x43, 0xE5, + 0xA5, 0xA2, 0x43, 0xE5, 0xA5, 0xB3, 0x43, 0xE5, + 0xA7, 0x98, 0x43, 0xE5, 0xA7, 0xAC, 0x43, 0xE5, + 0xA8, 0x9B, 0x43, 0xE5, 0xA8, 0xA7, 0x43, 0xE5, + 0xA9, 0xA2, 0x43, 0xE5, 0xA9, 0xA6, 0x43, 0xE5, + 0xAA, 0xB5, 0x43, 0xE5, 0xAC, 0x88, 0x43, 0xE5, + 0xAC, 0xA8, 0x43, 0xE5, 0xAC, 0xBE, 0x43, 0xE5, + // Bytes b40 - b7f + 0xAD, 0x90, 0x43, 0xE5, 0xAD, 0x97, 0x43, 0xE5, + 0xAD, 0xA6, 0x43, 0xE5, 0xAE, 0x80, 0x43, 0xE5, + 0xAE, 0x85, 0x43, 0xE5, 0xAE, 0x97, 0x43, 0xE5, + 0xAF, 0x83, 0x43, 0xE5, 0xAF, 0x98, 0x43, 0xE5, + 0xAF, 0xA7, 0x43, 0xE5, 0xAF, 0xAE, 0x43, 0xE5, + 0xAF, 0xB3, 0x43, 0xE5, 0xAF, 0xB8, 0x43, 0xE5, + 0xAF, 0xBF, 0x43, 0xE5, 0xB0, 0x86, 0x43, 0xE5, + 0xB0, 0x8F, 0x43, 0xE5, 0xB0, 0xA2, 0x43, 0xE5, + // Bytes b80 - bbf + 0xB0, 0xB8, 0x43, 0xE5, 0xB0, 0xBF, 0x43, 0xE5, + 0xB1, 0xA0, 0x43, 0xE5, 0xB1, 0xA2, 0x43, 0xE5, + 0xB1, 0xA4, 0x43, 0xE5, 0xB1, 0xA5, 0x43, 0xE5, + 0xB1, 0xAE, 0x43, 0xE5, 0xB1, 0xB1, 0x43, 0xE5, + 0xB2, 0x8D, 0x43, 0xE5, 0xB3, 0x80, 0x43, 0xE5, + 0xB4, 0x99, 0x43, 0xE5, 0xB5, 0x83, 0x43, 0xE5, + 0xB5, 0x90, 0x43, 0xE5, 0xB5, 0xAB, 0x43, 0xE5, + 0xB5, 0xAE, 0x43, 0xE5, 0xB5, 0xBC, 0x43, 0xE5, + // Bytes bc0 - bff + 0xB6, 0xB2, 0x43, 0xE5, 0xB6, 0xBA, 0x43, 0xE5, + 0xB7, 0x9B, 0x43, 0xE5, 0xB7, 0xA1, 0x43, 0xE5, + 0xB7, 0xA2, 0x43, 0xE5, 0xB7, 0xA5, 0x43, 0xE5, + 0xB7, 0xA6, 0x43, 0xE5, 0xB7, 0xB1, 0x43, 0xE5, + 0xB7, 0xBD, 0x43, 0xE5, 0xB7, 0xBE, 0x43, 0xE5, + 0xB8, 0xA8, 0x43, 0xE5, 0xB8, 0xBD, 0x43, 0xE5, + 0xB9, 0xA9, 0x43, 0xE5, 0xB9, 0xB2, 0x43, 0xE5, + 0xB9, 0xB4, 0x43, 0xE5, 0xB9, 0xBA, 0x43, 0xE5, + // Bytes c00 - c3f + 0xB9, 0xBC, 0x43, 0xE5, 0xB9, 0xBF, 0x43, 0xE5, + 0xBA, 0xA6, 0x43, 0xE5, 0xBA, 0xB0, 0x43, 0xE5, + 0xBA, 0xB3, 0x43, 0xE5, 0xBA, 0xB6, 0x43, 0xE5, + 0xBB, 0x89, 0x43, 0xE5, 0xBB, 0x8A, 0x43, 0xE5, + 0xBB, 0x92, 0x43, 0xE5, 0xBB, 0x93, 0x43, 0xE5, + 0xBB, 0x99, 0x43, 0xE5, 0xBB, 0xAC, 0x43, 0xE5, + 0xBB, 0xB4, 0x43, 0xE5, 0xBB, 0xBE, 0x43, 0xE5, + 0xBC, 0x84, 0x43, 0xE5, 0xBC, 0x8B, 0x43, 0xE5, + // Bytes c40 - c7f + 0xBC, 0x93, 0x43, 0xE5, 0xBC, 0xA2, 0x43, 0xE5, + 0xBD, 0x90, 0x43, 0xE5, 0xBD, 0x93, 0x43, 0xE5, + 0xBD, 0xA1, 0x43, 0xE5, 0xBD, 0xA2, 0x43, 0xE5, + 0xBD, 0xA9, 0x43, 0xE5, 0xBD, 0xAB, 0x43, 0xE5, + 0xBD, 0xB3, 0x43, 0xE5, 0xBE, 0x8B, 0x43, 0xE5, + 0xBE, 0x8C, 0x43, 0xE5, 0xBE, 0x97, 0x43, 0xE5, + 0xBE, 0x9A, 0x43, 0xE5, 0xBE, 0xA9, 0x43, 0xE5, + 0xBE, 0xAD, 0x43, 0xE5, 0xBF, 0x83, 0x43, 0xE5, + // Bytes c80 - cbf + 0xBF, 0x8D, 0x43, 0xE5, 0xBF, 0x97, 0x43, 0xE5, + 0xBF, 0xB5, 0x43, 0xE5, 0xBF, 0xB9, 0x43, 0xE6, + 0x80, 0x92, 0x43, 0xE6, 0x80, 0x9C, 0x43, 0xE6, + 0x81, 0xB5, 0x43, 0xE6, 0x82, 0x81, 0x43, 0xE6, + 0x82, 0x94, 0x43, 0xE6, 0x83, 0x87, 0x43, 0xE6, + 0x83, 0x98, 0x43, 0xE6, 0x83, 0xA1, 0x43, 0xE6, + 0x84, 0x88, 0x43, 0xE6, 0x85, 0x84, 0x43, 0xE6, + 0x85, 0x88, 0x43, 0xE6, 0x85, 0x8C, 0x43, 0xE6, + // Bytes cc0 - cff + 0x85, 0x8E, 0x43, 0xE6, 0x85, 0xA0, 0x43, 0xE6, + 0x85, 0xA8, 0x43, 0xE6, 0x85, 0xBA, 0x43, 0xE6, + 0x86, 0x8E, 0x43, 0xE6, 0x86, 0x90, 0x43, 0xE6, + 0x86, 0xA4, 0x43, 0xE6, 0x86, 0xAF, 0x43, 0xE6, + 0x86, 0xB2, 0x43, 0xE6, 0x87, 0x9E, 0x43, 0xE6, + 0x87, 0xB2, 0x43, 0xE6, 0x87, 0xB6, 0x43, 0xE6, + 0x88, 0x80, 0x43, 0xE6, 0x88, 0x88, 0x43, 0xE6, + 0x88, 0x90, 0x43, 0xE6, 0x88, 0x9B, 0x43, 0xE6, + // Bytes d00 - d3f + 0x88, 0xAE, 0x43, 0xE6, 0x88, 0xB4, 0x43, 0xE6, + 0x88, 0xB6, 0x43, 0xE6, 0x89, 0x8B, 0x43, 0xE6, + 0x89, 0x93, 0x43, 0xE6, 0x89, 0x9D, 0x43, 0xE6, + 0x8A, 0x95, 0x43, 0xE6, 0x8A, 0xB1, 0x43, 0xE6, + 0x8B, 0x89, 0x43, 0xE6, 0x8B, 0x8F, 0x43, 0xE6, + 0x8B, 0x93, 0x43, 0xE6, 0x8B, 0x94, 0x43, 0xE6, + 0x8B, 0xBC, 0x43, 0xE6, 0x8B, 0xBE, 0x43, 0xE6, + 0x8C, 0x87, 0x43, 0xE6, 0x8C, 0xBD, 0x43, 0xE6, + // Bytes d40 - d7f + 0x8D, 0x90, 0x43, 0xE6, 0x8D, 0x95, 0x43, 0xE6, + 0x8D, 0xA8, 0x43, 0xE6, 0x8D, 0xBB, 0x43, 0xE6, + 0x8E, 0x83, 0x43, 0xE6, 0x8E, 0xA0, 0x43, 0xE6, + 0x8E, 0xA9, 0x43, 0xE6, 0x8F, 0x84, 0x43, 0xE6, + 0x8F, 0x85, 0x43, 0xE6, 0x8F, 0xA4, 0x43, 0xE6, + 0x90, 0x9C, 0x43, 0xE6, 0x90, 0xA2, 0x43, 0xE6, + 0x91, 0x92, 0x43, 0xE6, 0x91, 0xA9, 0x43, 0xE6, + 0x91, 0xB7, 0x43, 0xE6, 0x91, 0xBE, 0x43, 0xE6, + // Bytes d80 - dbf + 0x92, 0x9A, 0x43, 0xE6, 0x92, 0x9D, 0x43, 0xE6, + 0x93, 0x84, 0x43, 0xE6, 0x94, 0xAF, 0x43, 0xE6, + 0x94, 0xB4, 0x43, 0xE6, 0x95, 0x8F, 0x43, 0xE6, + 0x95, 0x96, 0x43, 0xE6, 0x95, 0xAC, 0x43, 0xE6, + 0x95, 0xB8, 0x43, 0xE6, 0x96, 0x87, 0x43, 0xE6, + 0x96, 0x97, 0x43, 0xE6, 0x96, 0x99, 0x43, 0xE6, + 0x96, 0xA4, 0x43, 0xE6, 0x96, 0xB0, 0x43, 0xE6, + 0x96, 0xB9, 0x43, 0xE6, 0x97, 0x85, 0x43, 0xE6, + // Bytes dc0 - dff + 0x97, 0xA0, 0x43, 0xE6, 0x97, 0xA2, 0x43, 0xE6, + 0x97, 0xA3, 0x43, 0xE6, 0x97, 0xA5, 0x43, 0xE6, + 0x98, 0x93, 0x43, 0xE6, 0x98, 0xA0, 0x43, 0xE6, + 0x99, 0x89, 0x43, 0xE6, 0x99, 0xB4, 0x43, 0xE6, + 0x9A, 0x88, 0x43, 0xE6, 0x9A, 0x91, 0x43, 0xE6, + 0x9A, 0x9C, 0x43, 0xE6, 0x9A, 0xB4, 0x43, 0xE6, + 0x9B, 0x86, 0x43, 0xE6, 0x9B, 0xB0, 0x43, 0xE6, + 0x9B, 0xB4, 0x43, 0xE6, 0x9B, 0xB8, 0x43, 0xE6, + // Bytes e00 - e3f + 0x9C, 0x80, 0x43, 0xE6, 0x9C, 0x88, 0x43, 0xE6, + 0x9C, 0x89, 0x43, 0xE6, 0x9C, 0x97, 0x43, 0xE6, + 0x9C, 0x9B, 0x43, 0xE6, 0x9C, 0xA1, 0x43, 0xE6, + 0x9C, 0xA8, 0x43, 0xE6, 0x9D, 0x8E, 0x43, 0xE6, + 0x9D, 0x93, 0x43, 0xE6, 0x9D, 0x96, 0x43, 0xE6, + 0x9D, 0x9E, 0x43, 0xE6, 0x9D, 0xBB, 0x43, 0xE6, + 0x9E, 0x85, 0x43, 0xE6, 0x9E, 0x97, 0x43, 0xE6, + 0x9F, 0xB3, 0x43, 0xE6, 0x9F, 0xBA, 0x43, 0xE6, + // Bytes e40 - e7f + 0xA0, 0x97, 0x43, 0xE6, 0xA0, 0x9F, 0x43, 0xE6, + 0xA0, 0xAA, 0x43, 0xE6, 0xA1, 0x92, 0x43, 0xE6, + 0xA2, 0x81, 0x43, 0xE6, 0xA2, 0x85, 0x43, 0xE6, + 0xA2, 0x8E, 0x43, 0xE6, 0xA2, 0xA8, 0x43, 0xE6, + 0xA4, 0x94, 0x43, 0xE6, 0xA5, 0x82, 0x43, 0xE6, + 0xA6, 0xA3, 0x43, 0xE6, 0xA7, 0xAA, 0x43, 0xE6, + 0xA8, 0x82, 0x43, 0xE6, 0xA8, 0x93, 0x43, 0xE6, + 0xAA, 0xA8, 0x43, 0xE6, 0xAB, 0x93, 0x43, 0xE6, + // Bytes e80 - ebf + 0xAB, 0x9B, 0x43, 0xE6, 0xAC, 0x84, 0x43, 0xE6, + 0xAC, 0xA0, 0x43, 0xE6, 0xAC, 0xA1, 0x43, 0xE6, + 0xAD, 0x94, 0x43, 0xE6, 0xAD, 0xA2, 0x43, 0xE6, + 0xAD, 0xA3, 0x43, 0xE6, 0xAD, 0xB2, 0x43, 0xE6, + 0xAD, 0xB7, 0x43, 0xE6, 0xAD, 0xB9, 0x43, 0xE6, + 0xAE, 0x9F, 0x43, 0xE6, 0xAE, 0xAE, 0x43, 0xE6, + 0xAE, 0xB3, 0x43, 0xE6, 0xAE, 0xBA, 0x43, 0xE6, + 0xAE, 0xBB, 0x43, 0xE6, 0xAF, 0x8B, 0x43, 0xE6, + // Bytes ec0 - eff + 0xAF, 0x8D, 0x43, 0xE6, 0xAF, 0x94, 0x43, 0xE6, + 0xAF, 0x9B, 0x43, 0xE6, 0xB0, 0x8F, 0x43, 0xE6, + 0xB0, 0x94, 0x43, 0xE6, 0xB0, 0xB4, 0x43, 0xE6, + 0xB1, 0x8E, 0x43, 0xE6, 0xB1, 0xA7, 0x43, 0xE6, + 0xB2, 0x88, 0x43, 0xE6, 0xB2, 0xBF, 0x43, 0xE6, + 0xB3, 0x8C, 0x43, 0xE6, 0xB3, 0x8D, 0x43, 0xE6, + 0xB3, 0xA5, 0x43, 0xE6, 0xB3, 0xA8, 0x43, 0xE6, + 0xB4, 0x96, 0x43, 0xE6, 0xB4, 0x9B, 0x43, 0xE6, + // Bytes f00 - f3f + 0xB4, 0x9E, 0x43, 0xE6, 0xB4, 0xB4, 0x43, 0xE6, + 0xB4, 0xBE, 0x43, 0xE6, 0xB5, 0x81, 0x43, 0xE6, + 0xB5, 0xA9, 0x43, 0xE6, 0xB5, 0xAA, 0x43, 0xE6, + 0xB5, 0xB7, 0x43, 0xE6, 0xB5, 0xB8, 0x43, 0xE6, + 0xB6, 0x85, 0x43, 0xE6, 0xB7, 0x8B, 0x43, 0xE6, + 0xB7, 0x9A, 0x43, 0xE6, 0xB7, 0xAA, 0x43, 0xE6, + 0xB7, 0xB9, 0x43, 0xE6, 0xB8, 0x9A, 0x43, 0xE6, + 0xB8, 0xAF, 0x43, 0xE6, 0xB9, 0xAE, 0x43, 0xE6, + // Bytes f40 - f7f + 0xBA, 0x80, 0x43, 0xE6, 0xBA, 0x9C, 0x43, 0xE6, + 0xBA, 0xBA, 0x43, 0xE6, 0xBB, 0x87, 0x43, 0xE6, + 0xBB, 0x8B, 0x43, 0xE6, 0xBB, 0x91, 0x43, 0xE6, + 0xBB, 0x9B, 0x43, 0xE6, 0xBC, 0x8F, 0x43, 0xE6, + 0xBC, 0x94, 0x43, 0xE6, 0xBC, 0xA2, 0x43, 0xE6, + 0xBC, 0xA3, 0x43, 0xE6, 0xBD, 0xAE, 0x43, 0xE6, + 0xBF, 0x86, 0x43, 0xE6, 0xBF, 0xAB, 0x43, 0xE6, + 0xBF, 0xBE, 0x43, 0xE7, 0x80, 0x9B, 0x43, 0xE7, + // Bytes f80 - fbf + 0x80, 0x9E, 0x43, 0xE7, 0x80, 0xB9, 0x43, 0xE7, + 0x81, 0x8A, 0x43, 0xE7, 0x81, 0xAB, 0x43, 0xE7, + 0x81, 0xB0, 0x43, 0xE7, 0x81, 0xB7, 0x43, 0xE7, + 0x81, 0xBD, 0x43, 0xE7, 0x82, 0x99, 0x43, 0xE7, + 0x82, 0xAD, 0x43, 0xE7, 0x83, 0x88, 0x43, 0xE7, + 0x83, 0x99, 0x43, 0xE7, 0x84, 0xA1, 0x43, 0xE7, + 0x85, 0x85, 0x43, 0xE7, 0x85, 0x89, 0x43, 0xE7, + 0x85, 0xAE, 0x43, 0xE7, 0x86, 0x9C, 0x43, 0xE7, + // Bytes fc0 - fff + 0x87, 0x8E, 0x43, 0xE7, 0x87, 0x90, 0x43, 0xE7, + 0x88, 0x90, 0x43, 0xE7, 0x88, 0x9B, 0x43, 0xE7, + 0x88, 0xA8, 0x43, 0xE7, 0x88, 0xAA, 0x43, 0xE7, + 0x88, 0xAB, 0x43, 0xE7, 0x88, 0xB5, 0x43, 0xE7, + 0x88, 0xB6, 0x43, 0xE7, 0x88, 0xBB, 0x43, 0xE7, + 0x88, 0xBF, 0x43, 0xE7, 0x89, 0x87, 0x43, 0xE7, + 0x89, 0x90, 0x43, 0xE7, 0x89, 0x99, 0x43, 0xE7, + 0x89, 0x9B, 0x43, 0xE7, 0x89, 0xA2, 0x43, 0xE7, + // Bytes 1000 - 103f + 0x89, 0xB9, 0x43, 0xE7, 0x8A, 0x80, 0x43, 0xE7, + 0x8A, 0x95, 0x43, 0xE7, 0x8A, 0xAC, 0x43, 0xE7, + 0x8A, 0xAF, 0x43, 0xE7, 0x8B, 0x80, 0x43, 0xE7, + 0x8B, 0xBC, 0x43, 0xE7, 0x8C, 0xAA, 0x43, 0xE7, + 0x8D, 0xB5, 0x43, 0xE7, 0x8D, 0xBA, 0x43, 0xE7, + 0x8E, 0x84, 0x43, 0xE7, 0x8E, 0x87, 0x43, 0xE7, + 0x8E, 0x89, 0x43, 0xE7, 0x8E, 0x8B, 0x43, 0xE7, + 0x8E, 0xA5, 0x43, 0xE7, 0x8E, 0xB2, 0x43, 0xE7, + // Bytes 1040 - 107f + 0x8F, 0x9E, 0x43, 0xE7, 0x90, 0x86, 0x43, 0xE7, + 0x90, 0x89, 0x43, 0xE7, 0x90, 0xA2, 0x43, 0xE7, + 0x91, 0x87, 0x43, 0xE7, 0x91, 0x9C, 0x43, 0xE7, + 0x91, 0xA9, 0x43, 0xE7, 0x91, 0xB1, 0x43, 0xE7, + 0x92, 0x85, 0x43, 0xE7, 0x92, 0x89, 0x43, 0xE7, + 0x92, 0x98, 0x43, 0xE7, 0x93, 0x8A, 0x43, 0xE7, + 0x93, 0x9C, 0x43, 0xE7, 0x93, 0xA6, 0x43, 0xE7, + 0x94, 0x86, 0x43, 0xE7, 0x94, 0x98, 0x43, 0xE7, + // Bytes 1080 - 10bf + 0x94, 0x9F, 0x43, 0xE7, 0x94, 0xA4, 0x43, 0xE7, + 0x94, 0xA8, 0x43, 0xE7, 0x94, 0xB0, 0x43, 0xE7, + 0x94, 0xB2, 0x43, 0xE7, 0x94, 0xB3, 0x43, 0xE7, + 0x94, 0xB7, 0x43, 0xE7, 0x94, 0xBB, 0x43, 0xE7, + 0x94, 0xBE, 0x43, 0xE7, 0x95, 0x99, 0x43, 0xE7, + 0x95, 0xA5, 0x43, 0xE7, 0x95, 0xB0, 0x43, 0xE7, + 0x96, 0x8B, 0x43, 0xE7, 0x96, 0x92, 0x43, 0xE7, + 0x97, 0xA2, 0x43, 0xE7, 0x98, 0x90, 0x43, 0xE7, + // Bytes 10c0 - 10ff + 0x98, 0x9D, 0x43, 0xE7, 0x98, 0x9F, 0x43, 0xE7, + 0x99, 0x82, 0x43, 0xE7, 0x99, 0xA9, 0x43, 0xE7, + 0x99, 0xB6, 0x43, 0xE7, 0x99, 0xBD, 0x43, 0xE7, + 0x9A, 0xAE, 0x43, 0xE7, 0x9A, 0xBF, 0x43, 0xE7, + 0x9B, 0x8A, 0x43, 0xE7, 0x9B, 0x9B, 0x43, 0xE7, + 0x9B, 0xA3, 0x43, 0xE7, 0x9B, 0xA7, 0x43, 0xE7, + 0x9B, 0xAE, 0x43, 0xE7, 0x9B, 0xB4, 0x43, 0xE7, + 0x9C, 0x81, 0x43, 0xE7, 0x9C, 0x9E, 0x43, 0xE7, + // Bytes 1100 - 113f + 0x9C, 0x9F, 0x43, 0xE7, 0x9D, 0x80, 0x43, 0xE7, + 0x9D, 0x8A, 0x43, 0xE7, 0x9E, 0x8B, 0x43, 0xE7, + 0x9E, 0xA7, 0x43, 0xE7, 0x9F, 0x9B, 0x43, 0xE7, + 0x9F, 0xA2, 0x43, 0xE7, 0x9F, 0xB3, 0x43, 0xE7, + 0xA1, 0x8E, 0x43, 0xE7, 0xA1, 0xAB, 0x43, 0xE7, + 0xA2, 0x8C, 0x43, 0xE7, 0xA2, 0x91, 0x43, 0xE7, + 0xA3, 0x8A, 0x43, 0xE7, 0xA3, 0x8C, 0x43, 0xE7, + 0xA3, 0xBB, 0x43, 0xE7, 0xA4, 0xAA, 0x43, 0xE7, + // Bytes 1140 - 117f + 0xA4, 0xBA, 0x43, 0xE7, 0xA4, 0xBC, 0x43, 0xE7, + 0xA4, 0xBE, 0x43, 0xE7, 0xA5, 0x88, 0x43, 0xE7, + 0xA5, 0x89, 0x43, 0xE7, 0xA5, 0x90, 0x43, 0xE7, + 0xA5, 0x96, 0x43, 0xE7, 0xA5, 0x9D, 0x43, 0xE7, + 0xA5, 0x9E, 0x43, 0xE7, 0xA5, 0xA5, 0x43, 0xE7, + 0xA5, 0xBF, 0x43, 0xE7, 0xA6, 0x81, 0x43, 0xE7, + 0xA6, 0x8D, 0x43, 0xE7, 0xA6, 0x8E, 0x43, 0xE7, + 0xA6, 0x8F, 0x43, 0xE7, 0xA6, 0xAE, 0x43, 0xE7, + // Bytes 1180 - 11bf + 0xA6, 0xB8, 0x43, 0xE7, 0xA6, 0xBE, 0x43, 0xE7, + 0xA7, 0x8A, 0x43, 0xE7, 0xA7, 0x98, 0x43, 0xE7, + 0xA7, 0xAB, 0x43, 0xE7, 0xA8, 0x9C, 0x43, 0xE7, + 0xA9, 0x80, 0x43, 0xE7, 0xA9, 0x8A, 0x43, 0xE7, + 0xA9, 0x8F, 0x43, 0xE7, 0xA9, 0xB4, 0x43, 0xE7, + 0xA9, 0xBA, 0x43, 0xE7, 0xAA, 0x81, 0x43, 0xE7, + 0xAA, 0xB1, 0x43, 0xE7, 0xAB, 0x8B, 0x43, 0xE7, + 0xAB, 0xAE, 0x43, 0xE7, 0xAB, 0xB9, 0x43, 0xE7, + // Bytes 11c0 - 11ff + 0xAC, 0xA0, 0x43, 0xE7, 0xAE, 0x8F, 0x43, 0xE7, + 0xAF, 0x80, 0x43, 0xE7, 0xAF, 0x86, 0x43, 0xE7, + 0xAF, 0x89, 0x43, 0xE7, 0xB0, 0xBE, 0x43, 0xE7, + 0xB1, 0xA0, 0x43, 0xE7, 0xB1, 0xB3, 0x43, 0xE7, + 0xB1, 0xBB, 0x43, 0xE7, 0xB2, 0x92, 0x43, 0xE7, + 0xB2, 0xBE, 0x43, 0xE7, 0xB3, 0x92, 0x43, 0xE7, + 0xB3, 0x96, 0x43, 0xE7, 0xB3, 0xA3, 0x43, 0xE7, + 0xB3, 0xA7, 0x43, 0xE7, 0xB3, 0xA8, 0x43, 0xE7, + // Bytes 1200 - 123f + 0xB3, 0xB8, 0x43, 0xE7, 0xB4, 0x80, 0x43, 0xE7, + 0xB4, 0x90, 0x43, 0xE7, 0xB4, 0xA2, 0x43, 0xE7, + 0xB4, 0xAF, 0x43, 0xE7, 0xB5, 0x82, 0x43, 0xE7, + 0xB5, 0x9B, 0x43, 0xE7, 0xB5, 0xA3, 0x43, 0xE7, + 0xB6, 0xA0, 0x43, 0xE7, 0xB6, 0xBE, 0x43, 0xE7, + 0xB7, 0x87, 0x43, 0xE7, 0xB7, 0xB4, 0x43, 0xE7, + 0xB8, 0x82, 0x43, 0xE7, 0xB8, 0x89, 0x43, 0xE7, + 0xB8, 0xB7, 0x43, 0xE7, 0xB9, 0x81, 0x43, 0xE7, + // Bytes 1240 - 127f + 0xB9, 0x85, 0x43, 0xE7, 0xBC, 0xB6, 0x43, 0xE7, + 0xBC, 0xBE, 0x43, 0xE7, 0xBD, 0x91, 0x43, 0xE7, + 0xBD, 0xB2, 0x43, 0xE7, 0xBD, 0xB9, 0x43, 0xE7, + 0xBD, 0xBA, 0x43, 0xE7, 0xBE, 0x85, 0x43, 0xE7, + 0xBE, 0x8A, 0x43, 0xE7, 0xBE, 0x95, 0x43, 0xE7, + 0xBE, 0x9A, 0x43, 0xE7, 0xBE, 0xBD, 0x43, 0xE7, + 0xBF, 0xBA, 0x43, 0xE8, 0x80, 0x81, 0x43, 0xE8, + 0x80, 0x85, 0x43, 0xE8, 0x80, 0x8C, 0x43, 0xE8, + // Bytes 1280 - 12bf + 0x80, 0x92, 0x43, 0xE8, 0x80, 0xB3, 0x43, 0xE8, + 0x81, 0x86, 0x43, 0xE8, 0x81, 0xA0, 0x43, 0xE8, + 0x81, 0xAF, 0x43, 0xE8, 0x81, 0xB0, 0x43, 0xE8, + 0x81, 0xBE, 0x43, 0xE8, 0x81, 0xBF, 0x43, 0xE8, + 0x82, 0x89, 0x43, 0xE8, 0x82, 0x8B, 0x43, 0xE8, + 0x82, 0xAD, 0x43, 0xE8, 0x82, 0xB2, 0x43, 0xE8, + 0x84, 0x83, 0x43, 0xE8, 0x84, 0xBE, 0x43, 0xE8, + 0x87, 0x98, 0x43, 0xE8, 0x87, 0xA3, 0x43, 0xE8, + // Bytes 12c0 - 12ff + 0x87, 0xA8, 0x43, 0xE8, 0x87, 0xAA, 0x43, 0xE8, + 0x87, 0xAD, 0x43, 0xE8, 0x87, 0xB3, 0x43, 0xE8, + 0x87, 0xBC, 0x43, 0xE8, 0x88, 0x81, 0x43, 0xE8, + 0x88, 0x84, 0x43, 0xE8, 0x88, 0x8C, 0x43, 0xE8, + 0x88, 0x98, 0x43, 0xE8, 0x88, 0x9B, 0x43, 0xE8, + 0x88, 0x9F, 0x43, 0xE8, 0x89, 0xAE, 0x43, 0xE8, + 0x89, 0xAF, 0x43, 0xE8, 0x89, 0xB2, 0x43, 0xE8, + 0x89, 0xB8, 0x43, 0xE8, 0x89, 0xB9, 0x43, 0xE8, + // Bytes 1300 - 133f + 0x8A, 0x8B, 0x43, 0xE8, 0x8A, 0x91, 0x43, 0xE8, + 0x8A, 0x9D, 0x43, 0xE8, 0x8A, 0xB1, 0x43, 0xE8, + 0x8A, 0xB3, 0x43, 0xE8, 0x8A, 0xBD, 0x43, 0xE8, + 0x8B, 0xA5, 0x43, 0xE8, 0x8B, 0xA6, 0x43, 0xE8, + 0x8C, 0x9D, 0x43, 0xE8, 0x8C, 0xA3, 0x43, 0xE8, + 0x8C, 0xB6, 0x43, 0xE8, 0x8D, 0x92, 0x43, 0xE8, + 0x8D, 0x93, 0x43, 0xE8, 0x8D, 0xA3, 0x43, 0xE8, + 0x8E, 0xAD, 0x43, 0xE8, 0x8E, 0xBD, 0x43, 0xE8, + // Bytes 1340 - 137f + 0x8F, 0x89, 0x43, 0xE8, 0x8F, 0x8A, 0x43, 0xE8, + 0x8F, 0x8C, 0x43, 0xE8, 0x8F, 0x9C, 0x43, 0xE8, + 0x8F, 0xA7, 0x43, 0xE8, 0x8F, 0xAF, 0x43, 0xE8, + 0x8F, 0xB1, 0x43, 0xE8, 0x90, 0xBD, 0x43, 0xE8, + 0x91, 0x89, 0x43, 0xE8, 0x91, 0x97, 0x43, 0xE8, + 0x93, 0xAE, 0x43, 0xE8, 0x93, 0xB1, 0x43, 0xE8, + 0x93, 0xB3, 0x43, 0xE8, 0x93, 0xBC, 0x43, 0xE8, + 0x94, 0x96, 0x43, 0xE8, 0x95, 0xA4, 0x43, 0xE8, + // Bytes 1380 - 13bf + 0x97, 0x8D, 0x43, 0xE8, 0x97, 0xBA, 0x43, 0xE8, + 0x98, 0x86, 0x43, 0xE8, 0x98, 0x92, 0x43, 0xE8, + 0x98, 0xAD, 0x43, 0xE8, 0x98, 0xBF, 0x43, 0xE8, + 0x99, 0x8D, 0x43, 0xE8, 0x99, 0x90, 0x43, 0xE8, + 0x99, 0x9C, 0x43, 0xE8, 0x99, 0xA7, 0x43, 0xE8, + 0x99, 0xA9, 0x43, 0xE8, 0x99, 0xAB, 0x43, 0xE8, + 0x9A, 0x88, 0x43, 0xE8, 0x9A, 0xA9, 0x43, 0xE8, + 0x9B, 0xA2, 0x43, 0xE8, 0x9C, 0x8E, 0x43, 0xE8, + // Bytes 13c0 - 13ff + 0x9C, 0xA8, 0x43, 0xE8, 0x9D, 0xAB, 0x43, 0xE8, + 0x9D, 0xB9, 0x43, 0xE8, 0x9E, 0x86, 0x43, 0xE8, + 0x9E, 0xBA, 0x43, 0xE8, 0x9F, 0xA1, 0x43, 0xE8, + 0xA0, 0x81, 0x43, 0xE8, 0xA0, 0x9F, 0x43, 0xE8, + 0xA1, 0x80, 0x43, 0xE8, 0xA1, 0x8C, 0x43, 0xE8, + 0xA1, 0xA0, 0x43, 0xE8, 0xA1, 0xA3, 0x43, 0xE8, + 0xA3, 0x82, 0x43, 0xE8, 0xA3, 0x8F, 0x43, 0xE8, + 0xA3, 0x97, 0x43, 0xE8, 0xA3, 0x9E, 0x43, 0xE8, + // Bytes 1400 - 143f + 0xA3, 0xA1, 0x43, 0xE8, 0xA3, 0xB8, 0x43, 0xE8, + 0xA3, 0xBA, 0x43, 0xE8, 0xA4, 0x90, 0x43, 0xE8, + 0xA5, 0x81, 0x43, 0xE8, 0xA5, 0xA4, 0x43, 0xE8, + 0xA5, 0xBE, 0x43, 0xE8, 0xA6, 0x86, 0x43, 0xE8, + 0xA6, 0x8B, 0x43, 0xE8, 0xA6, 0x96, 0x43, 0xE8, + 0xA7, 0x92, 0x43, 0xE8, 0xA7, 0xA3, 0x43, 0xE8, + 0xA8, 0x80, 0x43, 0xE8, 0xAA, 0xA0, 0x43, 0xE8, + 0xAA, 0xAA, 0x43, 0xE8, 0xAA, 0xBF, 0x43, 0xE8, + // Bytes 1440 - 147f + 0xAB, 0x8B, 0x43, 0xE8, 0xAB, 0x92, 0x43, 0xE8, + 0xAB, 0x96, 0x43, 0xE8, 0xAB, 0xAD, 0x43, 0xE8, + 0xAB, 0xB8, 0x43, 0xE8, 0xAB, 0xBE, 0x43, 0xE8, + 0xAC, 0x81, 0x43, 0xE8, 0xAC, 0xB9, 0x43, 0xE8, + 0xAD, 0x98, 0x43, 0xE8, 0xAE, 0x80, 0x43, 0xE8, + 0xAE, 0x8A, 0x43, 0xE8, 0xB0, 0xB7, 0x43, 0xE8, + 0xB1, 0x86, 0x43, 0xE8, 0xB1, 0x88, 0x43, 0xE8, + 0xB1, 0x95, 0x43, 0xE8, 0xB1, 0xB8, 0x43, 0xE8, + // Bytes 1480 - 14bf + 0xB2, 0x9D, 0x43, 0xE8, 0xB2, 0xA1, 0x43, 0xE8, + 0xB2, 0xA9, 0x43, 0xE8, 0xB2, 0xAB, 0x43, 0xE8, + 0xB3, 0x81, 0x43, 0xE8, 0xB3, 0x82, 0x43, 0xE8, + 0xB3, 0x87, 0x43, 0xE8, 0xB3, 0x88, 0x43, 0xE8, + 0xB3, 0x93, 0x43, 0xE8, 0xB4, 0x88, 0x43, 0xE8, + 0xB4, 0x9B, 0x43, 0xE8, 0xB5, 0xA4, 0x43, 0xE8, + 0xB5, 0xB0, 0x43, 0xE8, 0xB5, 0xB7, 0x43, 0xE8, + 0xB6, 0xB3, 0x43, 0xE8, 0xB6, 0xBC, 0x43, 0xE8, + // Bytes 14c0 - 14ff + 0xB7, 0x8B, 0x43, 0xE8, 0xB7, 0xAF, 0x43, 0xE8, + 0xB7, 0xB0, 0x43, 0xE8, 0xBA, 0xAB, 0x43, 0xE8, + 0xBB, 0x8A, 0x43, 0xE8, 0xBB, 0x94, 0x43, 0xE8, + 0xBC, 0xA6, 0x43, 0xE8, 0xBC, 0xAA, 0x43, 0xE8, + 0xBC, 0xB8, 0x43, 0xE8, 0xBC, 0xBB, 0x43, 0xE8, + 0xBD, 0xA2, 0x43, 0xE8, 0xBE, 0x9B, 0x43, 0xE8, + 0xBE, 0x9E, 0x43, 0xE8, 0xBE, 0xB0, 0x43, 0xE8, + 0xBE, 0xB5, 0x43, 0xE8, 0xBE, 0xB6, 0x43, 0xE9, + // Bytes 1500 - 153f + 0x80, 0xA3, 0x43, 0xE9, 0x80, 0xB8, 0x43, 0xE9, + 0x81, 0x8A, 0x43, 0xE9, 0x81, 0xA9, 0x43, 0xE9, + 0x81, 0xB2, 0x43, 0xE9, 0x81, 0xBC, 0x43, 0xE9, + 0x82, 0x8F, 0x43, 0xE9, 0x82, 0x91, 0x43, 0xE9, + 0x82, 0x94, 0x43, 0xE9, 0x83, 0x8E, 0x43, 0xE9, + 0x83, 0x9E, 0x43, 0xE9, 0x83, 0xB1, 0x43, 0xE9, + 0x83, 0xBD, 0x43, 0xE9, 0x84, 0x91, 0x43, 0xE9, + 0x84, 0x9B, 0x43, 0xE9, 0x85, 0x89, 0x43, 0xE9, + // Bytes 1540 - 157f + 0x85, 0x8D, 0x43, 0xE9, 0x85, 0xAA, 0x43, 0xE9, + 0x86, 0x99, 0x43, 0xE9, 0x86, 0xB4, 0x43, 0xE9, + 0x87, 0x86, 0x43, 0xE9, 0x87, 0x8C, 0x43, 0xE9, + 0x87, 0x8F, 0x43, 0xE9, 0x87, 0x91, 0x43, 0xE9, + 0x88, 0xB4, 0x43, 0xE9, 0x88, 0xB8, 0x43, 0xE9, + 0x89, 0xB6, 0x43, 0xE9, 0x89, 0xBC, 0x43, 0xE9, + 0x8B, 0x97, 0x43, 0xE9, 0x8B, 0x98, 0x43, 0xE9, + 0x8C, 0x84, 0x43, 0xE9, 0x8D, 0x8A, 0x43, 0xE9, + // Bytes 1580 - 15bf + 0x8F, 0xB9, 0x43, 0xE9, 0x90, 0x95, 0x43, 0xE9, + 0x95, 0xB7, 0x43, 0xE9, 0x96, 0x80, 0x43, 0xE9, + 0x96, 0x8B, 0x43, 0xE9, 0x96, 0xAD, 0x43, 0xE9, + 0x96, 0xB7, 0x43, 0xE9, 0x98, 0x9C, 0x43, 0xE9, + 0x98, 0xAE, 0x43, 0xE9, 0x99, 0x8B, 0x43, 0xE9, + 0x99, 0x8D, 0x43, 0xE9, 0x99, 0xB5, 0x43, 0xE9, + 0x99, 0xB8, 0x43, 0xE9, 0x99, 0xBC, 0x43, 0xE9, + 0x9A, 0x86, 0x43, 0xE9, 0x9A, 0xA3, 0x43, 0xE9, + // Bytes 15c0 - 15ff + 0x9A, 0xB6, 0x43, 0xE9, 0x9A, 0xB7, 0x43, 0xE9, + 0x9A, 0xB8, 0x43, 0xE9, 0x9A, 0xB9, 0x43, 0xE9, + 0x9B, 0x83, 0x43, 0xE9, 0x9B, 0xA2, 0x43, 0xE9, + 0x9B, 0xA3, 0x43, 0xE9, 0x9B, 0xA8, 0x43, 0xE9, + 0x9B, 0xB6, 0x43, 0xE9, 0x9B, 0xB7, 0x43, 0xE9, + 0x9C, 0xA3, 0x43, 0xE9, 0x9C, 0xB2, 0x43, 0xE9, + 0x9D, 0x88, 0x43, 0xE9, 0x9D, 0x91, 0x43, 0xE9, + 0x9D, 0x96, 0x43, 0xE9, 0x9D, 0x9E, 0x43, 0xE9, + // Bytes 1600 - 163f + 0x9D, 0xA2, 0x43, 0xE9, 0x9D, 0xA9, 0x43, 0xE9, + 0x9F, 0x8B, 0x43, 0xE9, 0x9F, 0x9B, 0x43, 0xE9, + 0x9F, 0xA0, 0x43, 0xE9, 0x9F, 0xAD, 0x43, 0xE9, + 0x9F, 0xB3, 0x43, 0xE9, 0x9F, 0xBF, 0x43, 0xE9, + 0xA0, 0x81, 0x43, 0xE9, 0xA0, 0x85, 0x43, 0xE9, + 0xA0, 0x8B, 0x43, 0xE9, 0xA0, 0x98, 0x43, 0xE9, + 0xA0, 0xA9, 0x43, 0xE9, 0xA0, 0xBB, 0x43, 0xE9, + 0xA1, 0x9E, 0x43, 0xE9, 0xA2, 0xA8, 0x43, 0xE9, + // Bytes 1640 - 167f + 0xA3, 0x9B, 0x43, 0xE9, 0xA3, 0x9F, 0x43, 0xE9, + 0xA3, 0xA2, 0x43, 0xE9, 0xA3, 0xAF, 0x43, 0xE9, + 0xA3, 0xBC, 0x43, 0xE9, 0xA4, 0xA8, 0x43, 0xE9, + 0xA4, 0xA9, 0x43, 0xE9, 0xA6, 0x96, 0x43, 0xE9, + 0xA6, 0x99, 0x43, 0xE9, 0xA6, 0xA7, 0x43, 0xE9, + 0xA6, 0xAC, 0x43, 0xE9, 0xA7, 0x82, 0x43, 0xE9, + 0xA7, 0xB1, 0x43, 0xE9, 0xA7, 0xBE, 0x43, 0xE9, + 0xA9, 0xAA, 0x43, 0xE9, 0xAA, 0xA8, 0x43, 0xE9, + // Bytes 1680 - 16bf + 0xAB, 0x98, 0x43, 0xE9, 0xAB, 0x9F, 0x43, 0xE9, + 0xAC, 0x92, 0x43, 0xE9, 0xAC, 0xA5, 0x43, 0xE9, + 0xAC, 0xAF, 0x43, 0xE9, 0xAC, 0xB2, 0x43, 0xE9, + 0xAC, 0xBC, 0x43, 0xE9, 0xAD, 0x9A, 0x43, 0xE9, + 0xAD, 0xAF, 0x43, 0xE9, 0xB1, 0x80, 0x43, 0xE9, + 0xB1, 0x97, 0x43, 0xE9, 0xB3, 0xA5, 0x43, 0xE9, + 0xB3, 0xBD, 0x43, 0xE9, 0xB5, 0xA7, 0x43, 0xE9, + 0xB6, 0xB4, 0x43, 0xE9, 0xB7, 0xBA, 0x43, 0xE9, + // Bytes 16c0 - 16ff + 0xB8, 0x9E, 0x43, 0xE9, 0xB9, 0xB5, 0x43, 0xE9, + 0xB9, 0xBF, 0x43, 0xE9, 0xBA, 0x97, 0x43, 0xE9, + 0xBA, 0x9F, 0x43, 0xE9, 0xBA, 0xA5, 0x43, 0xE9, + 0xBA, 0xBB, 0x43, 0xE9, 0xBB, 0x83, 0x43, 0xE9, + 0xBB, 0x8D, 0x43, 0xE9, 0xBB, 0x8E, 0x43, 0xE9, + 0xBB, 0x91, 0x43, 0xE9, 0xBB, 0xB9, 0x43, 0xE9, + 0xBB, 0xBD, 0x43, 0xE9, 0xBB, 0xBE, 0x43, 0xE9, + 0xBC, 0x85, 0x43, 0xE9, 0xBC, 0x8E, 0x43, 0xE9, + // Bytes 1700 - 173f + 0xBC, 0x8F, 0x43, 0xE9, 0xBC, 0x93, 0x43, 0xE9, + 0xBC, 0x96, 0x43, 0xE9, 0xBC, 0xA0, 0x43, 0xE9, + 0xBC, 0xBB, 0x43, 0xE9, 0xBD, 0x83, 0x43, 0xE9, + 0xBD, 0x8A, 0x43, 0xE9, 0xBD, 0x92, 0x43, 0xE9, + 0xBE, 0x8D, 0x43, 0xE9, 0xBE, 0x8E, 0x43, 0xE9, + 0xBE, 0x9C, 0x43, 0xE9, 0xBE, 0x9F, 0x43, 0xE9, + 0xBE, 0xA0, 0x43, 0xEA, 0x99, 0x91, 0x43, 0xEA, + 0x9A, 0x89, 0x43, 0xEA, 0x9C, 0xA7, 0x43, 0xEA, + // Bytes 1740 - 177f + 0x9D, 0xAF, 0x43, 0xEA, 0x9E, 0x8E, 0x43, 0xEA, + 0xAC, 0xB7, 0x43, 0xEA, 0xAD, 0x92, 0x43, 0xEA, + 0xAD, 0xA6, 0x43, 0xEA, 0xAD, 0xA7, 0x44, 0xF0, + 0x9D, 0xBC, 0x84, 0x44, 0xF0, 0x9D, 0xBC, 0x85, + 0x44, 0xF0, 0x9D, 0xBC, 0x86, 0x44, 0xF0, 0x9D, + 0xBC, 0x88, 0x44, 0xF0, 0x9D, 0xBC, 0x8A, 0x44, + 0xF0, 0x9D, 0xBC, 0x9E, 0x44, 0xF0, 0xA0, 0x84, + 0xA2, 0x44, 0xF0, 0xA0, 0x94, 0x9C, 0x44, 0xF0, + // Bytes 1780 - 17bf + 0xA0, 0x94, 0xA5, 0x44, 0xF0, 0xA0, 0x95, 0x8B, + 0x44, 0xF0, 0xA0, 0x98, 0xBA, 0x44, 0xF0, 0xA0, + 0xA0, 0x84, 0x44, 0xF0, 0xA0, 0xA3, 0x9E, 0x44, + 0xF0, 0xA0, 0xA8, 0xAC, 0x44, 0xF0, 0xA0, 0xAD, + 0xA3, 0x44, 0xF0, 0xA1, 0x93, 0xA4, 0x44, 0xF0, + 0xA1, 0x9A, 0xA8, 0x44, 0xF0, 0xA1, 0x9B, 0xAA, + 0x44, 0xF0, 0xA1, 0xA7, 0x88, 0x44, 0xF0, 0xA1, + 0xAC, 0x98, 0x44, 0xF0, 0xA1, 0xB4, 0x8B, 0x44, + // Bytes 17c0 - 17ff + 0xF0, 0xA1, 0xB7, 0xA4, 0x44, 0xF0, 0xA1, 0xB7, + 0xA6, 0x44, 0xF0, 0xA2, 0x86, 0x83, 0x44, 0xF0, + 0xA2, 0x86, 0x9F, 0x44, 0xF0, 0xA2, 0x8C, 0xB1, + 0x44, 0xF0, 0xA2, 0x9B, 0x94, 0x44, 0xF0, 0xA2, + 0xA1, 0x84, 0x44, 0xF0, 0xA2, 0xA1, 0x8A, 0x44, + 0xF0, 0xA2, 0xAC, 0x8C, 0x44, 0xF0, 0xA2, 0xAF, + 0xB1, 0x44, 0xF0, 0xA3, 0x80, 0x8A, 0x44, 0xF0, + 0xA3, 0x8A, 0xB8, 0x44, 0xF0, 0xA3, 0x8D, 0x9F, + // Bytes 1800 - 183f + 0x44, 0xF0, 0xA3, 0x8E, 0x93, 0x44, 0xF0, 0xA3, + 0x8E, 0x9C, 0x44, 0xF0, 0xA3, 0x8F, 0x83, 0x44, + 0xF0, 0xA3, 0x8F, 0x95, 0x44, 0xF0, 0xA3, 0x91, + 0xAD, 0x44, 0xF0, 0xA3, 0x9A, 0xA3, 0x44, 0xF0, + 0xA3, 0xA2, 0xA7, 0x44, 0xF0, 0xA3, 0xAA, 0x8D, + 0x44, 0xF0, 0xA3, 0xAB, 0xBA, 0x44, 0xF0, 0xA3, + 0xB2, 0xBC, 0x44, 0xF0, 0xA3, 0xB4, 0x9E, 0x44, + 0xF0, 0xA3, 0xBB, 0x91, 0x44, 0xF0, 0xA3, 0xBD, + // Bytes 1840 - 187f + 0x9E, 0x44, 0xF0, 0xA3, 0xBE, 0x8E, 0x44, 0xF0, + 0xA4, 0x89, 0xA3, 0x44, 0xF0, 0xA4, 0x8B, 0xAE, + 0x44, 0xF0, 0xA4, 0x8E, 0xAB, 0x44, 0xF0, 0xA4, + 0x98, 0x88, 0x44, 0xF0, 0xA4, 0x9C, 0xB5, 0x44, + 0xF0, 0xA4, 0xA0, 0x94, 0x44, 0xF0, 0xA4, 0xB0, + 0xB6, 0x44, 0xF0, 0xA4, 0xB2, 0x92, 0x44, 0xF0, + 0xA4, 0xBE, 0xA1, 0x44, 0xF0, 0xA4, 0xBE, 0xB8, + 0x44, 0xF0, 0xA5, 0x81, 0x84, 0x44, 0xF0, 0xA5, + // Bytes 1880 - 18bf + 0x83, 0xB2, 0x44, 0xF0, 0xA5, 0x83, 0xB3, 0x44, + 0xF0, 0xA5, 0x84, 0x99, 0x44, 0xF0, 0xA5, 0x84, + 0xB3, 0x44, 0xF0, 0xA5, 0x89, 0x89, 0x44, 0xF0, + 0xA5, 0x90, 0x9D, 0x44, 0xF0, 0xA5, 0x98, 0xA6, + 0x44, 0xF0, 0xA5, 0x9A, 0x9A, 0x44, 0xF0, 0xA5, + 0x9B, 0x85, 0x44, 0xF0, 0xA5, 0xA5, 0xBC, 0x44, + 0xF0, 0xA5, 0xAA, 0xA7, 0x44, 0xF0, 0xA5, 0xAE, + 0xAB, 0x44, 0xF0, 0xA5, 0xB2, 0x80, 0x44, 0xF0, + // Bytes 18c0 - 18ff + 0xA5, 0xB3, 0x90, 0x44, 0xF0, 0xA5, 0xBE, 0x86, + 0x44, 0xF0, 0xA6, 0x87, 0x9A, 0x44, 0xF0, 0xA6, + 0x88, 0xA8, 0x44, 0xF0, 0xA6, 0x89, 0x87, 0x44, + 0xF0, 0xA6, 0x8B, 0x99, 0x44, 0xF0, 0xA6, 0x8C, + 0xBE, 0x44, 0xF0, 0xA6, 0x93, 0x9A, 0x44, 0xF0, + 0xA6, 0x94, 0xA3, 0x44, 0xF0, 0xA6, 0x96, 0xA8, + 0x44, 0xF0, 0xA6, 0x9E, 0xA7, 0x44, 0xF0, 0xA6, + 0x9E, 0xB5, 0x44, 0xF0, 0xA6, 0xAC, 0xBC, 0x44, + // Bytes 1900 - 193f + 0xF0, 0xA6, 0xB0, 0xB6, 0x44, 0xF0, 0xA6, 0xB3, + 0x95, 0x44, 0xF0, 0xA6, 0xB5, 0xAB, 0x44, 0xF0, + 0xA6, 0xBC, 0xAC, 0x44, 0xF0, 0xA6, 0xBE, 0xB1, + 0x44, 0xF0, 0xA7, 0x83, 0x92, 0x44, 0xF0, 0xA7, + 0x8F, 0x8A, 0x44, 0xF0, 0xA7, 0x99, 0xA7, 0x44, + 0xF0, 0xA7, 0xA2, 0xAE, 0x44, 0xF0, 0xA7, 0xA5, + 0xA6, 0x44, 0xF0, 0xA7, 0xB2, 0xA8, 0x44, 0xF0, + 0xA7, 0xBB, 0x93, 0x44, 0xF0, 0xA7, 0xBC, 0xAF, + // Bytes 1940 - 197f + 0x44, 0xF0, 0xA8, 0x97, 0x92, 0x44, 0xF0, 0xA8, + 0x97, 0xAD, 0x44, 0xF0, 0xA8, 0x9C, 0xAE, 0x44, + 0xF0, 0xA8, 0xAF, 0xBA, 0x44, 0xF0, 0xA8, 0xB5, + 0xB7, 0x44, 0xF0, 0xA9, 0x85, 0x85, 0x44, 0xF0, + 0xA9, 0x87, 0x9F, 0x44, 0xF0, 0xA9, 0x88, 0x9A, + 0x44, 0xF0, 0xA9, 0x90, 0x8A, 0x44, 0xF0, 0xA9, + 0x92, 0x96, 0x44, 0xF0, 0xA9, 0x96, 0xB6, 0x44, + 0xF0, 0xA9, 0xAC, 0xB0, 0x44, 0xF0, 0xAA, 0x83, + // Bytes 1980 - 19bf + 0x8E, 0x44, 0xF0, 0xAA, 0x84, 0x85, 0x44, 0xF0, + 0xAA, 0x88, 0x8E, 0x44, 0xF0, 0xAA, 0x8A, 0x91, + 0x44, 0xF0, 0xAA, 0x8E, 0x92, 0x44, 0xF0, 0xAA, + 0x98, 0x80, 0x42, 0x21, 0x21, 0x42, 0x21, 0x3F, + 0x42, 0x2E, 0x2E, 0x42, 0x30, 0x2C, 0x42, 0x30, + 0x2E, 0x42, 0x31, 0x2C, 0x42, 0x31, 0x2E, 0x42, + 0x31, 0x30, 0x42, 0x31, 0x31, 0x42, 0x31, 0x32, + 0x42, 0x31, 0x33, 0x42, 0x31, 0x34, 0x42, 0x31, + // Bytes 19c0 - 19ff + 0x35, 0x42, 0x31, 0x36, 0x42, 0x31, 0x37, 0x42, + 0x31, 0x38, 0x42, 0x31, 0x39, 0x42, 0x32, 0x2C, + 0x42, 0x32, 0x2E, 0x42, 0x32, 0x30, 0x42, 0x32, + 0x31, 0x42, 0x32, 0x32, 0x42, 0x32, 0x33, 0x42, + 0x32, 0x34, 0x42, 0x32, 0x35, 0x42, 0x32, 0x36, + 0x42, 0x32, 0x37, 0x42, 0x32, 0x38, 0x42, 0x32, + 0x39, 0x42, 0x33, 0x2C, 0x42, 0x33, 0x2E, 0x42, + 0x33, 0x30, 0x42, 0x33, 0x31, 0x42, 0x33, 0x32, + // Bytes 1a00 - 1a3f + 0x42, 0x33, 0x33, 0x42, 0x33, 0x34, 0x42, 0x33, + 0x35, 0x42, 0x33, 0x36, 0x42, 0x33, 0x37, 0x42, + 0x33, 0x38, 0x42, 0x33, 0x39, 0x42, 0x34, 0x2C, + 0x42, 0x34, 0x2E, 0x42, 0x34, 0x30, 0x42, 0x34, + 0x31, 0x42, 0x34, 0x32, 0x42, 0x34, 0x33, 0x42, + 0x34, 0x34, 0x42, 0x34, 0x35, 0x42, 0x34, 0x36, + 0x42, 0x34, 0x37, 0x42, 0x34, 0x38, 0x42, 0x34, + 0x39, 0x42, 0x35, 0x2C, 0x42, 0x35, 0x2E, 0x42, + // Bytes 1a40 - 1a7f + 0x35, 0x30, 0x42, 0x36, 0x2C, 0x42, 0x36, 0x2E, + 0x42, 0x37, 0x2C, 0x42, 0x37, 0x2E, 0x42, 0x38, + 0x2C, 0x42, 0x38, 0x2E, 0x42, 0x39, 0x2C, 0x42, + 0x39, 0x2E, 0x42, 0x3D, 0x3D, 0x42, 0x3F, 0x21, + 0x42, 0x3F, 0x3F, 0x42, 0x41, 0x55, 0x42, 0x42, + 0x71, 0x42, 0x43, 0x44, 0x42, 0x44, 0x4A, 0x42, + 0x44, 0x5A, 0x42, 0x44, 0x7A, 0x42, 0x47, 0x42, + 0x42, 0x47, 0x79, 0x42, 0x48, 0x50, 0x42, 0x48, + // Bytes 1a80 - 1abf + 0x56, 0x42, 0x48, 0x67, 0x42, 0x48, 0x7A, 0x42, + 0x49, 0x49, 0x42, 0x49, 0x4A, 0x42, 0x49, 0x55, + 0x42, 0x49, 0x56, 0x42, 0x49, 0x58, 0x42, 0x4B, + 0x42, 0x42, 0x4B, 0x4B, 0x42, 0x4B, 0x4D, 0x42, + 0x4C, 0x4A, 0x42, 0x4C, 0x6A, 0x42, 0x4D, 0x42, + 0x42, 0x4D, 0x43, 0x42, 0x4D, 0x44, 0x42, 0x4D, + 0x52, 0x42, 0x4D, 0x56, 0x42, 0x4D, 0x57, 0x42, + 0x4E, 0x4A, 0x42, 0x4E, 0x6A, 0x42, 0x4E, 0x6F, + // Bytes 1ac0 - 1aff + 0x42, 0x50, 0x48, 0x42, 0x50, 0x52, 0x42, 0x50, + 0x61, 0x42, 0x52, 0x73, 0x42, 0x53, 0x44, 0x42, + 0x53, 0x4D, 0x42, 0x53, 0x53, 0x42, 0x53, 0x76, + 0x42, 0x54, 0x4D, 0x42, 0x56, 0x49, 0x42, 0x57, + 0x43, 0x42, 0x57, 0x5A, 0x42, 0x57, 0x62, 0x42, + 0x58, 0x49, 0x42, 0x63, 0x63, 0x42, 0x63, 0x64, + 0x42, 0x63, 0x6D, 0x42, 0x64, 0x42, 0x42, 0x64, + 0x61, 0x42, 0x64, 0x6C, 0x42, 0x64, 0x6D, 0x42, + // Bytes 1b00 - 1b3f + 0x64, 0x7A, 0x42, 0x65, 0x56, 0x42, 0x66, 0x66, + 0x42, 0x66, 0x69, 0x42, 0x66, 0x6C, 0x42, 0x66, + 0x6D, 0x42, 0x68, 0x61, 0x42, 0x69, 0x69, 0x42, + 0x69, 0x6A, 0x42, 0x69, 0x6E, 0x42, 0x69, 0x76, + 0x42, 0x69, 0x78, 0x42, 0x6B, 0x41, 0x42, 0x6B, + 0x56, 0x42, 0x6B, 0x57, 0x42, 0x6B, 0x67, 0x42, + 0x6B, 0x6C, 0x42, 0x6B, 0x6D, 0x42, 0x6B, 0x74, + 0x42, 0x6C, 0x6A, 0x42, 0x6C, 0x6D, 0x42, 0x6C, + // Bytes 1b40 - 1b7f + 0x6E, 0x42, 0x6C, 0x78, 0x42, 0x6D, 0x32, 0x42, + 0x6D, 0x33, 0x42, 0x6D, 0x41, 0x42, 0x6D, 0x56, + 0x42, 0x6D, 0x57, 0x42, 0x6D, 0x62, 0x42, 0x6D, + 0x67, 0x42, 0x6D, 0x6C, 0x42, 0x6D, 0x6D, 0x42, + 0x6D, 0x73, 0x42, 0x6E, 0x41, 0x42, 0x6E, 0x46, + 0x42, 0x6E, 0x56, 0x42, 0x6E, 0x57, 0x42, 0x6E, + 0x6A, 0x42, 0x6E, 0x6D, 0x42, 0x6E, 0x73, 0x42, + 0x6F, 0x56, 0x42, 0x70, 0x41, 0x42, 0x70, 0x46, + // Bytes 1b80 - 1bbf + 0x42, 0x70, 0x56, 0x42, 0x70, 0x57, 0x42, 0x70, + 0x63, 0x42, 0x70, 0x73, 0x42, 0x73, 0x72, 0x42, + 0x73, 0x74, 0x42, 0x76, 0x69, 0x42, 0x78, 0x69, + 0x43, 0x28, 0x31, 0x29, 0x43, 0x28, 0x32, 0x29, + 0x43, 0x28, 0x33, 0x29, 0x43, 0x28, 0x34, 0x29, + 0x43, 0x28, 0x35, 0x29, 0x43, 0x28, 0x36, 0x29, + 0x43, 0x28, 0x37, 0x29, 0x43, 0x28, 0x38, 0x29, + 0x43, 0x28, 0x39, 0x29, 0x43, 0x28, 0x41, 0x29, + // Bytes 1bc0 - 1bff + 0x43, 0x28, 0x42, 0x29, 0x43, 0x28, 0x43, 0x29, + 0x43, 0x28, 0x44, 0x29, 0x43, 0x28, 0x45, 0x29, + 0x43, 0x28, 0x46, 0x29, 0x43, 0x28, 0x47, 0x29, + 0x43, 0x28, 0x48, 0x29, 0x43, 0x28, 0x49, 0x29, + 0x43, 0x28, 0x4A, 0x29, 0x43, 0x28, 0x4B, 0x29, + 0x43, 0x28, 0x4C, 0x29, 0x43, 0x28, 0x4D, 0x29, + 0x43, 0x28, 0x4E, 0x29, 0x43, 0x28, 0x4F, 0x29, + 0x43, 0x28, 0x50, 0x29, 0x43, 0x28, 0x51, 0x29, + // Bytes 1c00 - 1c3f + 0x43, 0x28, 0x52, 0x29, 0x43, 0x28, 0x53, 0x29, + 0x43, 0x28, 0x54, 0x29, 0x43, 0x28, 0x55, 0x29, + 0x43, 0x28, 0x56, 0x29, 0x43, 0x28, 0x57, 0x29, + 0x43, 0x28, 0x58, 0x29, 0x43, 0x28, 0x59, 0x29, + 0x43, 0x28, 0x5A, 0x29, 0x43, 0x28, 0x61, 0x29, + 0x43, 0x28, 0x62, 0x29, 0x43, 0x28, 0x63, 0x29, + 0x43, 0x28, 0x64, 0x29, 0x43, 0x28, 0x65, 0x29, + 0x43, 0x28, 0x66, 0x29, 0x43, 0x28, 0x67, 0x29, + // Bytes 1c40 - 1c7f + 0x43, 0x28, 0x68, 0x29, 0x43, 0x28, 0x69, 0x29, + 0x43, 0x28, 0x6A, 0x29, 0x43, 0x28, 0x6B, 0x29, + 0x43, 0x28, 0x6C, 0x29, 0x43, 0x28, 0x6D, 0x29, + 0x43, 0x28, 0x6E, 0x29, 0x43, 0x28, 0x6F, 0x29, + 0x43, 0x28, 0x70, 0x29, 0x43, 0x28, 0x71, 0x29, + 0x43, 0x28, 0x72, 0x29, 0x43, 0x28, 0x73, 0x29, + 0x43, 0x28, 0x74, 0x29, 0x43, 0x28, 0x75, 0x29, + 0x43, 0x28, 0x76, 0x29, 0x43, 0x28, 0x77, 0x29, + // Bytes 1c80 - 1cbf + 0x43, 0x28, 0x78, 0x29, 0x43, 0x28, 0x79, 0x29, + 0x43, 0x28, 0x7A, 0x29, 0x43, 0x2E, 0x2E, 0x2E, + 0x43, 0x31, 0x30, 0x2E, 0x43, 0x31, 0x31, 0x2E, + 0x43, 0x31, 0x32, 0x2E, 0x43, 0x31, 0x33, 0x2E, + 0x43, 0x31, 0x34, 0x2E, 0x43, 0x31, 0x35, 0x2E, + 0x43, 0x31, 0x36, 0x2E, 0x43, 0x31, 0x37, 0x2E, + 0x43, 0x31, 0x38, 0x2E, 0x43, 0x31, 0x39, 0x2E, + 0x43, 0x32, 0x30, 0x2E, 0x43, 0x3A, 0x3A, 0x3D, + // Bytes 1cc0 - 1cff + 0x43, 0x3D, 0x3D, 0x3D, 0x43, 0x43, 0x6F, 0x2E, + 0x43, 0x46, 0x41, 0x58, 0x43, 0x47, 0x48, 0x7A, + 0x43, 0x47, 0x50, 0x61, 0x43, 0x49, 0x49, 0x49, + 0x43, 0x4C, 0x54, 0x44, 0x43, 0x4C, 0xC2, 0xB7, + 0x43, 0x4D, 0x48, 0x7A, 0x43, 0x4D, 0x50, 0x61, + 0x43, 0x4D, 0xCE, 0xA9, 0x43, 0x50, 0x50, 0x4D, + 0x43, 0x50, 0x50, 0x56, 0x43, 0x50, 0x54, 0x45, + 0x43, 0x54, 0x45, 0x4C, 0x43, 0x54, 0x48, 0x7A, + // Bytes 1d00 - 1d3f + 0x43, 0x56, 0x49, 0x49, 0x43, 0x58, 0x49, 0x49, + 0x43, 0x61, 0x2F, 0x63, 0x43, 0x61, 0x2F, 0x73, + 0x43, 0x61, 0xCA, 0xBE, 0x43, 0x62, 0x61, 0x72, + 0x43, 0x63, 0x2F, 0x6F, 0x43, 0x63, 0x2F, 0x75, + 0x43, 0x63, 0x61, 0x6C, 0x43, 0x63, 0x6D, 0x32, + 0x43, 0x63, 0x6D, 0x33, 0x43, 0x64, 0x6D, 0x32, + 0x43, 0x64, 0x6D, 0x33, 0x43, 0x65, 0x72, 0x67, + 0x43, 0x66, 0x66, 0x69, 0x43, 0x66, 0x66, 0x6C, + // Bytes 1d40 - 1d7f + 0x43, 0x67, 0x61, 0x6C, 0x43, 0x68, 0x50, 0x61, + 0x43, 0x69, 0x69, 0x69, 0x43, 0x6B, 0x48, 0x7A, + 0x43, 0x6B, 0x50, 0x61, 0x43, 0x6B, 0x6D, 0x32, + 0x43, 0x6B, 0x6D, 0x33, 0x43, 0x6B, 0xCE, 0xA9, + 0x43, 0x6C, 0x6F, 0x67, 0x43, 0x6C, 0xC2, 0xB7, + 0x43, 0x6D, 0x69, 0x6C, 0x43, 0x6D, 0x6D, 0x32, + 0x43, 0x6D, 0x6D, 0x33, 0x43, 0x6D, 0x6F, 0x6C, + 0x43, 0x72, 0x61, 0x64, 0x43, 0x76, 0x69, 0x69, + // Bytes 1d80 - 1dbf + 0x43, 0x78, 0x69, 0x69, 0x43, 0xC2, 0xB0, 0x43, + 0x43, 0xC2, 0xB0, 0x46, 0x43, 0xCA, 0xBC, 0x6E, + 0x43, 0xCE, 0xBC, 0x41, 0x43, 0xCE, 0xBC, 0x46, + 0x43, 0xCE, 0xBC, 0x56, 0x43, 0xCE, 0xBC, 0x57, + 0x43, 0xCE, 0xBC, 0x67, 0x43, 0xCE, 0xBC, 0x6C, + 0x43, 0xCE, 0xBC, 0x6D, 0x43, 0xCE, 0xBC, 0x73, + 0x44, 0x28, 0x31, 0x30, 0x29, 0x44, 0x28, 0x31, + 0x31, 0x29, 0x44, 0x28, 0x31, 0x32, 0x29, 0x44, + // Bytes 1dc0 - 1dff + 0x28, 0x31, 0x33, 0x29, 0x44, 0x28, 0x31, 0x34, + 0x29, 0x44, 0x28, 0x31, 0x35, 0x29, 0x44, 0x28, + 0x31, 0x36, 0x29, 0x44, 0x28, 0x31, 0x37, 0x29, + 0x44, 0x28, 0x31, 0x38, 0x29, 0x44, 0x28, 0x31, + 0x39, 0x29, 0x44, 0x28, 0x32, 0x30, 0x29, 0x44, + 0x30, 0xE7, 0x82, 0xB9, 0x44, 0x31, 0xE2, 0x81, + 0x84, 0x44, 0x31, 0xE6, 0x97, 0xA5, 0x44, 0x31, + 0xE6, 0x9C, 0x88, 0x44, 0x31, 0xE7, 0x82, 0xB9, + // Bytes 1e00 - 1e3f + 0x44, 0x32, 0xE6, 0x97, 0xA5, 0x44, 0x32, 0xE6, + 0x9C, 0x88, 0x44, 0x32, 0xE7, 0x82, 0xB9, 0x44, + 0x33, 0xE6, 0x97, 0xA5, 0x44, 0x33, 0xE6, 0x9C, + 0x88, 0x44, 0x33, 0xE7, 0x82, 0xB9, 0x44, 0x34, + 0xE6, 0x97, 0xA5, 0x44, 0x34, 0xE6, 0x9C, 0x88, + 0x44, 0x34, 0xE7, 0x82, 0xB9, 0x44, 0x35, 0xE6, + 0x97, 0xA5, 0x44, 0x35, 0xE6, 0x9C, 0x88, 0x44, + 0x35, 0xE7, 0x82, 0xB9, 0x44, 0x36, 0xE6, 0x97, + // Bytes 1e40 - 1e7f + 0xA5, 0x44, 0x36, 0xE6, 0x9C, 0x88, 0x44, 0x36, + 0xE7, 0x82, 0xB9, 0x44, 0x37, 0xE6, 0x97, 0xA5, + 0x44, 0x37, 0xE6, 0x9C, 0x88, 0x44, 0x37, 0xE7, + 0x82, 0xB9, 0x44, 0x38, 0xE6, 0x97, 0xA5, 0x44, + 0x38, 0xE6, 0x9C, 0x88, 0x44, 0x38, 0xE7, 0x82, + 0xB9, 0x44, 0x39, 0xE6, 0x97, 0xA5, 0x44, 0x39, + 0xE6, 0x9C, 0x88, 0x44, 0x39, 0xE7, 0x82, 0xB9, + 0x44, 0x56, 0x49, 0x49, 0x49, 0x44, 0x61, 0x2E, + // Bytes 1e80 - 1ebf + 0x6D, 0x2E, 0x44, 0x6B, 0x63, 0x61, 0x6C, 0x44, + 0x70, 0x2E, 0x6D, 0x2E, 0x44, 0x76, 0x69, 0x69, + 0x69, 0x44, 0xD5, 0xA5, 0xD6, 0x82, 0x44, 0xD5, + 0xB4, 0xD5, 0xA5, 0x44, 0xD5, 0xB4, 0xD5, 0xAB, + 0x44, 0xD5, 0xB4, 0xD5, 0xAD, 0x44, 0xD5, 0xB4, + 0xD5, 0xB6, 0x44, 0xD5, 0xBE, 0xD5, 0xB6, 0x44, + 0xD7, 0x90, 0xD7, 0x9C, 0x44, 0xD8, 0xA7, 0xD9, + 0xB4, 0x44, 0xD8, 0xA8, 0xD8, 0xAC, 0x44, 0xD8, + // Bytes 1ec0 - 1eff + 0xA8, 0xD8, 0xAD, 0x44, 0xD8, 0xA8, 0xD8, 0xAE, + 0x44, 0xD8, 0xA8, 0xD8, 0xB1, 0x44, 0xD8, 0xA8, + 0xD8, 0xB2, 0x44, 0xD8, 0xA8, 0xD9, 0x85, 0x44, + 0xD8, 0xA8, 0xD9, 0x86, 0x44, 0xD8, 0xA8, 0xD9, + 0x87, 0x44, 0xD8, 0xA8, 0xD9, 0x89, 0x44, 0xD8, + 0xA8, 0xD9, 0x8A, 0x44, 0xD8, 0xAA, 0xD8, 0xAC, + 0x44, 0xD8, 0xAA, 0xD8, 0xAD, 0x44, 0xD8, 0xAA, + 0xD8, 0xAE, 0x44, 0xD8, 0xAA, 0xD8, 0xB1, 0x44, + // Bytes 1f00 - 1f3f + 0xD8, 0xAA, 0xD8, 0xB2, 0x44, 0xD8, 0xAA, 0xD9, + 0x85, 0x44, 0xD8, 0xAA, 0xD9, 0x86, 0x44, 0xD8, + 0xAA, 0xD9, 0x87, 0x44, 0xD8, 0xAA, 0xD9, 0x89, + 0x44, 0xD8, 0xAA, 0xD9, 0x8A, 0x44, 0xD8, 0xAB, + 0xD8, 0xAC, 0x44, 0xD8, 0xAB, 0xD8, 0xB1, 0x44, + 0xD8, 0xAB, 0xD8, 0xB2, 0x44, 0xD8, 0xAB, 0xD9, + 0x85, 0x44, 0xD8, 0xAB, 0xD9, 0x86, 0x44, 0xD8, + 0xAB, 0xD9, 0x87, 0x44, 0xD8, 0xAB, 0xD9, 0x89, + // Bytes 1f40 - 1f7f + 0x44, 0xD8, 0xAB, 0xD9, 0x8A, 0x44, 0xD8, 0xAC, + 0xD8, 0xAD, 0x44, 0xD8, 0xAC, 0xD9, 0x85, 0x44, + 0xD8, 0xAC, 0xD9, 0x89, 0x44, 0xD8, 0xAC, 0xD9, + 0x8A, 0x44, 0xD8, 0xAD, 0xD8, 0xAC, 0x44, 0xD8, + 0xAD, 0xD9, 0x85, 0x44, 0xD8, 0xAD, 0xD9, 0x89, + 0x44, 0xD8, 0xAD, 0xD9, 0x8A, 0x44, 0xD8, 0xAE, + 0xD8, 0xAC, 0x44, 0xD8, 0xAE, 0xD8, 0xAD, 0x44, + 0xD8, 0xAE, 0xD9, 0x85, 0x44, 0xD8, 0xAE, 0xD9, + // Bytes 1f80 - 1fbf + 0x89, 0x44, 0xD8, 0xAE, 0xD9, 0x8A, 0x44, 0xD8, + 0xB3, 0xD8, 0xAC, 0x44, 0xD8, 0xB3, 0xD8, 0xAD, + 0x44, 0xD8, 0xB3, 0xD8, 0xAE, 0x44, 0xD8, 0xB3, + 0xD8, 0xB1, 0x44, 0xD8, 0xB3, 0xD9, 0x85, 0x44, + 0xD8, 0xB3, 0xD9, 0x87, 0x44, 0xD8, 0xB3, 0xD9, + 0x89, 0x44, 0xD8, 0xB3, 0xD9, 0x8A, 0x44, 0xD8, + 0xB4, 0xD8, 0xAC, 0x44, 0xD8, 0xB4, 0xD8, 0xAD, + 0x44, 0xD8, 0xB4, 0xD8, 0xAE, 0x44, 0xD8, 0xB4, + // Bytes 1fc0 - 1fff + 0xD8, 0xB1, 0x44, 0xD8, 0xB4, 0xD9, 0x85, 0x44, + 0xD8, 0xB4, 0xD9, 0x87, 0x44, 0xD8, 0xB4, 0xD9, + 0x89, 0x44, 0xD8, 0xB4, 0xD9, 0x8A, 0x44, 0xD8, + 0xB5, 0xD8, 0xAD, 0x44, 0xD8, 0xB5, 0xD8, 0xAE, + 0x44, 0xD8, 0xB5, 0xD8, 0xB1, 0x44, 0xD8, 0xB5, + 0xD9, 0x85, 0x44, 0xD8, 0xB5, 0xD9, 0x89, 0x44, + 0xD8, 0xB5, 0xD9, 0x8A, 0x44, 0xD8, 0xB6, 0xD8, + 0xAC, 0x44, 0xD8, 0xB6, 0xD8, 0xAD, 0x44, 0xD8, + // Bytes 2000 - 203f + 0xB6, 0xD8, 0xAE, 0x44, 0xD8, 0xB6, 0xD8, 0xB1, + 0x44, 0xD8, 0xB6, 0xD9, 0x85, 0x44, 0xD8, 0xB6, + 0xD9, 0x89, 0x44, 0xD8, 0xB6, 0xD9, 0x8A, 0x44, + 0xD8, 0xB7, 0xD8, 0xAD, 0x44, 0xD8, 0xB7, 0xD9, + 0x85, 0x44, 0xD8, 0xB7, 0xD9, 0x89, 0x44, 0xD8, + 0xB7, 0xD9, 0x8A, 0x44, 0xD8, 0xB8, 0xD9, 0x85, + 0x44, 0xD8, 0xB9, 0xD8, 0xAC, 0x44, 0xD8, 0xB9, + 0xD9, 0x85, 0x44, 0xD8, 0xB9, 0xD9, 0x89, 0x44, + // Bytes 2040 - 207f + 0xD8, 0xB9, 0xD9, 0x8A, 0x44, 0xD8, 0xBA, 0xD8, + 0xAC, 0x44, 0xD8, 0xBA, 0xD9, 0x85, 0x44, 0xD8, + 0xBA, 0xD9, 0x89, 0x44, 0xD8, 0xBA, 0xD9, 0x8A, + 0x44, 0xD9, 0x81, 0xD8, 0xAC, 0x44, 0xD9, 0x81, + 0xD8, 0xAD, 0x44, 0xD9, 0x81, 0xD8, 0xAE, 0x44, + 0xD9, 0x81, 0xD9, 0x85, 0x44, 0xD9, 0x81, 0xD9, + 0x89, 0x44, 0xD9, 0x81, 0xD9, 0x8A, 0x44, 0xD9, + 0x82, 0xD8, 0xAD, 0x44, 0xD9, 0x82, 0xD9, 0x85, + // Bytes 2080 - 20bf + 0x44, 0xD9, 0x82, 0xD9, 0x89, 0x44, 0xD9, 0x82, + 0xD9, 0x8A, 0x44, 0xD9, 0x83, 0xD8, 0xA7, 0x44, + 0xD9, 0x83, 0xD8, 0xAC, 0x44, 0xD9, 0x83, 0xD8, + 0xAD, 0x44, 0xD9, 0x83, 0xD8, 0xAE, 0x44, 0xD9, + 0x83, 0xD9, 0x84, 0x44, 0xD9, 0x83, 0xD9, 0x85, + 0x44, 0xD9, 0x83, 0xD9, 0x89, 0x44, 0xD9, 0x83, + 0xD9, 0x8A, 0x44, 0xD9, 0x84, 0xD8, 0xA7, 0x44, + 0xD9, 0x84, 0xD8, 0xAC, 0x44, 0xD9, 0x84, 0xD8, + // Bytes 20c0 - 20ff + 0xAD, 0x44, 0xD9, 0x84, 0xD8, 0xAE, 0x44, 0xD9, + 0x84, 0xD9, 0x85, 0x44, 0xD9, 0x84, 0xD9, 0x87, + 0x44, 0xD9, 0x84, 0xD9, 0x89, 0x44, 0xD9, 0x84, + 0xD9, 0x8A, 0x44, 0xD9, 0x85, 0xD8, 0xA7, 0x44, + 0xD9, 0x85, 0xD8, 0xAC, 0x44, 0xD9, 0x85, 0xD8, + 0xAD, 0x44, 0xD9, 0x85, 0xD8, 0xAE, 0x44, 0xD9, + 0x85, 0xD9, 0x85, 0x44, 0xD9, 0x85, 0xD9, 0x89, + 0x44, 0xD9, 0x85, 0xD9, 0x8A, 0x44, 0xD9, 0x86, + // Bytes 2100 - 213f + 0xD8, 0xAC, 0x44, 0xD9, 0x86, 0xD8, 0xAD, 0x44, + 0xD9, 0x86, 0xD8, 0xAE, 0x44, 0xD9, 0x86, 0xD8, + 0xB1, 0x44, 0xD9, 0x86, 0xD8, 0xB2, 0x44, 0xD9, + 0x86, 0xD9, 0x85, 0x44, 0xD9, 0x86, 0xD9, 0x86, + 0x44, 0xD9, 0x86, 0xD9, 0x87, 0x44, 0xD9, 0x86, + 0xD9, 0x89, 0x44, 0xD9, 0x86, 0xD9, 0x8A, 0x44, + 0xD9, 0x87, 0xD8, 0xAC, 0x44, 0xD9, 0x87, 0xD9, + 0x85, 0x44, 0xD9, 0x87, 0xD9, 0x89, 0x44, 0xD9, + // Bytes 2140 - 217f + 0x87, 0xD9, 0x8A, 0x44, 0xD9, 0x88, 0xD9, 0xB4, + 0x44, 0xD9, 0x8A, 0xD8, 0xAC, 0x44, 0xD9, 0x8A, + 0xD8, 0xAD, 0x44, 0xD9, 0x8A, 0xD8, 0xAE, 0x44, + 0xD9, 0x8A, 0xD8, 0xB1, 0x44, 0xD9, 0x8A, 0xD8, + 0xB2, 0x44, 0xD9, 0x8A, 0xD9, 0x85, 0x44, 0xD9, + 0x8A, 0xD9, 0x86, 0x44, 0xD9, 0x8A, 0xD9, 0x87, + 0x44, 0xD9, 0x8A, 0xD9, 0x89, 0x44, 0xD9, 0x8A, + 0xD9, 0x8A, 0x44, 0xD9, 0x8A, 0xD9, 0xB4, 0x44, + // Bytes 2180 - 21bf + 0xDB, 0x87, 0xD9, 0xB4, 0x45, 0x28, 0xE1, 0x84, + 0x80, 0x29, 0x45, 0x28, 0xE1, 0x84, 0x82, 0x29, + 0x45, 0x28, 0xE1, 0x84, 0x83, 0x29, 0x45, 0x28, + 0xE1, 0x84, 0x85, 0x29, 0x45, 0x28, 0xE1, 0x84, + 0x86, 0x29, 0x45, 0x28, 0xE1, 0x84, 0x87, 0x29, + 0x45, 0x28, 0xE1, 0x84, 0x89, 0x29, 0x45, 0x28, + 0xE1, 0x84, 0x8B, 0x29, 0x45, 0x28, 0xE1, 0x84, + 0x8C, 0x29, 0x45, 0x28, 0xE1, 0x84, 0x8E, 0x29, + // Bytes 21c0 - 21ff + 0x45, 0x28, 0xE1, 0x84, 0x8F, 0x29, 0x45, 0x28, + 0xE1, 0x84, 0x90, 0x29, 0x45, 0x28, 0xE1, 0x84, + 0x91, 0x29, 0x45, 0x28, 0xE1, 0x84, 0x92, 0x29, + 0x45, 0x28, 0xE4, 0xB8, 0x80, 0x29, 0x45, 0x28, + 0xE4, 0xB8, 0x83, 0x29, 0x45, 0x28, 0xE4, 0xB8, + 0x89, 0x29, 0x45, 0x28, 0xE4, 0xB9, 0x9D, 0x29, + 0x45, 0x28, 0xE4, 0xBA, 0x8C, 0x29, 0x45, 0x28, + 0xE4, 0xBA, 0x94, 0x29, 0x45, 0x28, 0xE4, 0xBB, + // Bytes 2200 - 223f + 0xA3, 0x29, 0x45, 0x28, 0xE4, 0xBC, 0x81, 0x29, + 0x45, 0x28, 0xE4, 0xBC, 0x91, 0x29, 0x45, 0x28, + 0xE5, 0x85, 0xAB, 0x29, 0x45, 0x28, 0xE5, 0x85, + 0xAD, 0x29, 0x45, 0x28, 0xE5, 0x8A, 0xB4, 0x29, + 0x45, 0x28, 0xE5, 0x8D, 0x81, 0x29, 0x45, 0x28, + 0xE5, 0x8D, 0x94, 0x29, 0x45, 0x28, 0xE5, 0x90, + 0x8D, 0x29, 0x45, 0x28, 0xE5, 0x91, 0xBC, 0x29, + 0x45, 0x28, 0xE5, 0x9B, 0x9B, 0x29, 0x45, 0x28, + // Bytes 2240 - 227f + 0xE5, 0x9C, 0x9F, 0x29, 0x45, 0x28, 0xE5, 0xAD, + 0xA6, 0x29, 0x45, 0x28, 0xE6, 0x97, 0xA5, 0x29, + 0x45, 0x28, 0xE6, 0x9C, 0x88, 0x29, 0x45, 0x28, + 0xE6, 0x9C, 0x89, 0x29, 0x45, 0x28, 0xE6, 0x9C, + 0xA8, 0x29, 0x45, 0x28, 0xE6, 0xA0, 0xAA, 0x29, + 0x45, 0x28, 0xE6, 0xB0, 0xB4, 0x29, 0x45, 0x28, + 0xE7, 0x81, 0xAB, 0x29, 0x45, 0x28, 0xE7, 0x89, + 0xB9, 0x29, 0x45, 0x28, 0xE7, 0x9B, 0xA3, 0x29, + // Bytes 2280 - 22bf + 0x45, 0x28, 0xE7, 0xA4, 0xBE, 0x29, 0x45, 0x28, + 0xE7, 0xA5, 0x9D, 0x29, 0x45, 0x28, 0xE7, 0xA5, + 0xAD, 0x29, 0x45, 0x28, 0xE8, 0x87, 0xAA, 0x29, + 0x45, 0x28, 0xE8, 0x87, 0xB3, 0x29, 0x45, 0x28, + 0xE8, 0xB2, 0xA1, 0x29, 0x45, 0x28, 0xE8, 0xB3, + 0x87, 0x29, 0x45, 0x28, 0xE9, 0x87, 0x91, 0x29, + 0x45, 0x30, 0xE2, 0x81, 0x84, 0x33, 0x45, 0x31, + 0x30, 0xE6, 0x97, 0xA5, 0x45, 0x31, 0x30, 0xE6, + // Bytes 22c0 - 22ff + 0x9C, 0x88, 0x45, 0x31, 0x30, 0xE7, 0x82, 0xB9, + 0x45, 0x31, 0x31, 0xE6, 0x97, 0xA5, 0x45, 0x31, + 0x31, 0xE6, 0x9C, 0x88, 0x45, 0x31, 0x31, 0xE7, + 0x82, 0xB9, 0x45, 0x31, 0x32, 0xE6, 0x97, 0xA5, + 0x45, 0x31, 0x32, 0xE6, 0x9C, 0x88, 0x45, 0x31, + 0x32, 0xE7, 0x82, 0xB9, 0x45, 0x31, 0x33, 0xE6, + 0x97, 0xA5, 0x45, 0x31, 0x33, 0xE7, 0x82, 0xB9, + 0x45, 0x31, 0x34, 0xE6, 0x97, 0xA5, 0x45, 0x31, + // Bytes 2300 - 233f + 0x34, 0xE7, 0x82, 0xB9, 0x45, 0x31, 0x35, 0xE6, + 0x97, 0xA5, 0x45, 0x31, 0x35, 0xE7, 0x82, 0xB9, + 0x45, 0x31, 0x36, 0xE6, 0x97, 0xA5, 0x45, 0x31, + 0x36, 0xE7, 0x82, 0xB9, 0x45, 0x31, 0x37, 0xE6, + 0x97, 0xA5, 0x45, 0x31, 0x37, 0xE7, 0x82, 0xB9, + 0x45, 0x31, 0x38, 0xE6, 0x97, 0xA5, 0x45, 0x31, + 0x38, 0xE7, 0x82, 0xB9, 0x45, 0x31, 0x39, 0xE6, + 0x97, 0xA5, 0x45, 0x31, 0x39, 0xE7, 0x82, 0xB9, + // Bytes 2340 - 237f + 0x45, 0x31, 0xE2, 0x81, 0x84, 0x32, 0x45, 0x31, + 0xE2, 0x81, 0x84, 0x33, 0x45, 0x31, 0xE2, 0x81, + 0x84, 0x34, 0x45, 0x31, 0xE2, 0x81, 0x84, 0x35, + 0x45, 0x31, 0xE2, 0x81, 0x84, 0x36, 0x45, 0x31, + 0xE2, 0x81, 0x84, 0x37, 0x45, 0x31, 0xE2, 0x81, + 0x84, 0x38, 0x45, 0x31, 0xE2, 0x81, 0x84, 0x39, + 0x45, 0x32, 0x30, 0xE6, 0x97, 0xA5, 0x45, 0x32, + 0x30, 0xE7, 0x82, 0xB9, 0x45, 0x32, 0x31, 0xE6, + // Bytes 2380 - 23bf + 0x97, 0xA5, 0x45, 0x32, 0x31, 0xE7, 0x82, 0xB9, + 0x45, 0x32, 0x32, 0xE6, 0x97, 0xA5, 0x45, 0x32, + 0x32, 0xE7, 0x82, 0xB9, 0x45, 0x32, 0x33, 0xE6, + 0x97, 0xA5, 0x45, 0x32, 0x33, 0xE7, 0x82, 0xB9, + 0x45, 0x32, 0x34, 0xE6, 0x97, 0xA5, 0x45, 0x32, + 0x34, 0xE7, 0x82, 0xB9, 0x45, 0x32, 0x35, 0xE6, + 0x97, 0xA5, 0x45, 0x32, 0x36, 0xE6, 0x97, 0xA5, + 0x45, 0x32, 0x37, 0xE6, 0x97, 0xA5, 0x45, 0x32, + // Bytes 23c0 - 23ff + 0x38, 0xE6, 0x97, 0xA5, 0x45, 0x32, 0x39, 0xE6, + 0x97, 0xA5, 0x45, 0x32, 0xE2, 0x81, 0x84, 0x33, + 0x45, 0x32, 0xE2, 0x81, 0x84, 0x35, 0x45, 0x33, + 0x30, 0xE6, 0x97, 0xA5, 0x45, 0x33, 0x31, 0xE6, + 0x97, 0xA5, 0x45, 0x33, 0xE2, 0x81, 0x84, 0x34, + 0x45, 0x33, 0xE2, 0x81, 0x84, 0x35, 0x45, 0x33, + 0xE2, 0x81, 0x84, 0x38, 0x45, 0x34, 0xE2, 0x81, + 0x84, 0x35, 0x45, 0x35, 0xE2, 0x81, 0x84, 0x36, + // Bytes 2400 - 243f + 0x45, 0x35, 0xE2, 0x81, 0x84, 0x38, 0x45, 0x37, + 0xE2, 0x81, 0x84, 0x38, 0x45, 0x41, 0xE2, 0x88, + 0x95, 0x6D, 0x45, 0x56, 0xE2, 0x88, 0x95, 0x6D, + 0x45, 0x6D, 0xE2, 0x88, 0x95, 0x73, 0x46, 0x31, + 0xE2, 0x81, 0x84, 0x31, 0x30, 0x46, 0x43, 0xE2, + 0x88, 0x95, 0x6B, 0x67, 0x46, 0x6D, 0xE2, 0x88, + 0x95, 0x73, 0x32, 0x46, 0xD8, 0xA8, 0xD8, 0xAD, + 0xD9, 0x8A, 0x46, 0xD8, 0xA8, 0xD8, 0xAE, 0xD9, + // Bytes 2440 - 247f + 0x8A, 0x46, 0xD8, 0xAA, 0xD8, 0xAC, 0xD9, 0x85, + 0x46, 0xD8, 0xAA, 0xD8, 0xAC, 0xD9, 0x89, 0x46, + 0xD8, 0xAA, 0xD8, 0xAC, 0xD9, 0x8A, 0x46, 0xD8, + 0xAA, 0xD8, 0xAD, 0xD8, 0xAC, 0x46, 0xD8, 0xAA, + 0xD8, 0xAD, 0xD9, 0x85, 0x46, 0xD8, 0xAA, 0xD8, + 0xAE, 0xD9, 0x85, 0x46, 0xD8, 0xAA, 0xD8, 0xAE, + 0xD9, 0x89, 0x46, 0xD8, 0xAA, 0xD8, 0xAE, 0xD9, + 0x8A, 0x46, 0xD8, 0xAA, 0xD9, 0x85, 0xD8, 0xAC, + // Bytes 2480 - 24bf + 0x46, 0xD8, 0xAA, 0xD9, 0x85, 0xD8, 0xAD, 0x46, + 0xD8, 0xAA, 0xD9, 0x85, 0xD8, 0xAE, 0x46, 0xD8, + 0xAA, 0xD9, 0x85, 0xD9, 0x89, 0x46, 0xD8, 0xAA, + 0xD9, 0x85, 0xD9, 0x8A, 0x46, 0xD8, 0xAC, 0xD8, + 0xAD, 0xD9, 0x89, 0x46, 0xD8, 0xAC, 0xD8, 0xAD, + 0xD9, 0x8A, 0x46, 0xD8, 0xAC, 0xD9, 0x85, 0xD8, + 0xAD, 0x46, 0xD8, 0xAC, 0xD9, 0x85, 0xD9, 0x89, + 0x46, 0xD8, 0xAC, 0xD9, 0x85, 0xD9, 0x8A, 0x46, + // Bytes 24c0 - 24ff + 0xD8, 0xAD, 0xD8, 0xAC, 0xD9, 0x8A, 0x46, 0xD8, + 0xAD, 0xD9, 0x85, 0xD9, 0x89, 0x46, 0xD8, 0xAD, + 0xD9, 0x85, 0xD9, 0x8A, 0x46, 0xD8, 0xB3, 0xD8, + 0xAC, 0xD8, 0xAD, 0x46, 0xD8, 0xB3, 0xD8, 0xAC, + 0xD9, 0x89, 0x46, 0xD8, 0xB3, 0xD8, 0xAD, 0xD8, + 0xAC, 0x46, 0xD8, 0xB3, 0xD8, 0xAE, 0xD9, 0x89, + 0x46, 0xD8, 0xB3, 0xD8, 0xAE, 0xD9, 0x8A, 0x46, + 0xD8, 0xB3, 0xD9, 0x85, 0xD8, 0xAC, 0x46, 0xD8, + // Bytes 2500 - 253f + 0xB3, 0xD9, 0x85, 0xD8, 0xAD, 0x46, 0xD8, 0xB3, + 0xD9, 0x85, 0xD9, 0x85, 0x46, 0xD8, 0xB4, 0xD8, + 0xAC, 0xD9, 0x8A, 0x46, 0xD8, 0xB4, 0xD8, 0xAD, + 0xD9, 0x85, 0x46, 0xD8, 0xB4, 0xD8, 0xAD, 0xD9, + 0x8A, 0x46, 0xD8, 0xB4, 0xD9, 0x85, 0xD8, 0xAE, + 0x46, 0xD8, 0xB4, 0xD9, 0x85, 0xD9, 0x85, 0x46, + 0xD8, 0xB5, 0xD8, 0xAD, 0xD8, 0xAD, 0x46, 0xD8, + 0xB5, 0xD8, 0xAD, 0xD9, 0x8A, 0x46, 0xD8, 0xB5, + // Bytes 2540 - 257f + 0xD9, 0x84, 0xD9, 0x89, 0x46, 0xD8, 0xB5, 0xD9, + 0x84, 0xDB, 0x92, 0x46, 0xD8, 0xB5, 0xD9, 0x85, + 0xD9, 0x85, 0x46, 0xD8, 0xB6, 0xD8, 0xAD, 0xD9, + 0x89, 0x46, 0xD8, 0xB6, 0xD8, 0xAD, 0xD9, 0x8A, + 0x46, 0xD8, 0xB6, 0xD8, 0xAE, 0xD9, 0x85, 0x46, + 0xD8, 0xB7, 0xD9, 0x85, 0xD8, 0xAD, 0x46, 0xD8, + 0xB7, 0xD9, 0x85, 0xD9, 0x85, 0x46, 0xD8, 0xB7, + 0xD9, 0x85, 0xD9, 0x8A, 0x46, 0xD8, 0xB9, 0xD8, + // Bytes 2580 - 25bf + 0xAC, 0xD9, 0x85, 0x46, 0xD8, 0xB9, 0xD9, 0x85, + 0xD9, 0x85, 0x46, 0xD8, 0xB9, 0xD9, 0x85, 0xD9, + 0x89, 0x46, 0xD8, 0xB9, 0xD9, 0x85, 0xD9, 0x8A, + 0x46, 0xD8, 0xBA, 0xD9, 0x85, 0xD9, 0x85, 0x46, + 0xD8, 0xBA, 0xD9, 0x85, 0xD9, 0x89, 0x46, 0xD8, + 0xBA, 0xD9, 0x85, 0xD9, 0x8A, 0x46, 0xD9, 0x81, + 0xD8, 0xAE, 0xD9, 0x85, 0x46, 0xD9, 0x81, 0xD9, + 0x85, 0xD9, 0x8A, 0x46, 0xD9, 0x82, 0xD9, 0x84, + // Bytes 25c0 - 25ff + 0xDB, 0x92, 0x46, 0xD9, 0x82, 0xD9, 0x85, 0xD8, + 0xAD, 0x46, 0xD9, 0x82, 0xD9, 0x85, 0xD9, 0x85, + 0x46, 0xD9, 0x82, 0xD9, 0x85, 0xD9, 0x8A, 0x46, + 0xD9, 0x83, 0xD9, 0x85, 0xD9, 0x85, 0x46, 0xD9, + 0x83, 0xD9, 0x85, 0xD9, 0x8A, 0x46, 0xD9, 0x84, + 0xD8, 0xAC, 0xD8, 0xAC, 0x46, 0xD9, 0x84, 0xD8, + 0xAC, 0xD9, 0x85, 0x46, 0xD9, 0x84, 0xD8, 0xAC, + 0xD9, 0x8A, 0x46, 0xD9, 0x84, 0xD8, 0xAD, 0xD9, + // Bytes 2600 - 263f + 0x85, 0x46, 0xD9, 0x84, 0xD8, 0xAD, 0xD9, 0x89, + 0x46, 0xD9, 0x84, 0xD8, 0xAD, 0xD9, 0x8A, 0x46, + 0xD9, 0x84, 0xD8, 0xAE, 0xD9, 0x85, 0x46, 0xD9, + 0x84, 0xD9, 0x85, 0xD8, 0xAD, 0x46, 0xD9, 0x84, + 0xD9, 0x85, 0xD9, 0x8A, 0x46, 0xD9, 0x85, 0xD8, + 0xAC, 0xD8, 0xAD, 0x46, 0xD9, 0x85, 0xD8, 0xAC, + 0xD8, 0xAE, 0x46, 0xD9, 0x85, 0xD8, 0xAC, 0xD9, + 0x85, 0x46, 0xD9, 0x85, 0xD8, 0xAC, 0xD9, 0x8A, + // Bytes 2640 - 267f + 0x46, 0xD9, 0x85, 0xD8, 0xAD, 0xD8, 0xAC, 0x46, + 0xD9, 0x85, 0xD8, 0xAD, 0xD9, 0x85, 0x46, 0xD9, + 0x85, 0xD8, 0xAD, 0xD9, 0x8A, 0x46, 0xD9, 0x85, + 0xD8, 0xAE, 0xD8, 0xAC, 0x46, 0xD9, 0x85, 0xD8, + 0xAE, 0xD9, 0x85, 0x46, 0xD9, 0x85, 0xD8, 0xAE, + 0xD9, 0x8A, 0x46, 0xD9, 0x85, 0xD9, 0x85, 0xD9, + 0x8A, 0x46, 0xD9, 0x86, 0xD8, 0xAC, 0xD8, 0xAD, + 0x46, 0xD9, 0x86, 0xD8, 0xAC, 0xD9, 0x85, 0x46, + // Bytes 2680 - 26bf + 0xD9, 0x86, 0xD8, 0xAC, 0xD9, 0x89, 0x46, 0xD9, + 0x86, 0xD8, 0xAC, 0xD9, 0x8A, 0x46, 0xD9, 0x86, + 0xD8, 0xAD, 0xD9, 0x85, 0x46, 0xD9, 0x86, 0xD8, + 0xAD, 0xD9, 0x89, 0x46, 0xD9, 0x86, 0xD8, 0xAD, + 0xD9, 0x8A, 0x46, 0xD9, 0x86, 0xD9, 0x85, 0xD9, + 0x89, 0x46, 0xD9, 0x86, 0xD9, 0x85, 0xD9, 0x8A, + 0x46, 0xD9, 0x87, 0xD9, 0x85, 0xD8, 0xAC, 0x46, + 0xD9, 0x87, 0xD9, 0x85, 0xD9, 0x85, 0x46, 0xD9, + // Bytes 26c0 - 26ff + 0x8A, 0xD8, 0xAC, 0xD9, 0x8A, 0x46, 0xD9, 0x8A, + 0xD8, 0xAD, 0xD9, 0x8A, 0x46, 0xD9, 0x8A, 0xD9, + 0x85, 0xD9, 0x85, 0x46, 0xD9, 0x8A, 0xD9, 0x85, + 0xD9, 0x8A, 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD8, + 0xA7, 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD8, 0xAC, + 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD8, 0xAD, 0x46, + 0xD9, 0x8A, 0xD9, 0x94, 0xD8, 0xAE, 0x46, 0xD9, + 0x8A, 0xD9, 0x94, 0xD8, 0xB1, 0x46, 0xD9, 0x8A, + // Bytes 2700 - 273f + 0xD9, 0x94, 0xD8, 0xB2, 0x46, 0xD9, 0x8A, 0xD9, + 0x94, 0xD9, 0x85, 0x46, 0xD9, 0x8A, 0xD9, 0x94, + 0xD9, 0x86, 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD9, + 0x87, 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD9, 0x88, + 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xD9, 0x89, 0x46, + 0xD9, 0x8A, 0xD9, 0x94, 0xD9, 0x8A, 0x46, 0xD9, + 0x8A, 0xD9, 0x94, 0xDB, 0x86, 0x46, 0xD9, 0x8A, + 0xD9, 0x94, 0xDB, 0x87, 0x46, 0xD9, 0x8A, 0xD9, + // Bytes 2740 - 277f + 0x94, 0xDB, 0x88, 0x46, 0xD9, 0x8A, 0xD9, 0x94, + 0xDB, 0x90, 0x46, 0xD9, 0x8A, 0xD9, 0x94, 0xDB, + 0x95, 0x46, 0xE0, 0xB9, 0x8D, 0xE0, 0xB8, 0xB2, + 0x46, 0xE0, 0xBA, 0xAB, 0xE0, 0xBA, 0x99, 0x46, + 0xE0, 0xBA, 0xAB, 0xE0, 0xBA, 0xA1, 0x46, 0xE0, + 0xBB, 0x8D, 0xE0, 0xBA, 0xB2, 0x46, 0xE0, 0xBD, + 0x80, 0xE0, 0xBE, 0xB5, 0x46, 0xE0, 0xBD, 0x82, + 0xE0, 0xBE, 0xB7, 0x46, 0xE0, 0xBD, 0x8C, 0xE0, + // Bytes 2780 - 27bf + 0xBE, 0xB7, 0x46, 0xE0, 0xBD, 0x91, 0xE0, 0xBE, + 0xB7, 0x46, 0xE0, 0xBD, 0x96, 0xE0, 0xBE, 0xB7, + 0x46, 0xE0, 0xBD, 0x9B, 0xE0, 0xBE, 0xB7, 0x46, + 0xE0, 0xBE, 0x90, 0xE0, 0xBE, 0xB5, 0x46, 0xE0, + 0xBE, 0x92, 0xE0, 0xBE, 0xB7, 0x46, 0xE0, 0xBE, + 0x9C, 0xE0, 0xBE, 0xB7, 0x46, 0xE0, 0xBE, 0xA1, + 0xE0, 0xBE, 0xB7, 0x46, 0xE0, 0xBE, 0xA6, 0xE0, + 0xBE, 0xB7, 0x46, 0xE0, 0xBE, 0xAB, 0xE0, 0xBE, + // Bytes 27c0 - 27ff + 0xB7, 0x46, 0xE2, 0x80, 0xB2, 0xE2, 0x80, 0xB2, + 0x46, 0xE2, 0x80, 0xB5, 0xE2, 0x80, 0xB5, 0x46, + 0xE2, 0x88, 0xAB, 0xE2, 0x88, 0xAB, 0x46, 0xE2, + 0x88, 0xAE, 0xE2, 0x88, 0xAE, 0x46, 0xE3, 0x81, + 0xBB, 0xE3, 0x81, 0x8B, 0x46, 0xE3, 0x82, 0x88, + 0xE3, 0x82, 0x8A, 0x46, 0xE3, 0x82, 0xAD, 0xE3, + 0x83, 0xAD, 0x46, 0xE3, 0x82, 0xB3, 0xE3, 0x82, + 0xB3, 0x46, 0xE3, 0x82, 0xB3, 0xE3, 0x83, 0x88, + // Bytes 2800 - 283f + 0x46, 0xE3, 0x83, 0x88, 0xE3, 0x83, 0xB3, 0x46, + 0xE3, 0x83, 0x8A, 0xE3, 0x83, 0x8E, 0x46, 0xE3, + 0x83, 0x9B, 0xE3, 0x83, 0xB3, 0x46, 0xE3, 0x83, + 0x9F, 0xE3, 0x83, 0xAA, 0x46, 0xE3, 0x83, 0xAA, + 0xE3, 0x83, 0xA9, 0x46, 0xE3, 0x83, 0xAC, 0xE3, + 0x83, 0xA0, 0x46, 0xE4, 0xBB, 0xA4, 0xE5, 0x92, + 0x8C, 0x46, 0xE5, 0xA4, 0xA7, 0xE6, 0xAD, 0xA3, + 0x46, 0xE5, 0xB9, 0xB3, 0xE6, 0x88, 0x90, 0x46, + // Bytes 2840 - 287f + 0xE6, 0x98, 0x8E, 0xE6, 0xB2, 0xBB, 0x46, 0xE6, + 0x98, 0xAD, 0xE5, 0x92, 0x8C, 0x47, 0x72, 0x61, + 0x64, 0xE2, 0x88, 0x95, 0x73, 0x47, 0xE3, 0x80, + 0x94, 0x53, 0xE3, 0x80, 0x95, 0x48, 0x28, 0xE1, + 0x84, 0x80, 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x28, + 0xE1, 0x84, 0x82, 0xE1, 0x85, 0xA1, 0x29, 0x48, + 0x28, 0xE1, 0x84, 0x83, 0xE1, 0x85, 0xA1, 0x29, + 0x48, 0x28, 0xE1, 0x84, 0x85, 0xE1, 0x85, 0xA1, + // Bytes 2880 - 28bf + 0x29, 0x48, 0x28, 0xE1, 0x84, 0x86, 0xE1, 0x85, + 0xA1, 0x29, 0x48, 0x28, 0xE1, 0x84, 0x87, 0xE1, + 0x85, 0xA1, 0x29, 0x48, 0x28, 0xE1, 0x84, 0x89, + 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x28, 0xE1, 0x84, + 0x8B, 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x28, 0xE1, + 0x84, 0x8C, 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x28, + 0xE1, 0x84, 0x8C, 0xE1, 0x85, 0xAE, 0x29, 0x48, + 0x28, 0xE1, 0x84, 0x8E, 0xE1, 0x85, 0xA1, 0x29, + // Bytes 28c0 - 28ff + 0x48, 0x28, 0xE1, 0x84, 0x8F, 0xE1, 0x85, 0xA1, + 0x29, 0x48, 0x28, 0xE1, 0x84, 0x90, 0xE1, 0x85, + 0xA1, 0x29, 0x48, 0x28, 0xE1, 0x84, 0x91, 0xE1, + 0x85, 0xA1, 0x29, 0x48, 0x28, 0xE1, 0x84, 0x92, + 0xE1, 0x85, 0xA1, 0x29, 0x48, 0x72, 0x61, 0x64, + 0xE2, 0x88, 0x95, 0x73, 0x32, 0x48, 0xD8, 0xA7, + 0xD9, 0x83, 0xD8, 0xA8, 0xD8, 0xB1, 0x48, 0xD8, + 0xA7, 0xD9, 0x84, 0xD9, 0x84, 0xD9, 0x87, 0x48, + // Bytes 2900 - 293f + 0xD8, 0xB1, 0xD8, 0xB3, 0xD9, 0x88, 0xD9, 0x84, + 0x48, 0xD8, 0xB1, 0xDB, 0x8C, 0xD8, 0xA7, 0xD9, + 0x84, 0x48, 0xD8, 0xB5, 0xD9, 0x84, 0xD8, 0xB9, + 0xD9, 0x85, 0x48, 0xD8, 0xB9, 0xD9, 0x84, 0xD9, + 0x8A, 0xD9, 0x87, 0x48, 0xD9, 0x85, 0xD8, 0xAD, + 0xD9, 0x85, 0xD8, 0xAF, 0x48, 0xD9, 0x88, 0xD8, + 0xB3, 0xD9, 0x84, 0xD9, 0x85, 0x49, 0xE2, 0x80, + 0xB2, 0xE2, 0x80, 0xB2, 0xE2, 0x80, 0xB2, 0x49, + // Bytes 2940 - 297f + 0xE2, 0x80, 0xB5, 0xE2, 0x80, 0xB5, 0xE2, 0x80, + 0xB5, 0x49, 0xE2, 0x88, 0xAB, 0xE2, 0x88, 0xAB, + 0xE2, 0x88, 0xAB, 0x49, 0xE2, 0x88, 0xAE, 0xE2, + 0x88, 0xAE, 0xE2, 0x88, 0xAE, 0x49, 0xE3, 0x80, + 0x94, 0xE4, 0xB8, 0x89, 0xE3, 0x80, 0x95, 0x49, + 0xE3, 0x80, 0x94, 0xE4, 0xBA, 0x8C, 0xE3, 0x80, + 0x95, 0x49, 0xE3, 0x80, 0x94, 0xE5, 0x8B, 0x9D, + 0xE3, 0x80, 0x95, 0x49, 0xE3, 0x80, 0x94, 0xE5, + // Bytes 2980 - 29bf + 0xAE, 0x89, 0xE3, 0x80, 0x95, 0x49, 0xE3, 0x80, + 0x94, 0xE6, 0x89, 0x93, 0xE3, 0x80, 0x95, 0x49, + 0xE3, 0x80, 0x94, 0xE6, 0x95, 0x97, 0xE3, 0x80, + 0x95, 0x49, 0xE3, 0x80, 0x94, 0xE6, 0x9C, 0xAC, + 0xE3, 0x80, 0x95, 0x49, 0xE3, 0x80, 0x94, 0xE7, + 0x82, 0xB9, 0xE3, 0x80, 0x95, 0x49, 0xE3, 0x80, + 0x94, 0xE7, 0x9B, 0x97, 0xE3, 0x80, 0x95, 0x49, + 0xE3, 0x82, 0xA2, 0xE3, 0x83, 0xBC, 0xE3, 0x83, + // Bytes 29c0 - 29ff + 0xAB, 0x49, 0xE3, 0x82, 0xA4, 0xE3, 0x83, 0xB3, + 0xE3, 0x83, 0x81, 0x49, 0xE3, 0x82, 0xA6, 0xE3, + 0x82, 0xA9, 0xE3, 0x83, 0xB3, 0x49, 0xE3, 0x82, + 0xAA, 0xE3, 0x83, 0xB3, 0xE3, 0x82, 0xB9, 0x49, + 0xE3, 0x82, 0xAA, 0xE3, 0x83, 0xBC, 0xE3, 0x83, + 0xA0, 0x49, 0xE3, 0x82, 0xAB, 0xE3, 0x82, 0xA4, + 0xE3, 0x83, 0xAA, 0x49, 0xE3, 0x82, 0xB1, 0xE3, + 0x83, 0xBC, 0xE3, 0x82, 0xB9, 0x49, 0xE3, 0x82, + // Bytes 2a00 - 2a3f + 0xB3, 0xE3, 0x83, 0xAB, 0xE3, 0x83, 0x8A, 0x49, + 0xE3, 0x82, 0xBB, 0xE3, 0x83, 0xB3, 0xE3, 0x83, + 0x81, 0x49, 0xE3, 0x82, 0xBB, 0xE3, 0x83, 0xB3, + 0xE3, 0x83, 0x88, 0x49, 0xE3, 0x83, 0x86, 0xE3, + 0x82, 0x99, 0xE3, 0x82, 0xB7, 0x49, 0xE3, 0x83, + 0x88, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xAB, 0x49, + 0xE3, 0x83, 0x8E, 0xE3, 0x83, 0x83, 0xE3, 0x83, + 0x88, 0x49, 0xE3, 0x83, 0x8F, 0xE3, 0x82, 0xA4, + // Bytes 2a40 - 2a7f + 0xE3, 0x83, 0x84, 0x49, 0xE3, 0x83, 0x92, 0xE3, + 0x82, 0x99, 0xE3, 0x83, 0xAB, 0x49, 0xE3, 0x83, + 0x92, 0xE3, 0x82, 0x9A, 0xE3, 0x82, 0xB3, 0x49, + 0xE3, 0x83, 0x95, 0xE3, 0x83, 0xA9, 0xE3, 0x83, + 0xB3, 0x49, 0xE3, 0x83, 0x98, 0xE3, 0x82, 0x9A, + 0xE3, 0x82, 0xBD, 0x49, 0xE3, 0x83, 0x98, 0xE3, + 0x83, 0xAB, 0xE3, 0x83, 0x84, 0x49, 0xE3, 0x83, + 0x9B, 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0xAB, 0x49, + // Bytes 2a80 - 2abf + 0xE3, 0x83, 0x9B, 0xE3, 0x83, 0xBC, 0xE3, 0x83, + 0xB3, 0x49, 0xE3, 0x83, 0x9E, 0xE3, 0x82, 0xA4, + 0xE3, 0x83, 0xAB, 0x49, 0xE3, 0x83, 0x9E, 0xE3, + 0x83, 0x83, 0xE3, 0x83, 0x8F, 0x49, 0xE3, 0x83, + 0x9E, 0xE3, 0x83, 0xAB, 0xE3, 0x82, 0xAF, 0x49, + 0xE3, 0x83, 0xA4, 0xE3, 0x83, 0xBC, 0xE3, 0x83, + 0xAB, 0x49, 0xE3, 0x83, 0xA6, 0xE3, 0x82, 0xA2, + 0xE3, 0x83, 0xB3, 0x49, 0xE3, 0x83, 0xAF, 0xE3, + // Bytes 2ac0 - 2aff + 0x83, 0x83, 0xE3, 0x83, 0x88, 0x4C, 0xE2, 0x80, + 0xB2, 0xE2, 0x80, 0xB2, 0xE2, 0x80, 0xB2, 0xE2, + 0x80, 0xB2, 0x4C, 0xE2, 0x88, 0xAB, 0xE2, 0x88, + 0xAB, 0xE2, 0x88, 0xAB, 0xE2, 0x88, 0xAB, 0x4C, + 0xE3, 0x82, 0xA2, 0xE3, 0x83, 0xAB, 0xE3, 0x83, + 0x95, 0xE3, 0x82, 0xA1, 0x4C, 0xE3, 0x82, 0xA8, + 0xE3, 0x83, 0xBC, 0xE3, 0x82, 0xAB, 0xE3, 0x83, + 0xBC, 0x4C, 0xE3, 0x82, 0xAB, 0xE3, 0x82, 0x99, + // Bytes 2b00 - 2b3f + 0xE3, 0x83, 0xAD, 0xE3, 0x83, 0xB3, 0x4C, 0xE3, + 0x82, 0xAB, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xB3, + 0xE3, 0x83, 0x9E, 0x4C, 0xE3, 0x82, 0xAB, 0xE3, + 0x83, 0xA9, 0xE3, 0x83, 0x83, 0xE3, 0x83, 0x88, + 0x4C, 0xE3, 0x82, 0xAB, 0xE3, 0x83, 0xAD, 0xE3, + 0x83, 0xAA, 0xE3, 0x83, 0xBC, 0x4C, 0xE3, 0x82, + 0xAD, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0x8B, 0xE3, + 0x83, 0xBC, 0x4C, 0xE3, 0x82, 0xAD, 0xE3, 0x83, + // Bytes 2b40 - 2b7f + 0xA5, 0xE3, 0x83, 0xAA, 0xE3, 0x83, 0xBC, 0x4C, + 0xE3, 0x82, 0xAF, 0xE3, 0x82, 0x99, 0xE3, 0x83, + 0xA9, 0xE3, 0x83, 0xA0, 0x4C, 0xE3, 0x82, 0xAF, + 0xE3, 0x83, 0xAD, 0xE3, 0x83, 0xBC, 0xE3, 0x83, + 0x8D, 0x4C, 0xE3, 0x82, 0xB5, 0xE3, 0x82, 0xA4, + 0xE3, 0x82, 0xAF, 0xE3, 0x83, 0xAB, 0x4C, 0xE3, + 0x82, 0xBF, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xBC, + 0xE3, 0x82, 0xB9, 0x4C, 0xE3, 0x83, 0x8F, 0xE3, + // Bytes 2b80 - 2bbf + 0x82, 0x9A, 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0x84, + 0x4C, 0xE3, 0x83, 0x92, 0xE3, 0x82, 0x9A, 0xE3, + 0x82, 0xAF, 0xE3, 0x83, 0xAB, 0x4C, 0xE3, 0x83, + 0x95, 0xE3, 0x82, 0xA3, 0xE3, 0x83, 0xBC, 0xE3, + 0x83, 0x88, 0x4C, 0xE3, 0x83, 0x98, 0xE3, 0x82, + 0x99, 0xE3, 0x83, 0xBC, 0xE3, 0x82, 0xBF, 0x4C, + 0xE3, 0x83, 0x98, 0xE3, 0x82, 0x9A, 0xE3, 0x83, + 0x8B, 0xE3, 0x83, 0x92, 0x4C, 0xE3, 0x83, 0x98, + // Bytes 2bc0 - 2bff + 0xE3, 0x82, 0x9A, 0xE3, 0x83, 0xB3, 0xE3, 0x82, + 0xB9, 0x4C, 0xE3, 0x83, 0x9B, 0xE3, 0x82, 0x99, + 0xE3, 0x83, 0xAB, 0xE3, 0x83, 0x88, 0x4C, 0xE3, + 0x83, 0x9E, 0xE3, 0x82, 0xA4, 0xE3, 0x82, 0xAF, + 0xE3, 0x83, 0xAD, 0x4C, 0xE3, 0x83, 0x9F, 0xE3, + 0x82, 0xAF, 0xE3, 0x83, 0xAD, 0xE3, 0x83, 0xB3, + 0x4C, 0xE3, 0x83, 0xA1, 0xE3, 0x83, 0xBC, 0xE3, + 0x83, 0x88, 0xE3, 0x83, 0xAB, 0x4C, 0xE3, 0x83, + // Bytes 2c00 - 2c3f + 0xAA, 0xE3, 0x83, 0x83, 0xE3, 0x83, 0x88, 0xE3, + 0x83, 0xAB, 0x4C, 0xE3, 0x83, 0xAB, 0xE3, 0x83, + 0x92, 0xE3, 0x82, 0x9A, 0xE3, 0x83, 0xBC, 0x4C, + 0xE6, 0xA0, 0xAA, 0xE5, 0xBC, 0x8F, 0xE4, 0xBC, + 0x9A, 0xE7, 0xA4, 0xBE, 0x4E, 0x28, 0xE1, 0x84, + 0x8B, 0xE1, 0x85, 0xA9, 0xE1, 0x84, 0x92, 0xE1, + 0x85, 0xAE, 0x29, 0x4F, 0xD8, 0xAC, 0xD9, 0x84, + 0x20, 0xD8, 0xAC, 0xD9, 0x84, 0xD8, 0xA7, 0xD9, + // Bytes 2c40 - 2c7f + 0x84, 0xD9, 0x87, 0x4F, 0xE3, 0x82, 0xA2, 0xE3, + 0x83, 0x8F, 0xE3, 0x82, 0x9A, 0xE3, 0x83, 0xBC, + 0xE3, 0x83, 0x88, 0x4F, 0xE3, 0x82, 0xA2, 0xE3, + 0x83, 0xB3, 0xE3, 0x83, 0x98, 0xE3, 0x82, 0x9A, + 0xE3, 0x82, 0xA2, 0x4F, 0xE3, 0x82, 0xAD, 0xE3, + 0x83, 0xAD, 0xE3, 0x83, 0xAF, 0xE3, 0x83, 0x83, + 0xE3, 0x83, 0x88, 0x4F, 0xE3, 0x82, 0xB5, 0xE3, + 0x83, 0xB3, 0xE3, 0x83, 0x81, 0xE3, 0x83, 0xBC, + // Bytes 2c80 - 2cbf + 0xE3, 0x83, 0xA0, 0x4F, 0xE3, 0x83, 0x8F, 0xE3, + 0x82, 0x99, 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0xAC, + 0xE3, 0x83, 0xAB, 0x4F, 0xE3, 0x83, 0x98, 0xE3, + 0x82, 0xAF, 0xE3, 0x82, 0xBF, 0xE3, 0x83, 0xBC, + 0xE3, 0x83, 0xAB, 0x4F, 0xE3, 0x83, 0x9B, 0xE3, + 0x82, 0x9A, 0xE3, 0x82, 0xA4, 0xE3, 0x83, 0xB3, + 0xE3, 0x83, 0x88, 0x4F, 0xE3, 0x83, 0x9E, 0xE3, + 0x83, 0xB3, 0xE3, 0x82, 0xB7, 0xE3, 0x83, 0xA7, + // Bytes 2cc0 - 2cff + 0xE3, 0x83, 0xB3, 0x4F, 0xE3, 0x83, 0xA1, 0xE3, + 0x82, 0xAB, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0x88, + 0xE3, 0x83, 0xB3, 0x4F, 0xE3, 0x83, 0xAB, 0xE3, + 0x83, 0xBC, 0xE3, 0x83, 0x95, 0xE3, 0x82, 0x99, + 0xE3, 0x83, 0xAB, 0x51, 0x28, 0xE1, 0x84, 0x8B, + 0xE1, 0x85, 0xA9, 0xE1, 0x84, 0x8C, 0xE1, 0x85, + 0xA5, 0xE1, 0x86, 0xAB, 0x29, 0x52, 0xE3, 0x82, + 0xAD, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xAB, 0xE3, + // Bytes 2d00 - 2d3f + 0x82, 0xBF, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xBC, + 0x52, 0xE3, 0x82, 0xAD, 0xE3, 0x83, 0xAD, 0xE3, + 0x82, 0xAF, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xA9, + 0xE3, 0x83, 0xA0, 0x52, 0xE3, 0x82, 0xAD, 0xE3, + 0x83, 0xAD, 0xE3, 0x83, 0xA1, 0xE3, 0x83, 0xBC, + 0xE3, 0x83, 0x88, 0xE3, 0x83, 0xAB, 0x52, 0xE3, + 0x82, 0xAF, 0xE3, 0x82, 0x99, 0xE3, 0x83, 0xA9, + 0xE3, 0x83, 0xA0, 0xE3, 0x83, 0x88, 0xE3, 0x83, + // Bytes 2d40 - 2d7f + 0xB3, 0x52, 0xE3, 0x82, 0xAF, 0xE3, 0x83, 0xAB, + 0xE3, 0x82, 0xBB, 0xE3, 0x82, 0x99, 0xE3, 0x82, + 0xA4, 0xE3, 0x83, 0xAD, 0x52, 0xE3, 0x83, 0x8F, + 0xE3, 0x82, 0x9A, 0xE3, 0x83, 0xBC, 0xE3, 0x82, + 0xBB, 0xE3, 0x83, 0xB3, 0xE3, 0x83, 0x88, 0x52, + 0xE3, 0x83, 0x92, 0xE3, 0x82, 0x9A, 0xE3, 0x82, + 0xA2, 0xE3, 0x82, 0xB9, 0xE3, 0x83, 0x88, 0xE3, + 0x83, 0xAB, 0x52, 0xE3, 0x83, 0x95, 0xE3, 0x82, + // Bytes 2d80 - 2dbf + 0x99, 0xE3, 0x83, 0x83, 0xE3, 0x82, 0xB7, 0xE3, + 0x82, 0xA7, 0xE3, 0x83, 0xAB, 0x52, 0xE3, 0x83, + 0x9F, 0xE3, 0x83, 0xAA, 0xE3, 0x83, 0x8F, 0xE3, + 0x82, 0x99, 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0xAB, + 0x52, 0xE3, 0x83, 0xAC, 0xE3, 0x83, 0xB3, 0xE3, + 0x83, 0x88, 0xE3, 0x82, 0xB1, 0xE3, 0x82, 0x99, + 0xE3, 0x83, 0xB3, 0x61, 0xD8, 0xB5, 0xD9, 0x84, + 0xD9, 0x89, 0x20, 0xD8, 0xA7, 0xD9, 0x84, 0xD9, + // Bytes 2dc0 - 2dff + 0x84, 0xD9, 0x87, 0x20, 0xD8, 0xB9, 0xD9, 0x84, + 0xD9, 0x8A, 0xD9, 0x87, 0x20, 0xD9, 0x88, 0xD8, + 0xB3, 0xD9, 0x84, 0xD9, 0x85, 0x06, 0xE0, 0xA7, + 0x87, 0xE0, 0xA6, 0xBE, 0x01, 0x06, 0xE0, 0xA7, + 0x87, 0xE0, 0xA7, 0x97, 0x01, 0x06, 0xE0, 0xAD, + 0x87, 0xE0, 0xAC, 0xBE, 0x01, 0x06, 0xE0, 0xAD, + 0x87, 0xE0, 0xAD, 0x96, 0x01, 0x06, 0xE0, 0xAD, + 0x87, 0xE0, 0xAD, 0x97, 0x01, 0x06, 0xE0, 0xAE, + // Bytes 2e00 - 2e3f + 0x92, 0xE0, 0xAF, 0x97, 0x01, 0x06, 0xE0, 0xAF, + 0x86, 0xE0, 0xAE, 0xBE, 0x01, 0x06, 0xE0, 0xAF, + 0x86, 0xE0, 0xAF, 0x97, 0x01, 0x06, 0xE0, 0xAF, + 0x87, 0xE0, 0xAE, 0xBE, 0x01, 0x06, 0xE0, 0xB2, + 0xBF, 0xE0, 0xB3, 0x95, 0x01, 0x06, 0xE0, 0xB3, + 0x86, 0xE0, 0xB3, 0x95, 0x01, 0x06, 0xE0, 0xB3, + 0x86, 0xE0, 0xB3, 0x96, 0x01, 0x06, 0xE0, 0xB5, + 0x86, 0xE0, 0xB4, 0xBE, 0x01, 0x06, 0xE0, 0xB5, + // Bytes 2e40 - 2e7f + 0x86, 0xE0, 0xB5, 0x97, 0x01, 0x06, 0xE0, 0xB5, + 0x87, 0xE0, 0xB4, 0xBE, 0x01, 0x06, 0xE0, 0xB7, + 0x99, 0xE0, 0xB7, 0x9F, 0x01, 0x06, 0xE1, 0x80, + 0xA5, 0xE1, 0x80, 0xAE, 0x01, 0x06, 0xE1, 0xAC, + 0x85, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAC, + 0x87, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAC, + 0x89, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAC, + 0x8B, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAC, + // Bytes 2e80 - 2ebf + 0x8D, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAC, + 0x91, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAC, + 0xBA, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAC, + 0xBC, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAC, + 0xBE, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAC, + 0xBF, 0xE1, 0xAC, 0xB5, 0x01, 0x06, 0xE1, 0xAD, + 0x82, 0xE1, 0xAC, 0xB5, 0x01, 0x08, 0xF0, 0x91, + 0x84, 0xB1, 0xF0, 0x91, 0x84, 0xA7, 0x01, 0x08, + // Bytes 2ec0 - 2eff + 0xF0, 0x91, 0x84, 0xB2, 0xF0, 0x91, 0x84, 0xA7, + 0x01, 0x08, 0xF0, 0x91, 0x8D, 0x87, 0xF0, 0x91, + 0x8C, 0xBE, 0x01, 0x08, 0xF0, 0x91, 0x8D, 0x87, + 0xF0, 0x91, 0x8D, 0x97, 0x01, 0x08, 0xF0, 0x91, + 0x92, 0xB9, 0xF0, 0x91, 0x92, 0xB0, 0x01, 0x08, + 0xF0, 0x91, 0x92, 0xB9, 0xF0, 0x91, 0x92, 0xBA, + 0x01, 0x08, 0xF0, 0x91, 0x92, 0xB9, 0xF0, 0x91, + 0x92, 0xBD, 0x01, 0x08, 0xF0, 0x91, 0x96, 0xB8, + // Bytes 2f00 - 2f3f + 0xF0, 0x91, 0x96, 0xAF, 0x01, 0x08, 0xF0, 0x91, + 0x96, 0xB9, 0xF0, 0x91, 0x96, 0xAF, 0x01, 0x08, + 0xF0, 0x91, 0xA4, 0xB5, 0xF0, 0x91, 0xA4, 0xB0, + 0x01, 0x09, 0xE0, 0xB3, 0x86, 0xE0, 0xB3, 0x82, + 0xE0, 0xB3, 0x95, 0x02, 0x09, 0xE0, 0xB7, 0x99, + 0xE0, 0xB7, 0x8F, 0xE0, 0xB7, 0x8A, 0x16, 0x44, + 0x44, 0x5A, 0xCC, 0x8C, 0xCD, 0x44, 0x44, 0x7A, + 0xCC, 0x8C, 0xCD, 0x44, 0x64, 0x7A, 0xCC, 0x8C, + // Bytes 2f40 - 2f7f + 0xCD, 0x46, 0xD9, 0x84, 0xD8, 0xA7, 0xD9, 0x93, + 0xCD, 0x46, 0xD9, 0x84, 0xD8, 0xA7, 0xD9, 0x94, + 0xCD, 0x46, 0xD9, 0x84, 0xD8, 0xA7, 0xD9, 0x95, + 0xB9, 0x46, 0xE1, 0x84, 0x80, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x82, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x83, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x85, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x86, 0xE1, 0x85, 0xA1, + // Bytes 2f80 - 2fbf + 0x01, 0x46, 0xE1, 0x84, 0x87, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x89, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x8B, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x8B, 0xE1, 0x85, 0xAE, + 0x01, 0x46, 0xE1, 0x84, 0x8C, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x8E, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x8F, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x90, 0xE1, 0x85, 0xA1, + // Bytes 2fc0 - 2fff + 0x01, 0x46, 0xE1, 0x84, 0x91, 0xE1, 0x85, 0xA1, + 0x01, 0x46, 0xE1, 0x84, 0x92, 0xE1, 0x85, 0xA1, + 0x01, 0x49, 0xE3, 0x83, 0xA1, 0xE3, 0x82, 0xAB, + 0xE3, 0x82, 0x99, 0x11, 0x4C, 0xE1, 0x84, 0x8C, + 0xE1, 0x85, 0xAE, 0xE1, 0x84, 0x8B, 0xE1, 0x85, + 0xB4, 0x01, 0x4C, 0xE3, 0x82, 0xAD, 0xE3, 0x82, + 0x99, 0xE3, 0x82, 0xAB, 0xE3, 0x82, 0x99, 0x11, + 0x4C, 0xE3, 0x82, 0xB3, 0xE3, 0x83, 0xBC, 0xE3, + // Bytes 3000 - 303f + 0x83, 0x9B, 0xE3, 0x82, 0x9A, 0x11, 0x4C, 0xE3, + 0x83, 0xA4, 0xE3, 0x83, 0xBC, 0xE3, 0x83, 0x88, + 0xE3, 0x82, 0x99, 0x11, 0x4F, 0xE1, 0x84, 0x8E, + 0xE1, 0x85, 0xA1, 0xE1, 0x86, 0xB7, 0xE1, 0x84, + 0x80, 0xE1, 0x85, 0xA9, 0x01, 0x4F, 0xE3, 0x82, + 0xA4, 0xE3, 0x83, 0x8B, 0xE3, 0x83, 0xB3, 0xE3, + 0x82, 0xAF, 0xE3, 0x82, 0x99, 0x11, 0x4F, 0xE3, + 0x82, 0xB7, 0xE3, 0x83, 0xAA, 0xE3, 0x83, 0xB3, + // Bytes 3040 - 307f + 0xE3, 0x82, 0xAF, 0xE3, 0x82, 0x99, 0x11, 0x4F, + 0xE3, 0x83, 0x98, 0xE3, 0x82, 0x9A, 0xE3, 0x83, + 0xBC, 0xE3, 0x82, 0xB7, 0xE3, 0x82, 0x99, 0x11, + 0x4F, 0xE3, 0x83, 0x9B, 0xE3, 0x82, 0x9A, 0xE3, + 0x83, 0xB3, 0xE3, 0x83, 0x88, 0xE3, 0x82, 0x99, + 0x11, 0x52, 0xE3, 0x82, 0xA8, 0xE3, 0x82, 0xB9, + 0xE3, 0x82, 0xAF, 0xE3, 0x83, 0xBC, 0xE3, 0x83, + 0x88, 0xE3, 0x82, 0x99, 0x11, 0x52, 0xE3, 0x83, + // Bytes 3080 - 30bf + 0x95, 0xE3, 0x82, 0xA1, 0xE3, 0x83, 0xA9, 0xE3, + 0x83, 0x83, 0xE3, 0x83, 0x88, 0xE3, 0x82, 0x99, + 0x11, 0x86, 0xE0, 0xB3, 0x86, 0xE0, 0xB3, 0x82, + 0x01, 0x86, 0xE0, 0xB7, 0x99, 0xE0, 0xB7, 0x8F, + 0x01, 0x03, 0x3C, 0xCC, 0xB8, 0x05, 0x03, 0x3D, + 0xCC, 0xB8, 0x05, 0x03, 0x3E, 0xCC, 0xB8, 0x05, + 0x03, 0x41, 0xCC, 0x80, 0xCD, 0x03, 0x41, 0xCC, + 0x81, 0xCD, 0x03, 0x41, 0xCC, 0x83, 0xCD, 0x03, + // Bytes 30c0 - 30ff + 0x41, 0xCC, 0x84, 0xCD, 0x03, 0x41, 0xCC, 0x89, + 0xCD, 0x03, 0x41, 0xCC, 0x8C, 0xCD, 0x03, 0x41, + 0xCC, 0x8F, 0xCD, 0x03, 0x41, 0xCC, 0x91, 0xCD, + 0x03, 0x41, 0xCC, 0xA5, 0xB9, 0x03, 0x41, 0xCC, + 0xA8, 0xA9, 0x03, 0x42, 0xCC, 0x87, 0xCD, 0x03, + 0x42, 0xCC, 0xA3, 0xB9, 0x03, 0x42, 0xCC, 0xB1, + 0xB9, 0x03, 0x43, 0xCC, 0x81, 0xCD, 0x03, 0x43, + 0xCC, 0x82, 0xCD, 0x03, 0x43, 0xCC, 0x87, 0xCD, + // Bytes 3100 - 313f + 0x03, 0x43, 0xCC, 0x8C, 0xCD, 0x03, 0x44, 0xCC, + 0x87, 0xCD, 0x03, 0x44, 0xCC, 0x8C, 0xCD, 0x03, + 0x44, 0xCC, 0xA3, 0xB9, 0x03, 0x44, 0xCC, 0xA7, + 0xA9, 0x03, 0x44, 0xCC, 0xAD, 0xB9, 0x03, 0x44, + 0xCC, 0xB1, 0xB9, 0x03, 0x45, 0xCC, 0x80, 0xCD, + 0x03, 0x45, 0xCC, 0x81, 0xCD, 0x03, 0x45, 0xCC, + 0x83, 0xCD, 0x03, 0x45, 0xCC, 0x86, 0xCD, 0x03, + 0x45, 0xCC, 0x87, 0xCD, 0x03, 0x45, 0xCC, 0x88, + // Bytes 3140 - 317f + 0xCD, 0x03, 0x45, 0xCC, 0x89, 0xCD, 0x03, 0x45, + 0xCC, 0x8C, 0xCD, 0x03, 0x45, 0xCC, 0x8F, 0xCD, + 0x03, 0x45, 0xCC, 0x91, 0xCD, 0x03, 0x45, 0xCC, + 0xA8, 0xA9, 0x03, 0x45, 0xCC, 0xAD, 0xB9, 0x03, + 0x45, 0xCC, 0xB0, 0xB9, 0x03, 0x46, 0xCC, 0x87, + 0xCD, 0x03, 0x47, 0xCC, 0x81, 0xCD, 0x03, 0x47, + 0xCC, 0x82, 0xCD, 0x03, 0x47, 0xCC, 0x84, 0xCD, + 0x03, 0x47, 0xCC, 0x86, 0xCD, 0x03, 0x47, 0xCC, + // Bytes 3180 - 31bf + 0x87, 0xCD, 0x03, 0x47, 0xCC, 0x8C, 0xCD, 0x03, + 0x47, 0xCC, 0xA7, 0xA9, 0x03, 0x48, 0xCC, 0x82, + 0xCD, 0x03, 0x48, 0xCC, 0x87, 0xCD, 0x03, 0x48, + 0xCC, 0x88, 0xCD, 0x03, 0x48, 0xCC, 0x8C, 0xCD, + 0x03, 0x48, 0xCC, 0xA3, 0xB9, 0x03, 0x48, 0xCC, + 0xA7, 0xA9, 0x03, 0x48, 0xCC, 0xAE, 0xB9, 0x03, + 0x49, 0xCC, 0x80, 0xCD, 0x03, 0x49, 0xCC, 0x81, + 0xCD, 0x03, 0x49, 0xCC, 0x82, 0xCD, 0x03, 0x49, + // Bytes 31c0 - 31ff + 0xCC, 0x83, 0xCD, 0x03, 0x49, 0xCC, 0x84, 0xCD, + 0x03, 0x49, 0xCC, 0x86, 0xCD, 0x03, 0x49, 0xCC, + 0x87, 0xCD, 0x03, 0x49, 0xCC, 0x89, 0xCD, 0x03, + 0x49, 0xCC, 0x8C, 0xCD, 0x03, 0x49, 0xCC, 0x8F, + 0xCD, 0x03, 0x49, 0xCC, 0x91, 0xCD, 0x03, 0x49, + 0xCC, 0xA3, 0xB9, 0x03, 0x49, 0xCC, 0xA8, 0xA9, + 0x03, 0x49, 0xCC, 0xB0, 0xB9, 0x03, 0x4A, 0xCC, + 0x82, 0xCD, 0x03, 0x4B, 0xCC, 0x81, 0xCD, 0x03, + // Bytes 3200 - 323f + 0x4B, 0xCC, 0x8C, 0xCD, 0x03, 0x4B, 0xCC, 0xA3, + 0xB9, 0x03, 0x4B, 0xCC, 0xA7, 0xA9, 0x03, 0x4B, + 0xCC, 0xB1, 0xB9, 0x03, 0x4C, 0xCC, 0x81, 0xCD, + 0x03, 0x4C, 0xCC, 0x8C, 0xCD, 0x03, 0x4C, 0xCC, + 0xA7, 0xA9, 0x03, 0x4C, 0xCC, 0xAD, 0xB9, 0x03, + 0x4C, 0xCC, 0xB1, 0xB9, 0x03, 0x4D, 0xCC, 0x81, + 0xCD, 0x03, 0x4D, 0xCC, 0x87, 0xCD, 0x03, 0x4D, + 0xCC, 0xA3, 0xB9, 0x03, 0x4E, 0xCC, 0x80, 0xCD, + // Bytes 3240 - 327f + 0x03, 0x4E, 0xCC, 0x81, 0xCD, 0x03, 0x4E, 0xCC, + 0x83, 0xCD, 0x03, 0x4E, 0xCC, 0x87, 0xCD, 0x03, + 0x4E, 0xCC, 0x8C, 0xCD, 0x03, 0x4E, 0xCC, 0xA3, + 0xB9, 0x03, 0x4E, 0xCC, 0xA7, 0xA9, 0x03, 0x4E, + 0xCC, 0xAD, 0xB9, 0x03, 0x4E, 0xCC, 0xB1, 0xB9, + 0x03, 0x4F, 0xCC, 0x80, 0xCD, 0x03, 0x4F, 0xCC, + 0x81, 0xCD, 0x03, 0x4F, 0xCC, 0x86, 0xCD, 0x03, + 0x4F, 0xCC, 0x89, 0xCD, 0x03, 0x4F, 0xCC, 0x8B, + // Bytes 3280 - 32bf + 0xCD, 0x03, 0x4F, 0xCC, 0x8C, 0xCD, 0x03, 0x4F, + 0xCC, 0x8F, 0xCD, 0x03, 0x4F, 0xCC, 0x91, 0xCD, + 0x03, 0x50, 0xCC, 0x81, 0xCD, 0x03, 0x50, 0xCC, + 0x87, 0xCD, 0x03, 0x52, 0xCC, 0x81, 0xCD, 0x03, + 0x52, 0xCC, 0x87, 0xCD, 0x03, 0x52, 0xCC, 0x8C, + 0xCD, 0x03, 0x52, 0xCC, 0x8F, 0xCD, 0x03, 0x52, + 0xCC, 0x91, 0xCD, 0x03, 0x52, 0xCC, 0xA7, 0xA9, + 0x03, 0x52, 0xCC, 0xB1, 0xB9, 0x03, 0x53, 0xCC, + // Bytes 32c0 - 32ff + 0x82, 0xCD, 0x03, 0x53, 0xCC, 0x87, 0xCD, 0x03, + 0x53, 0xCC, 0xA6, 0xB9, 0x03, 0x53, 0xCC, 0xA7, + 0xA9, 0x03, 0x54, 0xCC, 0x87, 0xCD, 0x03, 0x54, + 0xCC, 0x8C, 0xCD, 0x03, 0x54, 0xCC, 0xA3, 0xB9, + 0x03, 0x54, 0xCC, 0xA6, 0xB9, 0x03, 0x54, 0xCC, + 0xA7, 0xA9, 0x03, 0x54, 0xCC, 0xAD, 0xB9, 0x03, + 0x54, 0xCC, 0xB1, 0xB9, 0x03, 0x55, 0xCC, 0x80, + 0xCD, 0x03, 0x55, 0xCC, 0x81, 0xCD, 0x03, 0x55, + // Bytes 3300 - 333f + 0xCC, 0x82, 0xCD, 0x03, 0x55, 0xCC, 0x86, 0xCD, + 0x03, 0x55, 0xCC, 0x89, 0xCD, 0x03, 0x55, 0xCC, + 0x8A, 0xCD, 0x03, 0x55, 0xCC, 0x8B, 0xCD, 0x03, + 0x55, 0xCC, 0x8C, 0xCD, 0x03, 0x55, 0xCC, 0x8F, + 0xCD, 0x03, 0x55, 0xCC, 0x91, 0xCD, 0x03, 0x55, + 0xCC, 0xA3, 0xB9, 0x03, 0x55, 0xCC, 0xA4, 0xB9, + 0x03, 0x55, 0xCC, 0xA8, 0xA9, 0x03, 0x55, 0xCC, + 0xAD, 0xB9, 0x03, 0x55, 0xCC, 0xB0, 0xB9, 0x03, + // Bytes 3340 - 337f + 0x56, 0xCC, 0x83, 0xCD, 0x03, 0x56, 0xCC, 0xA3, + 0xB9, 0x03, 0x57, 0xCC, 0x80, 0xCD, 0x03, 0x57, + 0xCC, 0x81, 0xCD, 0x03, 0x57, 0xCC, 0x82, 0xCD, + 0x03, 0x57, 0xCC, 0x87, 0xCD, 0x03, 0x57, 0xCC, + 0x88, 0xCD, 0x03, 0x57, 0xCC, 0xA3, 0xB9, 0x03, + 0x58, 0xCC, 0x87, 0xCD, 0x03, 0x58, 0xCC, 0x88, + 0xCD, 0x03, 0x59, 0xCC, 0x80, 0xCD, 0x03, 0x59, + 0xCC, 0x81, 0xCD, 0x03, 0x59, 0xCC, 0x82, 0xCD, + // Bytes 3380 - 33bf + 0x03, 0x59, 0xCC, 0x83, 0xCD, 0x03, 0x59, 0xCC, + 0x84, 0xCD, 0x03, 0x59, 0xCC, 0x87, 0xCD, 0x03, + 0x59, 0xCC, 0x88, 0xCD, 0x03, 0x59, 0xCC, 0x89, + 0xCD, 0x03, 0x59, 0xCC, 0xA3, 0xB9, 0x03, 0x5A, + 0xCC, 0x81, 0xCD, 0x03, 0x5A, 0xCC, 0x82, 0xCD, + 0x03, 0x5A, 0xCC, 0x87, 0xCD, 0x03, 0x5A, 0xCC, + 0x8C, 0xCD, 0x03, 0x5A, 0xCC, 0xA3, 0xB9, 0x03, + 0x5A, 0xCC, 0xB1, 0xB9, 0x03, 0x61, 0xCC, 0x80, + // Bytes 33c0 - 33ff + 0xCD, 0x03, 0x61, 0xCC, 0x81, 0xCD, 0x03, 0x61, + 0xCC, 0x83, 0xCD, 0x03, 0x61, 0xCC, 0x84, 0xCD, + 0x03, 0x61, 0xCC, 0x89, 0xCD, 0x03, 0x61, 0xCC, + 0x8C, 0xCD, 0x03, 0x61, 0xCC, 0x8F, 0xCD, 0x03, + 0x61, 0xCC, 0x91, 0xCD, 0x03, 0x61, 0xCC, 0xA5, + 0xB9, 0x03, 0x61, 0xCC, 0xA8, 0xA9, 0x03, 0x62, + 0xCC, 0x87, 0xCD, 0x03, 0x62, 0xCC, 0xA3, 0xB9, + 0x03, 0x62, 0xCC, 0xB1, 0xB9, 0x03, 0x63, 0xCC, + // Bytes 3400 - 343f + 0x81, 0xCD, 0x03, 0x63, 0xCC, 0x82, 0xCD, 0x03, + 0x63, 0xCC, 0x87, 0xCD, 0x03, 0x63, 0xCC, 0x8C, + 0xCD, 0x03, 0x64, 0xCC, 0x87, 0xCD, 0x03, 0x64, + 0xCC, 0x8C, 0xCD, 0x03, 0x64, 0xCC, 0xA3, 0xB9, + 0x03, 0x64, 0xCC, 0xA7, 0xA9, 0x03, 0x64, 0xCC, + 0xAD, 0xB9, 0x03, 0x64, 0xCC, 0xB1, 0xB9, 0x03, + 0x65, 0xCC, 0x80, 0xCD, 0x03, 0x65, 0xCC, 0x81, + 0xCD, 0x03, 0x65, 0xCC, 0x83, 0xCD, 0x03, 0x65, + // Bytes 3440 - 347f + 0xCC, 0x86, 0xCD, 0x03, 0x65, 0xCC, 0x87, 0xCD, + 0x03, 0x65, 0xCC, 0x88, 0xCD, 0x03, 0x65, 0xCC, + 0x89, 0xCD, 0x03, 0x65, 0xCC, 0x8C, 0xCD, 0x03, + 0x65, 0xCC, 0x8F, 0xCD, 0x03, 0x65, 0xCC, 0x91, + 0xCD, 0x03, 0x65, 0xCC, 0xA8, 0xA9, 0x03, 0x65, + 0xCC, 0xAD, 0xB9, 0x03, 0x65, 0xCC, 0xB0, 0xB9, + 0x03, 0x66, 0xCC, 0x87, 0xCD, 0x03, 0x67, 0xCC, + 0x81, 0xCD, 0x03, 0x67, 0xCC, 0x82, 0xCD, 0x03, + // Bytes 3480 - 34bf + 0x67, 0xCC, 0x84, 0xCD, 0x03, 0x67, 0xCC, 0x86, + 0xCD, 0x03, 0x67, 0xCC, 0x87, 0xCD, 0x03, 0x67, + 0xCC, 0x8C, 0xCD, 0x03, 0x67, 0xCC, 0xA7, 0xA9, + 0x03, 0x68, 0xCC, 0x82, 0xCD, 0x03, 0x68, 0xCC, + 0x87, 0xCD, 0x03, 0x68, 0xCC, 0x88, 0xCD, 0x03, + 0x68, 0xCC, 0x8C, 0xCD, 0x03, 0x68, 0xCC, 0xA3, + 0xB9, 0x03, 0x68, 0xCC, 0xA7, 0xA9, 0x03, 0x68, + 0xCC, 0xAE, 0xB9, 0x03, 0x68, 0xCC, 0xB1, 0xB9, + // Bytes 34c0 - 34ff + 0x03, 0x69, 0xCC, 0x80, 0xCD, 0x03, 0x69, 0xCC, + 0x81, 0xCD, 0x03, 0x69, 0xCC, 0x82, 0xCD, 0x03, + 0x69, 0xCC, 0x83, 0xCD, 0x03, 0x69, 0xCC, 0x84, + 0xCD, 0x03, 0x69, 0xCC, 0x86, 0xCD, 0x03, 0x69, + 0xCC, 0x89, 0xCD, 0x03, 0x69, 0xCC, 0x8C, 0xCD, + 0x03, 0x69, 0xCC, 0x8F, 0xCD, 0x03, 0x69, 0xCC, + 0x91, 0xCD, 0x03, 0x69, 0xCC, 0xA3, 0xB9, 0x03, + 0x69, 0xCC, 0xA8, 0xA9, 0x03, 0x69, 0xCC, 0xB0, + // Bytes 3500 - 353f + 0xB9, 0x03, 0x6A, 0xCC, 0x82, 0xCD, 0x03, 0x6A, + 0xCC, 0x8C, 0xCD, 0x03, 0x6B, 0xCC, 0x81, 0xCD, + 0x03, 0x6B, 0xCC, 0x8C, 0xCD, 0x03, 0x6B, 0xCC, + 0xA3, 0xB9, 0x03, 0x6B, 0xCC, 0xA7, 0xA9, 0x03, + 0x6B, 0xCC, 0xB1, 0xB9, 0x03, 0x6C, 0xCC, 0x81, + 0xCD, 0x03, 0x6C, 0xCC, 0x8C, 0xCD, 0x03, 0x6C, + 0xCC, 0xA7, 0xA9, 0x03, 0x6C, 0xCC, 0xAD, 0xB9, + 0x03, 0x6C, 0xCC, 0xB1, 0xB9, 0x03, 0x6D, 0xCC, + // Bytes 3540 - 357f + 0x81, 0xCD, 0x03, 0x6D, 0xCC, 0x87, 0xCD, 0x03, + 0x6D, 0xCC, 0xA3, 0xB9, 0x03, 0x6E, 0xCC, 0x80, + 0xCD, 0x03, 0x6E, 0xCC, 0x81, 0xCD, 0x03, 0x6E, + 0xCC, 0x83, 0xCD, 0x03, 0x6E, 0xCC, 0x87, 0xCD, + 0x03, 0x6E, 0xCC, 0x8C, 0xCD, 0x03, 0x6E, 0xCC, + 0xA3, 0xB9, 0x03, 0x6E, 0xCC, 0xA7, 0xA9, 0x03, + 0x6E, 0xCC, 0xAD, 0xB9, 0x03, 0x6E, 0xCC, 0xB1, + 0xB9, 0x03, 0x6F, 0xCC, 0x80, 0xCD, 0x03, 0x6F, + // Bytes 3580 - 35bf + 0xCC, 0x81, 0xCD, 0x03, 0x6F, 0xCC, 0x86, 0xCD, + 0x03, 0x6F, 0xCC, 0x89, 0xCD, 0x03, 0x6F, 0xCC, + 0x8B, 0xCD, 0x03, 0x6F, 0xCC, 0x8C, 0xCD, 0x03, + 0x6F, 0xCC, 0x8F, 0xCD, 0x03, 0x6F, 0xCC, 0x91, + 0xCD, 0x03, 0x70, 0xCC, 0x81, 0xCD, 0x03, 0x70, + 0xCC, 0x87, 0xCD, 0x03, 0x72, 0xCC, 0x81, 0xCD, + 0x03, 0x72, 0xCC, 0x87, 0xCD, 0x03, 0x72, 0xCC, + 0x8C, 0xCD, 0x03, 0x72, 0xCC, 0x8F, 0xCD, 0x03, + // Bytes 35c0 - 35ff + 0x72, 0xCC, 0x91, 0xCD, 0x03, 0x72, 0xCC, 0xA7, + 0xA9, 0x03, 0x72, 0xCC, 0xB1, 0xB9, 0x03, 0x73, + 0xCC, 0x82, 0xCD, 0x03, 0x73, 0xCC, 0x87, 0xCD, + 0x03, 0x73, 0xCC, 0xA6, 0xB9, 0x03, 0x73, 0xCC, + 0xA7, 0xA9, 0x03, 0x74, 0xCC, 0x87, 0xCD, 0x03, + 0x74, 0xCC, 0x88, 0xCD, 0x03, 0x74, 0xCC, 0x8C, + 0xCD, 0x03, 0x74, 0xCC, 0xA3, 0xB9, 0x03, 0x74, + 0xCC, 0xA6, 0xB9, 0x03, 0x74, 0xCC, 0xA7, 0xA9, + // Bytes 3600 - 363f + 0x03, 0x74, 0xCC, 0xAD, 0xB9, 0x03, 0x74, 0xCC, + 0xB1, 0xB9, 0x03, 0x75, 0xCC, 0x80, 0xCD, 0x03, + 0x75, 0xCC, 0x81, 0xCD, 0x03, 0x75, 0xCC, 0x82, + 0xCD, 0x03, 0x75, 0xCC, 0x86, 0xCD, 0x03, 0x75, + 0xCC, 0x89, 0xCD, 0x03, 0x75, 0xCC, 0x8A, 0xCD, + 0x03, 0x75, 0xCC, 0x8B, 0xCD, 0x03, 0x75, 0xCC, + 0x8C, 0xCD, 0x03, 0x75, 0xCC, 0x8F, 0xCD, 0x03, + 0x75, 0xCC, 0x91, 0xCD, 0x03, 0x75, 0xCC, 0xA3, + // Bytes 3640 - 367f + 0xB9, 0x03, 0x75, 0xCC, 0xA4, 0xB9, 0x03, 0x75, + 0xCC, 0xA8, 0xA9, 0x03, 0x75, 0xCC, 0xAD, 0xB9, + 0x03, 0x75, 0xCC, 0xB0, 0xB9, 0x03, 0x76, 0xCC, + 0x83, 0xCD, 0x03, 0x76, 0xCC, 0xA3, 0xB9, 0x03, + 0x77, 0xCC, 0x80, 0xCD, 0x03, 0x77, 0xCC, 0x81, + 0xCD, 0x03, 0x77, 0xCC, 0x82, 0xCD, 0x03, 0x77, + 0xCC, 0x87, 0xCD, 0x03, 0x77, 0xCC, 0x88, 0xCD, + 0x03, 0x77, 0xCC, 0x8A, 0xCD, 0x03, 0x77, 0xCC, + // Bytes 3680 - 36bf + 0xA3, 0xB9, 0x03, 0x78, 0xCC, 0x87, 0xCD, 0x03, + 0x78, 0xCC, 0x88, 0xCD, 0x03, 0x79, 0xCC, 0x80, + 0xCD, 0x03, 0x79, 0xCC, 0x81, 0xCD, 0x03, 0x79, + 0xCC, 0x82, 0xCD, 0x03, 0x79, 0xCC, 0x83, 0xCD, + 0x03, 0x79, 0xCC, 0x84, 0xCD, 0x03, 0x79, 0xCC, + 0x87, 0xCD, 0x03, 0x79, 0xCC, 0x88, 0xCD, 0x03, + 0x79, 0xCC, 0x89, 0xCD, 0x03, 0x79, 0xCC, 0x8A, + 0xCD, 0x03, 0x79, 0xCC, 0xA3, 0xB9, 0x03, 0x7A, + // Bytes 36c0 - 36ff + 0xCC, 0x81, 0xCD, 0x03, 0x7A, 0xCC, 0x82, 0xCD, + 0x03, 0x7A, 0xCC, 0x87, 0xCD, 0x03, 0x7A, 0xCC, + 0x8C, 0xCD, 0x03, 0x7A, 0xCC, 0xA3, 0xB9, 0x03, + 0x7A, 0xCC, 0xB1, 0xB9, 0x04, 0xC2, 0xA8, 0xCC, + 0x80, 0xCE, 0x04, 0xC2, 0xA8, 0xCC, 0x81, 0xCE, + 0x04, 0xC2, 0xA8, 0xCD, 0x82, 0xCE, 0x04, 0xC3, + 0x86, 0xCC, 0x81, 0xCD, 0x04, 0xC3, 0x86, 0xCC, + 0x84, 0xCD, 0x04, 0xC3, 0x98, 0xCC, 0x81, 0xCD, + // Bytes 3700 - 373f + 0x04, 0xC3, 0xA6, 0xCC, 0x81, 0xCD, 0x04, 0xC3, + 0xA6, 0xCC, 0x84, 0xCD, 0x04, 0xC3, 0xB8, 0xCC, + 0x81, 0xCD, 0x04, 0xC5, 0xBF, 0xCC, 0x87, 0xCD, + 0x04, 0xC6, 0xB7, 0xCC, 0x8C, 0xCD, 0x04, 0xCA, + 0x92, 0xCC, 0x8C, 0xCD, 0x04, 0xCE, 0x91, 0xCC, + 0x80, 0xCD, 0x04, 0xCE, 0x91, 0xCC, 0x81, 0xCD, + 0x04, 0xCE, 0x91, 0xCC, 0x84, 0xCD, 0x04, 0xCE, + 0x91, 0xCC, 0x86, 0xCD, 0x04, 0xCE, 0x91, 0xCD, + // Bytes 3740 - 377f + 0x85, 0xDD, 0x04, 0xCE, 0x95, 0xCC, 0x80, 0xCD, + 0x04, 0xCE, 0x95, 0xCC, 0x81, 0xCD, 0x04, 0xCE, + 0x97, 0xCC, 0x80, 0xCD, 0x04, 0xCE, 0x97, 0xCC, + 0x81, 0xCD, 0x04, 0xCE, 0x97, 0xCD, 0x85, 0xDD, + 0x04, 0xCE, 0x99, 0xCC, 0x80, 0xCD, 0x04, 0xCE, + 0x99, 0xCC, 0x81, 0xCD, 0x04, 0xCE, 0x99, 0xCC, + 0x84, 0xCD, 0x04, 0xCE, 0x99, 0xCC, 0x86, 0xCD, + 0x04, 0xCE, 0x99, 0xCC, 0x88, 0xCD, 0x04, 0xCE, + // Bytes 3780 - 37bf + 0x9F, 0xCC, 0x80, 0xCD, 0x04, 0xCE, 0x9F, 0xCC, + 0x81, 0xCD, 0x04, 0xCE, 0xA1, 0xCC, 0x94, 0xCD, + 0x04, 0xCE, 0xA5, 0xCC, 0x80, 0xCD, 0x04, 0xCE, + 0xA5, 0xCC, 0x81, 0xCD, 0x04, 0xCE, 0xA5, 0xCC, + 0x84, 0xCD, 0x04, 0xCE, 0xA5, 0xCC, 0x86, 0xCD, + 0x04, 0xCE, 0xA5, 0xCC, 0x88, 0xCD, 0x04, 0xCE, + 0xA9, 0xCC, 0x80, 0xCD, 0x04, 0xCE, 0xA9, 0xCC, + 0x81, 0xCD, 0x04, 0xCE, 0xA9, 0xCD, 0x85, 0xDD, + // Bytes 37c0 - 37ff + 0x04, 0xCE, 0xB1, 0xCC, 0x84, 0xCD, 0x04, 0xCE, + 0xB1, 0xCC, 0x86, 0xCD, 0x04, 0xCE, 0xB1, 0xCD, + 0x85, 0xDD, 0x04, 0xCE, 0xB5, 0xCC, 0x80, 0xCD, + 0x04, 0xCE, 0xB5, 0xCC, 0x81, 0xCD, 0x04, 0xCE, + 0xB7, 0xCD, 0x85, 0xDD, 0x04, 0xCE, 0xB9, 0xCC, + 0x80, 0xCD, 0x04, 0xCE, 0xB9, 0xCC, 0x81, 0xCD, + 0x04, 0xCE, 0xB9, 0xCC, 0x84, 0xCD, 0x04, 0xCE, + 0xB9, 0xCC, 0x86, 0xCD, 0x04, 0xCE, 0xB9, 0xCD, + // Bytes 3800 - 383f + 0x82, 0xCD, 0x04, 0xCE, 0xBF, 0xCC, 0x80, 0xCD, + 0x04, 0xCE, 0xBF, 0xCC, 0x81, 0xCD, 0x04, 0xCF, + 0x81, 0xCC, 0x93, 0xCD, 0x04, 0xCF, 0x81, 0xCC, + 0x94, 0xCD, 0x04, 0xCF, 0x85, 0xCC, 0x80, 0xCD, + 0x04, 0xCF, 0x85, 0xCC, 0x81, 0xCD, 0x04, 0xCF, + 0x85, 0xCC, 0x84, 0xCD, 0x04, 0xCF, 0x85, 0xCC, + 0x86, 0xCD, 0x04, 0xCF, 0x85, 0xCD, 0x82, 0xCD, + 0x04, 0xCF, 0x89, 0xCD, 0x85, 0xDD, 0x04, 0xCF, + // Bytes 3840 - 387f + 0x92, 0xCC, 0x81, 0xCD, 0x04, 0xCF, 0x92, 0xCC, + 0x88, 0xCD, 0x04, 0xD0, 0x86, 0xCC, 0x88, 0xCD, + 0x04, 0xD0, 0x90, 0xCC, 0x86, 0xCD, 0x04, 0xD0, + 0x90, 0xCC, 0x88, 0xCD, 0x04, 0xD0, 0x93, 0xCC, + 0x81, 0xCD, 0x04, 0xD0, 0x95, 0xCC, 0x80, 0xCD, + 0x04, 0xD0, 0x95, 0xCC, 0x86, 0xCD, 0x04, 0xD0, + 0x95, 0xCC, 0x88, 0xCD, 0x04, 0xD0, 0x96, 0xCC, + 0x86, 0xCD, 0x04, 0xD0, 0x96, 0xCC, 0x88, 0xCD, + // Bytes 3880 - 38bf + 0x04, 0xD0, 0x97, 0xCC, 0x88, 0xCD, 0x04, 0xD0, + 0x98, 0xCC, 0x80, 0xCD, 0x04, 0xD0, 0x98, 0xCC, + 0x84, 0xCD, 0x04, 0xD0, 0x98, 0xCC, 0x86, 0xCD, + 0x04, 0xD0, 0x98, 0xCC, 0x88, 0xCD, 0x04, 0xD0, + 0x9A, 0xCC, 0x81, 0xCD, 0x04, 0xD0, 0x9E, 0xCC, + 0x88, 0xCD, 0x04, 0xD0, 0xA3, 0xCC, 0x84, 0xCD, + 0x04, 0xD0, 0xA3, 0xCC, 0x86, 0xCD, 0x04, 0xD0, + 0xA3, 0xCC, 0x88, 0xCD, 0x04, 0xD0, 0xA3, 0xCC, + // Bytes 38c0 - 38ff + 0x8B, 0xCD, 0x04, 0xD0, 0xA7, 0xCC, 0x88, 0xCD, + 0x04, 0xD0, 0xAB, 0xCC, 0x88, 0xCD, 0x04, 0xD0, + 0xAD, 0xCC, 0x88, 0xCD, 0x04, 0xD0, 0xB0, 0xCC, + 0x86, 0xCD, 0x04, 0xD0, 0xB0, 0xCC, 0x88, 0xCD, + 0x04, 0xD0, 0xB3, 0xCC, 0x81, 0xCD, 0x04, 0xD0, + 0xB5, 0xCC, 0x80, 0xCD, 0x04, 0xD0, 0xB5, 0xCC, + 0x86, 0xCD, 0x04, 0xD0, 0xB5, 0xCC, 0x88, 0xCD, + 0x04, 0xD0, 0xB6, 0xCC, 0x86, 0xCD, 0x04, 0xD0, + // Bytes 3900 - 393f + 0xB6, 0xCC, 0x88, 0xCD, 0x04, 0xD0, 0xB7, 0xCC, + 0x88, 0xCD, 0x04, 0xD0, 0xB8, 0xCC, 0x80, 0xCD, + 0x04, 0xD0, 0xB8, 0xCC, 0x84, 0xCD, 0x04, 0xD0, + 0xB8, 0xCC, 0x86, 0xCD, 0x04, 0xD0, 0xB8, 0xCC, + 0x88, 0xCD, 0x04, 0xD0, 0xBA, 0xCC, 0x81, 0xCD, + 0x04, 0xD0, 0xBE, 0xCC, 0x88, 0xCD, 0x04, 0xD1, + 0x83, 0xCC, 0x84, 0xCD, 0x04, 0xD1, 0x83, 0xCC, + 0x86, 0xCD, 0x04, 0xD1, 0x83, 0xCC, 0x88, 0xCD, + // Bytes 3940 - 397f + 0x04, 0xD1, 0x83, 0xCC, 0x8B, 0xCD, 0x04, 0xD1, + 0x87, 0xCC, 0x88, 0xCD, 0x04, 0xD1, 0x8B, 0xCC, + 0x88, 0xCD, 0x04, 0xD1, 0x8D, 0xCC, 0x88, 0xCD, + 0x04, 0xD1, 0x96, 0xCC, 0x88, 0xCD, 0x04, 0xD1, + 0xB4, 0xCC, 0x8F, 0xCD, 0x04, 0xD1, 0xB5, 0xCC, + 0x8F, 0xCD, 0x04, 0xD3, 0x98, 0xCC, 0x88, 0xCD, + 0x04, 0xD3, 0x99, 0xCC, 0x88, 0xCD, 0x04, 0xD3, + 0xA8, 0xCC, 0x88, 0xCD, 0x04, 0xD3, 0xA9, 0xCC, + // Bytes 3980 - 39bf + 0x88, 0xCD, 0x04, 0xD8, 0xA7, 0xD9, 0x93, 0xCD, + 0x04, 0xD8, 0xA7, 0xD9, 0x94, 0xCD, 0x04, 0xD8, + 0xA7, 0xD9, 0x95, 0xB9, 0x04, 0xD9, 0x88, 0xD9, + 0x94, 0xCD, 0x04, 0xD9, 0x8A, 0xD9, 0x94, 0xCD, + 0x04, 0xDB, 0x81, 0xD9, 0x94, 0xCD, 0x04, 0xDB, + 0x92, 0xD9, 0x94, 0xCD, 0x04, 0xDB, 0x95, 0xD9, + 0x94, 0xCD, 0x05, 0x41, 0xCC, 0x82, 0xCC, 0x80, + 0xCE, 0x05, 0x41, 0xCC, 0x82, 0xCC, 0x81, 0xCE, + // Bytes 39c0 - 39ff + 0x05, 0x41, 0xCC, 0x82, 0xCC, 0x83, 0xCE, 0x05, + 0x41, 0xCC, 0x82, 0xCC, 0x89, 0xCE, 0x05, 0x41, + 0xCC, 0x86, 0xCC, 0x80, 0xCE, 0x05, 0x41, 0xCC, + 0x86, 0xCC, 0x81, 0xCE, 0x05, 0x41, 0xCC, 0x86, + 0xCC, 0x83, 0xCE, 0x05, 0x41, 0xCC, 0x86, 0xCC, + 0x89, 0xCE, 0x05, 0x41, 0xCC, 0x87, 0xCC, 0x84, + 0xCE, 0x05, 0x41, 0xCC, 0x88, 0xCC, 0x84, 0xCE, + 0x05, 0x41, 0xCC, 0x8A, 0xCC, 0x81, 0xCE, 0x05, + // Bytes 3a00 - 3a3f + 0x41, 0xCC, 0xA3, 0xCC, 0x82, 0xCE, 0x05, 0x41, + 0xCC, 0xA3, 0xCC, 0x86, 0xCE, 0x05, 0x43, 0xCC, + 0xA7, 0xCC, 0x81, 0xCE, 0x05, 0x45, 0xCC, 0x82, + 0xCC, 0x80, 0xCE, 0x05, 0x45, 0xCC, 0x82, 0xCC, + 0x81, 0xCE, 0x05, 0x45, 0xCC, 0x82, 0xCC, 0x83, + 0xCE, 0x05, 0x45, 0xCC, 0x82, 0xCC, 0x89, 0xCE, + 0x05, 0x45, 0xCC, 0x84, 0xCC, 0x80, 0xCE, 0x05, + 0x45, 0xCC, 0x84, 0xCC, 0x81, 0xCE, 0x05, 0x45, + // Bytes 3a40 - 3a7f + 0xCC, 0xA3, 0xCC, 0x82, 0xCE, 0x05, 0x45, 0xCC, + 0xA7, 0xCC, 0x86, 0xCE, 0x05, 0x49, 0xCC, 0x88, + 0xCC, 0x81, 0xCE, 0x05, 0x4C, 0xCC, 0xA3, 0xCC, + 0x84, 0xCE, 0x05, 0x4F, 0xCC, 0x82, 0xCC, 0x80, + 0xCE, 0x05, 0x4F, 0xCC, 0x82, 0xCC, 0x81, 0xCE, + 0x05, 0x4F, 0xCC, 0x82, 0xCC, 0x83, 0xCE, 0x05, + 0x4F, 0xCC, 0x82, 0xCC, 0x89, 0xCE, 0x05, 0x4F, + 0xCC, 0x83, 0xCC, 0x81, 0xCE, 0x05, 0x4F, 0xCC, + // Bytes 3a80 - 3abf + 0x83, 0xCC, 0x84, 0xCE, 0x05, 0x4F, 0xCC, 0x83, + 0xCC, 0x88, 0xCE, 0x05, 0x4F, 0xCC, 0x84, 0xCC, + 0x80, 0xCE, 0x05, 0x4F, 0xCC, 0x84, 0xCC, 0x81, + 0xCE, 0x05, 0x4F, 0xCC, 0x87, 0xCC, 0x84, 0xCE, + 0x05, 0x4F, 0xCC, 0x88, 0xCC, 0x84, 0xCE, 0x05, + 0x4F, 0xCC, 0x9B, 0xCC, 0x80, 0xCE, 0x05, 0x4F, + 0xCC, 0x9B, 0xCC, 0x81, 0xCE, 0x05, 0x4F, 0xCC, + 0x9B, 0xCC, 0x83, 0xCE, 0x05, 0x4F, 0xCC, 0x9B, + // Bytes 3ac0 - 3aff + 0xCC, 0x89, 0xCE, 0x05, 0x4F, 0xCC, 0x9B, 0xCC, + 0xA3, 0xBA, 0x05, 0x4F, 0xCC, 0xA3, 0xCC, 0x82, + 0xCE, 0x05, 0x4F, 0xCC, 0xA8, 0xCC, 0x84, 0xCE, + 0x05, 0x52, 0xCC, 0xA3, 0xCC, 0x84, 0xCE, 0x05, + 0x53, 0xCC, 0x81, 0xCC, 0x87, 0xCE, 0x05, 0x53, + 0xCC, 0x8C, 0xCC, 0x87, 0xCE, 0x05, 0x53, 0xCC, + 0xA3, 0xCC, 0x87, 0xCE, 0x05, 0x55, 0xCC, 0x83, + 0xCC, 0x81, 0xCE, 0x05, 0x55, 0xCC, 0x84, 0xCC, + // Bytes 3b00 - 3b3f + 0x88, 0xCE, 0x05, 0x55, 0xCC, 0x88, 0xCC, 0x80, + 0xCE, 0x05, 0x55, 0xCC, 0x88, 0xCC, 0x81, 0xCE, + 0x05, 0x55, 0xCC, 0x88, 0xCC, 0x84, 0xCE, 0x05, + 0x55, 0xCC, 0x88, 0xCC, 0x8C, 0xCE, 0x05, 0x55, + 0xCC, 0x9B, 0xCC, 0x80, 0xCE, 0x05, 0x55, 0xCC, + 0x9B, 0xCC, 0x81, 0xCE, 0x05, 0x55, 0xCC, 0x9B, + 0xCC, 0x83, 0xCE, 0x05, 0x55, 0xCC, 0x9B, 0xCC, + 0x89, 0xCE, 0x05, 0x55, 0xCC, 0x9B, 0xCC, 0xA3, + // Bytes 3b40 - 3b7f + 0xBA, 0x05, 0x61, 0xCC, 0x82, 0xCC, 0x80, 0xCE, + 0x05, 0x61, 0xCC, 0x82, 0xCC, 0x81, 0xCE, 0x05, + 0x61, 0xCC, 0x82, 0xCC, 0x83, 0xCE, 0x05, 0x61, + 0xCC, 0x82, 0xCC, 0x89, 0xCE, 0x05, 0x61, 0xCC, + 0x86, 0xCC, 0x80, 0xCE, 0x05, 0x61, 0xCC, 0x86, + 0xCC, 0x81, 0xCE, 0x05, 0x61, 0xCC, 0x86, 0xCC, + 0x83, 0xCE, 0x05, 0x61, 0xCC, 0x86, 0xCC, 0x89, + 0xCE, 0x05, 0x61, 0xCC, 0x87, 0xCC, 0x84, 0xCE, + // Bytes 3b80 - 3bbf + 0x05, 0x61, 0xCC, 0x88, 0xCC, 0x84, 0xCE, 0x05, + 0x61, 0xCC, 0x8A, 0xCC, 0x81, 0xCE, 0x05, 0x61, + 0xCC, 0xA3, 0xCC, 0x82, 0xCE, 0x05, 0x61, 0xCC, + 0xA3, 0xCC, 0x86, 0xCE, 0x05, 0x63, 0xCC, 0xA7, + 0xCC, 0x81, 0xCE, 0x05, 0x65, 0xCC, 0x82, 0xCC, + 0x80, 0xCE, 0x05, 0x65, 0xCC, 0x82, 0xCC, 0x81, + 0xCE, 0x05, 0x65, 0xCC, 0x82, 0xCC, 0x83, 0xCE, + 0x05, 0x65, 0xCC, 0x82, 0xCC, 0x89, 0xCE, 0x05, + // Bytes 3bc0 - 3bff + 0x65, 0xCC, 0x84, 0xCC, 0x80, 0xCE, 0x05, 0x65, + 0xCC, 0x84, 0xCC, 0x81, 0xCE, 0x05, 0x65, 0xCC, + 0xA3, 0xCC, 0x82, 0xCE, 0x05, 0x65, 0xCC, 0xA7, + 0xCC, 0x86, 0xCE, 0x05, 0x69, 0xCC, 0x88, 0xCC, + 0x81, 0xCE, 0x05, 0x6C, 0xCC, 0xA3, 0xCC, 0x84, + 0xCE, 0x05, 0x6F, 0xCC, 0x82, 0xCC, 0x80, 0xCE, + 0x05, 0x6F, 0xCC, 0x82, 0xCC, 0x81, 0xCE, 0x05, + 0x6F, 0xCC, 0x82, 0xCC, 0x83, 0xCE, 0x05, 0x6F, + // Bytes 3c00 - 3c3f + 0xCC, 0x82, 0xCC, 0x89, 0xCE, 0x05, 0x6F, 0xCC, + 0x83, 0xCC, 0x81, 0xCE, 0x05, 0x6F, 0xCC, 0x83, + 0xCC, 0x84, 0xCE, 0x05, 0x6F, 0xCC, 0x83, 0xCC, + 0x88, 0xCE, 0x05, 0x6F, 0xCC, 0x84, 0xCC, 0x80, + 0xCE, 0x05, 0x6F, 0xCC, 0x84, 0xCC, 0x81, 0xCE, + 0x05, 0x6F, 0xCC, 0x87, 0xCC, 0x84, 0xCE, 0x05, + 0x6F, 0xCC, 0x88, 0xCC, 0x84, 0xCE, 0x05, 0x6F, + 0xCC, 0x9B, 0xCC, 0x80, 0xCE, 0x05, 0x6F, 0xCC, + // Bytes 3c40 - 3c7f + 0x9B, 0xCC, 0x81, 0xCE, 0x05, 0x6F, 0xCC, 0x9B, + 0xCC, 0x83, 0xCE, 0x05, 0x6F, 0xCC, 0x9B, 0xCC, + 0x89, 0xCE, 0x05, 0x6F, 0xCC, 0x9B, 0xCC, 0xA3, + 0xBA, 0x05, 0x6F, 0xCC, 0xA3, 0xCC, 0x82, 0xCE, + 0x05, 0x6F, 0xCC, 0xA8, 0xCC, 0x84, 0xCE, 0x05, + 0x72, 0xCC, 0xA3, 0xCC, 0x84, 0xCE, 0x05, 0x73, + 0xCC, 0x81, 0xCC, 0x87, 0xCE, 0x05, 0x73, 0xCC, + 0x8C, 0xCC, 0x87, 0xCE, 0x05, 0x73, 0xCC, 0xA3, + // Bytes 3c80 - 3cbf + 0xCC, 0x87, 0xCE, 0x05, 0x75, 0xCC, 0x83, 0xCC, + 0x81, 0xCE, 0x05, 0x75, 0xCC, 0x84, 0xCC, 0x88, + 0xCE, 0x05, 0x75, 0xCC, 0x88, 0xCC, 0x80, 0xCE, + 0x05, 0x75, 0xCC, 0x88, 0xCC, 0x81, 0xCE, 0x05, + 0x75, 0xCC, 0x88, 0xCC, 0x84, 0xCE, 0x05, 0x75, + 0xCC, 0x88, 0xCC, 0x8C, 0xCE, 0x05, 0x75, 0xCC, + 0x9B, 0xCC, 0x80, 0xCE, 0x05, 0x75, 0xCC, 0x9B, + 0xCC, 0x81, 0xCE, 0x05, 0x75, 0xCC, 0x9B, 0xCC, + // Bytes 3cc0 - 3cff + 0x83, 0xCE, 0x05, 0x75, 0xCC, 0x9B, 0xCC, 0x89, + 0xCE, 0x05, 0x75, 0xCC, 0x9B, 0xCC, 0xA3, 0xBA, + 0x05, 0xE1, 0xBE, 0xBF, 0xCC, 0x80, 0xCE, 0x05, + 0xE1, 0xBE, 0xBF, 0xCC, 0x81, 0xCE, 0x05, 0xE1, + 0xBE, 0xBF, 0xCD, 0x82, 0xCE, 0x05, 0xE1, 0xBF, + 0xBE, 0xCC, 0x80, 0xCE, 0x05, 0xE1, 0xBF, 0xBE, + 0xCC, 0x81, 0xCE, 0x05, 0xE1, 0xBF, 0xBE, 0xCD, + 0x82, 0xCE, 0x05, 0xE2, 0x86, 0x90, 0xCC, 0xB8, + // Bytes 3d00 - 3d3f + 0x05, 0x05, 0xE2, 0x86, 0x92, 0xCC, 0xB8, 0x05, + 0x05, 0xE2, 0x86, 0x94, 0xCC, 0xB8, 0x05, 0x05, + 0xE2, 0x87, 0x90, 0xCC, 0xB8, 0x05, 0x05, 0xE2, + 0x87, 0x92, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x87, + 0x94, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x88, 0x83, + 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x88, 0x88, 0xCC, + 0xB8, 0x05, 0x05, 0xE2, 0x88, 0x8B, 0xCC, 0xB8, + 0x05, 0x05, 0xE2, 0x88, 0xA3, 0xCC, 0xB8, 0x05, + // Bytes 3d40 - 3d7f + 0x05, 0xE2, 0x88, 0xA5, 0xCC, 0xB8, 0x05, 0x05, + 0xE2, 0x88, 0xBC, 0xCC, 0xB8, 0x05, 0x05, 0xE2, + 0x89, 0x83, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, + 0x85, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, 0x88, + 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, 0x8D, 0xCC, + 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xA1, 0xCC, 0xB8, + 0x05, 0x05, 0xE2, 0x89, 0xA4, 0xCC, 0xB8, 0x05, + 0x05, 0xE2, 0x89, 0xA5, 0xCC, 0xB8, 0x05, 0x05, + // Bytes 3d80 - 3dbf + 0xE2, 0x89, 0xB2, 0xCC, 0xB8, 0x05, 0x05, 0xE2, + 0x89, 0xB3, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, + 0xB6, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xB7, + 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xBA, 0xCC, + 0xB8, 0x05, 0x05, 0xE2, 0x89, 0xBB, 0xCC, 0xB8, + 0x05, 0x05, 0xE2, 0x89, 0xBC, 0xCC, 0xB8, 0x05, + 0x05, 0xE2, 0x89, 0xBD, 0xCC, 0xB8, 0x05, 0x05, + 0xE2, 0x8A, 0x82, 0xCC, 0xB8, 0x05, 0x05, 0xE2, + // Bytes 3dc0 - 3dff + 0x8A, 0x83, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A, + 0x86, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0x87, + 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0x91, 0xCC, + 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0x92, 0xCC, 0xB8, + 0x05, 0x05, 0xE2, 0x8A, 0xA2, 0xCC, 0xB8, 0x05, + 0x05, 0xE2, 0x8A, 0xA8, 0xCC, 0xB8, 0x05, 0x05, + 0xE2, 0x8A, 0xA9, 0xCC, 0xB8, 0x05, 0x05, 0xE2, + 0x8A, 0xAB, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A, + // Bytes 3e00 - 3e3f + 0xB2, 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0xB3, + 0xCC, 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0xB4, 0xCC, + 0xB8, 0x05, 0x05, 0xE2, 0x8A, 0xB5, 0xCC, 0xB8, + 0x05, 0x06, 0xCE, 0x91, 0xCC, 0x93, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0x91, 0xCC, 0x94, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0x95, 0xCC, 0x93, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0x95, 0xCC, 0x93, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0x95, 0xCC, 0x94, 0xCC, 0x80, + // Bytes 3e40 - 3e7f + 0xCE, 0x06, 0xCE, 0x95, 0xCC, 0x94, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0x97, 0xCC, 0x93, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0x97, 0xCC, 0x94, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0x99, 0xCC, 0x93, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0x99, 0xCC, 0x93, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0x99, 0xCC, 0x93, 0xCD, 0x82, + 0xCE, 0x06, 0xCE, 0x99, 0xCC, 0x94, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0x99, 0xCC, 0x94, 0xCC, 0x81, + // Bytes 3e80 - 3ebf + 0xCE, 0x06, 0xCE, 0x99, 0xCC, 0x94, 0xCD, 0x82, + 0xCE, 0x06, 0xCE, 0x9F, 0xCC, 0x93, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0x9F, 0xCC, 0x93, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0x9F, 0xCC, 0x94, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0x9F, 0xCC, 0x94, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0xA5, 0xCC, 0x94, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0xA5, 0xCC, 0x94, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0xA5, 0xCC, 0x94, 0xCD, 0x82, + // Bytes 3ec0 - 3eff + 0xCE, 0x06, 0xCE, 0xA9, 0xCC, 0x93, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xA9, 0xCC, 0x94, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB1, 0xCC, 0x80, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB1, 0xCC, 0x81, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB1, 0xCC, 0x93, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB1, 0xCC, 0x94, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB1, 0xCD, 0x82, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB5, 0xCC, 0x93, 0xCC, 0x80, + // Bytes 3f00 - 3f3f + 0xCE, 0x06, 0xCE, 0xB5, 0xCC, 0x93, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0xB5, 0xCC, 0x94, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0xB5, 0xCC, 0x94, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0xB7, 0xCC, 0x80, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB7, 0xCC, 0x81, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB7, 0xCC, 0x93, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB7, 0xCC, 0x94, 0xCD, 0x85, + 0xDE, 0x06, 0xCE, 0xB7, 0xCD, 0x82, 0xCD, 0x85, + // Bytes 3f40 - 3f7f + 0xDE, 0x06, 0xCE, 0xB9, 0xCC, 0x88, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0xB9, 0xCC, 0x88, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0xB9, 0xCC, 0x88, 0xCD, 0x82, + 0xCE, 0x06, 0xCE, 0xB9, 0xCC, 0x93, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0xB9, 0xCC, 0x93, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0xB9, 0xCC, 0x93, 0xCD, 0x82, + 0xCE, 0x06, 0xCE, 0xB9, 0xCC, 0x94, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0xB9, 0xCC, 0x94, 0xCC, 0x81, + // Bytes 3f80 - 3fbf + 0xCE, 0x06, 0xCE, 0xB9, 0xCC, 0x94, 0xCD, 0x82, + 0xCE, 0x06, 0xCE, 0xBF, 0xCC, 0x93, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0xBF, 0xCC, 0x93, 0xCC, 0x81, + 0xCE, 0x06, 0xCE, 0xBF, 0xCC, 0x94, 0xCC, 0x80, + 0xCE, 0x06, 0xCE, 0xBF, 0xCC, 0x94, 0xCC, 0x81, + 0xCE, 0x06, 0xCF, 0x85, 0xCC, 0x88, 0xCC, 0x80, + 0xCE, 0x06, 0xCF, 0x85, 0xCC, 0x88, 0xCC, 0x81, + 0xCE, 0x06, 0xCF, 0x85, 0xCC, 0x88, 0xCD, 0x82, + // Bytes 3fc0 - 3fff + 0xCE, 0x06, 0xCF, 0x85, 0xCC, 0x93, 0xCC, 0x80, + 0xCE, 0x06, 0xCF, 0x85, 0xCC, 0x93, 0xCC, 0x81, + 0xCE, 0x06, 0xCF, 0x85, 0xCC, 0x93, 0xCD, 0x82, + 0xCE, 0x06, 0xCF, 0x85, 0xCC, 0x94, 0xCC, 0x80, + 0xCE, 0x06, 0xCF, 0x85, 0xCC, 0x94, 0xCC, 0x81, + 0xCE, 0x06, 0xCF, 0x85, 0xCC, 0x94, 0xCD, 0x82, + 0xCE, 0x06, 0xCF, 0x89, 0xCC, 0x80, 0xCD, 0x85, + 0xDE, 0x06, 0xCF, 0x89, 0xCC, 0x81, 0xCD, 0x85, + // Bytes 4000 - 403f + 0xDE, 0x06, 0xCF, 0x89, 0xCC, 0x93, 0xCD, 0x85, + 0xDE, 0x06, 0xCF, 0x89, 0xCC, 0x94, 0xCD, 0x85, + 0xDE, 0x06, 0xCF, 0x89, 0xCD, 0x82, 0xCD, 0x85, + 0xDE, 0x06, 0xE0, 0xA4, 0xA8, 0xE0, 0xA4, 0xBC, + 0x0D, 0x06, 0xE0, 0xA4, 0xB0, 0xE0, 0xA4, 0xBC, + 0x0D, 0x06, 0xE0, 0xA4, 0xB3, 0xE0, 0xA4, 0xBC, + 0x0D, 0x06, 0xE0, 0xB1, 0x86, 0xE0, 0xB1, 0x96, + 0x89, 0x06, 0xE0, 0xB7, 0x99, 0xE0, 0xB7, 0x8A, + // Bytes 4040 - 407f + 0x15, 0x06, 0xE3, 0x81, 0x86, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x8B, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x8D, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x8F, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x91, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x93, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x95, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x97, 0xE3, 0x82, 0x99, + // Bytes 4080 - 40bf + 0x11, 0x06, 0xE3, 0x81, 0x99, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x9B, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x9D, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0x9F, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0xA1, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0xA4, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0xA6, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0xA8, 0xE3, 0x82, 0x99, + // Bytes 40c0 - 40ff + 0x11, 0x06, 0xE3, 0x81, 0xAF, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0xAF, 0xE3, 0x82, 0x9A, + 0x11, 0x06, 0xE3, 0x81, 0xB2, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0xB2, 0xE3, 0x82, 0x9A, + 0x11, 0x06, 0xE3, 0x81, 0xB5, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0xB5, 0xE3, 0x82, 0x9A, + 0x11, 0x06, 0xE3, 0x81, 0xB8, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0xB8, 0xE3, 0x82, 0x9A, + // Bytes 4100 - 413f + 0x11, 0x06, 0xE3, 0x81, 0xBB, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x81, 0xBB, 0xE3, 0x82, 0x9A, + 0x11, 0x06, 0xE3, 0x82, 0x9D, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xA6, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xAB, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xAD, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xAF, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xB1, 0xE3, 0x82, 0x99, + // Bytes 4140 - 417f + 0x11, 0x06, 0xE3, 0x82, 0xB3, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xB5, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xB7, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xB9, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xBB, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xBD, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x82, 0xBF, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0x81, 0xE3, 0x82, 0x99, + // Bytes 4180 - 41bf + 0x11, 0x06, 0xE3, 0x83, 0x84, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0x86, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0x88, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0x8F, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0x8F, 0xE3, 0x82, 0x9A, + 0x11, 0x06, 0xE3, 0x83, 0x92, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0x92, 0xE3, 0x82, 0x9A, + 0x11, 0x06, 0xE3, 0x83, 0x95, 0xE3, 0x82, 0x99, + // Bytes 41c0 - 41ff + 0x11, 0x06, 0xE3, 0x83, 0x95, 0xE3, 0x82, 0x9A, + 0x11, 0x06, 0xE3, 0x83, 0x98, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0x98, 0xE3, 0x82, 0x9A, + 0x11, 0x06, 0xE3, 0x83, 0x9B, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0x9B, 0xE3, 0x82, 0x9A, + 0x11, 0x06, 0xE3, 0x83, 0xAF, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0xB0, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0xB1, 0xE3, 0x82, 0x99, + // Bytes 4200 - 423f + 0x11, 0x06, 0xE3, 0x83, 0xB2, 0xE3, 0x82, 0x99, + 0x11, 0x06, 0xE3, 0x83, 0xBD, 0xE3, 0x82, 0x99, + 0x11, 0x08, 0xCE, 0x91, 0xCC, 0x93, 0xCC, 0x80, + 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0x91, 0xCC, 0x93, + 0xCC, 0x81, 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0x91, + 0xCC, 0x93, 0xCD, 0x82, 0xCD, 0x85, 0xDF, 0x08, + 0xCE, 0x91, 0xCC, 0x94, 0xCC, 0x80, 0xCD, 0x85, + 0xDF, 0x08, 0xCE, 0x91, 0xCC, 0x94, 0xCC, 0x81, + // Bytes 4240 - 427f + 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0x91, 0xCC, 0x94, + 0xCD, 0x82, 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0x97, + 0xCC, 0x93, 0xCC, 0x80, 0xCD, 0x85, 0xDF, 0x08, + 0xCE, 0x97, 0xCC, 0x93, 0xCC, 0x81, 0xCD, 0x85, + 0xDF, 0x08, 0xCE, 0x97, 0xCC, 0x93, 0xCD, 0x82, + 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0x97, 0xCC, 0x94, + 0xCC, 0x80, 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0x97, + 0xCC, 0x94, 0xCC, 0x81, 0xCD, 0x85, 0xDF, 0x08, + // Bytes 4280 - 42bf + 0xCE, 0x97, 0xCC, 0x94, 0xCD, 0x82, 0xCD, 0x85, + 0xDF, 0x08, 0xCE, 0xA9, 0xCC, 0x93, 0xCC, 0x80, + 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0xA9, 0xCC, 0x93, + 0xCC, 0x81, 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0xA9, + 0xCC, 0x93, 0xCD, 0x82, 0xCD, 0x85, 0xDF, 0x08, + 0xCE, 0xA9, 0xCC, 0x94, 0xCC, 0x80, 0xCD, 0x85, + 0xDF, 0x08, 0xCE, 0xA9, 0xCC, 0x94, 0xCC, 0x81, + 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0xA9, 0xCC, 0x94, + // Bytes 42c0 - 42ff + 0xCD, 0x82, 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0xB1, + 0xCC, 0x93, 0xCC, 0x80, 0xCD, 0x85, 0xDF, 0x08, + 0xCE, 0xB1, 0xCC, 0x93, 0xCC, 0x81, 0xCD, 0x85, + 0xDF, 0x08, 0xCE, 0xB1, 0xCC, 0x93, 0xCD, 0x82, + 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0xB1, 0xCC, 0x94, + 0xCC, 0x80, 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0xB1, + 0xCC, 0x94, 0xCC, 0x81, 0xCD, 0x85, 0xDF, 0x08, + 0xCE, 0xB1, 0xCC, 0x94, 0xCD, 0x82, 0xCD, 0x85, + // Bytes 4300 - 433f + 0xDF, 0x08, 0xCE, 0xB7, 0xCC, 0x93, 0xCC, 0x80, + 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0xB7, 0xCC, 0x93, + 0xCC, 0x81, 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0xB7, + 0xCC, 0x93, 0xCD, 0x82, 0xCD, 0x85, 0xDF, 0x08, + 0xCE, 0xB7, 0xCC, 0x94, 0xCC, 0x80, 0xCD, 0x85, + 0xDF, 0x08, 0xCE, 0xB7, 0xCC, 0x94, 0xCC, 0x81, + 0xCD, 0x85, 0xDF, 0x08, 0xCE, 0xB7, 0xCC, 0x94, + 0xCD, 0x82, 0xCD, 0x85, 0xDF, 0x08, 0xCF, 0x89, + // Bytes 4340 - 437f + 0xCC, 0x93, 0xCC, 0x80, 0xCD, 0x85, 0xDF, 0x08, + 0xCF, 0x89, 0xCC, 0x93, 0xCC, 0x81, 0xCD, 0x85, + 0xDF, 0x08, 0xCF, 0x89, 0xCC, 0x93, 0xCD, 0x82, + 0xCD, 0x85, 0xDF, 0x08, 0xCF, 0x89, 0xCC, 0x94, + 0xCC, 0x80, 0xCD, 0x85, 0xDF, 0x08, 0xCF, 0x89, + 0xCC, 0x94, 0xCC, 0x81, 0xCD, 0x85, 0xDF, 0x08, + 0xCF, 0x89, 0xCC, 0x94, 0xCD, 0x82, 0xCD, 0x85, + 0xDF, 0x08, 0xF0, 0x91, 0x82, 0x99, 0xF0, 0x91, + // Bytes 4380 - 43bf + 0x82, 0xBA, 0x0D, 0x08, 0xF0, 0x91, 0x82, 0x9B, + 0xF0, 0x91, 0x82, 0xBA, 0x0D, 0x08, 0xF0, 0x91, + 0x82, 0xA5, 0xF0, 0x91, 0x82, 0xBA, 0x0D, 0x42, + 0xC2, 0xB4, 0x01, 0x43, 0x20, 0xCC, 0x81, 0xCD, + 0x43, 0x20, 0xCC, 0x83, 0xCD, 0x43, 0x20, 0xCC, + 0x84, 0xCD, 0x43, 0x20, 0xCC, 0x85, 0xCD, 0x43, + 0x20, 0xCC, 0x86, 0xCD, 0x43, 0x20, 0xCC, 0x87, + 0xCD, 0x43, 0x20, 0xCC, 0x88, 0xCD, 0x43, 0x20, + // Bytes 43c0 - 43ff + 0xCC, 0x8A, 0xCD, 0x43, 0x20, 0xCC, 0x8B, 0xCD, + 0x43, 0x20, 0xCC, 0x93, 0xCD, 0x43, 0x20, 0xCC, + 0x94, 0xCD, 0x43, 0x20, 0xCC, 0xA7, 0xA9, 0x43, + 0x20, 0xCC, 0xA8, 0xA9, 0x43, 0x20, 0xCC, 0xB3, + 0xB9, 0x43, 0x20, 0xCD, 0x82, 0xCD, 0x43, 0x20, + 0xCD, 0x85, 0xDD, 0x43, 0x20, 0xD9, 0x8B, 0x5D, + 0x43, 0x20, 0xD9, 0x8C, 0x61, 0x43, 0x20, 0xD9, + 0x8D, 0x65, 0x43, 0x20, 0xD9, 0x8E, 0x69, 0x43, + // Bytes 4400 - 443f + 0x20, 0xD9, 0x8F, 0x6D, 0x43, 0x20, 0xD9, 0x90, + 0x71, 0x43, 0x20, 0xD9, 0x91, 0x75, 0x43, 0x20, + 0xD9, 0x92, 0x79, 0x43, 0x41, 0xCC, 0x8A, 0xCD, + 0x43, 0x73, 0xCC, 0x87, 0xCD, 0x44, 0x20, 0xE3, + 0x82, 0x99, 0x11, 0x44, 0x20, 0xE3, 0x82, 0x9A, + 0x11, 0x44, 0xC2, 0xA8, 0xCC, 0x81, 0xCE, 0x44, + 0xCE, 0x91, 0xCC, 0x81, 0xCD, 0x44, 0xCE, 0x95, + 0xCC, 0x81, 0xCD, 0x44, 0xCE, 0x97, 0xCC, 0x81, + // Bytes 4440 - 447f + 0xCD, 0x44, 0xCE, 0x99, 0xCC, 0x81, 0xCD, 0x44, + 0xCE, 0x9F, 0xCC, 0x81, 0xCD, 0x44, 0xCE, 0xA5, + 0xCC, 0x81, 0xCD, 0x44, 0xCE, 0xA5, 0xCC, 0x88, + 0xCD, 0x44, 0xCE, 0xA9, 0xCC, 0x81, 0xCD, 0x44, + 0xCE, 0xB1, 0xCC, 0x81, 0xCD, 0x44, 0xCE, 0xB5, + 0xCC, 0x81, 0xCD, 0x44, 0xCE, 0xB7, 0xCC, 0x81, + 0xCD, 0x44, 0xCE, 0xB9, 0xCC, 0x81, 0xCD, 0x44, + 0xCE, 0xBF, 0xCC, 0x81, 0xCD, 0x44, 0xCF, 0x85, + // Bytes 4480 - 44bf + 0xCC, 0x81, 0xCD, 0x44, 0xCF, 0x89, 0xCC, 0x81, + 0xCD, 0x44, 0xD7, 0x90, 0xD6, 0xB7, 0x35, 0x44, + 0xD7, 0x90, 0xD6, 0xB8, 0x39, 0x44, 0xD7, 0x90, + 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0x91, 0xD6, 0xBC, + 0x45, 0x44, 0xD7, 0x91, 0xD6, 0xBF, 0x4D, 0x44, + 0xD7, 0x92, 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0x93, + 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0x94, 0xD6, 0xBC, + 0x45, 0x44, 0xD7, 0x95, 0xD6, 0xB9, 0x3D, 0x44, + // Bytes 44c0 - 44ff + 0xD7, 0x95, 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0x96, + 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0x98, 0xD6, 0xBC, + 0x45, 0x44, 0xD7, 0x99, 0xD6, 0xB4, 0x29, 0x44, + 0xD7, 0x99, 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0x9A, + 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0x9B, 0xD6, 0xBC, + 0x45, 0x44, 0xD7, 0x9B, 0xD6, 0xBF, 0x4D, 0x44, + 0xD7, 0x9C, 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0x9E, + 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0xA0, 0xD6, 0xBC, + // Bytes 4500 - 453f + 0x45, 0x44, 0xD7, 0xA1, 0xD6, 0xBC, 0x45, 0x44, + 0xD7, 0xA3, 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0xA4, + 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0xA4, 0xD6, 0xBF, + 0x4D, 0x44, 0xD7, 0xA6, 0xD6, 0xBC, 0x45, 0x44, + 0xD7, 0xA7, 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0xA8, + 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0xA9, 0xD6, 0xBC, + 0x45, 0x44, 0xD7, 0xA9, 0xD7, 0x81, 0x51, 0x44, + 0xD7, 0xA9, 0xD7, 0x82, 0x55, 0x44, 0xD7, 0xAA, + // Bytes 4540 - 457f + 0xD6, 0xBC, 0x45, 0x44, 0xD7, 0xB2, 0xD6, 0xB7, + 0x35, 0x44, 0xD8, 0xA7, 0xD9, 0x8B, 0x5D, 0x44, + 0xD8, 0xA7, 0xD9, 0x93, 0xCD, 0x44, 0xD8, 0xA7, + 0xD9, 0x94, 0xCD, 0x44, 0xD8, 0xA7, 0xD9, 0x95, + 0xB9, 0x44, 0xD8, 0xB0, 0xD9, 0xB0, 0x7D, 0x44, + 0xD8, 0xB1, 0xD9, 0xB0, 0x7D, 0x44, 0xD9, 0x80, + 0xD9, 0x8B, 0x5D, 0x44, 0xD9, 0x80, 0xD9, 0x8E, + 0x69, 0x44, 0xD9, 0x80, 0xD9, 0x8F, 0x6D, 0x44, + // Bytes 4580 - 45bf + 0xD9, 0x80, 0xD9, 0x90, 0x71, 0x44, 0xD9, 0x80, + 0xD9, 0x91, 0x75, 0x44, 0xD9, 0x80, 0xD9, 0x92, + 0x79, 0x44, 0xD9, 0x87, 0xD9, 0xB0, 0x7D, 0x44, + 0xD9, 0x88, 0xD9, 0x94, 0xCD, 0x44, 0xD9, 0x89, + 0xD9, 0xB0, 0x7D, 0x44, 0xD9, 0x8A, 0xD9, 0x94, + 0xCD, 0x44, 0xDB, 0x92, 0xD9, 0x94, 0xCD, 0x44, + 0xDB, 0x95, 0xD9, 0x94, 0xCD, 0x45, 0x20, 0xCC, + 0x88, 0xCC, 0x80, 0xCE, 0x45, 0x20, 0xCC, 0x88, + // Bytes 45c0 - 45ff + 0xCC, 0x81, 0xCE, 0x45, 0x20, 0xCC, 0x88, 0xCD, + 0x82, 0xCE, 0x45, 0x20, 0xCC, 0x93, 0xCC, 0x80, + 0xCE, 0x45, 0x20, 0xCC, 0x93, 0xCC, 0x81, 0xCE, + 0x45, 0x20, 0xCC, 0x93, 0xCD, 0x82, 0xCE, 0x45, + 0x20, 0xCC, 0x94, 0xCC, 0x80, 0xCE, 0x45, 0x20, + 0xCC, 0x94, 0xCC, 0x81, 0xCE, 0x45, 0x20, 0xCC, + 0x94, 0xCD, 0x82, 0xCE, 0x45, 0x20, 0xD9, 0x8C, + 0xD9, 0x91, 0x76, 0x45, 0x20, 0xD9, 0x8D, 0xD9, + // Bytes 4600 - 463f + 0x91, 0x76, 0x45, 0x20, 0xD9, 0x8E, 0xD9, 0x91, + 0x76, 0x45, 0x20, 0xD9, 0x8F, 0xD9, 0x91, 0x76, + 0x45, 0x20, 0xD9, 0x90, 0xD9, 0x91, 0x76, 0x45, + 0x20, 0xD9, 0x91, 0xD9, 0xB0, 0x7E, 0x45, 0xE2, + 0xAB, 0x9D, 0xCC, 0xB8, 0x05, 0x46, 0xCE, 0xB9, + 0xCC, 0x88, 0xCC, 0x81, 0xCE, 0x46, 0xCF, 0x85, + 0xCC, 0x88, 0xCC, 0x81, 0xCE, 0x46, 0xD7, 0xA9, + 0xD6, 0xBC, 0xD7, 0x81, 0x52, 0x46, 0xD7, 0xA9, + // Bytes 4640 - 467f + 0xD6, 0xBC, 0xD7, 0x82, 0x56, 0x46, 0xD9, 0x80, + 0xD9, 0x8E, 0xD9, 0x91, 0x76, 0x46, 0xD9, 0x80, + 0xD9, 0x8F, 0xD9, 0x91, 0x76, 0x46, 0xD9, 0x80, + 0xD9, 0x90, 0xD9, 0x91, 0x76, 0x46, 0xE0, 0xA4, + 0x95, 0xE0, 0xA4, 0xBC, 0x0D, 0x46, 0xE0, 0xA4, + 0x96, 0xE0, 0xA4, 0xBC, 0x0D, 0x46, 0xE0, 0xA4, + 0x97, 0xE0, 0xA4, 0xBC, 0x0D, 0x46, 0xE0, 0xA4, + 0x9C, 0xE0, 0xA4, 0xBC, 0x0D, 0x46, 0xE0, 0xA4, + // Bytes 4680 - 46bf + 0xA1, 0xE0, 0xA4, 0xBC, 0x0D, 0x46, 0xE0, 0xA4, + 0xA2, 0xE0, 0xA4, 0xBC, 0x0D, 0x46, 0xE0, 0xA4, + 0xAB, 0xE0, 0xA4, 0xBC, 0x0D, 0x46, 0xE0, 0xA4, + 0xAF, 0xE0, 0xA4, 0xBC, 0x0D, 0x46, 0xE0, 0xA6, + 0xA1, 0xE0, 0xA6, 0xBC, 0x0D, 0x46, 0xE0, 0xA6, + 0xA2, 0xE0, 0xA6, 0xBC, 0x0D, 0x46, 0xE0, 0xA6, + 0xAF, 0xE0, 0xA6, 0xBC, 0x0D, 0x46, 0xE0, 0xA8, + 0x96, 0xE0, 0xA8, 0xBC, 0x0D, 0x46, 0xE0, 0xA8, + // Bytes 46c0 - 46ff + 0x97, 0xE0, 0xA8, 0xBC, 0x0D, 0x46, 0xE0, 0xA8, + 0x9C, 0xE0, 0xA8, 0xBC, 0x0D, 0x46, 0xE0, 0xA8, + 0xAB, 0xE0, 0xA8, 0xBC, 0x0D, 0x46, 0xE0, 0xA8, + 0xB2, 0xE0, 0xA8, 0xBC, 0x0D, 0x46, 0xE0, 0xA8, + 0xB8, 0xE0, 0xA8, 0xBC, 0x0D, 0x46, 0xE0, 0xAC, + 0xA1, 0xE0, 0xAC, 0xBC, 0x0D, 0x46, 0xE0, 0xAC, + 0xA2, 0xE0, 0xAC, 0xBC, 0x0D, 0x46, 0xE0, 0xBE, + 0xB2, 0xE0, 0xBE, 0x80, 0xA1, 0x46, 0xE0, 0xBE, + // Bytes 4700 - 473f + 0xB3, 0xE0, 0xBE, 0x80, 0xA1, 0x46, 0xE3, 0x83, + 0x86, 0xE3, 0x82, 0x99, 0x11, 0x48, 0xF0, 0x9D, + 0x85, 0x97, 0xF0, 0x9D, 0x85, 0xA5, 0xB1, 0x48, + 0xF0, 0x9D, 0x85, 0x98, 0xF0, 0x9D, 0x85, 0xA5, + 0xB1, 0x48, 0xF0, 0x9D, 0x86, 0xB9, 0xF0, 0x9D, + 0x85, 0xA5, 0xB1, 0x48, 0xF0, 0x9D, 0x86, 0xBA, + 0xF0, 0x9D, 0x85, 0xA5, 0xB1, 0x49, 0xE0, 0xBE, + 0xB2, 0xE0, 0xBD, 0xB1, 0xE0, 0xBE, 0x80, 0xA2, + // Bytes 4740 - 477f + 0x49, 0xE0, 0xBE, 0xB3, 0xE0, 0xBD, 0xB1, 0xE0, + 0xBE, 0x80, 0xA2, 0x4C, 0xF0, 0x9D, 0x85, 0x98, + 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D, 0x85, 0xAE, + 0xB2, 0x4C, 0xF0, 0x9D, 0x85, 0x98, 0xF0, 0x9D, + 0x85, 0xA5, 0xF0, 0x9D, 0x85, 0xAF, 0xB2, 0x4C, + 0xF0, 0x9D, 0x85, 0x98, 0xF0, 0x9D, 0x85, 0xA5, + 0xF0, 0x9D, 0x85, 0xB0, 0xB2, 0x4C, 0xF0, 0x9D, + 0x85, 0x98, 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D, + // Bytes 4780 - 47bf + 0x85, 0xB1, 0xB2, 0x4C, 0xF0, 0x9D, 0x85, 0x98, + 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D, 0x85, 0xB2, + 0xB2, 0x4C, 0xF0, 0x9D, 0x86, 0xB9, 0xF0, 0x9D, + 0x85, 0xA5, 0xF0, 0x9D, 0x85, 0xAE, 0xB2, 0x4C, + 0xF0, 0x9D, 0x86, 0xB9, 0xF0, 0x9D, 0x85, 0xA5, + 0xF0, 0x9D, 0x85, 0xAF, 0xB2, 0x4C, 0xF0, 0x9D, + 0x86, 0xBA, 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D, + 0x85, 0xAE, 0xB2, 0x4C, 0xF0, 0x9D, 0x86, 0xBA, + // Bytes 47c0 - 47ff + 0xF0, 0x9D, 0x85, 0xA5, 0xF0, 0x9D, 0x85, 0xAF, + 0xB2, 0x83, 0x41, 0xCC, 0x82, 0xCD, 0x83, 0x41, + 0xCC, 0x86, 0xCD, 0x83, 0x41, 0xCC, 0x87, 0xCD, + 0x83, 0x41, 0xCC, 0x88, 0xCD, 0x83, 0x41, 0xCC, + 0x8A, 0xCD, 0x83, 0x41, 0xCC, 0xA3, 0xB9, 0x83, + 0x43, 0xCC, 0xA7, 0xA9, 0x83, 0x45, 0xCC, 0x82, + 0xCD, 0x83, 0x45, 0xCC, 0x84, 0xCD, 0x83, 0x45, + 0xCC, 0xA3, 0xB9, 0x83, 0x45, 0xCC, 0xA7, 0xA9, + // Bytes 4800 - 483f + 0x83, 0x49, 0xCC, 0x88, 0xCD, 0x83, 0x4C, 0xCC, + 0xA3, 0xB9, 0x83, 0x4F, 0xCC, 0x82, 0xCD, 0x83, + 0x4F, 0xCC, 0x83, 0xCD, 0x83, 0x4F, 0xCC, 0x84, + 0xCD, 0x83, 0x4F, 0xCC, 0x87, 0xCD, 0x83, 0x4F, + 0xCC, 0x88, 0xCD, 0x83, 0x4F, 0xCC, 0x9B, 0xB1, + 0x83, 0x4F, 0xCC, 0xA3, 0xB9, 0x83, 0x4F, 0xCC, + 0xA8, 0xA9, 0x83, 0x52, 0xCC, 0xA3, 0xB9, 0x83, + 0x53, 0xCC, 0x81, 0xCD, 0x83, 0x53, 0xCC, 0x8C, + // Bytes 4840 - 487f + 0xCD, 0x83, 0x53, 0xCC, 0xA3, 0xB9, 0x83, 0x55, + 0xCC, 0x83, 0xCD, 0x83, 0x55, 0xCC, 0x84, 0xCD, + 0x83, 0x55, 0xCC, 0x88, 0xCD, 0x83, 0x55, 0xCC, + 0x9B, 0xB1, 0x83, 0x61, 0xCC, 0x82, 0xCD, 0x83, + 0x61, 0xCC, 0x86, 0xCD, 0x83, 0x61, 0xCC, 0x87, + 0xCD, 0x83, 0x61, 0xCC, 0x88, 0xCD, 0x83, 0x61, + 0xCC, 0x8A, 0xCD, 0x83, 0x61, 0xCC, 0xA3, 0xB9, + 0x83, 0x63, 0xCC, 0xA7, 0xA9, 0x83, 0x65, 0xCC, + // Bytes 4880 - 48bf + 0x82, 0xCD, 0x83, 0x65, 0xCC, 0x84, 0xCD, 0x83, + 0x65, 0xCC, 0xA3, 0xB9, 0x83, 0x65, 0xCC, 0xA7, + 0xA9, 0x83, 0x69, 0xCC, 0x88, 0xCD, 0x83, 0x6C, + 0xCC, 0xA3, 0xB9, 0x83, 0x6F, 0xCC, 0x82, 0xCD, + 0x83, 0x6F, 0xCC, 0x83, 0xCD, 0x83, 0x6F, 0xCC, + 0x84, 0xCD, 0x83, 0x6F, 0xCC, 0x87, 0xCD, 0x83, + 0x6F, 0xCC, 0x88, 0xCD, 0x83, 0x6F, 0xCC, 0x9B, + 0xB1, 0x83, 0x6F, 0xCC, 0xA3, 0xB9, 0x83, 0x6F, + // Bytes 48c0 - 48ff + 0xCC, 0xA8, 0xA9, 0x83, 0x72, 0xCC, 0xA3, 0xB9, + 0x83, 0x73, 0xCC, 0x81, 0xCD, 0x83, 0x73, 0xCC, + 0x8C, 0xCD, 0x83, 0x73, 0xCC, 0xA3, 0xB9, 0x83, + 0x75, 0xCC, 0x83, 0xCD, 0x83, 0x75, 0xCC, 0x84, + 0xCD, 0x83, 0x75, 0xCC, 0x88, 0xCD, 0x83, 0x75, + 0xCC, 0x9B, 0xB1, 0x84, 0xCE, 0x91, 0xCC, 0x93, + 0xCD, 0x84, 0xCE, 0x91, 0xCC, 0x94, 0xCD, 0x84, + 0xCE, 0x95, 0xCC, 0x93, 0xCD, 0x84, 0xCE, 0x95, + // Bytes 4900 - 493f + 0xCC, 0x94, 0xCD, 0x84, 0xCE, 0x97, 0xCC, 0x93, + 0xCD, 0x84, 0xCE, 0x97, 0xCC, 0x94, 0xCD, 0x84, + 0xCE, 0x99, 0xCC, 0x93, 0xCD, 0x84, 0xCE, 0x99, + 0xCC, 0x94, 0xCD, 0x84, 0xCE, 0x9F, 0xCC, 0x93, + 0xCD, 0x84, 0xCE, 0x9F, 0xCC, 0x94, 0xCD, 0x84, + 0xCE, 0xA5, 0xCC, 0x94, 0xCD, 0x84, 0xCE, 0xA9, + 0xCC, 0x93, 0xCD, 0x84, 0xCE, 0xA9, 0xCC, 0x94, + 0xCD, 0x84, 0xCE, 0xB1, 0xCC, 0x80, 0xCD, 0x84, + // Bytes 4940 - 497f + 0xCE, 0xB1, 0xCC, 0x81, 0xCD, 0x84, 0xCE, 0xB1, + 0xCC, 0x93, 0xCD, 0x84, 0xCE, 0xB1, 0xCC, 0x94, + 0xCD, 0x84, 0xCE, 0xB1, 0xCD, 0x82, 0xCD, 0x84, + 0xCE, 0xB5, 0xCC, 0x93, 0xCD, 0x84, 0xCE, 0xB5, + 0xCC, 0x94, 0xCD, 0x84, 0xCE, 0xB7, 0xCC, 0x80, + 0xCD, 0x84, 0xCE, 0xB7, 0xCC, 0x81, 0xCD, 0x84, + 0xCE, 0xB7, 0xCC, 0x93, 0xCD, 0x84, 0xCE, 0xB7, + 0xCC, 0x94, 0xCD, 0x84, 0xCE, 0xB7, 0xCD, 0x82, + // Bytes 4980 - 49bf + 0xCD, 0x84, 0xCE, 0xB9, 0xCC, 0x88, 0xCD, 0x84, + 0xCE, 0xB9, 0xCC, 0x93, 0xCD, 0x84, 0xCE, 0xB9, + 0xCC, 0x94, 0xCD, 0x84, 0xCE, 0xBF, 0xCC, 0x93, + 0xCD, 0x84, 0xCE, 0xBF, 0xCC, 0x94, 0xCD, 0x84, + 0xCF, 0x85, 0xCC, 0x88, 0xCD, 0x84, 0xCF, 0x85, + 0xCC, 0x93, 0xCD, 0x84, 0xCF, 0x85, 0xCC, 0x94, + 0xCD, 0x84, 0xCF, 0x89, 0xCC, 0x80, 0xCD, 0x84, + 0xCF, 0x89, 0xCC, 0x81, 0xCD, 0x84, 0xCF, 0x89, + // Bytes 49c0 - 49ff + 0xCC, 0x93, 0xCD, 0x84, 0xCF, 0x89, 0xCC, 0x94, + 0xCD, 0x84, 0xCF, 0x89, 0xCD, 0x82, 0xCD, 0x86, + 0xCE, 0x91, 0xCC, 0x93, 0xCC, 0x80, 0xCE, 0x86, + 0xCE, 0x91, 0xCC, 0x93, 0xCC, 0x81, 0xCE, 0x86, + 0xCE, 0x91, 0xCC, 0x93, 0xCD, 0x82, 0xCE, 0x86, + 0xCE, 0x91, 0xCC, 0x94, 0xCC, 0x80, 0xCE, 0x86, + 0xCE, 0x91, 0xCC, 0x94, 0xCC, 0x81, 0xCE, 0x86, + 0xCE, 0x91, 0xCC, 0x94, 0xCD, 0x82, 0xCE, 0x86, + // Bytes 4a00 - 4a3f + 0xCE, 0x97, 0xCC, 0x93, 0xCC, 0x80, 0xCE, 0x86, + 0xCE, 0x97, 0xCC, 0x93, 0xCC, 0x81, 0xCE, 0x86, + 0xCE, 0x97, 0xCC, 0x93, 0xCD, 0x82, 0xCE, 0x86, + 0xCE, 0x97, 0xCC, 0x94, 0xCC, 0x80, 0xCE, 0x86, + 0xCE, 0x97, 0xCC, 0x94, 0xCC, 0x81, 0xCE, 0x86, + 0xCE, 0x97, 0xCC, 0x94, 0xCD, 0x82, 0xCE, 0x86, + 0xCE, 0xA9, 0xCC, 0x93, 0xCC, 0x80, 0xCE, 0x86, + 0xCE, 0xA9, 0xCC, 0x93, 0xCC, 0x81, 0xCE, 0x86, + // Bytes 4a40 - 4a7f + 0xCE, 0xA9, 0xCC, 0x93, 0xCD, 0x82, 0xCE, 0x86, + 0xCE, 0xA9, 0xCC, 0x94, 0xCC, 0x80, 0xCE, 0x86, + 0xCE, 0xA9, 0xCC, 0x94, 0xCC, 0x81, 0xCE, 0x86, + 0xCE, 0xA9, 0xCC, 0x94, 0xCD, 0x82, 0xCE, 0x86, + 0xCE, 0xB1, 0xCC, 0x93, 0xCC, 0x80, 0xCE, 0x86, + 0xCE, 0xB1, 0xCC, 0x93, 0xCC, 0x81, 0xCE, 0x86, + 0xCE, 0xB1, 0xCC, 0x93, 0xCD, 0x82, 0xCE, 0x86, + 0xCE, 0xB1, 0xCC, 0x94, 0xCC, 0x80, 0xCE, 0x86, + // Bytes 4a80 - 4abf + 0xCE, 0xB1, 0xCC, 0x94, 0xCC, 0x81, 0xCE, 0x86, + 0xCE, 0xB1, 0xCC, 0x94, 0xCD, 0x82, 0xCE, 0x86, + 0xCE, 0xB7, 0xCC, 0x93, 0xCC, 0x80, 0xCE, 0x86, + 0xCE, 0xB7, 0xCC, 0x93, 0xCC, 0x81, 0xCE, 0x86, + 0xCE, 0xB7, 0xCC, 0x93, 0xCD, 0x82, 0xCE, 0x86, + 0xCE, 0xB7, 0xCC, 0x94, 0xCC, 0x80, 0xCE, 0x86, + 0xCE, 0xB7, 0xCC, 0x94, 0xCC, 0x81, 0xCE, 0x86, + 0xCE, 0xB7, 0xCC, 0x94, 0xCD, 0x82, 0xCE, 0x86, + // Bytes 4ac0 - 4aff + 0xCF, 0x89, 0xCC, 0x93, 0xCC, 0x80, 0xCE, 0x86, + 0xCF, 0x89, 0xCC, 0x93, 0xCC, 0x81, 0xCE, 0x86, + 0xCF, 0x89, 0xCC, 0x93, 0xCD, 0x82, 0xCE, 0x86, + 0xCF, 0x89, 0xCC, 0x94, 0xCC, 0x80, 0xCE, 0x86, + 0xCF, 0x89, 0xCC, 0x94, 0xCC, 0x81, 0xCE, 0x86, + 0xCF, 0x89, 0xCC, 0x94, 0xCD, 0x82, 0xCE, 0x42, + 0xCC, 0x80, 0xCD, 0x33, 0x42, 0xCC, 0x81, 0xCD, + 0x33, 0x42, 0xCC, 0x93, 0xCD, 0x33, 0x43, 0xE1, + // Bytes 4b00 - 4b3f + 0x85, 0xA1, 0x01, 0x00, 0x43, 0xE1, 0x85, 0xA2, + 0x01, 0x00, 0x43, 0xE1, 0x85, 0xA3, 0x01, 0x00, + 0x43, 0xE1, 0x85, 0xA4, 0x01, 0x00, 0x43, 0xE1, + 0x85, 0xA5, 0x01, 0x00, 0x43, 0xE1, 0x85, 0xA6, + 0x01, 0x00, 0x43, 0xE1, 0x85, 0xA7, 0x01, 0x00, + 0x43, 0xE1, 0x85, 0xA8, 0x01, 0x00, 0x43, 0xE1, + 0x85, 0xA9, 0x01, 0x00, 0x43, 0xE1, 0x85, 0xAA, + 0x01, 0x00, 0x43, 0xE1, 0x85, 0xAB, 0x01, 0x00, + // Bytes 4b40 - 4b7f + 0x43, 0xE1, 0x85, 0xAC, 0x01, 0x00, 0x43, 0xE1, + 0x85, 0xAD, 0x01, 0x00, 0x43, 0xE1, 0x85, 0xAE, + 0x01, 0x00, 0x43, 0xE1, 0x85, 0xAF, 0x01, 0x00, + 0x43, 0xE1, 0x85, 0xB0, 0x01, 0x00, 0x43, 0xE1, + 0x85, 0xB1, 0x01, 0x00, 0x43, 0xE1, 0x85, 0xB2, + 0x01, 0x00, 0x43, 0xE1, 0x85, 0xB3, 0x01, 0x00, + 0x43, 0xE1, 0x85, 0xB4, 0x01, 0x00, 0x43, 0xE1, + 0x85, 0xB5, 0x01, 0x00, 0x43, 0xE1, 0x86, 0xAA, + // Bytes 4b80 - 4bbf + 0x01, 0x00, 0x43, 0xE1, 0x86, 0xAC, 0x01, 0x00, + 0x43, 0xE1, 0x86, 0xAD, 0x01, 0x00, 0x43, 0xE1, + 0x86, 0xB0, 0x01, 0x00, 0x43, 0xE1, 0x86, 0xB1, + 0x01, 0x00, 0x43, 0xE1, 0x86, 0xB2, 0x01, 0x00, + 0x43, 0xE1, 0x86, 0xB3, 0x01, 0x00, 0x43, 0xE1, + 0x86, 0xB4, 0x01, 0x00, 0x43, 0xE1, 0x86, 0xB5, + 0x01, 0x00, 0x44, 0xCC, 0x88, 0xCC, 0x81, 0xCE, + 0x33, 0x43, 0xE3, 0x82, 0x99, 0x11, 0x04, 0x43, + // Bytes 4bc0 - 4bff + 0xE3, 0x82, 0x9A, 0x11, 0x04, 0x46, 0xE0, 0xBD, + 0xB1, 0xE0, 0xBD, 0xB2, 0xA2, 0x27, 0x46, 0xE0, + 0xBD, 0xB1, 0xE0, 0xBD, 0xB4, 0xA6, 0x27, 0x46, + 0xE0, 0xBD, 0xB1, 0xE0, 0xBE, 0x80, 0xA2, 0x27, + 0x00, 0x01, +} + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *nfcTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return nfcValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := nfcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := nfcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = nfcIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := nfcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = nfcIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = nfcIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *nfcTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return nfcValues[c0] + } + i := nfcIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = nfcIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = nfcIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *nfcTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return nfcValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := nfcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := nfcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = nfcIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := nfcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = nfcIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = nfcIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *nfcTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return nfcValues[c0] + } + i := nfcIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = nfcIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = nfcIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// nfcTrie. Total size: 10798 bytes (10.54 KiB). Checksum: b5981cc85e3bd14. +type nfcTrie struct{} + +func newNfcTrie(i int) *nfcTrie { + return &nfcTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *nfcTrie) lookupValue(n uint32, b byte) uint16 { + switch { + case n < 46: + return uint16(nfcValues[n<<6+uint32(b)]) + default: + n -= 46 + return uint16(nfcSparse.lookup(n, b)) + } +} + +// nfcValues: 48 blocks, 3072 entries, 6144 bytes +// The third block is the zero block. +var nfcValues = [3072]uint16{ + // Block 0x0, offset 0x0 + 0x3c: 0xa000, 0x3d: 0xa000, 0x3e: 0xa000, + // Block 0x1, offset 0x40 + 0x41: 0xa000, 0x42: 0xa000, 0x43: 0xa000, 0x44: 0xa000, 0x45: 0xa000, + 0x46: 0xa000, 0x47: 0xa000, 0x48: 0xa000, 0x49: 0xa000, 0x4a: 0xa000, 0x4b: 0xa000, + 0x4c: 0xa000, 0x4d: 0xa000, 0x4e: 0xa000, 0x4f: 0xa000, 0x50: 0xa000, + 0x52: 0xa000, 0x53: 0xa000, 0x54: 0xa000, 0x55: 0xa000, 0x56: 0xa000, 0x57: 0xa000, + 0x58: 0xa000, 0x59: 0xa000, 0x5a: 0xa000, + 0x61: 0xa000, 0x62: 0xa000, 0x63: 0xa000, + 0x64: 0xa000, 0x65: 0xa000, 0x66: 0xa000, 0x67: 0xa000, 0x68: 0xa000, 0x69: 0xa000, + 0x6a: 0xa000, 0x6b: 0xa000, 0x6c: 0xa000, 0x6d: 0xa000, 0x6e: 0xa000, 0x6f: 0xa000, + 0x70: 0xa000, 0x72: 0xa000, 0x73: 0xa000, 0x74: 0xa000, 0x75: 0xa000, + 0x76: 0xa000, 0x77: 0xa000, 0x78: 0xa000, 0x79: 0xa000, 0x7a: 0xa000, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc0: 0x30b0, 0xc1: 0x30b5, 0xc2: 0x47c9, 0xc3: 0x30ba, 0xc4: 0x47d8, 0xc5: 0x47dd, + 0xc6: 0xa000, 0xc7: 0x47e7, 0xc8: 0x3123, 0xc9: 0x3128, 0xca: 0x47ec, 0xcb: 0x313c, + 0xcc: 0x31af, 0xcd: 0x31b4, 0xce: 0x31b9, 0xcf: 0x4800, 0xd1: 0x3245, + 0xd2: 0x3268, 0xd3: 0x326d, 0xd4: 0x480a, 0xd5: 0x480f, 0xd6: 0x481e, + 0xd8: 0xa000, 0xd9: 0x32f4, 0xda: 0x32f9, 0xdb: 0x32fe, 0xdc: 0x4850, 0xdd: 0x3376, + 0xe0: 0x33bc, 0xe1: 0x33c1, 0xe2: 0x485a, 0xe3: 0x33c6, + 0xe4: 0x4869, 0xe5: 0x486e, 0xe6: 0xa000, 0xe7: 0x4878, 0xe8: 0x342f, 0xe9: 0x3434, + 0xea: 0x487d, 0xeb: 0x3448, 0xec: 0x34c0, 0xed: 0x34c5, 0xee: 0x34ca, 0xef: 0x4891, + 0xf1: 0x3556, 0xf2: 0x3579, 0xf3: 0x357e, 0xf4: 0x489b, 0xf5: 0x48a0, + 0xf6: 0x48af, 0xf8: 0xa000, 0xf9: 0x360a, 0xfa: 0x360f, 0xfb: 0x3614, + 0xfc: 0x48e1, 0xfd: 0x3691, 0xff: 0x36aa, + // Block 0x4, offset 0x100 + 0x100: 0x30bf, 0x101: 0x33cb, 0x102: 0x47ce, 0x103: 0x485f, 0x104: 0x30dd, 0x105: 0x33e9, + 0x106: 0x30f1, 0x107: 0x33fd, 0x108: 0x30f6, 0x109: 0x3402, 0x10a: 0x30fb, 0x10b: 0x3407, + 0x10c: 0x3100, 0x10d: 0x340c, 0x10e: 0x310a, 0x10f: 0x3416, + 0x112: 0x47f1, 0x113: 0x4882, 0x114: 0x3132, 0x115: 0x343e, 0x116: 0x3137, 0x117: 0x3443, + 0x118: 0x3155, 0x119: 0x3461, 0x11a: 0x3146, 0x11b: 0x3452, 0x11c: 0x316e, 0x11d: 0x347a, + 0x11e: 0x3178, 0x11f: 0x3484, 0x120: 0x317d, 0x121: 0x3489, 0x122: 0x3187, 0x123: 0x3493, + 0x124: 0x318c, 0x125: 0x3498, 0x128: 0x31be, 0x129: 0x34cf, + 0x12a: 0x31c3, 0x12b: 0x34d4, 0x12c: 0x31c8, 0x12d: 0x34d9, 0x12e: 0x31eb, 0x12f: 0x34f7, + 0x130: 0x31cd, 0x134: 0x31f5, 0x135: 0x3501, + 0x136: 0x3209, 0x137: 0x351a, 0x139: 0x3213, 0x13a: 0x3524, 0x13b: 0x321d, + 0x13c: 0x352e, 0x13d: 0x3218, 0x13e: 0x3529, + // Block 0x5, offset 0x140 + 0x143: 0x3240, 0x144: 0x3551, 0x145: 0x3259, + 0x146: 0x356a, 0x147: 0x324f, 0x148: 0x3560, + 0x14c: 0x4814, 0x14d: 0x48a5, 0x14e: 0x3272, 0x14f: 0x3583, 0x150: 0x327c, 0x151: 0x358d, + 0x154: 0x329a, 0x155: 0x35ab, 0x156: 0x32b3, 0x157: 0x35c4, + 0x158: 0x32a4, 0x159: 0x35b5, 0x15a: 0x4837, 0x15b: 0x48c8, 0x15c: 0x32bd, 0x15d: 0x35ce, + 0x15e: 0x32cc, 0x15f: 0x35dd, 0x160: 0x483c, 0x161: 0x48cd, 0x162: 0x32e5, 0x163: 0x35fb, + 0x164: 0x32d6, 0x165: 0x35ec, 0x168: 0x4846, 0x169: 0x48d7, + 0x16a: 0x484b, 0x16b: 0x48dc, 0x16c: 0x3303, 0x16d: 0x3619, 0x16e: 0x330d, 0x16f: 0x3623, + 0x170: 0x3312, 0x171: 0x3628, 0x172: 0x3330, 0x173: 0x3646, 0x174: 0x3353, 0x175: 0x3669, + 0x176: 0x337b, 0x177: 0x3696, 0x178: 0x338f, 0x179: 0x339e, 0x17a: 0x36be, 0x17b: 0x33a8, + 0x17c: 0x36c8, 0x17d: 0x33ad, 0x17e: 0x36cd, 0x17f: 0xa000, + // Block 0x6, offset 0x180 + 0x184: 0x8100, 0x185: 0x8100, + 0x186: 0x8100, + 0x18d: 0x30c9, 0x18e: 0x33d5, 0x18f: 0x31d7, 0x190: 0x34e3, 0x191: 0x3281, + 0x192: 0x3592, 0x193: 0x3317, 0x194: 0x362d, 0x195: 0x3b10, 0x196: 0x3c9f, 0x197: 0x3b09, + 0x198: 0x3c98, 0x199: 0x3b17, 0x19a: 0x3ca6, 0x19b: 0x3b02, 0x19c: 0x3c91, + 0x19e: 0x39f1, 0x19f: 0x3b80, 0x1a0: 0x39ea, 0x1a1: 0x3b79, 0x1a2: 0x36f4, 0x1a3: 0x3706, + 0x1a6: 0x3182, 0x1a7: 0x348e, 0x1a8: 0x31ff, 0x1a9: 0x3510, + 0x1aa: 0x482d, 0x1ab: 0x48be, 0x1ac: 0x3ad1, 0x1ad: 0x3c60, 0x1ae: 0x3718, 0x1af: 0x371e, + 0x1b0: 0x3506, 0x1b4: 0x3169, 0x1b5: 0x3475, + 0x1b8: 0x323b, 0x1b9: 0x354c, 0x1ba: 0x39f8, 0x1bb: 0x3b87, + 0x1bc: 0x36ee, 0x1bd: 0x3700, 0x1be: 0x36fa, 0x1bf: 0x370c, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x30ce, 0x1c1: 0x33da, 0x1c2: 0x30d3, 0x1c3: 0x33df, 0x1c4: 0x314b, 0x1c5: 0x3457, + 0x1c6: 0x3150, 0x1c7: 0x345c, 0x1c8: 0x31dc, 0x1c9: 0x34e8, 0x1ca: 0x31e1, 0x1cb: 0x34ed, + 0x1cc: 0x3286, 0x1cd: 0x3597, 0x1ce: 0x328b, 0x1cf: 0x359c, 0x1d0: 0x32a9, 0x1d1: 0x35ba, + 0x1d2: 0x32ae, 0x1d3: 0x35bf, 0x1d4: 0x331c, 0x1d5: 0x3632, 0x1d6: 0x3321, 0x1d7: 0x3637, + 0x1d8: 0x32c7, 0x1d9: 0x35d8, 0x1da: 0x32e0, 0x1db: 0x35f6, + 0x1de: 0x319b, 0x1df: 0x34a7, + 0x1e6: 0x47d3, 0x1e7: 0x4864, 0x1e8: 0x47fb, 0x1e9: 0x488c, + 0x1ea: 0x3aa0, 0x1eb: 0x3c2f, 0x1ec: 0x3a7d, 0x1ed: 0x3c0c, 0x1ee: 0x4819, 0x1ef: 0x48aa, + 0x1f0: 0x3a99, 0x1f1: 0x3c28, 0x1f2: 0x3385, 0x1f3: 0x36a0, + // Block 0x8, offset 0x200 + 0x200: 0x9933, 0x201: 0x9933, 0x202: 0x9933, 0x203: 0x9933, 0x204: 0x9933, 0x205: 0x8133, + 0x206: 0x9933, 0x207: 0x9933, 0x208: 0x9933, 0x209: 0x9933, 0x20a: 0x9933, 0x20b: 0x9933, + 0x20c: 0x9933, 0x20d: 0x8133, 0x20e: 0x8133, 0x20f: 0x9933, 0x210: 0x8133, 0x211: 0x9933, + 0x212: 0x8133, 0x213: 0x9933, 0x214: 0x9933, 0x215: 0x8134, 0x216: 0x812e, 0x217: 0x812e, + 0x218: 0x812e, 0x219: 0x812e, 0x21a: 0x8134, 0x21b: 0x992c, 0x21c: 0x812e, 0x21d: 0x812e, + 0x21e: 0x812e, 0x21f: 0x812e, 0x220: 0x812e, 0x221: 0x812a, 0x222: 0x812a, 0x223: 0x992e, + 0x224: 0x992e, 0x225: 0x992e, 0x226: 0x992e, 0x227: 0x992a, 0x228: 0x992a, 0x229: 0x812e, + 0x22a: 0x812e, 0x22b: 0x812e, 0x22c: 0x812e, 0x22d: 0x992e, 0x22e: 0x992e, 0x22f: 0x812e, + 0x230: 0x992e, 0x231: 0x992e, 0x232: 0x812e, 0x233: 0x812e, 0x234: 0x8101, 0x235: 0x8101, + 0x236: 0x8101, 0x237: 0x8101, 0x238: 0x9901, 0x239: 0x812e, 0x23a: 0x812e, 0x23b: 0x812e, + 0x23c: 0x812e, 0x23d: 0x8133, 0x23e: 0x8133, 0x23f: 0x8133, + // Block 0x9, offset 0x240 + 0x240: 0x4aef, 0x241: 0x4af4, 0x242: 0x9933, 0x243: 0x4af9, 0x244: 0x4bb2, 0x245: 0x9937, + 0x246: 0x8133, 0x247: 0x812e, 0x248: 0x812e, 0x249: 0x812e, 0x24a: 0x8133, 0x24b: 0x8133, + 0x24c: 0x8133, 0x24d: 0x812e, 0x24e: 0x812e, 0x250: 0x8133, 0x251: 0x8133, + 0x252: 0x8133, 0x253: 0x812e, 0x254: 0x812e, 0x255: 0x812e, 0x256: 0x812e, 0x257: 0x8133, + 0x258: 0x8134, 0x259: 0x812e, 0x25a: 0x812e, 0x25b: 0x8133, 0x25c: 0x8135, 0x25d: 0x8136, + 0x25e: 0x8136, 0x25f: 0x8135, 0x260: 0x8136, 0x261: 0x8136, 0x262: 0x8135, 0x263: 0x8133, + 0x264: 0x8133, 0x265: 0x8133, 0x266: 0x8133, 0x267: 0x8133, 0x268: 0x8133, 0x269: 0x8133, + 0x26a: 0x8133, 0x26b: 0x8133, 0x26c: 0x8133, 0x26d: 0x8133, 0x26e: 0x8133, 0x26f: 0x8133, + 0x274: 0x01ee, + 0x27a: 0x8100, + 0x27e: 0x0037, + // Block 0xa, offset 0x280 + 0x284: 0x8100, 0x285: 0x36e2, + 0x286: 0x372a, 0x287: 0x00ce, 0x288: 0x3748, 0x289: 0x3754, 0x28a: 0x3766, + 0x28c: 0x3784, 0x28e: 0x3796, 0x28f: 0x37b4, 0x290: 0x3f49, 0x291: 0xa000, + 0x295: 0xa000, 0x297: 0xa000, + 0x299: 0xa000, + 0x29f: 0xa000, 0x2a1: 0xa000, + 0x2a5: 0xa000, 0x2a9: 0xa000, + 0x2aa: 0x3778, 0x2ab: 0x37a8, 0x2ac: 0x493f, 0x2ad: 0x37d8, 0x2ae: 0x4969, 0x2af: 0x37ea, + 0x2b0: 0x3fb1, 0x2b1: 0xa000, 0x2b5: 0xa000, + 0x2b7: 0xa000, 0x2b9: 0xa000, + 0x2bf: 0xa000, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x3862, 0x2c1: 0x386e, 0x2c3: 0x385c, + 0x2c6: 0xa000, 0x2c7: 0x384a, + 0x2cc: 0x389e, 0x2cd: 0x3886, 0x2ce: 0x38b0, 0x2d0: 0xa000, + 0x2d3: 0xa000, 0x2d5: 0xa000, 0x2d6: 0xa000, 0x2d7: 0xa000, + 0x2d8: 0xa000, 0x2d9: 0x3892, 0x2da: 0xa000, + 0x2de: 0xa000, 0x2e3: 0xa000, + 0x2e7: 0xa000, + 0x2eb: 0xa000, 0x2ed: 0xa000, + 0x2f0: 0xa000, 0x2f3: 0xa000, 0x2f5: 0xa000, + 0x2f6: 0xa000, 0x2f7: 0xa000, 0x2f8: 0xa000, 0x2f9: 0x3916, 0x2fa: 0xa000, + 0x2fe: 0xa000, + // Block 0xc, offset 0x300 + 0x301: 0x3874, 0x302: 0x38f8, + 0x310: 0x3850, 0x311: 0x38d4, + 0x312: 0x3856, 0x313: 0x38da, 0x316: 0x3868, 0x317: 0x38ec, + 0x318: 0xa000, 0x319: 0xa000, 0x31a: 0x396a, 0x31b: 0x3970, 0x31c: 0x387a, 0x31d: 0x38fe, + 0x31e: 0x3880, 0x31f: 0x3904, 0x322: 0x388c, 0x323: 0x3910, + 0x324: 0x3898, 0x325: 0x391c, 0x326: 0x38a4, 0x327: 0x3928, 0x328: 0xa000, 0x329: 0xa000, + 0x32a: 0x3976, 0x32b: 0x397c, 0x32c: 0x38ce, 0x32d: 0x3952, 0x32e: 0x38aa, 0x32f: 0x392e, + 0x330: 0x38b6, 0x331: 0x393a, 0x332: 0x38bc, 0x333: 0x3940, 0x334: 0x38c2, 0x335: 0x3946, + 0x338: 0x38c8, 0x339: 0x394c, + // Block 0xd, offset 0x340 + 0x351: 0x812e, + 0x352: 0x8133, 0x353: 0x8133, 0x354: 0x8133, 0x355: 0x8133, 0x356: 0x812e, 0x357: 0x8133, + 0x358: 0x8133, 0x359: 0x8133, 0x35a: 0x812f, 0x35b: 0x812e, 0x35c: 0x8133, 0x35d: 0x8133, + 0x35e: 0x8133, 0x35f: 0x8133, 0x360: 0x8133, 0x361: 0x8133, 0x362: 0x812e, 0x363: 0x812e, + 0x364: 0x812e, 0x365: 0x812e, 0x366: 0x812e, 0x367: 0x812e, 0x368: 0x8133, 0x369: 0x8133, + 0x36a: 0x812e, 0x36b: 0x8133, 0x36c: 0x8133, 0x36d: 0x812f, 0x36e: 0x8132, 0x36f: 0x8133, + 0x370: 0x8106, 0x371: 0x8107, 0x372: 0x8108, 0x373: 0x8109, 0x374: 0x810a, 0x375: 0x810b, + 0x376: 0x810c, 0x377: 0x810d, 0x378: 0x810e, 0x379: 0x810f, 0x37a: 0x810f, 0x37b: 0x8110, + 0x37c: 0x8111, 0x37d: 0x8112, 0x37f: 0x8113, + // Block 0xe, offset 0x380 + 0x388: 0xa000, 0x38a: 0xa000, 0x38b: 0x8117, + 0x38c: 0x8118, 0x38d: 0x8119, 0x38e: 0x811a, 0x38f: 0x811b, 0x390: 0x811c, 0x391: 0x811d, + 0x392: 0x811e, 0x393: 0x9933, 0x394: 0x9933, 0x395: 0x992e, 0x396: 0x812e, 0x397: 0x8133, + 0x398: 0x8133, 0x399: 0x8133, 0x39a: 0x8133, 0x39b: 0x8133, 0x39c: 0x812e, 0x39d: 0x8133, + 0x39e: 0x8133, 0x39f: 0x812e, + 0x3b0: 0x811f, + // Block 0xf, offset 0x3c0 + 0x3ca: 0x8133, 0x3cb: 0x8133, + 0x3cc: 0x8133, 0x3cd: 0x8133, 0x3ce: 0x8133, 0x3cf: 0x812e, 0x3d0: 0x812e, 0x3d1: 0x812e, + 0x3d2: 0x812e, 0x3d3: 0x812e, 0x3d4: 0x8133, 0x3d5: 0x8133, 0x3d6: 0x8133, 0x3d7: 0x8133, + 0x3d8: 0x8133, 0x3d9: 0x8133, 0x3da: 0x8133, 0x3db: 0x8133, 0x3dc: 0x8133, 0x3dd: 0x8133, + 0x3de: 0x8133, 0x3df: 0x8133, 0x3e0: 0x8133, 0x3e1: 0x8133, 0x3e3: 0x812e, + 0x3e4: 0x8133, 0x3e5: 0x8133, 0x3e6: 0x812e, 0x3e7: 0x8133, 0x3e8: 0x8133, 0x3e9: 0x812e, + 0x3ea: 0x8133, 0x3eb: 0x8133, 0x3ec: 0x8133, 0x3ed: 0x812e, 0x3ee: 0x812e, 0x3ef: 0x812e, + 0x3f0: 0x8117, 0x3f1: 0x8118, 0x3f2: 0x8119, 0x3f3: 0x8133, 0x3f4: 0x8133, 0x3f5: 0x8133, + 0x3f6: 0x812e, 0x3f7: 0x8133, 0x3f8: 0x8133, 0x3f9: 0x812e, 0x3fa: 0x812e, 0x3fb: 0x8133, + 0x3fc: 0x8133, 0x3fd: 0x8133, 0x3fe: 0x8133, 0x3ff: 0x8133, + // Block 0x10, offset 0x400 + 0x405: 0xa000, + 0x406: 0x2e5d, 0x407: 0xa000, 0x408: 0x2e65, 0x409: 0xa000, 0x40a: 0x2e6d, 0x40b: 0xa000, + 0x40c: 0x2e75, 0x40d: 0xa000, 0x40e: 0x2e7d, 0x411: 0xa000, + 0x412: 0x2e85, + 0x434: 0x8103, 0x435: 0x9900, + 0x43a: 0xa000, 0x43b: 0x2e8d, + 0x43c: 0xa000, 0x43d: 0x2e95, 0x43e: 0xa000, 0x43f: 0xa000, + // Block 0x11, offset 0x440 + 0x440: 0x8133, 0x441: 0x8133, 0x442: 0x812e, 0x443: 0x8133, 0x444: 0x8133, 0x445: 0x8133, + 0x446: 0x8133, 0x447: 0x8133, 0x448: 0x8133, 0x449: 0x8133, 0x44a: 0x812e, 0x44b: 0x8133, + 0x44c: 0x8133, 0x44d: 0x8136, 0x44e: 0x812b, 0x44f: 0x812e, 0x450: 0x812a, 0x451: 0x8133, + 0x452: 0x8133, 0x453: 0x8133, 0x454: 0x8133, 0x455: 0x8133, 0x456: 0x8133, 0x457: 0x8133, + 0x458: 0x8133, 0x459: 0x8133, 0x45a: 0x8133, 0x45b: 0x8133, 0x45c: 0x8133, 0x45d: 0x8133, + 0x45e: 0x8133, 0x45f: 0x8133, 0x460: 0x8133, 0x461: 0x8133, 0x462: 0x8133, 0x463: 0x8133, + 0x464: 0x8133, 0x465: 0x8133, 0x466: 0x8133, 0x467: 0x8133, 0x468: 0x8133, 0x469: 0x8133, + 0x46a: 0x8133, 0x46b: 0x8133, 0x46c: 0x8133, 0x46d: 0x8133, 0x46e: 0x8133, 0x46f: 0x8133, + 0x470: 0x8133, 0x471: 0x8133, 0x472: 0x8133, 0x473: 0x8133, 0x474: 0x8133, 0x475: 0x8133, + 0x476: 0x8134, 0x477: 0x8132, 0x478: 0x8132, 0x479: 0x812e, 0x47a: 0x812d, 0x47b: 0x8133, + 0x47c: 0x8135, 0x47d: 0x812e, 0x47e: 0x8133, 0x47f: 0x812e, + // Block 0x12, offset 0x480 + 0x480: 0x30d8, 0x481: 0x33e4, 0x482: 0x30e2, 0x483: 0x33ee, 0x484: 0x30e7, 0x485: 0x33f3, + 0x486: 0x30ec, 0x487: 0x33f8, 0x488: 0x3a0d, 0x489: 0x3b9c, 0x48a: 0x3105, 0x48b: 0x3411, + 0x48c: 0x310f, 0x48d: 0x341b, 0x48e: 0x311e, 0x48f: 0x342a, 0x490: 0x3114, 0x491: 0x3420, + 0x492: 0x3119, 0x493: 0x3425, 0x494: 0x3a30, 0x495: 0x3bbf, 0x496: 0x3a37, 0x497: 0x3bc6, + 0x498: 0x315a, 0x499: 0x3466, 0x49a: 0x315f, 0x49b: 0x346b, 0x49c: 0x3a45, 0x49d: 0x3bd4, + 0x49e: 0x3164, 0x49f: 0x3470, 0x4a0: 0x3173, 0x4a1: 0x347f, 0x4a2: 0x3191, 0x4a3: 0x349d, + 0x4a4: 0x31a0, 0x4a5: 0x34ac, 0x4a6: 0x3196, 0x4a7: 0x34a2, 0x4a8: 0x31a5, 0x4a9: 0x34b1, + 0x4aa: 0x31aa, 0x4ab: 0x34b6, 0x4ac: 0x31f0, 0x4ad: 0x34fc, 0x4ae: 0x3a4c, 0x4af: 0x3bdb, + 0x4b0: 0x31fa, 0x4b1: 0x350b, 0x4b2: 0x3204, 0x4b3: 0x3515, 0x4b4: 0x320e, 0x4b5: 0x351f, + 0x4b6: 0x4805, 0x4b7: 0x4896, 0x4b8: 0x3a53, 0x4b9: 0x3be2, 0x4ba: 0x3227, 0x4bb: 0x3538, + 0x4bc: 0x3222, 0x4bd: 0x3533, 0x4be: 0x322c, 0x4bf: 0x353d, + // Block 0x13, offset 0x4c0 + 0x4c0: 0x3231, 0x4c1: 0x3542, 0x4c2: 0x3236, 0x4c3: 0x3547, 0x4c4: 0x324a, 0x4c5: 0x355b, + 0x4c6: 0x3254, 0x4c7: 0x3565, 0x4c8: 0x3263, 0x4c9: 0x3574, 0x4ca: 0x325e, 0x4cb: 0x356f, + 0x4cc: 0x3a76, 0x4cd: 0x3c05, 0x4ce: 0x3a84, 0x4cf: 0x3c13, 0x4d0: 0x3a8b, 0x4d1: 0x3c1a, + 0x4d2: 0x3a92, 0x4d3: 0x3c21, 0x4d4: 0x3290, 0x4d5: 0x35a1, 0x4d6: 0x3295, 0x4d7: 0x35a6, + 0x4d8: 0x329f, 0x4d9: 0x35b0, 0x4da: 0x4832, 0x4db: 0x48c3, 0x4dc: 0x3ad8, 0x4dd: 0x3c67, + 0x4de: 0x32b8, 0x4df: 0x35c9, 0x4e0: 0x32c2, 0x4e1: 0x35d3, 0x4e2: 0x4841, 0x4e3: 0x48d2, + 0x4e4: 0x3adf, 0x4e5: 0x3c6e, 0x4e6: 0x3ae6, 0x4e7: 0x3c75, 0x4e8: 0x3aed, 0x4e9: 0x3c7c, + 0x4ea: 0x32d1, 0x4eb: 0x35e2, 0x4ec: 0x32db, 0x4ed: 0x35f1, 0x4ee: 0x32ef, 0x4ef: 0x3605, + 0x4f0: 0x32ea, 0x4f1: 0x3600, 0x4f2: 0x332b, 0x4f3: 0x3641, 0x4f4: 0x333a, 0x4f5: 0x3650, + 0x4f6: 0x3335, 0x4f7: 0x364b, 0x4f8: 0x3af4, 0x4f9: 0x3c83, 0x4fa: 0x3afb, 0x4fb: 0x3c8a, + 0x4fc: 0x333f, 0x4fd: 0x3655, 0x4fe: 0x3344, 0x4ff: 0x365a, + // Block 0x14, offset 0x500 + 0x500: 0x3349, 0x501: 0x365f, 0x502: 0x334e, 0x503: 0x3664, 0x504: 0x335d, 0x505: 0x3673, + 0x506: 0x3358, 0x507: 0x366e, 0x508: 0x3362, 0x509: 0x367d, 0x50a: 0x3367, 0x50b: 0x3682, + 0x50c: 0x336c, 0x50d: 0x3687, 0x50e: 0x338a, 0x50f: 0x36a5, 0x510: 0x33a3, 0x511: 0x36c3, + 0x512: 0x33b2, 0x513: 0x36d2, 0x514: 0x33b7, 0x515: 0x36d7, 0x516: 0x34bb, 0x517: 0x35e7, + 0x518: 0x3678, 0x519: 0x36b4, 0x51b: 0x3712, + 0x520: 0x47e2, 0x521: 0x4873, 0x522: 0x30c4, 0x523: 0x33d0, + 0x524: 0x39b9, 0x525: 0x3b48, 0x526: 0x39b2, 0x527: 0x3b41, 0x528: 0x39c7, 0x529: 0x3b56, + 0x52a: 0x39c0, 0x52b: 0x3b4f, 0x52c: 0x39ff, 0x52d: 0x3b8e, 0x52e: 0x39d5, 0x52f: 0x3b64, + 0x530: 0x39ce, 0x531: 0x3b5d, 0x532: 0x39e3, 0x533: 0x3b72, 0x534: 0x39dc, 0x535: 0x3b6b, + 0x536: 0x3a06, 0x537: 0x3b95, 0x538: 0x47f6, 0x539: 0x4887, 0x53a: 0x3141, 0x53b: 0x344d, + 0x53c: 0x312d, 0x53d: 0x3439, 0x53e: 0x3a1b, 0x53f: 0x3baa, + // Block 0x15, offset 0x540 + 0x540: 0x3a14, 0x541: 0x3ba3, 0x542: 0x3a29, 0x543: 0x3bb8, 0x544: 0x3a22, 0x545: 0x3bb1, + 0x546: 0x3a3e, 0x547: 0x3bcd, 0x548: 0x31d2, 0x549: 0x34de, 0x54a: 0x31e6, 0x54b: 0x34f2, + 0x54c: 0x4828, 0x54d: 0x48b9, 0x54e: 0x3277, 0x54f: 0x3588, 0x550: 0x3a61, 0x551: 0x3bf0, + 0x552: 0x3a5a, 0x553: 0x3be9, 0x554: 0x3a6f, 0x555: 0x3bfe, 0x556: 0x3a68, 0x557: 0x3bf7, + 0x558: 0x3aca, 0x559: 0x3c59, 0x55a: 0x3aae, 0x55b: 0x3c3d, 0x55c: 0x3aa7, 0x55d: 0x3c36, + 0x55e: 0x3abc, 0x55f: 0x3c4b, 0x560: 0x3ab5, 0x561: 0x3c44, 0x562: 0x3ac3, 0x563: 0x3c52, + 0x564: 0x3326, 0x565: 0x363c, 0x566: 0x3308, 0x567: 0x361e, 0x568: 0x3b25, 0x569: 0x3cb4, + 0x56a: 0x3b1e, 0x56b: 0x3cad, 0x56c: 0x3b33, 0x56d: 0x3cc2, 0x56e: 0x3b2c, 0x56f: 0x3cbb, + 0x570: 0x3b3a, 0x571: 0x3cc9, 0x572: 0x3371, 0x573: 0x368c, 0x574: 0x3399, 0x575: 0x36b9, + 0x576: 0x3394, 0x577: 0x36af, 0x578: 0x3380, 0x579: 0x369b, + // Block 0x16, offset 0x580 + 0x580: 0x4945, 0x581: 0x494b, 0x582: 0x4a5f, 0x583: 0x4a77, 0x584: 0x4a67, 0x585: 0x4a7f, + 0x586: 0x4a6f, 0x587: 0x4a87, 0x588: 0x48eb, 0x589: 0x48f1, 0x58a: 0x49cf, 0x58b: 0x49e7, + 0x58c: 0x49d7, 0x58d: 0x49ef, 0x58e: 0x49df, 0x58f: 0x49f7, 0x590: 0x4957, 0x591: 0x495d, + 0x592: 0x3ef9, 0x593: 0x3f09, 0x594: 0x3f01, 0x595: 0x3f11, + 0x598: 0x48f7, 0x599: 0x48fd, 0x59a: 0x3e29, 0x59b: 0x3e39, 0x59c: 0x3e31, 0x59d: 0x3e41, + 0x5a0: 0x496f, 0x5a1: 0x4975, 0x5a2: 0x4a8f, 0x5a3: 0x4aa7, + 0x5a4: 0x4a97, 0x5a5: 0x4aaf, 0x5a6: 0x4a9f, 0x5a7: 0x4ab7, 0x5a8: 0x4903, 0x5a9: 0x4909, + 0x5aa: 0x49ff, 0x5ab: 0x4a17, 0x5ac: 0x4a07, 0x5ad: 0x4a1f, 0x5ae: 0x4a0f, 0x5af: 0x4a27, + 0x5b0: 0x4987, 0x5b1: 0x498d, 0x5b2: 0x3f59, 0x5b3: 0x3f71, 0x5b4: 0x3f61, 0x5b5: 0x3f79, + 0x5b6: 0x3f69, 0x5b7: 0x3f81, 0x5b8: 0x490f, 0x5b9: 0x4915, 0x5ba: 0x3e59, 0x5bb: 0x3e71, + 0x5bc: 0x3e61, 0x5bd: 0x3e79, 0x5be: 0x3e69, 0x5bf: 0x3e81, + // Block 0x17, offset 0x5c0 + 0x5c0: 0x4993, 0x5c1: 0x4999, 0x5c2: 0x3f89, 0x5c3: 0x3f99, 0x5c4: 0x3f91, 0x5c5: 0x3fa1, + 0x5c8: 0x491b, 0x5c9: 0x4921, 0x5ca: 0x3e89, 0x5cb: 0x3e99, + 0x5cc: 0x3e91, 0x5cd: 0x3ea1, 0x5d0: 0x49a5, 0x5d1: 0x49ab, + 0x5d2: 0x3fc1, 0x5d3: 0x3fd9, 0x5d4: 0x3fc9, 0x5d5: 0x3fe1, 0x5d6: 0x3fd1, 0x5d7: 0x3fe9, + 0x5d9: 0x4927, 0x5db: 0x3ea9, 0x5dd: 0x3eb1, + 0x5df: 0x3eb9, 0x5e0: 0x49bd, 0x5e1: 0x49c3, 0x5e2: 0x4abf, 0x5e3: 0x4ad7, + 0x5e4: 0x4ac7, 0x5e5: 0x4adf, 0x5e6: 0x4acf, 0x5e7: 0x4ae7, 0x5e8: 0x492d, 0x5e9: 0x4933, + 0x5ea: 0x4a2f, 0x5eb: 0x4a47, 0x5ec: 0x4a37, 0x5ed: 0x4a4f, 0x5ee: 0x4a3f, 0x5ef: 0x4a57, + 0x5f0: 0x4939, 0x5f1: 0x445f, 0x5f2: 0x37d2, 0x5f3: 0x4465, 0x5f4: 0x4963, 0x5f5: 0x446b, + 0x5f6: 0x37e4, 0x5f7: 0x4471, 0x5f8: 0x3802, 0x5f9: 0x4477, 0x5fa: 0x381a, 0x5fb: 0x447d, + 0x5fc: 0x49b1, 0x5fd: 0x4483, + // Block 0x18, offset 0x600 + 0x600: 0x3ee1, 0x601: 0x3ee9, 0x602: 0x42c5, 0x603: 0x42e3, 0x604: 0x42cf, 0x605: 0x42ed, + 0x606: 0x42d9, 0x607: 0x42f7, 0x608: 0x3e19, 0x609: 0x3e21, 0x60a: 0x4211, 0x60b: 0x422f, + 0x60c: 0x421b, 0x60d: 0x4239, 0x60e: 0x4225, 0x60f: 0x4243, 0x610: 0x3f29, 0x611: 0x3f31, + 0x612: 0x4301, 0x613: 0x431f, 0x614: 0x430b, 0x615: 0x4329, 0x616: 0x4315, 0x617: 0x4333, + 0x618: 0x3e49, 0x619: 0x3e51, 0x61a: 0x424d, 0x61b: 0x426b, 0x61c: 0x4257, 0x61d: 0x4275, + 0x61e: 0x4261, 0x61f: 0x427f, 0x620: 0x4001, 0x621: 0x4009, 0x622: 0x433d, 0x623: 0x435b, + 0x624: 0x4347, 0x625: 0x4365, 0x626: 0x4351, 0x627: 0x436f, 0x628: 0x3ec1, 0x629: 0x3ec9, + 0x62a: 0x4289, 0x62b: 0x42a7, 0x62c: 0x4293, 0x62d: 0x42b1, 0x62e: 0x429d, 0x62f: 0x42bb, + 0x630: 0x37c6, 0x631: 0x37c0, 0x632: 0x3ed1, 0x633: 0x37cc, 0x634: 0x3ed9, + 0x636: 0x4951, 0x637: 0x3ef1, 0x638: 0x3736, 0x639: 0x3730, 0x63a: 0x3724, 0x63b: 0x442f, + 0x63c: 0x373c, 0x63d: 0x8100, 0x63e: 0x0257, 0x63f: 0xa100, + // Block 0x19, offset 0x640 + 0x640: 0x8100, 0x641: 0x36e8, 0x642: 0x3f19, 0x643: 0x37de, 0x644: 0x3f21, + 0x646: 0x497b, 0x647: 0x3f39, 0x648: 0x3742, 0x649: 0x4435, 0x64a: 0x374e, 0x64b: 0x443b, + 0x64c: 0x375a, 0x64d: 0x3cd0, 0x64e: 0x3cd7, 0x64f: 0x3cde, 0x650: 0x37f6, 0x651: 0x37f0, + 0x652: 0x3f41, 0x653: 0x4625, 0x656: 0x37fc, 0x657: 0x3f51, + 0x658: 0x3772, 0x659: 0x376c, 0x65a: 0x3760, 0x65b: 0x4441, 0x65d: 0x3ce5, + 0x65e: 0x3cec, 0x65f: 0x3cf3, 0x660: 0x382c, 0x661: 0x3826, 0x662: 0x3fa9, 0x663: 0x462d, + 0x664: 0x380e, 0x665: 0x3814, 0x666: 0x3832, 0x667: 0x3fb9, 0x668: 0x37a2, 0x669: 0x379c, + 0x66a: 0x3790, 0x66b: 0x444d, 0x66c: 0x378a, 0x66d: 0x36dc, 0x66e: 0x4429, 0x66f: 0x0081, + 0x672: 0x3ff1, 0x673: 0x3838, 0x674: 0x3ff9, + 0x676: 0x49c9, 0x677: 0x4011, 0x678: 0x377e, 0x679: 0x4447, 0x67a: 0x37ae, 0x67b: 0x4459, + 0x67c: 0x37ba, 0x67d: 0x4397, 0x67e: 0xa100, + // Block 0x1a, offset 0x680 + 0x681: 0x3d47, 0x683: 0xa000, 0x684: 0x3d4e, 0x685: 0xa000, + 0x687: 0x3d55, 0x688: 0xa000, 0x689: 0x3d5c, + 0x68d: 0xa000, + 0x6a0: 0x30a6, 0x6a1: 0xa000, 0x6a2: 0x3d6a, + 0x6a4: 0xa000, 0x6a5: 0xa000, + 0x6ad: 0x3d63, 0x6ae: 0x30a1, 0x6af: 0x30ab, + 0x6b0: 0x3d71, 0x6b1: 0x3d78, 0x6b2: 0xa000, 0x6b3: 0xa000, 0x6b4: 0x3d7f, 0x6b5: 0x3d86, + 0x6b6: 0xa000, 0x6b7: 0xa000, 0x6b8: 0x3d8d, 0x6b9: 0x3d94, 0x6ba: 0xa000, 0x6bb: 0xa000, + 0x6bc: 0xa000, 0x6bd: 0xa000, + // Block 0x1b, offset 0x6c0 + 0x6c0: 0x3d9b, 0x6c1: 0x3da2, 0x6c2: 0xa000, 0x6c3: 0xa000, 0x6c4: 0x3db7, 0x6c5: 0x3dbe, + 0x6c6: 0xa000, 0x6c7: 0xa000, 0x6c8: 0x3dc5, 0x6c9: 0x3dcc, + 0x6d1: 0xa000, + 0x6d2: 0xa000, + 0x6e2: 0xa000, + 0x6e8: 0xa000, 0x6e9: 0xa000, + 0x6eb: 0xa000, 0x6ec: 0x3de1, 0x6ed: 0x3de8, 0x6ee: 0x3def, 0x6ef: 0x3df6, + 0x6f2: 0xa000, 0x6f3: 0xa000, 0x6f4: 0xa000, 0x6f5: 0xa000, + // Block 0x1c, offset 0x700 + 0x706: 0xa000, 0x70b: 0xa000, + 0x70c: 0x4049, 0x70d: 0xa000, 0x70e: 0x4051, 0x70f: 0xa000, 0x710: 0x4059, 0x711: 0xa000, + 0x712: 0x4061, 0x713: 0xa000, 0x714: 0x4069, 0x715: 0xa000, 0x716: 0x4071, 0x717: 0xa000, + 0x718: 0x4079, 0x719: 0xa000, 0x71a: 0x4081, 0x71b: 0xa000, 0x71c: 0x4089, 0x71d: 0xa000, + 0x71e: 0x4091, 0x71f: 0xa000, 0x720: 0x4099, 0x721: 0xa000, 0x722: 0x40a1, + 0x724: 0xa000, 0x725: 0x40a9, 0x726: 0xa000, 0x727: 0x40b1, 0x728: 0xa000, 0x729: 0x40b9, + 0x72f: 0xa000, + 0x730: 0x40c1, 0x731: 0x40c9, 0x732: 0xa000, 0x733: 0x40d1, 0x734: 0x40d9, 0x735: 0xa000, + 0x736: 0x40e1, 0x737: 0x40e9, 0x738: 0xa000, 0x739: 0x40f1, 0x73a: 0x40f9, 0x73b: 0xa000, + 0x73c: 0x4101, 0x73d: 0x4109, + // Block 0x1d, offset 0x740 + 0x754: 0x4041, + 0x759: 0x9904, 0x75a: 0x9904, 0x75b: 0x8100, 0x75c: 0x8100, 0x75d: 0xa000, + 0x75e: 0x4111, + 0x766: 0xa000, + 0x76b: 0xa000, 0x76c: 0x4121, 0x76d: 0xa000, 0x76e: 0x4129, 0x76f: 0xa000, + 0x770: 0x4131, 0x771: 0xa000, 0x772: 0x4139, 0x773: 0xa000, 0x774: 0x4141, 0x775: 0xa000, + 0x776: 0x4149, 0x777: 0xa000, 0x778: 0x4151, 0x779: 0xa000, 0x77a: 0x4159, 0x77b: 0xa000, + 0x77c: 0x4161, 0x77d: 0xa000, 0x77e: 0x4169, 0x77f: 0xa000, + // Block 0x1e, offset 0x780 + 0x780: 0x4171, 0x781: 0xa000, 0x782: 0x4179, 0x784: 0xa000, 0x785: 0x4181, + 0x786: 0xa000, 0x787: 0x4189, 0x788: 0xa000, 0x789: 0x4191, + 0x78f: 0xa000, 0x790: 0x4199, 0x791: 0x41a1, + 0x792: 0xa000, 0x793: 0x41a9, 0x794: 0x41b1, 0x795: 0xa000, 0x796: 0x41b9, 0x797: 0x41c1, + 0x798: 0xa000, 0x799: 0x41c9, 0x79a: 0x41d1, 0x79b: 0xa000, 0x79c: 0x41d9, 0x79d: 0x41e1, + 0x7af: 0xa000, + 0x7b0: 0xa000, 0x7b1: 0xa000, 0x7b2: 0xa000, 0x7b4: 0x4119, + 0x7b7: 0x41e9, 0x7b8: 0x41f1, 0x7b9: 0x41f9, 0x7ba: 0x4201, + 0x7bd: 0xa000, 0x7be: 0x4209, + // Block 0x1f, offset 0x7c0 + 0x7c0: 0x1472, 0x7c1: 0x0df6, 0x7c2: 0x14ce, 0x7c3: 0x149a, 0x7c4: 0x0f52, 0x7c5: 0x07e6, + 0x7c6: 0x09da, 0x7c7: 0x1726, 0x7c8: 0x1726, 0x7c9: 0x0b06, 0x7ca: 0x155a, 0x7cb: 0x0a3e, + 0x7cc: 0x0b02, 0x7cd: 0x0cea, 0x7ce: 0x10ca, 0x7cf: 0x125a, 0x7d0: 0x1392, 0x7d1: 0x13ce, + 0x7d2: 0x1402, 0x7d3: 0x1516, 0x7d4: 0x0e6e, 0x7d5: 0x0efa, 0x7d6: 0x0fa6, 0x7d7: 0x103e, + 0x7d8: 0x135a, 0x7d9: 0x1542, 0x7da: 0x166e, 0x7db: 0x080a, 0x7dc: 0x09ae, 0x7dd: 0x0e82, + 0x7de: 0x0fca, 0x7df: 0x138e, 0x7e0: 0x16be, 0x7e1: 0x0bae, 0x7e2: 0x0f72, 0x7e3: 0x137e, + 0x7e4: 0x1412, 0x7e5: 0x0d1e, 0x7e6: 0x12b6, 0x7e7: 0x13da, 0x7e8: 0x0c1a, 0x7e9: 0x0e0a, + 0x7ea: 0x0f12, 0x7eb: 0x1016, 0x7ec: 0x1522, 0x7ed: 0x084a, 0x7ee: 0x08e2, 0x7ef: 0x094e, + 0x7f0: 0x0d86, 0x7f1: 0x0e7a, 0x7f2: 0x0fc6, 0x7f3: 0x10ea, 0x7f4: 0x1272, 0x7f5: 0x1386, + 0x7f6: 0x139e, 0x7f7: 0x14c2, 0x7f8: 0x15ea, 0x7f9: 0x169e, 0x7fa: 0x16ba, 0x7fb: 0x1126, + 0x7fc: 0x1166, 0x7fd: 0x121e, 0x7fe: 0x133e, 0x7ff: 0x1576, + // Block 0x20, offset 0x800 + 0x800: 0x16c6, 0x801: 0x1446, 0x802: 0x0ac2, 0x803: 0x0c36, 0x804: 0x11d6, 0x805: 0x1296, + 0x806: 0x0ffa, 0x807: 0x112e, 0x808: 0x1492, 0x809: 0x15e2, 0x80a: 0x0abe, 0x80b: 0x0b8a, + 0x80c: 0x0e72, 0x80d: 0x0f26, 0x80e: 0x0f5a, 0x80f: 0x120e, 0x810: 0x1236, 0x811: 0x15a2, + 0x812: 0x094a, 0x813: 0x12a2, 0x814: 0x08ee, 0x815: 0x08ea, 0x816: 0x1192, 0x817: 0x1222, + 0x818: 0x1356, 0x819: 0x15aa, 0x81a: 0x1462, 0x81b: 0x0d22, 0x81c: 0x0e6e, 0x81d: 0x1452, + 0x81e: 0x07f2, 0x81f: 0x0b5e, 0x820: 0x0c8e, 0x821: 0x102a, 0x822: 0x10aa, 0x823: 0x096e, + 0x824: 0x1136, 0x825: 0x085a, 0x826: 0x0c72, 0x827: 0x07d2, 0x828: 0x0ee6, 0x829: 0x0d9e, + 0x82a: 0x120a, 0x82b: 0x09c2, 0x82c: 0x0aae, 0x82d: 0x10f6, 0x82e: 0x135e, 0x82f: 0x1436, + 0x830: 0x0eb2, 0x831: 0x14f2, 0x832: 0x0ede, 0x833: 0x0d32, 0x834: 0x1316, 0x835: 0x0d52, + 0x836: 0x10a6, 0x837: 0x0826, 0x838: 0x08a2, 0x839: 0x08e6, 0x83a: 0x0e4e, 0x83b: 0x11f6, + 0x83c: 0x12ee, 0x83d: 0x1442, 0x83e: 0x1556, 0x83f: 0x0956, + // Block 0x21, offset 0x840 + 0x840: 0x0a0a, 0x841: 0x0b12, 0x842: 0x0c2a, 0x843: 0x0dba, 0x844: 0x0f76, 0x845: 0x113a, + 0x846: 0x1592, 0x847: 0x1676, 0x848: 0x16ca, 0x849: 0x16e2, 0x84a: 0x0932, 0x84b: 0x0dee, + 0x84c: 0x0e9e, 0x84d: 0x14e6, 0x84e: 0x0bf6, 0x84f: 0x0cd2, 0x850: 0x0cee, 0x851: 0x0d7e, + 0x852: 0x0f66, 0x853: 0x0fb2, 0x854: 0x1062, 0x855: 0x1186, 0x856: 0x122a, 0x857: 0x128e, + 0x858: 0x14d6, 0x859: 0x1366, 0x85a: 0x14fe, 0x85b: 0x157a, 0x85c: 0x090a, 0x85d: 0x0936, + 0x85e: 0x0a1e, 0x85f: 0x0fa2, 0x860: 0x13ee, 0x861: 0x1436, 0x862: 0x0c16, 0x863: 0x0c86, + 0x864: 0x0d4a, 0x865: 0x0eaa, 0x866: 0x11d2, 0x867: 0x101e, 0x868: 0x0836, 0x869: 0x0a7a, + 0x86a: 0x0b5e, 0x86b: 0x0bc2, 0x86c: 0x0c92, 0x86d: 0x103a, 0x86e: 0x1056, 0x86f: 0x1266, + 0x870: 0x1286, 0x871: 0x155e, 0x872: 0x15de, 0x873: 0x15ee, 0x874: 0x162a, 0x875: 0x084e, + 0x876: 0x117a, 0x877: 0x154a, 0x878: 0x15c6, 0x879: 0x0caa, 0x87a: 0x0812, 0x87b: 0x0872, + 0x87c: 0x0b62, 0x87d: 0x0b82, 0x87e: 0x0daa, 0x87f: 0x0e6e, + // Block 0x22, offset 0x880 + 0x880: 0x0fbe, 0x881: 0x10c6, 0x882: 0x1372, 0x883: 0x1512, 0x884: 0x171e, 0x885: 0x0dde, + 0x886: 0x159e, 0x887: 0x092e, 0x888: 0x0e2a, 0x889: 0x0e36, 0x88a: 0x0f0a, 0x88b: 0x0f42, + 0x88c: 0x1046, 0x88d: 0x10a2, 0x88e: 0x1122, 0x88f: 0x1206, 0x890: 0x1636, 0x891: 0x08aa, + 0x892: 0x0cfe, 0x893: 0x15ae, 0x894: 0x0862, 0x895: 0x0ba6, 0x896: 0x0f2a, 0x897: 0x14da, + 0x898: 0x0c62, 0x899: 0x0cb2, 0x89a: 0x0e3e, 0x89b: 0x102a, 0x89c: 0x15b6, 0x89d: 0x0912, + 0x89e: 0x09fa, 0x89f: 0x0b92, 0x8a0: 0x0dce, 0x8a1: 0x0e1a, 0x8a2: 0x0e5a, 0x8a3: 0x0eee, + 0x8a4: 0x1042, 0x8a5: 0x10b6, 0x8a6: 0x1252, 0x8a7: 0x13f2, 0x8a8: 0x13fe, 0x8a9: 0x1552, + 0x8aa: 0x15d2, 0x8ab: 0x097e, 0x8ac: 0x0f46, 0x8ad: 0x09fe, 0x8ae: 0x0fc2, 0x8af: 0x1066, + 0x8b0: 0x1382, 0x8b1: 0x15ba, 0x8b2: 0x16a6, 0x8b3: 0x16ce, 0x8b4: 0x0e32, 0x8b5: 0x0f22, + 0x8b6: 0x12be, 0x8b7: 0x11b2, 0x8b8: 0x11be, 0x8b9: 0x11e2, 0x8ba: 0x1012, 0x8bb: 0x0f9a, + 0x8bc: 0x145e, 0x8bd: 0x082e, 0x8be: 0x1326, 0x8bf: 0x0916, + // Block 0x23, offset 0x8c0 + 0x8c0: 0x0906, 0x8c1: 0x0c06, 0x8c2: 0x0d26, 0x8c3: 0x11ee, 0x8c4: 0x0b4e, 0x8c5: 0x0efe, + 0x8c6: 0x0dea, 0x8c7: 0x14e2, 0x8c8: 0x13e2, 0x8c9: 0x15a6, 0x8ca: 0x141e, 0x8cb: 0x0c22, + 0x8cc: 0x0882, 0x8cd: 0x0a56, 0x8d0: 0x0aaa, + 0x8d2: 0x0dda, 0x8d5: 0x08f2, 0x8d6: 0x101a, 0x8d7: 0x10de, + 0x8d8: 0x1142, 0x8d9: 0x115e, 0x8da: 0x1162, 0x8db: 0x1176, 0x8dc: 0x15f6, 0x8dd: 0x11e6, + 0x8de: 0x126a, 0x8e0: 0x138a, 0x8e2: 0x144e, + 0x8e5: 0x1502, 0x8e6: 0x152e, + 0x8ea: 0x164a, 0x8eb: 0x164e, 0x8ec: 0x1652, 0x8ed: 0x16b6, 0x8ee: 0x1526, 0x8ef: 0x15c2, + 0x8f0: 0x0852, 0x8f1: 0x0876, 0x8f2: 0x088a, 0x8f3: 0x0946, 0x8f4: 0x0952, 0x8f5: 0x0992, + 0x8f6: 0x0a46, 0x8f7: 0x0a62, 0x8f8: 0x0a6a, 0x8f9: 0x0aa6, 0x8fa: 0x0ab2, 0x8fb: 0x0b8e, + 0x8fc: 0x0b96, 0x8fd: 0x0c9e, 0x8fe: 0x0cc6, 0x8ff: 0x0cce, + // Block 0x24, offset 0x900 + 0x900: 0x0ce6, 0x901: 0x0d92, 0x902: 0x0dc2, 0x903: 0x0de2, 0x904: 0x0e52, 0x905: 0x0f16, + 0x906: 0x0f32, 0x907: 0x0f62, 0x908: 0x0fb6, 0x909: 0x0fd6, 0x90a: 0x104a, 0x90b: 0x112a, + 0x90c: 0x1146, 0x90d: 0x114e, 0x90e: 0x114a, 0x90f: 0x1152, 0x910: 0x1156, 0x911: 0x115a, + 0x912: 0x116e, 0x913: 0x1172, 0x914: 0x1196, 0x915: 0x11aa, 0x916: 0x11c6, 0x917: 0x122a, + 0x918: 0x1232, 0x919: 0x123a, 0x91a: 0x124e, 0x91b: 0x1276, 0x91c: 0x12c6, 0x91d: 0x12fa, + 0x91e: 0x12fa, 0x91f: 0x1362, 0x920: 0x140a, 0x921: 0x1422, 0x922: 0x1456, 0x923: 0x145a, + 0x924: 0x149e, 0x925: 0x14a2, 0x926: 0x14fa, 0x927: 0x1502, 0x928: 0x15d6, 0x929: 0x161a, + 0x92a: 0x1632, 0x92b: 0x0c96, 0x92c: 0x184b, 0x92d: 0x12de, + 0x930: 0x07da, 0x931: 0x08de, 0x932: 0x089e, 0x933: 0x0846, 0x934: 0x0886, 0x935: 0x08b2, + 0x936: 0x0942, 0x937: 0x095e, 0x938: 0x0a46, 0x939: 0x0a32, 0x93a: 0x0a42, 0x93b: 0x0a5e, + 0x93c: 0x0aaa, 0x93d: 0x0aba, 0x93e: 0x0afe, 0x93f: 0x0b0a, + // Block 0x25, offset 0x940 + 0x940: 0x0b26, 0x941: 0x0b36, 0x942: 0x0c1e, 0x943: 0x0c26, 0x944: 0x0c56, 0x945: 0x0c76, + 0x946: 0x0ca6, 0x947: 0x0cbe, 0x948: 0x0cae, 0x949: 0x0cce, 0x94a: 0x0cc2, 0x94b: 0x0ce6, + 0x94c: 0x0d02, 0x94d: 0x0d5a, 0x94e: 0x0d66, 0x94f: 0x0d6e, 0x950: 0x0d96, 0x951: 0x0dda, + 0x952: 0x0e0a, 0x953: 0x0e0e, 0x954: 0x0e22, 0x955: 0x0ea2, 0x956: 0x0eb2, 0x957: 0x0f0a, + 0x958: 0x0f56, 0x959: 0x0f4e, 0x95a: 0x0f62, 0x95b: 0x0f7e, 0x95c: 0x0fb6, 0x95d: 0x110e, + 0x95e: 0x0fda, 0x95f: 0x100e, 0x960: 0x101a, 0x961: 0x105a, 0x962: 0x1076, 0x963: 0x109a, + 0x964: 0x10be, 0x965: 0x10c2, 0x966: 0x10de, 0x967: 0x10e2, 0x968: 0x10f2, 0x969: 0x1106, + 0x96a: 0x1102, 0x96b: 0x1132, 0x96c: 0x11ae, 0x96d: 0x11c6, 0x96e: 0x11de, 0x96f: 0x1216, + 0x970: 0x122a, 0x971: 0x1246, 0x972: 0x1276, 0x973: 0x132a, 0x974: 0x1352, 0x975: 0x13c6, + 0x976: 0x140e, 0x977: 0x141a, 0x978: 0x1422, 0x979: 0x143a, 0x97a: 0x144e, 0x97b: 0x143e, + 0x97c: 0x1456, 0x97d: 0x1452, 0x97e: 0x144a, 0x97f: 0x145a, + // Block 0x26, offset 0x980 + 0x980: 0x1466, 0x981: 0x14a2, 0x982: 0x14de, 0x983: 0x150e, 0x984: 0x1546, 0x985: 0x1566, + 0x986: 0x15b2, 0x987: 0x15d6, 0x988: 0x15f6, 0x989: 0x160a, 0x98a: 0x161a, 0x98b: 0x1626, + 0x98c: 0x1632, 0x98d: 0x1686, 0x98e: 0x1726, 0x98f: 0x17e2, 0x990: 0x17dd, 0x991: 0x180f, + 0x992: 0x0702, 0x993: 0x072a, 0x994: 0x072e, 0x995: 0x1891, 0x996: 0x18be, 0x997: 0x1936, + 0x998: 0x1712, 0x999: 0x1722, + // Block 0x27, offset 0x9c0 + 0x9c0: 0x07f6, 0x9c1: 0x07ee, 0x9c2: 0x07fe, 0x9c3: 0x1774, 0x9c4: 0x0842, 0x9c5: 0x0852, + 0x9c6: 0x0856, 0x9c7: 0x085e, 0x9c8: 0x0866, 0x9c9: 0x086a, 0x9ca: 0x0876, 0x9cb: 0x086e, + 0x9cc: 0x06ae, 0x9cd: 0x1788, 0x9ce: 0x088a, 0x9cf: 0x088e, 0x9d0: 0x0892, 0x9d1: 0x08ae, + 0x9d2: 0x1779, 0x9d3: 0x06b2, 0x9d4: 0x089a, 0x9d5: 0x08ba, 0x9d6: 0x1783, 0x9d7: 0x08ca, + 0x9d8: 0x08d2, 0x9d9: 0x0832, 0x9da: 0x08da, 0x9db: 0x08de, 0x9dc: 0x195e, 0x9dd: 0x08fa, + 0x9de: 0x0902, 0x9df: 0x06ba, 0x9e0: 0x091a, 0x9e1: 0x091e, 0x9e2: 0x0926, 0x9e3: 0x092a, + 0x9e4: 0x06be, 0x9e5: 0x0942, 0x9e6: 0x0946, 0x9e7: 0x0952, 0x9e8: 0x095e, 0x9e9: 0x0962, + 0x9ea: 0x0966, 0x9eb: 0x096e, 0x9ec: 0x098e, 0x9ed: 0x0992, 0x9ee: 0x099a, 0x9ef: 0x09aa, + 0x9f0: 0x09b2, 0x9f1: 0x09b6, 0x9f2: 0x09b6, 0x9f3: 0x09b6, 0x9f4: 0x1797, 0x9f5: 0x0f8e, + 0x9f6: 0x09ca, 0x9f7: 0x09d2, 0x9f8: 0x179c, 0x9f9: 0x09de, 0x9fa: 0x09e6, 0x9fb: 0x09ee, + 0x9fc: 0x0a16, 0x9fd: 0x0a02, 0x9fe: 0x0a0e, 0x9ff: 0x0a12, + // Block 0x28, offset 0xa00 + 0xa00: 0x0a1a, 0xa01: 0x0a22, 0xa02: 0x0a26, 0xa03: 0x0a2e, 0xa04: 0x0a36, 0xa05: 0x0a3a, + 0xa06: 0x0a3a, 0xa07: 0x0a42, 0xa08: 0x0a4a, 0xa09: 0x0a4e, 0xa0a: 0x0a5a, 0xa0b: 0x0a7e, + 0xa0c: 0x0a62, 0xa0d: 0x0a82, 0xa0e: 0x0a66, 0xa0f: 0x0a6e, 0xa10: 0x0906, 0xa11: 0x0aca, + 0xa12: 0x0a92, 0xa13: 0x0a96, 0xa14: 0x0a9a, 0xa15: 0x0a8e, 0xa16: 0x0aa2, 0xa17: 0x0a9e, + 0xa18: 0x0ab6, 0xa19: 0x17a1, 0xa1a: 0x0ad2, 0xa1b: 0x0ad6, 0xa1c: 0x0ade, 0xa1d: 0x0aea, + 0xa1e: 0x0af2, 0xa1f: 0x0b0e, 0xa20: 0x17a6, 0xa21: 0x17ab, 0xa22: 0x0b1a, 0xa23: 0x0b1e, + 0xa24: 0x0b22, 0xa25: 0x0b16, 0xa26: 0x0b2a, 0xa27: 0x06c2, 0xa28: 0x06c6, 0xa29: 0x0b32, + 0xa2a: 0x0b3a, 0xa2b: 0x0b3a, 0xa2c: 0x17b0, 0xa2d: 0x0b56, 0xa2e: 0x0b5a, 0xa2f: 0x0b5e, + 0xa30: 0x0b66, 0xa31: 0x17b5, 0xa32: 0x0b6e, 0xa33: 0x0b72, 0xa34: 0x0c4a, 0xa35: 0x0b7a, + 0xa36: 0x06ca, 0xa37: 0x0b86, 0xa38: 0x0b96, 0xa39: 0x0ba2, 0xa3a: 0x0b9e, 0xa3b: 0x17bf, + 0xa3c: 0x0baa, 0xa3d: 0x17c4, 0xa3e: 0x0bb6, 0xa3f: 0x0bb2, + // Block 0x29, offset 0xa40 + 0xa40: 0x0bba, 0xa41: 0x0bca, 0xa42: 0x0bce, 0xa43: 0x06ce, 0xa44: 0x0bde, 0xa45: 0x0be6, + 0xa46: 0x0bea, 0xa47: 0x0bee, 0xa48: 0x06d2, 0xa49: 0x17c9, 0xa4a: 0x06d6, 0xa4b: 0x0c0a, + 0xa4c: 0x0c0e, 0xa4d: 0x0c12, 0xa4e: 0x0c1a, 0xa4f: 0x1990, 0xa50: 0x0c32, 0xa51: 0x17d3, + 0xa52: 0x17d3, 0xa53: 0x12d2, 0xa54: 0x0c42, 0xa55: 0x0c42, 0xa56: 0x06da, 0xa57: 0x17f6, + 0xa58: 0x18c8, 0xa59: 0x0c52, 0xa5a: 0x0c5a, 0xa5b: 0x06de, 0xa5c: 0x0c6e, 0xa5d: 0x0c7e, + 0xa5e: 0x0c82, 0xa5f: 0x0c8a, 0xa60: 0x0c9a, 0xa61: 0x06e6, 0xa62: 0x06e2, 0xa63: 0x0c9e, + 0xa64: 0x17d8, 0xa65: 0x0ca2, 0xa66: 0x0cb6, 0xa67: 0x0cba, 0xa68: 0x0cbe, 0xa69: 0x0cba, + 0xa6a: 0x0cca, 0xa6b: 0x0cce, 0xa6c: 0x0cde, 0xa6d: 0x0cd6, 0xa6e: 0x0cda, 0xa6f: 0x0ce2, + 0xa70: 0x0ce6, 0xa71: 0x0cea, 0xa72: 0x0cf6, 0xa73: 0x0cfa, 0xa74: 0x0d12, 0xa75: 0x0d1a, + 0xa76: 0x0d2a, 0xa77: 0x0d3e, 0xa78: 0x17e7, 0xa79: 0x0d3a, 0xa7a: 0x0d2e, 0xa7b: 0x0d46, + 0xa7c: 0x0d4e, 0xa7d: 0x0d62, 0xa7e: 0x17ec, 0xa7f: 0x0d6a, + // Block 0x2a, offset 0xa80 + 0xa80: 0x0d5e, 0xa81: 0x0d56, 0xa82: 0x06ea, 0xa83: 0x0d72, 0xa84: 0x0d7a, 0xa85: 0x0d82, + 0xa86: 0x0d76, 0xa87: 0x06ee, 0xa88: 0x0d92, 0xa89: 0x0d9a, 0xa8a: 0x17f1, 0xa8b: 0x0dc6, + 0xa8c: 0x0dfa, 0xa8d: 0x0dd6, 0xa8e: 0x06fa, 0xa8f: 0x0de2, 0xa90: 0x06f6, 0xa91: 0x06f2, + 0xa92: 0x08be, 0xa93: 0x08c2, 0xa94: 0x0dfe, 0xa95: 0x0de6, 0xa96: 0x12a6, 0xa97: 0x075e, + 0xa98: 0x0e0a, 0xa99: 0x0e0e, 0xa9a: 0x0e12, 0xa9b: 0x0e26, 0xa9c: 0x0e1e, 0xa9d: 0x180a, + 0xa9e: 0x06fe, 0xa9f: 0x0e3a, 0xaa0: 0x0e2e, 0xaa1: 0x0e4a, 0xaa2: 0x0e52, 0xaa3: 0x1814, + 0xaa4: 0x0e56, 0xaa5: 0x0e42, 0xaa6: 0x0e5e, 0xaa7: 0x0702, 0xaa8: 0x0e62, 0xaa9: 0x0e66, + 0xaaa: 0x0e6a, 0xaab: 0x0e76, 0xaac: 0x1819, 0xaad: 0x0e7e, 0xaae: 0x0706, 0xaaf: 0x0e8a, + 0xab0: 0x181e, 0xab1: 0x0e8e, 0xab2: 0x070a, 0xab3: 0x0e9a, 0xab4: 0x0ea6, 0xab5: 0x0eb2, + 0xab6: 0x0eb6, 0xab7: 0x1823, 0xab8: 0x17ba, 0xab9: 0x1828, 0xaba: 0x0ed6, 0xabb: 0x182d, + 0xabc: 0x0ee2, 0xabd: 0x0eea, 0xabe: 0x0eda, 0xabf: 0x0ef6, + // Block 0x2b, offset 0xac0 + 0xac0: 0x0f06, 0xac1: 0x0f16, 0xac2: 0x0f0a, 0xac3: 0x0f0e, 0xac4: 0x0f1a, 0xac5: 0x0f1e, + 0xac6: 0x1832, 0xac7: 0x0f02, 0xac8: 0x0f36, 0xac9: 0x0f3a, 0xaca: 0x070e, 0xacb: 0x0f4e, + 0xacc: 0x0f4a, 0xacd: 0x1837, 0xace: 0x0f2e, 0xacf: 0x0f6a, 0xad0: 0x183c, 0xad1: 0x1841, + 0xad2: 0x0f6e, 0xad3: 0x0f82, 0xad4: 0x0f7e, 0xad5: 0x0f7a, 0xad6: 0x0712, 0xad7: 0x0f86, + 0xad8: 0x0f96, 0xad9: 0x0f92, 0xada: 0x0f9e, 0xadb: 0x177e, 0xadc: 0x0fae, 0xadd: 0x1846, + 0xade: 0x0fba, 0xadf: 0x1850, 0xae0: 0x0fce, 0xae1: 0x0fda, 0xae2: 0x0fee, 0xae3: 0x1855, + 0xae4: 0x1002, 0xae5: 0x1006, 0xae6: 0x185a, 0xae7: 0x185f, 0xae8: 0x1022, 0xae9: 0x1032, + 0xaea: 0x0716, 0xaeb: 0x1036, 0xaec: 0x071a, 0xaed: 0x071a, 0xaee: 0x104e, 0xaef: 0x1052, + 0xaf0: 0x105a, 0xaf1: 0x105e, 0xaf2: 0x106a, 0xaf3: 0x071e, 0xaf4: 0x1082, 0xaf5: 0x1864, + 0xaf6: 0x109e, 0xaf7: 0x1869, 0xaf8: 0x10aa, 0xaf9: 0x17ce, 0xafa: 0x10ba, 0xafb: 0x186e, + 0xafc: 0x1873, 0xafd: 0x1878, 0xafe: 0x0722, 0xaff: 0x0726, + // Block 0x2c, offset 0xb00 + 0xb00: 0x10f2, 0xb01: 0x1882, 0xb02: 0x187d, 0xb03: 0x1887, 0xb04: 0x188c, 0xb05: 0x10fa, + 0xb06: 0x10fe, 0xb07: 0x10fe, 0xb08: 0x1106, 0xb09: 0x072e, 0xb0a: 0x110a, 0xb0b: 0x0732, + 0xb0c: 0x0736, 0xb0d: 0x1896, 0xb0e: 0x111e, 0xb0f: 0x1126, 0xb10: 0x1132, 0xb11: 0x073a, + 0xb12: 0x189b, 0xb13: 0x1156, 0xb14: 0x18a0, 0xb15: 0x18a5, 0xb16: 0x1176, 0xb17: 0x118e, + 0xb18: 0x073e, 0xb19: 0x1196, 0xb1a: 0x119a, 0xb1b: 0x119e, 0xb1c: 0x18aa, 0xb1d: 0x18af, + 0xb1e: 0x18af, 0xb1f: 0x11b6, 0xb20: 0x0742, 0xb21: 0x18b4, 0xb22: 0x11ca, 0xb23: 0x11ce, + 0xb24: 0x0746, 0xb25: 0x18b9, 0xb26: 0x11ea, 0xb27: 0x074a, 0xb28: 0x11fa, 0xb29: 0x11f2, + 0xb2a: 0x1202, 0xb2b: 0x18c3, 0xb2c: 0x121a, 0xb2d: 0x074e, 0xb2e: 0x1226, 0xb2f: 0x122e, + 0xb30: 0x123e, 0xb31: 0x0752, 0xb32: 0x18cd, 0xb33: 0x18d2, 0xb34: 0x0756, 0xb35: 0x18d7, + 0xb36: 0x1256, 0xb37: 0x18dc, 0xb38: 0x1262, 0xb39: 0x126e, 0xb3a: 0x1276, 0xb3b: 0x18e1, + 0xb3c: 0x18e6, 0xb3d: 0x128a, 0xb3e: 0x18eb, 0xb3f: 0x1292, + // Block 0x2d, offset 0xb40 + 0xb40: 0x17fb, 0xb41: 0x075a, 0xb42: 0x12aa, 0xb43: 0x12ae, 0xb44: 0x0762, 0xb45: 0x12b2, + 0xb46: 0x0b2e, 0xb47: 0x18f0, 0xb48: 0x18f5, 0xb49: 0x1800, 0xb4a: 0x1805, 0xb4b: 0x12d2, + 0xb4c: 0x12d6, 0xb4d: 0x14ee, 0xb4e: 0x0766, 0xb4f: 0x1302, 0xb50: 0x12fe, 0xb51: 0x1306, + 0xb52: 0x093a, 0xb53: 0x130a, 0xb54: 0x130e, 0xb55: 0x1312, 0xb56: 0x131a, 0xb57: 0x18fa, + 0xb58: 0x1316, 0xb59: 0x131e, 0xb5a: 0x1332, 0xb5b: 0x1336, 0xb5c: 0x1322, 0xb5d: 0x133a, + 0xb5e: 0x134e, 0xb5f: 0x1362, 0xb60: 0x132e, 0xb61: 0x1342, 0xb62: 0x1346, 0xb63: 0x134a, + 0xb64: 0x18ff, 0xb65: 0x1909, 0xb66: 0x1904, 0xb67: 0x076a, 0xb68: 0x136a, 0xb69: 0x136e, + 0xb6a: 0x1376, 0xb6b: 0x191d, 0xb6c: 0x137a, 0xb6d: 0x190e, 0xb6e: 0x076e, 0xb6f: 0x0772, + 0xb70: 0x1913, 0xb71: 0x1918, 0xb72: 0x0776, 0xb73: 0x139a, 0xb74: 0x139e, 0xb75: 0x13a2, + 0xb76: 0x13a6, 0xb77: 0x13b2, 0xb78: 0x13ae, 0xb79: 0x13ba, 0xb7a: 0x13b6, 0xb7b: 0x13c6, + 0xb7c: 0x13be, 0xb7d: 0x13c2, 0xb7e: 0x13ca, 0xb7f: 0x077a, + // Block 0x2e, offset 0xb80 + 0xb80: 0x13d2, 0xb81: 0x13d6, 0xb82: 0x077e, 0xb83: 0x13e6, 0xb84: 0x13ea, 0xb85: 0x1922, + 0xb86: 0x13f6, 0xb87: 0x13fa, 0xb88: 0x0782, 0xb89: 0x1406, 0xb8a: 0x06b6, 0xb8b: 0x1927, + 0xb8c: 0x192c, 0xb8d: 0x0786, 0xb8e: 0x078a, 0xb8f: 0x1432, 0xb90: 0x144a, 0xb91: 0x1466, + 0xb92: 0x1476, 0xb93: 0x1931, 0xb94: 0x148a, 0xb95: 0x148e, 0xb96: 0x14a6, 0xb97: 0x14b2, + 0xb98: 0x193b, 0xb99: 0x178d, 0xb9a: 0x14be, 0xb9b: 0x14ba, 0xb9c: 0x14c6, 0xb9d: 0x1792, + 0xb9e: 0x14d2, 0xb9f: 0x14de, 0xba0: 0x1940, 0xba1: 0x1945, 0xba2: 0x151e, 0xba3: 0x152a, + 0xba4: 0x1532, 0xba5: 0x194a, 0xba6: 0x1536, 0xba7: 0x1562, 0xba8: 0x156e, 0xba9: 0x1572, + 0xbaa: 0x156a, 0xbab: 0x157e, 0xbac: 0x1582, 0xbad: 0x194f, 0xbae: 0x158e, 0xbaf: 0x078e, + 0xbb0: 0x1596, 0xbb1: 0x1954, 0xbb2: 0x0792, 0xbb3: 0x15ce, 0xbb4: 0x0bbe, 0xbb5: 0x15e6, + 0xbb6: 0x1959, 0xbb7: 0x1963, 0xbb8: 0x0796, 0xbb9: 0x079a, 0xbba: 0x160e, 0xbbb: 0x1968, + 0xbbc: 0x079e, 0xbbd: 0x196d, 0xbbe: 0x1626, 0xbbf: 0x1626, + // Block 0x2f, offset 0xbc0 + 0xbc0: 0x162e, 0xbc1: 0x1972, 0xbc2: 0x1646, 0xbc3: 0x07a2, 0xbc4: 0x1656, 0xbc5: 0x1662, + 0xbc6: 0x166a, 0xbc7: 0x1672, 0xbc8: 0x07a6, 0xbc9: 0x1977, 0xbca: 0x1686, 0xbcb: 0x16a2, + 0xbcc: 0x16ae, 0xbcd: 0x07aa, 0xbce: 0x07ae, 0xbcf: 0x16b2, 0xbd0: 0x197c, 0xbd1: 0x07b2, + 0xbd2: 0x1981, 0xbd3: 0x1986, 0xbd4: 0x198b, 0xbd5: 0x16d6, 0xbd6: 0x07b6, 0xbd7: 0x16ea, + 0xbd8: 0x16f2, 0xbd9: 0x16f6, 0xbda: 0x16fe, 0xbdb: 0x1706, 0xbdc: 0x170e, 0xbdd: 0x1995, +} + +// nfcIndex: 22 blocks, 1408 entries, 1408 bytes +// Block 0 is the zero block. +var nfcIndex = [1408]uint8{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x2e, 0xc3: 0x01, 0xc4: 0x02, 0xc5: 0x03, 0xc6: 0x2f, 0xc7: 0x04, + 0xc8: 0x05, 0xca: 0x30, 0xcb: 0x31, 0xcc: 0x06, 0xcd: 0x07, 0xce: 0x08, 0xcf: 0x32, + 0xd0: 0x09, 0xd1: 0x33, 0xd2: 0x34, 0xd3: 0x0a, 0xd6: 0x0b, 0xd7: 0x35, + 0xd8: 0x36, 0xd9: 0x0c, 0xdb: 0x37, 0xdc: 0x38, 0xdd: 0x39, 0xdf: 0x3a, + 0xe0: 0x02, 0xe1: 0x03, 0xe2: 0x04, 0xe3: 0x05, + 0xea: 0x06, 0xeb: 0x07, 0xec: 0x08, 0xed: 0x09, 0xef: 0x0a, + 0xf0: 0x13, + // Block 0x4, offset 0x100 + 0x120: 0x3b, 0x121: 0x3c, 0x122: 0x3d, 0x123: 0x0d, 0x124: 0x3e, 0x125: 0x3f, 0x126: 0x40, 0x127: 0x41, + 0x128: 0x42, 0x129: 0x43, 0x12a: 0x44, 0x12b: 0x45, 0x12c: 0x40, 0x12d: 0x46, 0x12e: 0x47, 0x12f: 0x48, + 0x130: 0x44, 0x131: 0x49, 0x132: 0x4a, 0x133: 0x4b, 0x134: 0x4c, 0x135: 0x4d, 0x137: 0x4e, + 0x138: 0x4f, 0x139: 0x50, 0x13a: 0x51, 0x13b: 0x52, 0x13c: 0x53, 0x13d: 0x54, 0x13e: 0x55, 0x13f: 0x56, + // Block 0x5, offset 0x140 + 0x140: 0x57, 0x142: 0x58, 0x144: 0x59, 0x145: 0x5a, 0x146: 0x5b, 0x147: 0x5c, + 0x14d: 0x5d, + 0x15c: 0x5e, 0x15f: 0x5f, + 0x162: 0x60, 0x164: 0x61, + 0x168: 0x62, 0x169: 0x63, 0x16a: 0x64, 0x16b: 0x65, 0x16c: 0x0e, 0x16d: 0x66, 0x16e: 0x67, 0x16f: 0x68, + 0x170: 0x69, 0x173: 0x6a, 0x177: 0x0f, + 0x178: 0x10, 0x179: 0x11, 0x17a: 0x12, 0x17b: 0x13, 0x17c: 0x14, 0x17d: 0x15, 0x17e: 0x16, 0x17f: 0x17, + // Block 0x6, offset 0x180 + 0x180: 0x6b, 0x183: 0x6c, 0x184: 0x6d, 0x186: 0x6e, 0x187: 0x6f, + 0x188: 0x70, 0x189: 0x18, 0x18a: 0x19, 0x18b: 0x71, 0x18c: 0x72, + 0x1ab: 0x73, + 0x1b3: 0x74, 0x1b5: 0x75, 0x1b7: 0x76, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x77, 0x1c1: 0x1a, 0x1c2: 0x1b, 0x1c3: 0x1c, 0x1c4: 0x78, 0x1c5: 0x79, + 0x1c9: 0x7a, 0x1cc: 0x7b, 0x1cd: 0x7c, + // Block 0x8, offset 0x200 + 0x219: 0x7d, 0x21a: 0x7e, 0x21b: 0x7f, + 0x220: 0x80, 0x223: 0x81, 0x224: 0x82, 0x225: 0x83, 0x226: 0x84, 0x227: 0x85, + 0x22a: 0x86, 0x22b: 0x87, 0x22f: 0x88, + 0x230: 0x89, 0x231: 0x8a, 0x232: 0x8b, 0x233: 0x8c, 0x234: 0x8d, 0x235: 0x8e, 0x236: 0x8f, 0x237: 0x89, + 0x238: 0x8a, 0x239: 0x8b, 0x23a: 0x8c, 0x23b: 0x8d, 0x23c: 0x8e, 0x23d: 0x8f, 0x23e: 0x89, 0x23f: 0x8a, + // Block 0x9, offset 0x240 + 0x240: 0x8b, 0x241: 0x8c, 0x242: 0x8d, 0x243: 0x8e, 0x244: 0x8f, 0x245: 0x89, 0x246: 0x8a, 0x247: 0x8b, + 0x248: 0x8c, 0x249: 0x8d, 0x24a: 0x8e, 0x24b: 0x8f, 0x24c: 0x89, 0x24d: 0x8a, 0x24e: 0x8b, 0x24f: 0x8c, + 0x250: 0x8d, 0x251: 0x8e, 0x252: 0x8f, 0x253: 0x89, 0x254: 0x8a, 0x255: 0x8b, 0x256: 0x8c, 0x257: 0x8d, + 0x258: 0x8e, 0x259: 0x8f, 0x25a: 0x89, 0x25b: 0x8a, 0x25c: 0x8b, 0x25d: 0x8c, 0x25e: 0x8d, 0x25f: 0x8e, + 0x260: 0x8f, 0x261: 0x89, 0x262: 0x8a, 0x263: 0x8b, 0x264: 0x8c, 0x265: 0x8d, 0x266: 0x8e, 0x267: 0x8f, + 0x268: 0x89, 0x269: 0x8a, 0x26a: 0x8b, 0x26b: 0x8c, 0x26c: 0x8d, 0x26d: 0x8e, 0x26e: 0x8f, 0x26f: 0x89, + 0x270: 0x8a, 0x271: 0x8b, 0x272: 0x8c, 0x273: 0x8d, 0x274: 0x8e, 0x275: 0x8f, 0x276: 0x89, 0x277: 0x8a, + 0x278: 0x8b, 0x279: 0x8c, 0x27a: 0x8d, 0x27b: 0x8e, 0x27c: 0x8f, 0x27d: 0x89, 0x27e: 0x8a, 0x27f: 0x8b, + // Block 0xa, offset 0x280 + 0x280: 0x8c, 0x281: 0x8d, 0x282: 0x8e, 0x283: 0x8f, 0x284: 0x89, 0x285: 0x8a, 0x286: 0x8b, 0x287: 0x8c, + 0x288: 0x8d, 0x289: 0x8e, 0x28a: 0x8f, 0x28b: 0x89, 0x28c: 0x8a, 0x28d: 0x8b, 0x28e: 0x8c, 0x28f: 0x8d, + 0x290: 0x8e, 0x291: 0x8f, 0x292: 0x89, 0x293: 0x8a, 0x294: 0x8b, 0x295: 0x8c, 0x296: 0x8d, 0x297: 0x8e, + 0x298: 0x8f, 0x299: 0x89, 0x29a: 0x8a, 0x29b: 0x8b, 0x29c: 0x8c, 0x29d: 0x8d, 0x29e: 0x8e, 0x29f: 0x8f, + 0x2a0: 0x89, 0x2a1: 0x8a, 0x2a2: 0x8b, 0x2a3: 0x8c, 0x2a4: 0x8d, 0x2a5: 0x8e, 0x2a6: 0x8f, 0x2a7: 0x89, + 0x2a8: 0x8a, 0x2a9: 0x8b, 0x2aa: 0x8c, 0x2ab: 0x8d, 0x2ac: 0x8e, 0x2ad: 0x8f, 0x2ae: 0x89, 0x2af: 0x8a, + 0x2b0: 0x8b, 0x2b1: 0x8c, 0x2b2: 0x8d, 0x2b3: 0x8e, 0x2b4: 0x8f, 0x2b5: 0x89, 0x2b6: 0x8a, 0x2b7: 0x8b, + 0x2b8: 0x8c, 0x2b9: 0x8d, 0x2ba: 0x8e, 0x2bb: 0x8f, 0x2bc: 0x89, 0x2bd: 0x8a, 0x2be: 0x8b, 0x2bf: 0x8c, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x8d, 0x2c1: 0x8e, 0x2c2: 0x8f, 0x2c3: 0x89, 0x2c4: 0x8a, 0x2c5: 0x8b, 0x2c6: 0x8c, 0x2c7: 0x8d, + 0x2c8: 0x8e, 0x2c9: 0x8f, 0x2ca: 0x89, 0x2cb: 0x8a, 0x2cc: 0x8b, 0x2cd: 0x8c, 0x2ce: 0x8d, 0x2cf: 0x8e, + 0x2d0: 0x8f, 0x2d1: 0x89, 0x2d2: 0x8a, 0x2d3: 0x8b, 0x2d4: 0x8c, 0x2d5: 0x8d, 0x2d6: 0x8e, 0x2d7: 0x8f, + 0x2d8: 0x89, 0x2d9: 0x8a, 0x2da: 0x8b, 0x2db: 0x8c, 0x2dc: 0x8d, 0x2dd: 0x8e, 0x2de: 0x90, + // Block 0xc, offset 0x300 + 0x324: 0x1d, 0x325: 0x1e, 0x326: 0x1f, 0x327: 0x20, + 0x328: 0x21, 0x329: 0x22, 0x32a: 0x23, 0x32b: 0x24, 0x32c: 0x91, 0x32d: 0x92, 0x32e: 0x93, + 0x331: 0x94, 0x332: 0x95, 0x333: 0x96, 0x334: 0x97, + 0x338: 0x98, 0x339: 0x99, 0x33a: 0x9a, 0x33b: 0x9b, 0x33e: 0x9c, 0x33f: 0x9d, + // Block 0xd, offset 0x340 + 0x347: 0x9e, + 0x34b: 0x9f, 0x34d: 0xa0, + 0x368: 0xa1, 0x36b: 0xa2, + 0x374: 0xa3, + 0x37a: 0xa4, 0x37b: 0xa5, 0x37d: 0xa6, 0x37e: 0xa7, + // Block 0xe, offset 0x380 + 0x381: 0xa8, 0x382: 0xa9, 0x384: 0xaa, 0x385: 0x84, 0x387: 0xab, + 0x388: 0xac, 0x38b: 0xad, 0x38c: 0xae, 0x38d: 0xaf, + 0x391: 0xb0, 0x392: 0xb1, 0x393: 0xb2, 0x396: 0xb3, 0x397: 0xb4, + 0x398: 0x75, 0x39a: 0xb5, 0x39c: 0xb6, + 0x3a0: 0xb7, 0x3a4: 0xb8, 0x3a5: 0xb9, 0x3a7: 0xba, + 0x3a8: 0xbb, 0x3a9: 0xbc, 0x3aa: 0xbd, + 0x3b0: 0x75, 0x3b5: 0xbe, 0x3b6: 0xbf, + 0x3bd: 0xc0, + // Block 0xf, offset 0x3c0 + 0x3eb: 0xc1, 0x3ec: 0xc2, + 0x3ff: 0xc3, + // Block 0x10, offset 0x400 + 0x432: 0xc4, + // Block 0x11, offset 0x440 + 0x445: 0xc5, 0x446: 0xc6, 0x447: 0xc7, + 0x449: 0xc8, + // Block 0x12, offset 0x480 + 0x480: 0xc9, 0x482: 0xca, 0x484: 0xc2, + 0x48a: 0xcb, 0x48b: 0xcc, + 0x493: 0xcd, + 0x4a3: 0xce, 0x4a5: 0xcf, + // Block 0x13, offset 0x4c0 + 0x4c8: 0xd0, + // Block 0x14, offset 0x500 + 0x520: 0x25, 0x521: 0x26, 0x522: 0x27, 0x523: 0x28, 0x524: 0x29, 0x525: 0x2a, 0x526: 0x2b, 0x527: 0x2c, + 0x528: 0x2d, + // Block 0x15, offset 0x540 + 0x550: 0x0b, 0x551: 0x0c, 0x556: 0x0d, + 0x55b: 0x0e, 0x55d: 0x0f, 0x55e: 0x10, 0x55f: 0x11, + 0x56f: 0x12, +} + +// nfcSparseOffset: 163 entries, 326 bytes +var nfcSparseOffset = []uint16{0x0, 0x5, 0x9, 0xb, 0xd, 0x18, 0x28, 0x2a, 0x2f, 0x3a, 0x49, 0x56, 0x5e, 0x63, 0x68, 0x6a, 0x6e, 0x76, 0x7d, 0x80, 0x88, 0x8c, 0x90, 0x92, 0x94, 0x9d, 0xa1, 0xa8, 0xad, 0xb0, 0xba, 0xbd, 0xc4, 0xcc, 0xcf, 0xd1, 0xd4, 0xd6, 0xdb, 0xec, 0xf8, 0xfa, 0x100, 0x102, 0x104, 0x106, 0x108, 0x10a, 0x10c, 0x10f, 0x112, 0x114, 0x117, 0x11a, 0x11e, 0x124, 0x12b, 0x134, 0x136, 0x139, 0x13b, 0x146, 0x14a, 0x158, 0x15b, 0x161, 0x167, 0x172, 0x176, 0x178, 0x17a, 0x17c, 0x17e, 0x180, 0x186, 0x18a, 0x18c, 0x18e, 0x196, 0x19a, 0x19d, 0x19f, 0x1a1, 0x1a4, 0x1a7, 0x1a9, 0x1ab, 0x1ad, 0x1af, 0x1b5, 0x1b8, 0x1ba, 0x1c1, 0x1c7, 0x1cd, 0x1d5, 0x1db, 0x1e1, 0x1e7, 0x1eb, 0x1f9, 0x202, 0x205, 0x208, 0x20a, 0x20d, 0x20f, 0x213, 0x218, 0x21a, 0x21c, 0x221, 0x227, 0x229, 0x22b, 0x22d, 0x233, 0x236, 0x238, 0x23a, 0x23c, 0x242, 0x246, 0x24a, 0x252, 0x259, 0x25c, 0x25f, 0x261, 0x264, 0x26c, 0x270, 0x277, 0x27a, 0x280, 0x282, 0x285, 0x287, 0x28a, 0x28f, 0x291, 0x293, 0x295, 0x297, 0x299, 0x29c, 0x29e, 0x2a0, 0x2a2, 0x2a4, 0x2a6, 0x2a8, 0x2b5, 0x2bf, 0x2c1, 0x2c3, 0x2c9, 0x2cb, 0x2cd, 0x2cf, 0x2d3, 0x2d5, 0x2d8} + +// nfcSparseValues: 730 entries, 2920 bytes +var nfcSparseValues = [730]valueRange{ + // Block 0x0, offset 0x0 + {value: 0x0000, lo: 0x04}, + {value: 0xa100, lo: 0xa8, hi: 0xa8}, + {value: 0x8100, lo: 0xaf, hi: 0xaf}, + {value: 0x8100, lo: 0xb4, hi: 0xb4}, + {value: 0x8100, lo: 0xb8, hi: 0xb8}, + // Block 0x1, offset 0x5 + {value: 0x0091, lo: 0x03}, + {value: 0x4823, lo: 0xa0, hi: 0xa1}, + {value: 0x4855, lo: 0xaf, hi: 0xb0}, + {value: 0xa000, lo: 0xb7, hi: 0xb7}, + // Block 0x2, offset 0x9 + {value: 0x0000, lo: 0x01}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + // Block 0x3, offset 0xb + {value: 0x0000, lo: 0x01}, + {value: 0x8100, lo: 0x98, hi: 0x9d}, + // Block 0x4, offset 0xd + {value: 0x0006, lo: 0x0a}, + {value: 0xa000, lo: 0x81, hi: 0x81}, + {value: 0xa000, lo: 0x85, hi: 0x85}, + {value: 0xa000, lo: 0x89, hi: 0x89}, + {value: 0x4981, lo: 0x8a, hi: 0x8a}, + {value: 0x499f, lo: 0x8b, hi: 0x8b}, + {value: 0x3808, lo: 0x8c, hi: 0x8c}, + {value: 0x3820, lo: 0x8d, hi: 0x8d}, + {value: 0x49b7, lo: 0x8e, hi: 0x8e}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0x383e, lo: 0x93, hi: 0x94}, + // Block 0x5, offset 0x18 + {value: 0x0000, lo: 0x0f}, + {value: 0xa000, lo: 0x83, hi: 0x83}, + {value: 0xa000, lo: 0x87, hi: 0x87}, + {value: 0xa000, lo: 0x8b, hi: 0x8b}, + {value: 0xa000, lo: 0x8d, hi: 0x8d}, + {value: 0x38e6, lo: 0x90, hi: 0x90}, + {value: 0x38f2, lo: 0x91, hi: 0x91}, + {value: 0x38e0, lo: 0x93, hi: 0x93}, + {value: 0xa000, lo: 0x96, hi: 0x96}, + {value: 0x3958, lo: 0x97, hi: 0x97}, + {value: 0x3922, lo: 0x9c, hi: 0x9c}, + {value: 0x390a, lo: 0x9d, hi: 0x9d}, + {value: 0x3934, lo: 0x9e, hi: 0x9e}, + {value: 0xa000, lo: 0xb4, hi: 0xb5}, + {value: 0x395e, lo: 0xb6, hi: 0xb6}, + {value: 0x3964, lo: 0xb7, hi: 0xb7}, + // Block 0x6, offset 0x28 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0x83, hi: 0x87}, + // Block 0x7, offset 0x2a + {value: 0x0001, lo: 0x04}, + {value: 0x8114, lo: 0x81, hi: 0x82}, + {value: 0x8133, lo: 0x84, hi: 0x84}, + {value: 0x812e, lo: 0x85, hi: 0x85}, + {value: 0x810e, lo: 0x87, hi: 0x87}, + // Block 0x8, offset 0x2f + {value: 0x0000, lo: 0x0a}, + {value: 0x8133, lo: 0x90, hi: 0x97}, + {value: 0x811a, lo: 0x98, hi: 0x98}, + {value: 0x811b, lo: 0x99, hi: 0x99}, + {value: 0x811c, lo: 0x9a, hi: 0x9a}, + {value: 0x3982, lo: 0xa2, hi: 0xa2}, + {value: 0x3988, lo: 0xa3, hi: 0xa3}, + {value: 0x3994, lo: 0xa4, hi: 0xa4}, + {value: 0x398e, lo: 0xa5, hi: 0xa5}, + {value: 0x399a, lo: 0xa6, hi: 0xa6}, + {value: 0xa000, lo: 0xa7, hi: 0xa7}, + // Block 0x9, offset 0x3a + {value: 0x0000, lo: 0x0e}, + {value: 0x39ac, lo: 0x80, hi: 0x80}, + {value: 0xa000, lo: 0x81, hi: 0x81}, + {value: 0x39a0, lo: 0x82, hi: 0x82}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0x39a6, lo: 0x93, hi: 0x93}, + {value: 0xa000, lo: 0x95, hi: 0x95}, + {value: 0x8133, lo: 0x96, hi: 0x9c}, + {value: 0x8133, lo: 0x9f, hi: 0xa2}, + {value: 0x812e, lo: 0xa3, hi: 0xa3}, + {value: 0x8133, lo: 0xa4, hi: 0xa4}, + {value: 0x8133, lo: 0xa7, hi: 0xa8}, + {value: 0x812e, lo: 0xaa, hi: 0xaa}, + {value: 0x8133, lo: 0xab, hi: 0xac}, + {value: 0x812e, lo: 0xad, hi: 0xad}, + // Block 0xa, offset 0x49 + {value: 0x0000, lo: 0x0c}, + {value: 0x8120, lo: 0x91, hi: 0x91}, + {value: 0x8133, lo: 0xb0, hi: 0xb0}, + {value: 0x812e, lo: 0xb1, hi: 0xb1}, + {value: 0x8133, lo: 0xb2, hi: 0xb3}, + {value: 0x812e, lo: 0xb4, hi: 0xb4}, + {value: 0x8133, lo: 0xb5, hi: 0xb6}, + {value: 0x812e, lo: 0xb7, hi: 0xb9}, + {value: 0x8133, lo: 0xba, hi: 0xba}, + {value: 0x812e, lo: 0xbb, hi: 0xbc}, + {value: 0x8133, lo: 0xbd, hi: 0xbd}, + {value: 0x812e, lo: 0xbe, hi: 0xbe}, + {value: 0x8133, lo: 0xbf, hi: 0xbf}, + // Block 0xb, offset 0x56 + {value: 0x0005, lo: 0x07}, + {value: 0x8133, lo: 0x80, hi: 0x80}, + {value: 0x8133, lo: 0x81, hi: 0x81}, + {value: 0x812e, lo: 0x82, hi: 0x83}, + {value: 0x812e, lo: 0x84, hi: 0x85}, + {value: 0x812e, lo: 0x86, hi: 0x87}, + {value: 0x812e, lo: 0x88, hi: 0x89}, + {value: 0x8133, lo: 0x8a, hi: 0x8a}, + // Block 0xc, offset 0x5e + {value: 0x0000, lo: 0x04}, + {value: 0x8133, lo: 0xab, hi: 0xb1}, + {value: 0x812e, lo: 0xb2, hi: 0xb2}, + {value: 0x8133, lo: 0xb3, hi: 0xb3}, + {value: 0x812e, lo: 0xbd, hi: 0xbd}, + // Block 0xd, offset 0x63 + {value: 0x0000, lo: 0x04}, + {value: 0x8133, lo: 0x96, hi: 0x99}, + {value: 0x8133, lo: 0x9b, hi: 0xa3}, + {value: 0x8133, lo: 0xa5, hi: 0xa7}, + {value: 0x8133, lo: 0xa9, hi: 0xad}, + // Block 0xe, offset 0x68 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0x99, hi: 0x9b}, + // Block 0xf, offset 0x6a + {value: 0x0000, lo: 0x03}, + {value: 0x8133, lo: 0x98, hi: 0x98}, + {value: 0x812e, lo: 0x99, hi: 0x9b}, + {value: 0x8133, lo: 0x9c, hi: 0x9f}, + // Block 0x10, offset 0x6e + {value: 0x0000, lo: 0x07}, + {value: 0xa000, lo: 0xa8, hi: 0xa8}, + {value: 0x4019, lo: 0xa9, hi: 0xa9}, + {value: 0xa000, lo: 0xb0, hi: 0xb0}, + {value: 0x4021, lo: 0xb1, hi: 0xb1}, + {value: 0xa000, lo: 0xb3, hi: 0xb3}, + {value: 0x4029, lo: 0xb4, hi: 0xb4}, + {value: 0x9903, lo: 0xbc, hi: 0xbc}, + // Block 0x11, offset 0x76 + {value: 0x0008, lo: 0x06}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x8133, lo: 0x91, hi: 0x91}, + {value: 0x812e, lo: 0x92, hi: 0x92}, + {value: 0x8133, lo: 0x93, hi: 0x93}, + {value: 0x8133, lo: 0x94, hi: 0x94}, + {value: 0x465d, lo: 0x98, hi: 0x9f}, + // Block 0x12, offset 0x7d + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0xbc, hi: 0xbc}, + {value: 0x9900, lo: 0xbe, hi: 0xbe}, + // Block 0x13, offset 0x80 + {value: 0x0008, lo: 0x07}, + {value: 0xa000, lo: 0x87, hi: 0x87}, + {value: 0x2dd5, lo: 0x8b, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x97, hi: 0x97}, + {value: 0x469d, lo: 0x9c, hi: 0x9d}, + {value: 0x46ad, lo: 0x9f, hi: 0x9f}, + {value: 0x8133, lo: 0xbe, hi: 0xbe}, + // Block 0x14, offset 0x88 + {value: 0x0000, lo: 0x03}, + {value: 0x46d5, lo: 0xb3, hi: 0xb3}, + {value: 0x46dd, lo: 0xb6, hi: 0xb6}, + {value: 0x8103, lo: 0xbc, hi: 0xbc}, + // Block 0x15, offset 0x8c + {value: 0x0008, lo: 0x03}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x46b5, lo: 0x99, hi: 0x9b}, + {value: 0x46cd, lo: 0x9e, hi: 0x9e}, + // Block 0x16, offset 0x90 + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0xbc, hi: 0xbc}, + // Block 0x17, offset 0x92 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + // Block 0x18, offset 0x94 + {value: 0x0000, lo: 0x08}, + {value: 0xa000, lo: 0x87, hi: 0x87}, + {value: 0x2ded, lo: 0x88, hi: 0x88}, + {value: 0x2de5, lo: 0x8b, hi: 0x8b}, + {value: 0x2df5, lo: 0x8c, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x96, hi: 0x97}, + {value: 0x46e5, lo: 0x9c, hi: 0x9c}, + {value: 0x46ed, lo: 0x9d, hi: 0x9d}, + // Block 0x19, offset 0x9d + {value: 0x0000, lo: 0x03}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0x2dfd, lo: 0x94, hi: 0x94}, + {value: 0x9900, lo: 0xbe, hi: 0xbe}, + // Block 0x1a, offset 0xa1 + {value: 0x0000, lo: 0x06}, + {value: 0xa000, lo: 0x86, hi: 0x87}, + {value: 0x2e05, lo: 0x8a, hi: 0x8a}, + {value: 0x2e15, lo: 0x8b, hi: 0x8b}, + {value: 0x2e0d, lo: 0x8c, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x97, hi: 0x97}, + // Block 0x1b, offset 0xa8 + {value: 0x1801, lo: 0x04}, + {value: 0xa000, lo: 0x86, hi: 0x86}, + {value: 0x4031, lo: 0x88, hi: 0x88}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x8121, lo: 0x95, hi: 0x96}, + // Block 0x1c, offset 0xad + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0xbc, hi: 0xbc}, + {value: 0xa000, lo: 0xbf, hi: 0xbf}, + // Block 0x1d, offset 0xb0 + {value: 0x0000, lo: 0x09}, + {value: 0x2e1d, lo: 0x80, hi: 0x80}, + {value: 0x9900, lo: 0x82, hi: 0x82}, + {value: 0xa000, lo: 0x86, hi: 0x86}, + {value: 0x2e25, lo: 0x87, hi: 0x87}, + {value: 0x2e2d, lo: 0x88, hi: 0x88}, + {value: 0x3091, lo: 0x8a, hi: 0x8a}, + {value: 0x2f19, lo: 0x8b, hi: 0x8b}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x95, hi: 0x96}, + // Block 0x1e, offset 0xba + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0xbb, hi: 0xbc}, + {value: 0x9900, lo: 0xbe, hi: 0xbe}, + // Block 0x1f, offset 0xbd + {value: 0x0000, lo: 0x06}, + {value: 0xa000, lo: 0x86, hi: 0x87}, + {value: 0x2e35, lo: 0x8a, hi: 0x8a}, + {value: 0x2e45, lo: 0x8b, hi: 0x8b}, + {value: 0x2e3d, lo: 0x8c, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x97, hi: 0x97}, + // Block 0x20, offset 0xc4 + {value: 0x6ab3, lo: 0x07}, + {value: 0x9905, lo: 0x8a, hi: 0x8a}, + {value: 0x9900, lo: 0x8f, hi: 0x8f}, + {value: 0xa000, lo: 0x99, hi: 0x99}, + {value: 0x4039, lo: 0x9a, hi: 0x9a}, + {value: 0x3099, lo: 0x9c, hi: 0x9c}, + {value: 0x2f24, lo: 0x9d, hi: 0x9d}, + {value: 0x2e4d, lo: 0x9e, hi: 0x9f}, + // Block 0x21, offset 0xcc + {value: 0x0000, lo: 0x02}, + {value: 0x8123, lo: 0xb8, hi: 0xb9}, + {value: 0x8105, lo: 0xba, hi: 0xba}, + // Block 0x22, offset 0xcf + {value: 0x0000, lo: 0x01}, + {value: 0x8124, lo: 0x88, hi: 0x8b}, + // Block 0x23, offset 0xd1 + {value: 0x0000, lo: 0x02}, + {value: 0x8125, lo: 0xb8, hi: 0xb9}, + {value: 0x8105, lo: 0xba, hi: 0xba}, + // Block 0x24, offset 0xd4 + {value: 0x0000, lo: 0x01}, + {value: 0x8126, lo: 0x88, hi: 0x8b}, + // Block 0x25, offset 0xd6 + {value: 0x0000, lo: 0x04}, + {value: 0x812e, lo: 0x98, hi: 0x99}, + {value: 0x812e, lo: 0xb5, hi: 0xb5}, + {value: 0x812e, lo: 0xb7, hi: 0xb7}, + {value: 0x812c, lo: 0xb9, hi: 0xb9}, + // Block 0x26, offset 0xdb + {value: 0x0000, lo: 0x10}, + {value: 0x2774, lo: 0x83, hi: 0x83}, + {value: 0x277b, lo: 0x8d, hi: 0x8d}, + {value: 0x2782, lo: 0x92, hi: 0x92}, + {value: 0x2789, lo: 0x97, hi: 0x97}, + {value: 0x2790, lo: 0x9c, hi: 0x9c}, + {value: 0x276d, lo: 0xa9, hi: 0xa9}, + {value: 0x8127, lo: 0xb1, hi: 0xb1}, + {value: 0x8128, lo: 0xb2, hi: 0xb2}, + {value: 0x4bc5, lo: 0xb3, hi: 0xb3}, + {value: 0x8129, lo: 0xb4, hi: 0xb4}, + {value: 0x4bce, lo: 0xb5, hi: 0xb5}, + {value: 0x46f5, lo: 0xb6, hi: 0xb6}, + {value: 0x8200, lo: 0xb7, hi: 0xb7}, + {value: 0x46fd, lo: 0xb8, hi: 0xb8}, + {value: 0x8200, lo: 0xb9, hi: 0xb9}, + {value: 0x8128, lo: 0xba, hi: 0xbd}, + // Block 0x27, offset 0xec + {value: 0x0000, lo: 0x0b}, + {value: 0x8128, lo: 0x80, hi: 0x80}, + {value: 0x4bd7, lo: 0x81, hi: 0x81}, + {value: 0x8133, lo: 0x82, hi: 0x83}, + {value: 0x8105, lo: 0x84, hi: 0x84}, + {value: 0x8133, lo: 0x86, hi: 0x87}, + {value: 0x279e, lo: 0x93, hi: 0x93}, + {value: 0x27a5, lo: 0x9d, hi: 0x9d}, + {value: 0x27ac, lo: 0xa2, hi: 0xa2}, + {value: 0x27b3, lo: 0xa7, hi: 0xa7}, + {value: 0x27ba, lo: 0xac, hi: 0xac}, + {value: 0x2797, lo: 0xb9, hi: 0xb9}, + // Block 0x28, offset 0xf8 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0x86, hi: 0x86}, + // Block 0x29, offset 0xfa + {value: 0x0000, lo: 0x05}, + {value: 0xa000, lo: 0xa5, hi: 0xa5}, + {value: 0x2e55, lo: 0xa6, hi: 0xa6}, + {value: 0x9900, lo: 0xae, hi: 0xae}, + {value: 0x8103, lo: 0xb7, hi: 0xb7}, + {value: 0x8105, lo: 0xb9, hi: 0xba}, + // Block 0x2a, offset 0x100 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0x8d, hi: 0x8d}, + // Block 0x2b, offset 0x102 + {value: 0x0000, lo: 0x01}, + {value: 0xa000, lo: 0x80, hi: 0x92}, + // Block 0x2c, offset 0x104 + {value: 0x0000, lo: 0x01}, + {value: 0xb900, lo: 0xa1, hi: 0xb5}, + // Block 0x2d, offset 0x106 + {value: 0x0000, lo: 0x01}, + {value: 0x9900, lo: 0xa8, hi: 0xbf}, + // Block 0x2e, offset 0x108 + {value: 0x0000, lo: 0x01}, + {value: 0x9900, lo: 0x80, hi: 0x82}, + // Block 0x2f, offset 0x10a + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0x9d, hi: 0x9f}, + // Block 0x30, offset 0x10c + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x94, hi: 0x95}, + {value: 0x8105, lo: 0xb4, hi: 0xb4}, + // Block 0x31, offset 0x10f + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x92, hi: 0x92}, + {value: 0x8133, lo: 0x9d, hi: 0x9d}, + // Block 0x32, offset 0x112 + {value: 0x0000, lo: 0x01}, + {value: 0x8132, lo: 0xa9, hi: 0xa9}, + // Block 0x33, offset 0x114 + {value: 0x0004, lo: 0x02}, + {value: 0x812f, lo: 0xb9, hi: 0xba}, + {value: 0x812e, lo: 0xbb, hi: 0xbb}, + // Block 0x34, offset 0x117 + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0x97, hi: 0x97}, + {value: 0x812e, lo: 0x98, hi: 0x98}, + // Block 0x35, offset 0x11a + {value: 0x0000, lo: 0x03}, + {value: 0x8105, lo: 0xa0, hi: 0xa0}, + {value: 0x8133, lo: 0xb5, hi: 0xbc}, + {value: 0x812e, lo: 0xbf, hi: 0xbf}, + // Block 0x36, offset 0x11e + {value: 0x0000, lo: 0x05}, + {value: 0x8133, lo: 0xb0, hi: 0xb4}, + {value: 0x812e, lo: 0xb5, hi: 0xba}, + {value: 0x8133, lo: 0xbb, hi: 0xbc}, + {value: 0x812e, lo: 0xbd, hi: 0xbd}, + {value: 0x812e, lo: 0xbf, hi: 0xbf}, + // Block 0x37, offset 0x124 + {value: 0x0000, lo: 0x06}, + {value: 0x812e, lo: 0x80, hi: 0x80}, + {value: 0x8133, lo: 0x81, hi: 0x82}, + {value: 0x812e, lo: 0x83, hi: 0x84}, + {value: 0x8133, lo: 0x85, hi: 0x89}, + {value: 0x812e, lo: 0x8a, hi: 0x8a}, + {value: 0x8133, lo: 0x8b, hi: 0x8e}, + // Block 0x38, offset 0x12b + {value: 0x0000, lo: 0x08}, + {value: 0x2e9d, lo: 0x80, hi: 0x80}, + {value: 0x2ea5, lo: 0x81, hi: 0x81}, + {value: 0xa000, lo: 0x82, hi: 0x82}, + {value: 0x2ead, lo: 0x83, hi: 0x83}, + {value: 0x8105, lo: 0x84, hi: 0x84}, + {value: 0x8133, lo: 0xab, hi: 0xab}, + {value: 0x812e, lo: 0xac, hi: 0xac}, + {value: 0x8133, lo: 0xad, hi: 0xb3}, + // Block 0x39, offset 0x134 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xaa, hi: 0xab}, + // Block 0x3a, offset 0x136 + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0xa6, hi: 0xa6}, + {value: 0x8105, lo: 0xb2, hi: 0xb3}, + // Block 0x3b, offset 0x139 + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0xb7, hi: 0xb7}, + // Block 0x3c, offset 0x13b + {value: 0x0000, lo: 0x0a}, + {value: 0x8133, lo: 0x90, hi: 0x92}, + {value: 0x8101, lo: 0x94, hi: 0x94}, + {value: 0x812e, lo: 0x95, hi: 0x99}, + {value: 0x8133, lo: 0x9a, hi: 0x9b}, + {value: 0x812e, lo: 0x9c, hi: 0x9f}, + {value: 0x8133, lo: 0xa0, hi: 0xa0}, + {value: 0x8101, lo: 0xa2, hi: 0xa8}, + {value: 0x812e, lo: 0xad, hi: 0xad}, + {value: 0x8133, lo: 0xb4, hi: 0xb4}, + {value: 0x8133, lo: 0xb8, hi: 0xb9}, + // Block 0x3d, offset 0x146 + {value: 0x0004, lo: 0x03}, + {value: 0x052a, lo: 0x80, hi: 0x81}, + {value: 0x8100, lo: 0x97, hi: 0x97}, + {value: 0x8100, lo: 0xbe, hi: 0xbe}, + // Block 0x3e, offset 0x14a + {value: 0x0000, lo: 0x0d}, + {value: 0x8133, lo: 0x90, hi: 0x91}, + {value: 0x8101, lo: 0x92, hi: 0x93}, + {value: 0x8133, lo: 0x94, hi: 0x97}, + {value: 0x8101, lo: 0x98, hi: 0x9a}, + {value: 0x8133, lo: 0x9b, hi: 0x9c}, + {value: 0x8133, lo: 0xa1, hi: 0xa1}, + {value: 0x8101, lo: 0xa5, hi: 0xa6}, + {value: 0x8133, lo: 0xa7, hi: 0xa7}, + {value: 0x812e, lo: 0xa8, hi: 0xa8}, + {value: 0x8133, lo: 0xa9, hi: 0xa9}, + {value: 0x8101, lo: 0xaa, hi: 0xab}, + {value: 0x812e, lo: 0xac, hi: 0xaf}, + {value: 0x8133, lo: 0xb0, hi: 0xb0}, + // Block 0x3f, offset 0x158 + {value: 0x43bc, lo: 0x02}, + {value: 0x023c, lo: 0xa6, hi: 0xa6}, + {value: 0x0057, lo: 0xaa, hi: 0xab}, + // Block 0x40, offset 0x15b + {value: 0x0007, lo: 0x05}, + {value: 0xa000, lo: 0x90, hi: 0x90}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0xa000, lo: 0x94, hi: 0x94}, + {value: 0x3cfa, lo: 0x9a, hi: 0x9b}, + {value: 0x3d08, lo: 0xae, hi: 0xae}, + // Block 0x41, offset 0x161 + {value: 0x000e, lo: 0x05}, + {value: 0x3d0f, lo: 0x8d, hi: 0x8e}, + {value: 0x3d16, lo: 0x8f, hi: 0x8f}, + {value: 0xa000, lo: 0x90, hi: 0x90}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0xa000, lo: 0x94, hi: 0x94}, + // Block 0x42, offset 0x167 + {value: 0x62c7, lo: 0x0a}, + {value: 0xa000, lo: 0x83, hi: 0x83}, + {value: 0x3d24, lo: 0x84, hi: 0x84}, + {value: 0xa000, lo: 0x88, hi: 0x88}, + {value: 0x3d2b, lo: 0x89, hi: 0x89}, + {value: 0xa000, lo: 0x8b, hi: 0x8b}, + {value: 0x3d32, lo: 0x8c, hi: 0x8c}, + {value: 0xa000, lo: 0xa3, hi: 0xa3}, + {value: 0x3d39, lo: 0xa4, hi: 0xa5}, + {value: 0x3d40, lo: 0xa6, hi: 0xa6}, + {value: 0xa000, lo: 0xbc, hi: 0xbc}, + // Block 0x43, offset 0x172 + {value: 0x0007, lo: 0x03}, + {value: 0x3da9, lo: 0xa0, hi: 0xa1}, + {value: 0x3dd3, lo: 0xa2, hi: 0xa3}, + {value: 0x3dfd, lo: 0xaa, hi: 0xad}, + // Block 0x44, offset 0x176 + {value: 0x0004, lo: 0x01}, + {value: 0x0586, lo: 0xa9, hi: 0xaa}, + // Block 0x45, offset 0x178 + {value: 0x0000, lo: 0x01}, + {value: 0x461e, lo: 0x9c, hi: 0x9c}, + // Block 0x46, offset 0x17a + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xaf, hi: 0xb1}, + // Block 0x47, offset 0x17c + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xbf, hi: 0xbf}, + // Block 0x48, offset 0x17e + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xa0, hi: 0xbf}, + // Block 0x49, offset 0x180 + {value: 0x0000, lo: 0x05}, + {value: 0x812d, lo: 0xaa, hi: 0xaa}, + {value: 0x8132, lo: 0xab, hi: 0xab}, + {value: 0x8134, lo: 0xac, hi: 0xac}, + {value: 0x812f, lo: 0xad, hi: 0xad}, + {value: 0x8130, lo: 0xae, hi: 0xaf}, + // Block 0x4a, offset 0x186 + {value: 0x0000, lo: 0x03}, + {value: 0x4be0, lo: 0xb3, hi: 0xb3}, + {value: 0x4be0, lo: 0xb5, hi: 0xb6}, + {value: 0x4be0, lo: 0xba, hi: 0xbf}, + // Block 0x4b, offset 0x18a + {value: 0x0000, lo: 0x01}, + {value: 0x4be0, lo: 0x8f, hi: 0xa3}, + // Block 0x4c, offset 0x18c + {value: 0x0000, lo: 0x01}, + {value: 0x8100, lo: 0xae, hi: 0xbe}, + // Block 0x4d, offset 0x18e + {value: 0x0000, lo: 0x07}, + {value: 0x8100, lo: 0x84, hi: 0x84}, + {value: 0x8100, lo: 0x87, hi: 0x87}, + {value: 0x8100, lo: 0x90, hi: 0x90}, + {value: 0x8100, lo: 0x9e, hi: 0x9e}, + {value: 0x8100, lo: 0xa1, hi: 0xa1}, + {value: 0x8100, lo: 0xb2, hi: 0xb2}, + {value: 0x8100, lo: 0xbb, hi: 0xbb}, + // Block 0x4e, offset 0x196 + {value: 0x0000, lo: 0x03}, + {value: 0x8100, lo: 0x80, hi: 0x80}, + {value: 0x8100, lo: 0x8b, hi: 0x8b}, + {value: 0x8100, lo: 0x8e, hi: 0x8e}, + // Block 0x4f, offset 0x19a + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0xaf, hi: 0xaf}, + {value: 0x8133, lo: 0xb4, hi: 0xbd}, + // Block 0x50, offset 0x19d + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0x9e, hi: 0x9f}, + // Block 0x51, offset 0x19f + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xb0, hi: 0xb1}, + // Block 0x52, offset 0x1a1 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x86, hi: 0x86}, + {value: 0x8105, lo: 0xac, hi: 0xac}, + // Block 0x53, offset 0x1a4 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x84, hi: 0x84}, + {value: 0x8133, lo: 0xa0, hi: 0xb1}, + // Block 0x54, offset 0x1a7 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0xab, hi: 0xad}, + // Block 0x55, offset 0x1a9 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x93, hi: 0x93}, + // Block 0x56, offset 0x1ab + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0xb3, hi: 0xb3}, + // Block 0x57, offset 0x1ad + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x80, hi: 0x80}, + // Block 0x58, offset 0x1af + {value: 0x0000, lo: 0x05}, + {value: 0x8133, lo: 0xb0, hi: 0xb0}, + {value: 0x8133, lo: 0xb2, hi: 0xb3}, + {value: 0x812e, lo: 0xb4, hi: 0xb4}, + {value: 0x8133, lo: 0xb7, hi: 0xb8}, + {value: 0x8133, lo: 0xbe, hi: 0xbf}, + // Block 0x59, offset 0x1b5 + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0x81, hi: 0x81}, + {value: 0x8105, lo: 0xb6, hi: 0xb6}, + // Block 0x5a, offset 0x1b8 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xad, hi: 0xad}, + // Block 0x5b, offset 0x1ba + {value: 0x0000, lo: 0x06}, + {value: 0xe500, lo: 0x80, hi: 0x80}, + {value: 0xc600, lo: 0x81, hi: 0x9b}, + {value: 0xe500, lo: 0x9c, hi: 0x9c}, + {value: 0xc600, lo: 0x9d, hi: 0xb7}, + {value: 0xe500, lo: 0xb8, hi: 0xb8}, + {value: 0xc600, lo: 0xb9, hi: 0xbf}, + // Block 0x5c, offset 0x1c1 + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x93}, + {value: 0xe500, lo: 0x94, hi: 0x94}, + {value: 0xc600, lo: 0x95, hi: 0xaf}, + {value: 0xe500, lo: 0xb0, hi: 0xb0}, + {value: 0xc600, lo: 0xb1, hi: 0xbf}, + // Block 0x5d, offset 0x1c7 + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x8b}, + {value: 0xe500, lo: 0x8c, hi: 0x8c}, + {value: 0xc600, lo: 0x8d, hi: 0xa7}, + {value: 0xe500, lo: 0xa8, hi: 0xa8}, + {value: 0xc600, lo: 0xa9, hi: 0xbf}, + // Block 0x5e, offset 0x1cd + {value: 0x0000, lo: 0x07}, + {value: 0xc600, lo: 0x80, hi: 0x83}, + {value: 0xe500, lo: 0x84, hi: 0x84}, + {value: 0xc600, lo: 0x85, hi: 0x9f}, + {value: 0xe500, lo: 0xa0, hi: 0xa0}, + {value: 0xc600, lo: 0xa1, hi: 0xbb}, + {value: 0xe500, lo: 0xbc, hi: 0xbc}, + {value: 0xc600, lo: 0xbd, hi: 0xbf}, + // Block 0x5f, offset 0x1d5 + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x97}, + {value: 0xe500, lo: 0x98, hi: 0x98}, + {value: 0xc600, lo: 0x99, hi: 0xb3}, + {value: 0xe500, lo: 0xb4, hi: 0xb4}, + {value: 0xc600, lo: 0xb5, hi: 0xbf}, + // Block 0x60, offset 0x1db + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x8f}, + {value: 0xe500, lo: 0x90, hi: 0x90}, + {value: 0xc600, lo: 0x91, hi: 0xab}, + {value: 0xe500, lo: 0xac, hi: 0xac}, + {value: 0xc600, lo: 0xad, hi: 0xbf}, + // Block 0x61, offset 0x1e1 + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x87}, + {value: 0xe500, lo: 0x88, hi: 0x88}, + {value: 0xc600, lo: 0x89, hi: 0xa3}, + {value: 0xe500, lo: 0xa4, hi: 0xa4}, + {value: 0xc600, lo: 0xa5, hi: 0xbf}, + // Block 0x62, offset 0x1e7 + {value: 0x0000, lo: 0x03}, + {value: 0xc600, lo: 0x80, hi: 0x87}, + {value: 0xe500, lo: 0x88, hi: 0x88}, + {value: 0xc600, lo: 0x89, hi: 0xa3}, + // Block 0x63, offset 0x1eb + {value: 0x0006, lo: 0x0d}, + {value: 0x44d1, lo: 0x9d, hi: 0x9d}, + {value: 0x8116, lo: 0x9e, hi: 0x9e}, + {value: 0x4543, lo: 0x9f, hi: 0x9f}, + {value: 0x4531, lo: 0xaa, hi: 0xab}, + {value: 0x4635, lo: 0xac, hi: 0xac}, + {value: 0x463d, lo: 0xad, hi: 0xad}, + {value: 0x4489, lo: 0xae, hi: 0xb1}, + {value: 0x44a7, lo: 0xb2, hi: 0xb4}, + {value: 0x44bf, lo: 0xb5, hi: 0xb6}, + {value: 0x44cb, lo: 0xb8, hi: 0xb8}, + {value: 0x44d7, lo: 0xb9, hi: 0xbb}, + {value: 0x44ef, lo: 0xbc, hi: 0xbc}, + {value: 0x44f5, lo: 0xbe, hi: 0xbe}, + // Block 0x64, offset 0x1f9 + {value: 0x0006, lo: 0x08}, + {value: 0x44fb, lo: 0x80, hi: 0x81}, + {value: 0x4507, lo: 0x83, hi: 0x84}, + {value: 0x4519, lo: 0x86, hi: 0x89}, + {value: 0x453d, lo: 0x8a, hi: 0x8a}, + {value: 0x44b9, lo: 0x8b, hi: 0x8b}, + {value: 0x44a1, lo: 0x8c, hi: 0x8c}, + {value: 0x44e9, lo: 0x8d, hi: 0x8d}, + {value: 0x4513, lo: 0x8e, hi: 0x8e}, + // Block 0x65, offset 0x202 + {value: 0x0000, lo: 0x02}, + {value: 0x8100, lo: 0xa4, hi: 0xa5}, + {value: 0x8100, lo: 0xb0, hi: 0xb1}, + // Block 0x66, offset 0x205 + {value: 0x0000, lo: 0x02}, + {value: 0x8100, lo: 0x9b, hi: 0x9d}, + {value: 0x8200, lo: 0x9e, hi: 0xa3}, + // Block 0x67, offset 0x208 + {value: 0x0000, lo: 0x01}, + {value: 0x8100, lo: 0x90, hi: 0x90}, + // Block 0x68, offset 0x20a + {value: 0x0000, lo: 0x02}, + {value: 0x8100, lo: 0x99, hi: 0x99}, + {value: 0x8200, lo: 0xb2, hi: 0xb4}, + // Block 0x69, offset 0x20d + {value: 0x0000, lo: 0x01}, + {value: 0x8100, lo: 0xbc, hi: 0xbd}, + // Block 0x6a, offset 0x20f + {value: 0x0000, lo: 0x03}, + {value: 0x8133, lo: 0xa0, hi: 0xa6}, + {value: 0x812e, lo: 0xa7, hi: 0xad}, + {value: 0x8133, lo: 0xae, hi: 0xaf}, + // Block 0x6b, offset 0x213 + {value: 0x0000, lo: 0x04}, + {value: 0x8100, lo: 0x89, hi: 0x8c}, + {value: 0x8100, lo: 0xb0, hi: 0xb2}, + {value: 0x8100, lo: 0xb4, hi: 0xb4}, + {value: 0x8100, lo: 0xb6, hi: 0xbf}, + // Block 0x6c, offset 0x218 + {value: 0x0000, lo: 0x01}, + {value: 0x8100, lo: 0x81, hi: 0x8c}, + // Block 0x6d, offset 0x21a + {value: 0x0000, lo: 0x01}, + {value: 0x8100, lo: 0xb5, hi: 0xba}, + // Block 0x6e, offset 0x21c + {value: 0x0000, lo: 0x04}, + {value: 0x4be0, lo: 0x9e, hi: 0x9f}, + {value: 0x4be0, lo: 0xa3, hi: 0xa3}, + {value: 0x4be0, lo: 0xa5, hi: 0xa6}, + {value: 0x4be0, lo: 0xaa, hi: 0xaf}, + // Block 0x6f, offset 0x221 + {value: 0x0000, lo: 0x05}, + {value: 0x4be0, lo: 0x82, hi: 0x87}, + {value: 0x4be0, lo: 0x8a, hi: 0x8f}, + {value: 0x4be0, lo: 0x92, hi: 0x97}, + {value: 0x4be0, lo: 0x9a, hi: 0x9c}, + {value: 0x8100, lo: 0xa3, hi: 0xa3}, + // Block 0x70, offset 0x227 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0xbd, hi: 0xbd}, + // Block 0x71, offset 0x229 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0xa0, hi: 0xa0}, + // Block 0x72, offset 0x22b + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xb6, hi: 0xba}, + // Block 0x73, offset 0x22d + {value: 0x002d, lo: 0x05}, + {value: 0x812e, lo: 0x8d, hi: 0x8d}, + {value: 0x8133, lo: 0x8f, hi: 0x8f}, + {value: 0x8133, lo: 0xb8, hi: 0xb8}, + {value: 0x8101, lo: 0xb9, hi: 0xba}, + {value: 0x8105, lo: 0xbf, hi: 0xbf}, + // Block 0x74, offset 0x233 + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0xa5, hi: 0xa5}, + {value: 0x812e, lo: 0xa6, hi: 0xa6}, + // Block 0x75, offset 0x236 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xa4, hi: 0xa7}, + // Block 0x76, offset 0x238 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xab, hi: 0xac}, + // Block 0x77, offset 0x23a + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0xbd, hi: 0xbf}, + // Block 0x78, offset 0x23c + {value: 0x0000, lo: 0x05}, + {value: 0x812e, lo: 0x86, hi: 0x87}, + {value: 0x8133, lo: 0x88, hi: 0x8a}, + {value: 0x812e, lo: 0x8b, hi: 0x8b}, + {value: 0x8133, lo: 0x8c, hi: 0x8c}, + {value: 0x812e, lo: 0x8d, hi: 0x90}, + // Block 0x79, offset 0x242 + {value: 0x0005, lo: 0x03}, + {value: 0x8133, lo: 0x82, hi: 0x82}, + {value: 0x812e, lo: 0x83, hi: 0x84}, + {value: 0x812e, lo: 0x85, hi: 0x85}, + // Block 0x7a, offset 0x246 + {value: 0x0000, lo: 0x03}, + {value: 0x8105, lo: 0x86, hi: 0x86}, + {value: 0x8105, lo: 0xb0, hi: 0xb0}, + {value: 0x8105, lo: 0xbf, hi: 0xbf}, + // Block 0x7b, offset 0x24a + {value: 0x17fe, lo: 0x07}, + {value: 0xa000, lo: 0x99, hi: 0x99}, + {value: 0x4379, lo: 0x9a, hi: 0x9a}, + {value: 0xa000, lo: 0x9b, hi: 0x9b}, + {value: 0x4383, lo: 0x9c, hi: 0x9c}, + {value: 0xa000, lo: 0xa5, hi: 0xa5}, + {value: 0x438d, lo: 0xab, hi: 0xab}, + {value: 0x8105, lo: 0xb9, hi: 0xba}, + // Block 0x7c, offset 0x252 + {value: 0x0000, lo: 0x06}, + {value: 0x8133, lo: 0x80, hi: 0x82}, + {value: 0x9900, lo: 0xa7, hi: 0xa7}, + {value: 0x2eb5, lo: 0xae, hi: 0xae}, + {value: 0x2ebf, lo: 0xaf, hi: 0xaf}, + {value: 0xa000, lo: 0xb1, hi: 0xb2}, + {value: 0x8105, lo: 0xb3, hi: 0xb4}, + // Block 0x7d, offset 0x259 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x80, hi: 0x80}, + {value: 0x8103, lo: 0x8a, hi: 0x8a}, + // Block 0x7e, offset 0x25c + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0xb5, hi: 0xb5}, + {value: 0x8103, lo: 0xb6, hi: 0xb6}, + // Block 0x7f, offset 0x25f + {value: 0x0002, lo: 0x01}, + {value: 0x8103, lo: 0xa9, hi: 0xaa}, + // Block 0x80, offset 0x261 + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0xbb, hi: 0xbc}, + {value: 0x9900, lo: 0xbe, hi: 0xbe}, + // Block 0x81, offset 0x264 + {value: 0x0000, lo: 0x07}, + {value: 0xa000, lo: 0x87, hi: 0x87}, + {value: 0x2ec9, lo: 0x8b, hi: 0x8b}, + {value: 0x2ed3, lo: 0x8c, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x97, hi: 0x97}, + {value: 0x8133, lo: 0xa6, hi: 0xac}, + {value: 0x8133, lo: 0xb0, hi: 0xb4}, + // Block 0x82, offset 0x26c + {value: 0x0000, lo: 0x03}, + {value: 0x8105, lo: 0x82, hi: 0x82}, + {value: 0x8103, lo: 0x86, hi: 0x86}, + {value: 0x8133, lo: 0x9e, hi: 0x9e}, + // Block 0x83, offset 0x270 + {value: 0x6a23, lo: 0x06}, + {value: 0x9900, lo: 0xb0, hi: 0xb0}, + {value: 0xa000, lo: 0xb9, hi: 0xb9}, + {value: 0x9900, lo: 0xba, hi: 0xba}, + {value: 0x2ee7, lo: 0xbb, hi: 0xbb}, + {value: 0x2edd, lo: 0xbc, hi: 0xbd}, + {value: 0x2ef1, lo: 0xbe, hi: 0xbe}, + // Block 0x84, offset 0x277 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x82, hi: 0x82}, + {value: 0x8103, lo: 0x83, hi: 0x83}, + // Block 0x85, offset 0x27a + {value: 0x0000, lo: 0x05}, + {value: 0x9900, lo: 0xaf, hi: 0xaf}, + {value: 0xa000, lo: 0xb8, hi: 0xb9}, + {value: 0x2efb, lo: 0xba, hi: 0xba}, + {value: 0x2f05, lo: 0xbb, hi: 0xbb}, + {value: 0x8105, lo: 0xbf, hi: 0xbf}, + // Block 0x86, offset 0x280 + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0x80, hi: 0x80}, + // Block 0x87, offset 0x282 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0xb6, hi: 0xb6}, + {value: 0x8103, lo: 0xb7, hi: 0xb7}, + // Block 0x88, offset 0x285 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xab, hi: 0xab}, + // Block 0x89, offset 0x287 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0xb9, hi: 0xb9}, + {value: 0x8103, lo: 0xba, hi: 0xba}, + // Block 0x8a, offset 0x28a + {value: 0x0000, lo: 0x04}, + {value: 0x9900, lo: 0xb0, hi: 0xb0}, + {value: 0xa000, lo: 0xb5, hi: 0xb5}, + {value: 0x2f0f, lo: 0xb8, hi: 0xb8}, + {value: 0x8105, lo: 0xbd, hi: 0xbe}, + // Block 0x8b, offset 0x28f + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0x83, hi: 0x83}, + // Block 0x8c, offset 0x291 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xa0, hi: 0xa0}, + // Block 0x8d, offset 0x293 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xb4, hi: 0xb4}, + // Block 0x8e, offset 0x295 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x87, hi: 0x87}, + // Block 0x8f, offset 0x297 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x99, hi: 0x99}, + // Block 0x90, offset 0x299 + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0x82, hi: 0x82}, + {value: 0x8105, lo: 0x84, hi: 0x85}, + // Block 0x91, offset 0x29c + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x97, hi: 0x97}, + // Block 0x92, offset 0x29e + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x81, hi: 0x82}, + // Block 0x93, offset 0x2a0 + {value: 0x0000, lo: 0x01}, + {value: 0x8101, lo: 0xb0, hi: 0xb4}, + // Block 0x94, offset 0x2a2 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xb0, hi: 0xb6}, + // Block 0x95, offset 0x2a4 + {value: 0x0000, lo: 0x01}, + {value: 0x8102, lo: 0xb0, hi: 0xb1}, + // Block 0x96, offset 0x2a6 + {value: 0x0000, lo: 0x01}, + {value: 0x8101, lo: 0x9e, hi: 0x9e}, + // Block 0x97, offset 0x2a8 + {value: 0x0000, lo: 0x0c}, + {value: 0x470d, lo: 0x9e, hi: 0x9e}, + {value: 0x4717, lo: 0x9f, hi: 0x9f}, + {value: 0x474b, lo: 0xa0, hi: 0xa0}, + {value: 0x4759, lo: 0xa1, hi: 0xa1}, + {value: 0x4767, lo: 0xa2, hi: 0xa2}, + {value: 0x4775, lo: 0xa3, hi: 0xa3}, + {value: 0x4783, lo: 0xa4, hi: 0xa4}, + {value: 0x812c, lo: 0xa5, hi: 0xa6}, + {value: 0x8101, lo: 0xa7, hi: 0xa9}, + {value: 0x8131, lo: 0xad, hi: 0xad}, + {value: 0x812c, lo: 0xae, hi: 0xb2}, + {value: 0x812e, lo: 0xbb, hi: 0xbf}, + // Block 0x98, offset 0x2b5 + {value: 0x0000, lo: 0x09}, + {value: 0x812e, lo: 0x80, hi: 0x82}, + {value: 0x8133, lo: 0x85, hi: 0x89}, + {value: 0x812e, lo: 0x8a, hi: 0x8b}, + {value: 0x8133, lo: 0xaa, hi: 0xad}, + {value: 0x4721, lo: 0xbb, hi: 0xbb}, + {value: 0x472b, lo: 0xbc, hi: 0xbc}, + {value: 0x4791, lo: 0xbd, hi: 0xbd}, + {value: 0x47ad, lo: 0xbe, hi: 0xbe}, + {value: 0x479f, lo: 0xbf, hi: 0xbf}, + // Block 0x99, offset 0x2bf + {value: 0x0000, lo: 0x01}, + {value: 0x47bb, lo: 0x80, hi: 0x80}, + // Block 0x9a, offset 0x2c1 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0x82, hi: 0x84}, + // Block 0x9b, offset 0x2c3 + {value: 0x0000, lo: 0x05}, + {value: 0x8133, lo: 0x80, hi: 0x86}, + {value: 0x8133, lo: 0x88, hi: 0x98}, + {value: 0x8133, lo: 0x9b, hi: 0xa1}, + {value: 0x8133, lo: 0xa3, hi: 0xa4}, + {value: 0x8133, lo: 0xa6, hi: 0xaa}, + // Block 0x9c, offset 0x2c9 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0x8f, hi: 0x8f}, + // Block 0x9d, offset 0x2cb + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xae, hi: 0xae}, + // Block 0x9e, offset 0x2cd + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xac, hi: 0xaf}, + // Block 0x9f, offset 0x2cf + {value: 0x0000, lo: 0x03}, + {value: 0x8134, lo: 0xac, hi: 0xad}, + {value: 0x812e, lo: 0xae, hi: 0xae}, + {value: 0x8133, lo: 0xaf, hi: 0xaf}, + // Block 0xa0, offset 0x2d3 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0x90, hi: 0x96}, + // Block 0xa1, offset 0x2d5 + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0x84, hi: 0x89}, + {value: 0x8103, lo: 0x8a, hi: 0x8a}, + // Block 0xa2, offset 0x2d8 + {value: 0x0000, lo: 0x01}, + {value: 0x8100, lo: 0x93, hi: 0x93}, +} + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *nfkcTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return nfkcValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := nfkcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := nfkcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = nfkcIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := nfkcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = nfkcIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = nfkcIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *nfkcTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return nfkcValues[c0] + } + i := nfkcIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = nfkcIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = nfkcIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *nfkcTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return nfkcValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := nfkcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := nfkcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = nfkcIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := nfkcIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = nfkcIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = nfkcIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *nfkcTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return nfkcValues[c0] + } + i := nfkcIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = nfkcIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = nfkcIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// nfkcTrie. Total size: 19260 bytes (18.81 KiB). Checksum: 1a0bbc4c8c24da49. +type nfkcTrie struct{} + +func newNfkcTrie(i int) *nfkcTrie { + return &nfkcTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *nfkcTrie) lookupValue(n uint32, b byte) uint16 { + switch { + case n < 95: + return uint16(nfkcValues[n<<6+uint32(b)]) + default: + n -= 95 + return uint16(nfkcSparse.lookup(n, b)) + } +} + +// nfkcValues: 97 blocks, 6208 entries, 12416 bytes +// The third block is the zero block. +var nfkcValues = [6208]uint16{ + // Block 0x0, offset 0x0 + 0x3c: 0xa000, 0x3d: 0xa000, 0x3e: 0xa000, + // Block 0x1, offset 0x40 + 0x41: 0xa000, 0x42: 0xa000, 0x43: 0xa000, 0x44: 0xa000, 0x45: 0xa000, + 0x46: 0xa000, 0x47: 0xa000, 0x48: 0xa000, 0x49: 0xa000, 0x4a: 0xa000, 0x4b: 0xa000, + 0x4c: 0xa000, 0x4d: 0xa000, 0x4e: 0xa000, 0x4f: 0xa000, 0x50: 0xa000, + 0x52: 0xa000, 0x53: 0xa000, 0x54: 0xa000, 0x55: 0xa000, 0x56: 0xa000, 0x57: 0xa000, + 0x58: 0xa000, 0x59: 0xa000, 0x5a: 0xa000, + 0x61: 0xa000, 0x62: 0xa000, 0x63: 0xa000, + 0x64: 0xa000, 0x65: 0xa000, 0x66: 0xa000, 0x67: 0xa000, 0x68: 0xa000, 0x69: 0xa000, + 0x6a: 0xa000, 0x6b: 0xa000, 0x6c: 0xa000, 0x6d: 0xa000, 0x6e: 0xa000, 0x6f: 0xa000, + 0x70: 0xa000, 0x72: 0xa000, 0x73: 0xa000, 0x74: 0xa000, 0x75: 0xa000, + 0x76: 0xa000, 0x77: 0xa000, 0x78: 0xa000, 0x79: 0xa000, 0x7a: 0xa000, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc0: 0x30b0, 0xc1: 0x30b5, 0xc2: 0x47c9, 0xc3: 0x30ba, 0xc4: 0x47d8, 0xc5: 0x47dd, + 0xc6: 0xa000, 0xc7: 0x47e7, 0xc8: 0x3123, 0xc9: 0x3128, 0xca: 0x47ec, 0xcb: 0x313c, + 0xcc: 0x31af, 0xcd: 0x31b4, 0xce: 0x31b9, 0xcf: 0x4800, 0xd1: 0x3245, + 0xd2: 0x3268, 0xd3: 0x326d, 0xd4: 0x480a, 0xd5: 0x480f, 0xd6: 0x481e, + 0xd8: 0xa000, 0xd9: 0x32f4, 0xda: 0x32f9, 0xdb: 0x32fe, 0xdc: 0x4850, 0xdd: 0x3376, + 0xe0: 0x33bc, 0xe1: 0x33c1, 0xe2: 0x485a, 0xe3: 0x33c6, + 0xe4: 0x4869, 0xe5: 0x486e, 0xe6: 0xa000, 0xe7: 0x4878, 0xe8: 0x342f, 0xe9: 0x3434, + 0xea: 0x487d, 0xeb: 0x3448, 0xec: 0x34c0, 0xed: 0x34c5, 0xee: 0x34ca, 0xef: 0x4891, + 0xf1: 0x3556, 0xf2: 0x3579, 0xf3: 0x357e, 0xf4: 0x489b, 0xf5: 0x48a0, + 0xf6: 0x48af, 0xf8: 0xa000, 0xf9: 0x360a, 0xfa: 0x360f, 0xfb: 0x3614, + 0xfc: 0x48e1, 0xfd: 0x3691, 0xff: 0x36aa, + // Block 0x4, offset 0x100 + 0x100: 0x30bf, 0x101: 0x33cb, 0x102: 0x47ce, 0x103: 0x485f, 0x104: 0x30dd, 0x105: 0x33e9, + 0x106: 0x30f1, 0x107: 0x33fd, 0x108: 0x30f6, 0x109: 0x3402, 0x10a: 0x30fb, 0x10b: 0x3407, + 0x10c: 0x3100, 0x10d: 0x340c, 0x10e: 0x310a, 0x10f: 0x3416, + 0x112: 0x47f1, 0x113: 0x4882, 0x114: 0x3132, 0x115: 0x343e, 0x116: 0x3137, 0x117: 0x3443, + 0x118: 0x3155, 0x119: 0x3461, 0x11a: 0x3146, 0x11b: 0x3452, 0x11c: 0x316e, 0x11d: 0x347a, + 0x11e: 0x3178, 0x11f: 0x3484, 0x120: 0x317d, 0x121: 0x3489, 0x122: 0x3187, 0x123: 0x3493, + 0x124: 0x318c, 0x125: 0x3498, 0x128: 0x31be, 0x129: 0x34cf, + 0x12a: 0x31c3, 0x12b: 0x34d4, 0x12c: 0x31c8, 0x12d: 0x34d9, 0x12e: 0x31eb, 0x12f: 0x34f7, + 0x130: 0x31cd, 0x132: 0x1a8a, 0x133: 0x1b17, 0x134: 0x31f5, 0x135: 0x3501, + 0x136: 0x3209, 0x137: 0x351a, 0x139: 0x3213, 0x13a: 0x3524, 0x13b: 0x321d, + 0x13c: 0x352e, 0x13d: 0x3218, 0x13e: 0x3529, 0x13f: 0x1cdc, + // Block 0x5, offset 0x140 + 0x140: 0x1d64, 0x143: 0x3240, 0x144: 0x3551, 0x145: 0x3259, + 0x146: 0x356a, 0x147: 0x324f, 0x148: 0x3560, 0x149: 0x1d8c, + 0x14c: 0x4814, 0x14d: 0x48a5, 0x14e: 0x3272, 0x14f: 0x3583, 0x150: 0x327c, 0x151: 0x358d, + 0x154: 0x329a, 0x155: 0x35ab, 0x156: 0x32b3, 0x157: 0x35c4, + 0x158: 0x32a4, 0x159: 0x35b5, 0x15a: 0x4837, 0x15b: 0x48c8, 0x15c: 0x32bd, 0x15d: 0x35ce, + 0x15e: 0x32cc, 0x15f: 0x35dd, 0x160: 0x483c, 0x161: 0x48cd, 0x162: 0x32e5, 0x163: 0x35fb, + 0x164: 0x32d6, 0x165: 0x35ec, 0x168: 0x4846, 0x169: 0x48d7, + 0x16a: 0x484b, 0x16b: 0x48dc, 0x16c: 0x3303, 0x16d: 0x3619, 0x16e: 0x330d, 0x16f: 0x3623, + 0x170: 0x3312, 0x171: 0x3628, 0x172: 0x3330, 0x173: 0x3646, 0x174: 0x3353, 0x175: 0x3669, + 0x176: 0x337b, 0x177: 0x3696, 0x178: 0x338f, 0x179: 0x339e, 0x17a: 0x36be, 0x17b: 0x33a8, + 0x17c: 0x36c8, 0x17d: 0x33ad, 0x17e: 0x36cd, 0x17f: 0x00a7, + // Block 0x6, offset 0x180 + 0x184: 0x2f2f, 0x185: 0x2f35, + 0x186: 0x2f3b, 0x187: 0x1a9f, 0x188: 0x1aa2, 0x189: 0x1b38, 0x18a: 0x1ab7, 0x18b: 0x1aba, + 0x18c: 0x1b6e, 0x18d: 0x30c9, 0x18e: 0x33d5, 0x18f: 0x31d7, 0x190: 0x34e3, 0x191: 0x3281, + 0x192: 0x3592, 0x193: 0x3317, 0x194: 0x362d, 0x195: 0x3b10, 0x196: 0x3c9f, 0x197: 0x3b09, + 0x198: 0x3c98, 0x199: 0x3b17, 0x19a: 0x3ca6, 0x19b: 0x3b02, 0x19c: 0x3c91, + 0x19e: 0x39f1, 0x19f: 0x3b80, 0x1a0: 0x39ea, 0x1a1: 0x3b79, 0x1a2: 0x36f4, 0x1a3: 0x3706, + 0x1a6: 0x3182, 0x1a7: 0x348e, 0x1a8: 0x31ff, 0x1a9: 0x3510, + 0x1aa: 0x482d, 0x1ab: 0x48be, 0x1ac: 0x3ad1, 0x1ad: 0x3c60, 0x1ae: 0x3718, 0x1af: 0x371e, + 0x1b0: 0x3506, 0x1b1: 0x1a6f, 0x1b2: 0x1a72, 0x1b3: 0x1aff, 0x1b4: 0x3169, 0x1b5: 0x3475, + 0x1b8: 0x323b, 0x1b9: 0x354c, 0x1ba: 0x39f8, 0x1bb: 0x3b87, + 0x1bc: 0x36ee, 0x1bd: 0x3700, 0x1be: 0x36fa, 0x1bf: 0x370c, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x30ce, 0x1c1: 0x33da, 0x1c2: 0x30d3, 0x1c3: 0x33df, 0x1c4: 0x314b, 0x1c5: 0x3457, + 0x1c6: 0x3150, 0x1c7: 0x345c, 0x1c8: 0x31dc, 0x1c9: 0x34e8, 0x1ca: 0x31e1, 0x1cb: 0x34ed, + 0x1cc: 0x3286, 0x1cd: 0x3597, 0x1ce: 0x328b, 0x1cf: 0x359c, 0x1d0: 0x32a9, 0x1d1: 0x35ba, + 0x1d2: 0x32ae, 0x1d3: 0x35bf, 0x1d4: 0x331c, 0x1d5: 0x3632, 0x1d6: 0x3321, 0x1d7: 0x3637, + 0x1d8: 0x32c7, 0x1d9: 0x35d8, 0x1da: 0x32e0, 0x1db: 0x35f6, + 0x1de: 0x319b, 0x1df: 0x34a7, + 0x1e6: 0x47d3, 0x1e7: 0x4864, 0x1e8: 0x47fb, 0x1e9: 0x488c, + 0x1ea: 0x3aa0, 0x1eb: 0x3c2f, 0x1ec: 0x3a7d, 0x1ed: 0x3c0c, 0x1ee: 0x4819, 0x1ef: 0x48aa, + 0x1f0: 0x3a99, 0x1f1: 0x3c28, 0x1f2: 0x3385, 0x1f3: 0x36a0, + // Block 0x8, offset 0x200 + 0x200: 0x9933, 0x201: 0x9933, 0x202: 0x9933, 0x203: 0x9933, 0x204: 0x9933, 0x205: 0x8133, + 0x206: 0x9933, 0x207: 0x9933, 0x208: 0x9933, 0x209: 0x9933, 0x20a: 0x9933, 0x20b: 0x9933, + 0x20c: 0x9933, 0x20d: 0x8133, 0x20e: 0x8133, 0x20f: 0x9933, 0x210: 0x8133, 0x211: 0x9933, + 0x212: 0x8133, 0x213: 0x9933, 0x214: 0x9933, 0x215: 0x8134, 0x216: 0x812e, 0x217: 0x812e, + 0x218: 0x812e, 0x219: 0x812e, 0x21a: 0x8134, 0x21b: 0x992c, 0x21c: 0x812e, 0x21d: 0x812e, + 0x21e: 0x812e, 0x21f: 0x812e, 0x220: 0x812e, 0x221: 0x812a, 0x222: 0x812a, 0x223: 0x992e, + 0x224: 0x992e, 0x225: 0x992e, 0x226: 0x992e, 0x227: 0x992a, 0x228: 0x992a, 0x229: 0x812e, + 0x22a: 0x812e, 0x22b: 0x812e, 0x22c: 0x812e, 0x22d: 0x992e, 0x22e: 0x992e, 0x22f: 0x812e, + 0x230: 0x992e, 0x231: 0x992e, 0x232: 0x812e, 0x233: 0x812e, 0x234: 0x8101, 0x235: 0x8101, + 0x236: 0x8101, 0x237: 0x8101, 0x238: 0x9901, 0x239: 0x812e, 0x23a: 0x812e, 0x23b: 0x812e, + 0x23c: 0x812e, 0x23d: 0x8133, 0x23e: 0x8133, 0x23f: 0x8133, + // Block 0x9, offset 0x240 + 0x240: 0x4aef, 0x241: 0x4af4, 0x242: 0x9933, 0x243: 0x4af9, 0x244: 0x4bb2, 0x245: 0x9937, + 0x246: 0x8133, 0x247: 0x812e, 0x248: 0x812e, 0x249: 0x812e, 0x24a: 0x8133, 0x24b: 0x8133, + 0x24c: 0x8133, 0x24d: 0x812e, 0x24e: 0x812e, 0x250: 0x8133, 0x251: 0x8133, + 0x252: 0x8133, 0x253: 0x812e, 0x254: 0x812e, 0x255: 0x812e, 0x256: 0x812e, 0x257: 0x8133, + 0x258: 0x8134, 0x259: 0x812e, 0x25a: 0x812e, 0x25b: 0x8133, 0x25c: 0x8135, 0x25d: 0x8136, + 0x25e: 0x8136, 0x25f: 0x8135, 0x260: 0x8136, 0x261: 0x8136, 0x262: 0x8135, 0x263: 0x8133, + 0x264: 0x8133, 0x265: 0x8133, 0x266: 0x8133, 0x267: 0x8133, 0x268: 0x8133, 0x269: 0x8133, + 0x26a: 0x8133, 0x26b: 0x8133, 0x26c: 0x8133, 0x26d: 0x8133, 0x26e: 0x8133, 0x26f: 0x8133, + 0x274: 0x01ee, + 0x27a: 0x43e6, + 0x27e: 0x0037, + // Block 0xa, offset 0x280 + 0x284: 0x439b, 0x285: 0x45bc, + 0x286: 0x372a, 0x287: 0x00ce, 0x288: 0x3748, 0x289: 0x3754, 0x28a: 0x3766, + 0x28c: 0x3784, 0x28e: 0x3796, 0x28f: 0x37b4, 0x290: 0x3f49, 0x291: 0xa000, + 0x295: 0xa000, 0x297: 0xa000, + 0x299: 0xa000, + 0x29f: 0xa000, 0x2a1: 0xa000, + 0x2a5: 0xa000, 0x2a9: 0xa000, + 0x2aa: 0x3778, 0x2ab: 0x37a8, 0x2ac: 0x493f, 0x2ad: 0x37d8, 0x2ae: 0x4969, 0x2af: 0x37ea, + 0x2b0: 0x3fb1, 0x2b1: 0xa000, 0x2b5: 0xa000, + 0x2b7: 0xa000, 0x2b9: 0xa000, + 0x2bf: 0xa000, + // Block 0xb, offset 0x2c0 + 0x2c1: 0xa000, 0x2c5: 0xa000, + 0x2c9: 0xa000, 0x2ca: 0x4981, 0x2cb: 0x499f, + 0x2cc: 0x3808, 0x2cd: 0x3820, 0x2ce: 0x49b7, 0x2d0: 0x0242, 0x2d1: 0x0254, + 0x2d2: 0x0230, 0x2d3: 0x444d, 0x2d4: 0x4453, 0x2d5: 0x027e, 0x2d6: 0x026c, + 0x2f0: 0x025a, 0x2f1: 0x026f, 0x2f2: 0x0272, 0x2f4: 0x020c, 0x2f5: 0x024b, + 0x2f9: 0x022a, + // Block 0xc, offset 0x300 + 0x300: 0x3862, 0x301: 0x386e, 0x303: 0x385c, + 0x306: 0xa000, 0x307: 0x384a, + 0x30c: 0x389e, 0x30d: 0x3886, 0x30e: 0x38b0, 0x310: 0xa000, + 0x313: 0xa000, 0x315: 0xa000, 0x316: 0xa000, 0x317: 0xa000, + 0x318: 0xa000, 0x319: 0x3892, 0x31a: 0xa000, + 0x31e: 0xa000, 0x323: 0xa000, + 0x327: 0xa000, + 0x32b: 0xa000, 0x32d: 0xa000, + 0x330: 0xa000, 0x333: 0xa000, 0x335: 0xa000, + 0x336: 0xa000, 0x337: 0xa000, 0x338: 0xa000, 0x339: 0x3916, 0x33a: 0xa000, + 0x33e: 0xa000, + // Block 0xd, offset 0x340 + 0x341: 0x3874, 0x342: 0x38f8, + 0x350: 0x3850, 0x351: 0x38d4, + 0x352: 0x3856, 0x353: 0x38da, 0x356: 0x3868, 0x357: 0x38ec, + 0x358: 0xa000, 0x359: 0xa000, 0x35a: 0x396a, 0x35b: 0x3970, 0x35c: 0x387a, 0x35d: 0x38fe, + 0x35e: 0x3880, 0x35f: 0x3904, 0x362: 0x388c, 0x363: 0x3910, + 0x364: 0x3898, 0x365: 0x391c, 0x366: 0x38a4, 0x367: 0x3928, 0x368: 0xa000, 0x369: 0xa000, + 0x36a: 0x3976, 0x36b: 0x397c, 0x36c: 0x38ce, 0x36d: 0x3952, 0x36e: 0x38aa, 0x36f: 0x392e, + 0x370: 0x38b6, 0x371: 0x393a, 0x372: 0x38bc, 0x373: 0x3940, 0x374: 0x38c2, 0x375: 0x3946, + 0x378: 0x38c8, 0x379: 0x394c, + // Block 0xe, offset 0x380 + 0x387: 0x1e91, + 0x391: 0x812e, + 0x392: 0x8133, 0x393: 0x8133, 0x394: 0x8133, 0x395: 0x8133, 0x396: 0x812e, 0x397: 0x8133, + 0x398: 0x8133, 0x399: 0x8133, 0x39a: 0x812f, 0x39b: 0x812e, 0x39c: 0x8133, 0x39d: 0x8133, + 0x39e: 0x8133, 0x39f: 0x8133, 0x3a0: 0x8133, 0x3a1: 0x8133, 0x3a2: 0x812e, 0x3a3: 0x812e, + 0x3a4: 0x812e, 0x3a5: 0x812e, 0x3a6: 0x812e, 0x3a7: 0x812e, 0x3a8: 0x8133, 0x3a9: 0x8133, + 0x3aa: 0x812e, 0x3ab: 0x8133, 0x3ac: 0x8133, 0x3ad: 0x812f, 0x3ae: 0x8132, 0x3af: 0x8133, + 0x3b0: 0x8106, 0x3b1: 0x8107, 0x3b2: 0x8108, 0x3b3: 0x8109, 0x3b4: 0x810a, 0x3b5: 0x810b, + 0x3b6: 0x810c, 0x3b7: 0x810d, 0x3b8: 0x810e, 0x3b9: 0x810f, 0x3ba: 0x810f, 0x3bb: 0x8110, + 0x3bc: 0x8111, 0x3bd: 0x8112, 0x3bf: 0x8113, + // Block 0xf, offset 0x3c0 + 0x3c8: 0xa000, 0x3ca: 0xa000, 0x3cb: 0x8117, + 0x3cc: 0x8118, 0x3cd: 0x8119, 0x3ce: 0x811a, 0x3cf: 0x811b, 0x3d0: 0x811c, 0x3d1: 0x811d, + 0x3d2: 0x811e, 0x3d3: 0x9933, 0x3d4: 0x9933, 0x3d5: 0x992e, 0x3d6: 0x812e, 0x3d7: 0x8133, + 0x3d8: 0x8133, 0x3d9: 0x8133, 0x3da: 0x8133, 0x3db: 0x8133, 0x3dc: 0x812e, 0x3dd: 0x8133, + 0x3de: 0x8133, 0x3df: 0x812e, + 0x3f0: 0x811f, 0x3f5: 0x1eb4, + 0x3f6: 0x2143, 0x3f7: 0x217f, 0x3f8: 0x217a, + // Block 0x10, offset 0x400 + 0x40a: 0x8133, 0x40b: 0x8133, + 0x40c: 0x8133, 0x40d: 0x8133, 0x40e: 0x8133, 0x40f: 0x812e, 0x410: 0x812e, 0x411: 0x812e, + 0x412: 0x812e, 0x413: 0x812e, 0x414: 0x8133, 0x415: 0x8133, 0x416: 0x8133, 0x417: 0x8133, + 0x418: 0x8133, 0x419: 0x8133, 0x41a: 0x8133, 0x41b: 0x8133, 0x41c: 0x8133, 0x41d: 0x8133, + 0x41e: 0x8133, 0x41f: 0x8133, 0x420: 0x8133, 0x421: 0x8133, 0x423: 0x812e, + 0x424: 0x8133, 0x425: 0x8133, 0x426: 0x812e, 0x427: 0x8133, 0x428: 0x8133, 0x429: 0x812e, + 0x42a: 0x8133, 0x42b: 0x8133, 0x42c: 0x8133, 0x42d: 0x812e, 0x42e: 0x812e, 0x42f: 0x812e, + 0x430: 0x8117, 0x431: 0x8118, 0x432: 0x8119, 0x433: 0x8133, 0x434: 0x8133, 0x435: 0x8133, + 0x436: 0x812e, 0x437: 0x8133, 0x438: 0x8133, 0x439: 0x812e, 0x43a: 0x812e, 0x43b: 0x8133, + 0x43c: 0x8133, 0x43d: 0x8133, 0x43e: 0x8133, 0x43f: 0x8133, + // Block 0x11, offset 0x440 + 0x445: 0xa000, + 0x446: 0x2e5d, 0x447: 0xa000, 0x448: 0x2e65, 0x449: 0xa000, 0x44a: 0x2e6d, 0x44b: 0xa000, + 0x44c: 0x2e75, 0x44d: 0xa000, 0x44e: 0x2e7d, 0x451: 0xa000, + 0x452: 0x2e85, + 0x474: 0x8103, 0x475: 0x9900, + 0x47a: 0xa000, 0x47b: 0x2e8d, + 0x47c: 0xa000, 0x47d: 0x2e95, 0x47e: 0xa000, 0x47f: 0xa000, + // Block 0x12, offset 0x480 + 0x480: 0x0069, 0x481: 0x006b, 0x482: 0x006f, 0x483: 0x0083, 0x484: 0x0104, 0x485: 0x0107, + 0x486: 0x0506, 0x487: 0x0085, 0x488: 0x0089, 0x489: 0x008b, 0x48a: 0x011f, 0x48b: 0x0122, + 0x48c: 0x0125, 0x48d: 0x008f, 0x48f: 0x0097, 0x490: 0x009b, 0x491: 0x00e6, + 0x492: 0x009f, 0x493: 0x0110, 0x494: 0x050a, 0x495: 0x050e, 0x496: 0x00a1, 0x497: 0x00a9, + 0x498: 0x00ab, 0x499: 0x0516, 0x49a: 0x015b, 0x49b: 0x00ad, 0x49c: 0x051a, 0x49d: 0x0242, + 0x49e: 0x0245, 0x49f: 0x0248, 0x4a0: 0x027e, 0x4a1: 0x0281, 0x4a2: 0x0093, 0x4a3: 0x00a5, + 0x4a4: 0x00ab, 0x4a5: 0x00ad, 0x4a6: 0x0242, 0x4a7: 0x0245, 0x4a8: 0x026f, 0x4a9: 0x027e, + 0x4aa: 0x0281, + 0x4b8: 0x02b4, + // Block 0x13, offset 0x4c0 + 0x4db: 0x010a, 0x4dc: 0x0087, 0x4dd: 0x0113, + 0x4de: 0x00d7, 0x4df: 0x0125, 0x4e0: 0x008d, 0x4e1: 0x012b, 0x4e2: 0x0131, 0x4e3: 0x013d, + 0x4e4: 0x0146, 0x4e5: 0x0149, 0x4e6: 0x014c, 0x4e7: 0x051e, 0x4e8: 0x01c7, 0x4e9: 0x0155, + 0x4ea: 0x0522, 0x4eb: 0x01ca, 0x4ec: 0x0161, 0x4ed: 0x015e, 0x4ee: 0x0164, 0x4ef: 0x0167, + 0x4f0: 0x016a, 0x4f1: 0x016d, 0x4f2: 0x0176, 0x4f3: 0x018e, 0x4f4: 0x0191, 0x4f5: 0x00f2, + 0x4f6: 0x019a, 0x4f7: 0x019d, 0x4f8: 0x0512, 0x4f9: 0x01a0, 0x4fa: 0x01a3, 0x4fb: 0x00b5, + 0x4fc: 0x01af, 0x4fd: 0x01b2, 0x4fe: 0x01b5, 0x4ff: 0x0254, + // Block 0x14, offset 0x500 + 0x500: 0x8133, 0x501: 0x8133, 0x502: 0x812e, 0x503: 0x8133, 0x504: 0x8133, 0x505: 0x8133, + 0x506: 0x8133, 0x507: 0x8133, 0x508: 0x8133, 0x509: 0x8133, 0x50a: 0x812e, 0x50b: 0x8133, + 0x50c: 0x8133, 0x50d: 0x8136, 0x50e: 0x812b, 0x50f: 0x812e, 0x510: 0x812a, 0x511: 0x8133, + 0x512: 0x8133, 0x513: 0x8133, 0x514: 0x8133, 0x515: 0x8133, 0x516: 0x8133, 0x517: 0x8133, + 0x518: 0x8133, 0x519: 0x8133, 0x51a: 0x8133, 0x51b: 0x8133, 0x51c: 0x8133, 0x51d: 0x8133, + 0x51e: 0x8133, 0x51f: 0x8133, 0x520: 0x8133, 0x521: 0x8133, 0x522: 0x8133, 0x523: 0x8133, + 0x524: 0x8133, 0x525: 0x8133, 0x526: 0x8133, 0x527: 0x8133, 0x528: 0x8133, 0x529: 0x8133, + 0x52a: 0x8133, 0x52b: 0x8133, 0x52c: 0x8133, 0x52d: 0x8133, 0x52e: 0x8133, 0x52f: 0x8133, + 0x530: 0x8133, 0x531: 0x8133, 0x532: 0x8133, 0x533: 0x8133, 0x534: 0x8133, 0x535: 0x8133, + 0x536: 0x8134, 0x537: 0x8132, 0x538: 0x8132, 0x539: 0x812e, 0x53a: 0x812d, 0x53b: 0x8133, + 0x53c: 0x8135, 0x53d: 0x812e, 0x53e: 0x8133, 0x53f: 0x812e, + // Block 0x15, offset 0x540 + 0x540: 0x30d8, 0x541: 0x33e4, 0x542: 0x30e2, 0x543: 0x33ee, 0x544: 0x30e7, 0x545: 0x33f3, + 0x546: 0x30ec, 0x547: 0x33f8, 0x548: 0x3a0d, 0x549: 0x3b9c, 0x54a: 0x3105, 0x54b: 0x3411, + 0x54c: 0x310f, 0x54d: 0x341b, 0x54e: 0x311e, 0x54f: 0x342a, 0x550: 0x3114, 0x551: 0x3420, + 0x552: 0x3119, 0x553: 0x3425, 0x554: 0x3a30, 0x555: 0x3bbf, 0x556: 0x3a37, 0x557: 0x3bc6, + 0x558: 0x315a, 0x559: 0x3466, 0x55a: 0x315f, 0x55b: 0x346b, 0x55c: 0x3a45, 0x55d: 0x3bd4, + 0x55e: 0x3164, 0x55f: 0x3470, 0x560: 0x3173, 0x561: 0x347f, 0x562: 0x3191, 0x563: 0x349d, + 0x564: 0x31a0, 0x565: 0x34ac, 0x566: 0x3196, 0x567: 0x34a2, 0x568: 0x31a5, 0x569: 0x34b1, + 0x56a: 0x31aa, 0x56b: 0x34b6, 0x56c: 0x31f0, 0x56d: 0x34fc, 0x56e: 0x3a4c, 0x56f: 0x3bdb, + 0x570: 0x31fa, 0x571: 0x350b, 0x572: 0x3204, 0x573: 0x3515, 0x574: 0x320e, 0x575: 0x351f, + 0x576: 0x4805, 0x577: 0x4896, 0x578: 0x3a53, 0x579: 0x3be2, 0x57a: 0x3227, 0x57b: 0x3538, + 0x57c: 0x3222, 0x57d: 0x3533, 0x57e: 0x322c, 0x57f: 0x353d, + // Block 0x16, offset 0x580 + 0x580: 0x3231, 0x581: 0x3542, 0x582: 0x3236, 0x583: 0x3547, 0x584: 0x324a, 0x585: 0x355b, + 0x586: 0x3254, 0x587: 0x3565, 0x588: 0x3263, 0x589: 0x3574, 0x58a: 0x325e, 0x58b: 0x356f, + 0x58c: 0x3a76, 0x58d: 0x3c05, 0x58e: 0x3a84, 0x58f: 0x3c13, 0x590: 0x3a8b, 0x591: 0x3c1a, + 0x592: 0x3a92, 0x593: 0x3c21, 0x594: 0x3290, 0x595: 0x35a1, 0x596: 0x3295, 0x597: 0x35a6, + 0x598: 0x329f, 0x599: 0x35b0, 0x59a: 0x4832, 0x59b: 0x48c3, 0x59c: 0x3ad8, 0x59d: 0x3c67, + 0x59e: 0x32b8, 0x59f: 0x35c9, 0x5a0: 0x32c2, 0x5a1: 0x35d3, 0x5a2: 0x4841, 0x5a3: 0x48d2, + 0x5a4: 0x3adf, 0x5a5: 0x3c6e, 0x5a6: 0x3ae6, 0x5a7: 0x3c75, 0x5a8: 0x3aed, 0x5a9: 0x3c7c, + 0x5aa: 0x32d1, 0x5ab: 0x35e2, 0x5ac: 0x32db, 0x5ad: 0x35f1, 0x5ae: 0x32ef, 0x5af: 0x3605, + 0x5b0: 0x32ea, 0x5b1: 0x3600, 0x5b2: 0x332b, 0x5b3: 0x3641, 0x5b4: 0x333a, 0x5b5: 0x3650, + 0x5b6: 0x3335, 0x5b7: 0x364b, 0x5b8: 0x3af4, 0x5b9: 0x3c83, 0x5ba: 0x3afb, 0x5bb: 0x3c8a, + 0x5bc: 0x333f, 0x5bd: 0x3655, 0x5be: 0x3344, 0x5bf: 0x365a, + // Block 0x17, offset 0x5c0 + 0x5c0: 0x3349, 0x5c1: 0x365f, 0x5c2: 0x334e, 0x5c3: 0x3664, 0x5c4: 0x335d, 0x5c5: 0x3673, + 0x5c6: 0x3358, 0x5c7: 0x366e, 0x5c8: 0x3362, 0x5c9: 0x367d, 0x5ca: 0x3367, 0x5cb: 0x3682, + 0x5cc: 0x336c, 0x5cd: 0x3687, 0x5ce: 0x338a, 0x5cf: 0x36a5, 0x5d0: 0x33a3, 0x5d1: 0x36c3, + 0x5d2: 0x33b2, 0x5d3: 0x36d2, 0x5d4: 0x33b7, 0x5d5: 0x36d7, 0x5d6: 0x34bb, 0x5d7: 0x35e7, + 0x5d8: 0x3678, 0x5d9: 0x36b4, 0x5da: 0x1d10, 0x5db: 0x4418, + 0x5e0: 0x47e2, 0x5e1: 0x4873, 0x5e2: 0x30c4, 0x5e3: 0x33d0, + 0x5e4: 0x39b9, 0x5e5: 0x3b48, 0x5e6: 0x39b2, 0x5e7: 0x3b41, 0x5e8: 0x39c7, 0x5e9: 0x3b56, + 0x5ea: 0x39c0, 0x5eb: 0x3b4f, 0x5ec: 0x39ff, 0x5ed: 0x3b8e, 0x5ee: 0x39d5, 0x5ef: 0x3b64, + 0x5f0: 0x39ce, 0x5f1: 0x3b5d, 0x5f2: 0x39e3, 0x5f3: 0x3b72, 0x5f4: 0x39dc, 0x5f5: 0x3b6b, + 0x5f6: 0x3a06, 0x5f7: 0x3b95, 0x5f8: 0x47f6, 0x5f9: 0x4887, 0x5fa: 0x3141, 0x5fb: 0x344d, + 0x5fc: 0x312d, 0x5fd: 0x3439, 0x5fe: 0x3a1b, 0x5ff: 0x3baa, + // Block 0x18, offset 0x600 + 0x600: 0x3a14, 0x601: 0x3ba3, 0x602: 0x3a29, 0x603: 0x3bb8, 0x604: 0x3a22, 0x605: 0x3bb1, + 0x606: 0x3a3e, 0x607: 0x3bcd, 0x608: 0x31d2, 0x609: 0x34de, 0x60a: 0x31e6, 0x60b: 0x34f2, + 0x60c: 0x4828, 0x60d: 0x48b9, 0x60e: 0x3277, 0x60f: 0x3588, 0x610: 0x3a61, 0x611: 0x3bf0, + 0x612: 0x3a5a, 0x613: 0x3be9, 0x614: 0x3a6f, 0x615: 0x3bfe, 0x616: 0x3a68, 0x617: 0x3bf7, + 0x618: 0x3aca, 0x619: 0x3c59, 0x61a: 0x3aae, 0x61b: 0x3c3d, 0x61c: 0x3aa7, 0x61d: 0x3c36, + 0x61e: 0x3abc, 0x61f: 0x3c4b, 0x620: 0x3ab5, 0x621: 0x3c44, 0x622: 0x3ac3, 0x623: 0x3c52, + 0x624: 0x3326, 0x625: 0x363c, 0x626: 0x3308, 0x627: 0x361e, 0x628: 0x3b25, 0x629: 0x3cb4, + 0x62a: 0x3b1e, 0x62b: 0x3cad, 0x62c: 0x3b33, 0x62d: 0x3cc2, 0x62e: 0x3b2c, 0x62f: 0x3cbb, + 0x630: 0x3b3a, 0x631: 0x3cc9, 0x632: 0x3371, 0x633: 0x368c, 0x634: 0x3399, 0x635: 0x36b9, + 0x636: 0x3394, 0x637: 0x36af, 0x638: 0x3380, 0x639: 0x369b, + // Block 0x19, offset 0x640 + 0x640: 0x4945, 0x641: 0x494b, 0x642: 0x4a5f, 0x643: 0x4a77, 0x644: 0x4a67, 0x645: 0x4a7f, + 0x646: 0x4a6f, 0x647: 0x4a87, 0x648: 0x48eb, 0x649: 0x48f1, 0x64a: 0x49cf, 0x64b: 0x49e7, + 0x64c: 0x49d7, 0x64d: 0x49ef, 0x64e: 0x49df, 0x64f: 0x49f7, 0x650: 0x4957, 0x651: 0x495d, + 0x652: 0x3ef9, 0x653: 0x3f09, 0x654: 0x3f01, 0x655: 0x3f11, + 0x658: 0x48f7, 0x659: 0x48fd, 0x65a: 0x3e29, 0x65b: 0x3e39, 0x65c: 0x3e31, 0x65d: 0x3e41, + 0x660: 0x496f, 0x661: 0x4975, 0x662: 0x4a8f, 0x663: 0x4aa7, + 0x664: 0x4a97, 0x665: 0x4aaf, 0x666: 0x4a9f, 0x667: 0x4ab7, 0x668: 0x4903, 0x669: 0x4909, + 0x66a: 0x49ff, 0x66b: 0x4a17, 0x66c: 0x4a07, 0x66d: 0x4a1f, 0x66e: 0x4a0f, 0x66f: 0x4a27, + 0x670: 0x4987, 0x671: 0x498d, 0x672: 0x3f59, 0x673: 0x3f71, 0x674: 0x3f61, 0x675: 0x3f79, + 0x676: 0x3f69, 0x677: 0x3f81, 0x678: 0x490f, 0x679: 0x4915, 0x67a: 0x3e59, 0x67b: 0x3e71, + 0x67c: 0x3e61, 0x67d: 0x3e79, 0x67e: 0x3e69, 0x67f: 0x3e81, + // Block 0x1a, offset 0x680 + 0x680: 0x4993, 0x681: 0x4999, 0x682: 0x3f89, 0x683: 0x3f99, 0x684: 0x3f91, 0x685: 0x3fa1, + 0x688: 0x491b, 0x689: 0x4921, 0x68a: 0x3e89, 0x68b: 0x3e99, + 0x68c: 0x3e91, 0x68d: 0x3ea1, 0x690: 0x49a5, 0x691: 0x49ab, + 0x692: 0x3fc1, 0x693: 0x3fd9, 0x694: 0x3fc9, 0x695: 0x3fe1, 0x696: 0x3fd1, 0x697: 0x3fe9, + 0x699: 0x4927, 0x69b: 0x3ea9, 0x69d: 0x3eb1, + 0x69f: 0x3eb9, 0x6a0: 0x49bd, 0x6a1: 0x49c3, 0x6a2: 0x4abf, 0x6a3: 0x4ad7, + 0x6a4: 0x4ac7, 0x6a5: 0x4adf, 0x6a6: 0x4acf, 0x6a7: 0x4ae7, 0x6a8: 0x492d, 0x6a9: 0x4933, + 0x6aa: 0x4a2f, 0x6ab: 0x4a47, 0x6ac: 0x4a37, 0x6ad: 0x4a4f, 0x6ae: 0x4a3f, 0x6af: 0x4a57, + 0x6b0: 0x4939, 0x6b1: 0x445f, 0x6b2: 0x37d2, 0x6b3: 0x4465, 0x6b4: 0x4963, 0x6b5: 0x446b, + 0x6b6: 0x37e4, 0x6b7: 0x4471, 0x6b8: 0x3802, 0x6b9: 0x4477, 0x6ba: 0x381a, 0x6bb: 0x447d, + 0x6bc: 0x49b1, 0x6bd: 0x4483, + // Block 0x1b, offset 0x6c0 + 0x6c0: 0x3ee1, 0x6c1: 0x3ee9, 0x6c2: 0x42c5, 0x6c3: 0x42e3, 0x6c4: 0x42cf, 0x6c5: 0x42ed, + 0x6c6: 0x42d9, 0x6c7: 0x42f7, 0x6c8: 0x3e19, 0x6c9: 0x3e21, 0x6ca: 0x4211, 0x6cb: 0x422f, + 0x6cc: 0x421b, 0x6cd: 0x4239, 0x6ce: 0x4225, 0x6cf: 0x4243, 0x6d0: 0x3f29, 0x6d1: 0x3f31, + 0x6d2: 0x4301, 0x6d3: 0x431f, 0x6d4: 0x430b, 0x6d5: 0x4329, 0x6d6: 0x4315, 0x6d7: 0x4333, + 0x6d8: 0x3e49, 0x6d9: 0x3e51, 0x6da: 0x424d, 0x6db: 0x426b, 0x6dc: 0x4257, 0x6dd: 0x4275, + 0x6de: 0x4261, 0x6df: 0x427f, 0x6e0: 0x4001, 0x6e1: 0x4009, 0x6e2: 0x433d, 0x6e3: 0x435b, + 0x6e4: 0x4347, 0x6e5: 0x4365, 0x6e6: 0x4351, 0x6e7: 0x436f, 0x6e8: 0x3ec1, 0x6e9: 0x3ec9, + 0x6ea: 0x4289, 0x6eb: 0x42a7, 0x6ec: 0x4293, 0x6ed: 0x42b1, 0x6ee: 0x429d, 0x6ef: 0x42bb, + 0x6f0: 0x37c6, 0x6f1: 0x37c0, 0x6f2: 0x3ed1, 0x6f3: 0x37cc, 0x6f4: 0x3ed9, + 0x6f6: 0x4951, 0x6f7: 0x3ef1, 0x6f8: 0x3736, 0x6f9: 0x3730, 0x6fa: 0x3724, 0x6fb: 0x442f, + 0x6fc: 0x373c, 0x6fd: 0x43c8, 0x6fe: 0x0257, 0x6ff: 0x43c8, + // Block 0x1c, offset 0x700 + 0x700: 0x43e1, 0x701: 0x45c3, 0x702: 0x3f19, 0x703: 0x37de, 0x704: 0x3f21, + 0x706: 0x497b, 0x707: 0x3f39, 0x708: 0x3742, 0x709: 0x4435, 0x70a: 0x374e, 0x70b: 0x443b, + 0x70c: 0x375a, 0x70d: 0x45ca, 0x70e: 0x45d1, 0x70f: 0x45d8, 0x710: 0x37f6, 0x711: 0x37f0, + 0x712: 0x3f41, 0x713: 0x4625, 0x716: 0x37fc, 0x717: 0x3f51, + 0x718: 0x3772, 0x719: 0x376c, 0x71a: 0x3760, 0x71b: 0x4441, 0x71d: 0x45df, + 0x71e: 0x45e6, 0x71f: 0x45ed, 0x720: 0x382c, 0x721: 0x3826, 0x722: 0x3fa9, 0x723: 0x462d, + 0x724: 0x380e, 0x725: 0x3814, 0x726: 0x3832, 0x727: 0x3fb9, 0x728: 0x37a2, 0x729: 0x379c, + 0x72a: 0x3790, 0x72b: 0x444d, 0x72c: 0x378a, 0x72d: 0x45b5, 0x72e: 0x45bc, 0x72f: 0x0081, + 0x732: 0x3ff1, 0x733: 0x3838, 0x734: 0x3ff9, + 0x736: 0x49c9, 0x737: 0x4011, 0x738: 0x377e, 0x739: 0x4447, 0x73a: 0x37ae, 0x73b: 0x4459, + 0x73c: 0x37ba, 0x73d: 0x439b, 0x73e: 0x43cd, + // Block 0x1d, offset 0x740 + 0x740: 0x1d08, 0x741: 0x1d0c, 0x742: 0x0047, 0x743: 0x1d84, 0x745: 0x1d18, + 0x746: 0x1d1c, 0x747: 0x00ef, 0x749: 0x1d88, 0x74a: 0x008f, 0x74b: 0x0051, + 0x74c: 0x0051, 0x74d: 0x0051, 0x74e: 0x0091, 0x74f: 0x00e0, 0x750: 0x0053, 0x751: 0x0053, + 0x752: 0x0059, 0x753: 0x0099, 0x755: 0x005d, 0x756: 0x1abd, + 0x759: 0x0061, 0x75a: 0x0063, 0x75b: 0x0065, 0x75c: 0x0065, 0x75d: 0x0065, + 0x760: 0x1acf, 0x761: 0x1cf8, 0x762: 0x1ad8, + 0x764: 0x0075, 0x766: 0x023c, 0x768: 0x0075, + 0x76a: 0x0057, 0x76b: 0x4413, 0x76c: 0x0045, 0x76d: 0x0047, 0x76f: 0x008b, + 0x770: 0x004b, 0x771: 0x004d, 0x773: 0x005b, 0x774: 0x009f, 0x775: 0x0308, + 0x776: 0x030b, 0x777: 0x030e, 0x778: 0x0311, 0x779: 0x0093, 0x77b: 0x1cc8, + 0x77c: 0x026c, 0x77d: 0x0245, 0x77e: 0x01fd, 0x77f: 0x0224, + // Block 0x1e, offset 0x780 + 0x780: 0x055a, 0x785: 0x0049, + 0x786: 0x0089, 0x787: 0x008b, 0x788: 0x0093, 0x789: 0x0095, + 0x790: 0x235e, 0x791: 0x236a, + 0x792: 0x241e, 0x793: 0x2346, 0x794: 0x23ca, 0x795: 0x2352, 0x796: 0x23d0, 0x797: 0x23e8, + 0x798: 0x23f4, 0x799: 0x2358, 0x79a: 0x23fa, 0x79b: 0x2364, 0x79c: 0x23ee, 0x79d: 0x2400, + 0x79e: 0x2406, 0x79f: 0x1dec, 0x7a0: 0x0053, 0x7a1: 0x1a87, 0x7a2: 0x1cd4, 0x7a3: 0x1a90, + 0x7a4: 0x006d, 0x7a5: 0x1adb, 0x7a6: 0x1d00, 0x7a7: 0x1e78, 0x7a8: 0x1a93, 0x7a9: 0x0071, + 0x7aa: 0x1ae7, 0x7ab: 0x1d04, 0x7ac: 0x0059, 0x7ad: 0x0047, 0x7ae: 0x0049, 0x7af: 0x005b, + 0x7b0: 0x0093, 0x7b1: 0x1b14, 0x7b2: 0x1d48, 0x7b3: 0x1b1d, 0x7b4: 0x00ad, 0x7b5: 0x1b92, + 0x7b6: 0x1d7c, 0x7b7: 0x1e8c, 0x7b8: 0x1b20, 0x7b9: 0x00b1, 0x7ba: 0x1b95, 0x7bb: 0x1d80, + 0x7bc: 0x0099, 0x7bd: 0x0087, 0x7be: 0x0089, 0x7bf: 0x009b, + // Block 0x1f, offset 0x7c0 + 0x7c1: 0x3d47, 0x7c3: 0xa000, 0x7c4: 0x3d4e, 0x7c5: 0xa000, + 0x7c7: 0x3d55, 0x7c8: 0xa000, 0x7c9: 0x3d5c, + 0x7cd: 0xa000, + 0x7e0: 0x30a6, 0x7e1: 0xa000, 0x7e2: 0x3d6a, + 0x7e4: 0xa000, 0x7e5: 0xa000, + 0x7ed: 0x3d63, 0x7ee: 0x30a1, 0x7ef: 0x30ab, + 0x7f0: 0x3d71, 0x7f1: 0x3d78, 0x7f2: 0xa000, 0x7f3: 0xa000, 0x7f4: 0x3d7f, 0x7f5: 0x3d86, + 0x7f6: 0xa000, 0x7f7: 0xa000, 0x7f8: 0x3d8d, 0x7f9: 0x3d94, 0x7fa: 0xa000, 0x7fb: 0xa000, + 0x7fc: 0xa000, 0x7fd: 0xa000, + // Block 0x20, offset 0x800 + 0x800: 0x3d9b, 0x801: 0x3da2, 0x802: 0xa000, 0x803: 0xa000, 0x804: 0x3db7, 0x805: 0x3dbe, + 0x806: 0xa000, 0x807: 0xa000, 0x808: 0x3dc5, 0x809: 0x3dcc, + 0x811: 0xa000, + 0x812: 0xa000, + 0x822: 0xa000, + 0x828: 0xa000, 0x829: 0xa000, + 0x82b: 0xa000, 0x82c: 0x3de1, 0x82d: 0x3de8, 0x82e: 0x3def, 0x82f: 0x3df6, + 0x832: 0xa000, 0x833: 0xa000, 0x834: 0xa000, 0x835: 0xa000, + // Block 0x21, offset 0x840 + 0x860: 0x0023, 0x861: 0x0025, 0x862: 0x0027, 0x863: 0x0029, + 0x864: 0x002b, 0x865: 0x002d, 0x866: 0x002f, 0x867: 0x0031, 0x868: 0x0033, 0x869: 0x19af, + 0x86a: 0x19b2, 0x86b: 0x19b5, 0x86c: 0x19b8, 0x86d: 0x19bb, 0x86e: 0x19be, 0x86f: 0x19c1, + 0x870: 0x19c4, 0x871: 0x19c7, 0x872: 0x19ca, 0x873: 0x19d3, 0x874: 0x1b98, 0x875: 0x1b9c, + 0x876: 0x1ba0, 0x877: 0x1ba4, 0x878: 0x1ba8, 0x879: 0x1bac, 0x87a: 0x1bb0, 0x87b: 0x1bb4, + 0x87c: 0x1bb8, 0x87d: 0x1db0, 0x87e: 0x1db5, 0x87f: 0x1dba, + // Block 0x22, offset 0x880 + 0x880: 0x1dbf, 0x881: 0x1dc4, 0x882: 0x1dc9, 0x883: 0x1dce, 0x884: 0x1dd3, 0x885: 0x1dd8, + 0x886: 0x1ddd, 0x887: 0x1de2, 0x888: 0x19ac, 0x889: 0x19d0, 0x88a: 0x19f4, 0x88b: 0x1a18, + 0x88c: 0x1a3c, 0x88d: 0x1a45, 0x88e: 0x1a4b, 0x88f: 0x1a51, 0x890: 0x1a57, 0x891: 0x1c90, + 0x892: 0x1c94, 0x893: 0x1c98, 0x894: 0x1c9c, 0x895: 0x1ca0, 0x896: 0x1ca4, 0x897: 0x1ca8, + 0x898: 0x1cac, 0x899: 0x1cb0, 0x89a: 0x1cb4, 0x89b: 0x1cb8, 0x89c: 0x1c24, 0x89d: 0x1c28, + 0x89e: 0x1c2c, 0x89f: 0x1c30, 0x8a0: 0x1c34, 0x8a1: 0x1c38, 0x8a2: 0x1c3c, 0x8a3: 0x1c40, + 0x8a4: 0x1c44, 0x8a5: 0x1c48, 0x8a6: 0x1c4c, 0x8a7: 0x1c50, 0x8a8: 0x1c54, 0x8a9: 0x1c58, + 0x8aa: 0x1c5c, 0x8ab: 0x1c60, 0x8ac: 0x1c64, 0x8ad: 0x1c68, 0x8ae: 0x1c6c, 0x8af: 0x1c70, + 0x8b0: 0x1c74, 0x8b1: 0x1c78, 0x8b2: 0x1c7c, 0x8b3: 0x1c80, 0x8b4: 0x1c84, 0x8b5: 0x1c88, + 0x8b6: 0x0043, 0x8b7: 0x0045, 0x8b8: 0x0047, 0x8b9: 0x0049, 0x8ba: 0x004b, 0x8bb: 0x004d, + 0x8bc: 0x004f, 0x8bd: 0x0051, 0x8be: 0x0053, 0x8bf: 0x0055, + // Block 0x23, offset 0x8c0 + 0x8c0: 0x07ba, 0x8c1: 0x07de, 0x8c2: 0x07ea, 0x8c3: 0x07fa, 0x8c4: 0x0802, 0x8c5: 0x080e, + 0x8c6: 0x0816, 0x8c7: 0x081e, 0x8c8: 0x082a, 0x8c9: 0x087e, 0x8ca: 0x0896, 0x8cb: 0x08a6, + 0x8cc: 0x08b6, 0x8cd: 0x08c6, 0x8ce: 0x08d6, 0x8cf: 0x08f6, 0x8d0: 0x08fa, 0x8d1: 0x08fe, + 0x8d2: 0x0932, 0x8d3: 0x095a, 0x8d4: 0x096a, 0x8d5: 0x0972, 0x8d6: 0x0976, 0x8d7: 0x0982, + 0x8d8: 0x099e, 0x8d9: 0x09a2, 0x8da: 0x09ba, 0x8db: 0x09be, 0x8dc: 0x09c6, 0x8dd: 0x09d6, + 0x8de: 0x0a72, 0x8df: 0x0a86, 0x8e0: 0x0ac6, 0x8e1: 0x0ada, 0x8e2: 0x0ae2, 0x8e3: 0x0ae6, + 0x8e4: 0x0af6, 0x8e5: 0x0b12, 0x8e6: 0x0b3e, 0x8e7: 0x0b4a, 0x8e8: 0x0b6a, 0x8e9: 0x0b76, + 0x8ea: 0x0b7a, 0x8eb: 0x0b7e, 0x8ec: 0x0b96, 0x8ed: 0x0b9a, 0x8ee: 0x0bc6, 0x8ef: 0x0bd2, + 0x8f0: 0x0bda, 0x8f1: 0x0be2, 0x8f2: 0x0bf2, 0x8f3: 0x0bfa, 0x8f4: 0x0c02, 0x8f5: 0x0c2e, + 0x8f6: 0x0c32, 0x8f7: 0x0c3a, 0x8f8: 0x0c3e, 0x8f9: 0x0c46, 0x8fa: 0x0c4e, 0x8fb: 0x0c5e, + 0x8fc: 0x0c7a, 0x8fd: 0x0cf2, 0x8fe: 0x0d06, 0x8ff: 0x0d0a, + // Block 0x24, offset 0x900 + 0x900: 0x0d8a, 0x901: 0x0d8e, 0x902: 0x0da2, 0x903: 0x0da6, 0x904: 0x0dae, 0x905: 0x0db6, + 0x906: 0x0dbe, 0x907: 0x0dca, 0x908: 0x0df2, 0x909: 0x0e02, 0x90a: 0x0e16, 0x90b: 0x0e86, + 0x90c: 0x0e92, 0x90d: 0x0ea2, 0x90e: 0x0eae, 0x90f: 0x0eba, 0x910: 0x0ec2, 0x911: 0x0ec6, + 0x912: 0x0eca, 0x913: 0x0ece, 0x914: 0x0ed2, 0x915: 0x0f8a, 0x916: 0x0fd2, 0x917: 0x0fde, + 0x918: 0x0fe2, 0x919: 0x0fe6, 0x91a: 0x0fea, 0x91b: 0x0ff2, 0x91c: 0x0ff6, 0x91d: 0x100a, + 0x91e: 0x1026, 0x91f: 0x102e, 0x920: 0x106e, 0x921: 0x1072, 0x922: 0x107a, 0x923: 0x107e, + 0x924: 0x1086, 0x925: 0x108a, 0x926: 0x10ae, 0x927: 0x10b2, 0x928: 0x10ce, 0x929: 0x10d2, + 0x92a: 0x10d6, 0x92b: 0x10da, 0x92c: 0x10ee, 0x92d: 0x1112, 0x92e: 0x1116, 0x92f: 0x111a, + 0x930: 0x113e, 0x931: 0x117e, 0x932: 0x1182, 0x933: 0x11a2, 0x934: 0x11b2, 0x935: 0x11ba, + 0x936: 0x11da, 0x937: 0x11fe, 0x938: 0x1242, 0x939: 0x124a, 0x93a: 0x125e, 0x93b: 0x126a, + 0x93c: 0x1272, 0x93d: 0x127a, 0x93e: 0x127e, 0x93f: 0x1282, + // Block 0x25, offset 0x940 + 0x940: 0x129a, 0x941: 0x129e, 0x942: 0x12ba, 0x943: 0x12c2, 0x944: 0x12ca, 0x945: 0x12ce, + 0x946: 0x12da, 0x947: 0x12e2, 0x948: 0x12e6, 0x949: 0x12ea, 0x94a: 0x12f2, 0x94b: 0x12f6, + 0x94c: 0x1396, 0x94d: 0x13aa, 0x94e: 0x13de, 0x94f: 0x13e2, 0x950: 0x13ea, 0x951: 0x1416, + 0x952: 0x141e, 0x953: 0x1426, 0x954: 0x142e, 0x955: 0x146a, 0x956: 0x146e, 0x957: 0x1476, + 0x958: 0x147a, 0x959: 0x147e, 0x95a: 0x14aa, 0x95b: 0x14ae, 0x95c: 0x14b6, 0x95d: 0x14ca, + 0x95e: 0x14ce, 0x95f: 0x14ea, 0x960: 0x14f2, 0x961: 0x14f6, 0x962: 0x151a, 0x963: 0x153a, + 0x964: 0x154e, 0x965: 0x1552, 0x966: 0x155a, 0x967: 0x1586, 0x968: 0x158a, 0x969: 0x159a, + 0x96a: 0x15be, 0x96b: 0x15ca, 0x96c: 0x15da, 0x96d: 0x15f2, 0x96e: 0x15fa, 0x96f: 0x15fe, + 0x970: 0x1602, 0x971: 0x1606, 0x972: 0x1612, 0x973: 0x1616, 0x974: 0x161e, 0x975: 0x163a, + 0x976: 0x163e, 0x977: 0x1642, 0x978: 0x165a, 0x979: 0x165e, 0x97a: 0x1666, 0x97b: 0x167a, + 0x97c: 0x167e, 0x97d: 0x1682, 0x97e: 0x168a, 0x97f: 0x168e, + // Block 0x26, offset 0x980 + 0x986: 0xa000, 0x98b: 0xa000, + 0x98c: 0x4049, 0x98d: 0xa000, 0x98e: 0x4051, 0x98f: 0xa000, 0x990: 0x4059, 0x991: 0xa000, + 0x992: 0x4061, 0x993: 0xa000, 0x994: 0x4069, 0x995: 0xa000, 0x996: 0x4071, 0x997: 0xa000, + 0x998: 0x4079, 0x999: 0xa000, 0x99a: 0x4081, 0x99b: 0xa000, 0x99c: 0x4089, 0x99d: 0xa000, + 0x99e: 0x4091, 0x99f: 0xa000, 0x9a0: 0x4099, 0x9a1: 0xa000, 0x9a2: 0x40a1, + 0x9a4: 0xa000, 0x9a5: 0x40a9, 0x9a6: 0xa000, 0x9a7: 0x40b1, 0x9a8: 0xa000, 0x9a9: 0x40b9, + 0x9af: 0xa000, + 0x9b0: 0x40c1, 0x9b1: 0x40c9, 0x9b2: 0xa000, 0x9b3: 0x40d1, 0x9b4: 0x40d9, 0x9b5: 0xa000, + 0x9b6: 0x40e1, 0x9b7: 0x40e9, 0x9b8: 0xa000, 0x9b9: 0x40f1, 0x9ba: 0x40f9, 0x9bb: 0xa000, + 0x9bc: 0x4101, 0x9bd: 0x4109, + // Block 0x27, offset 0x9c0 + 0x9d4: 0x4041, + 0x9d9: 0x9904, 0x9da: 0x9904, 0x9db: 0x441d, 0x9dc: 0x4423, 0x9dd: 0xa000, + 0x9de: 0x4111, 0x9df: 0x27e4, + 0x9e6: 0xa000, + 0x9eb: 0xa000, 0x9ec: 0x4121, 0x9ed: 0xa000, 0x9ee: 0x4129, 0x9ef: 0xa000, + 0x9f0: 0x4131, 0x9f1: 0xa000, 0x9f2: 0x4139, 0x9f3: 0xa000, 0x9f4: 0x4141, 0x9f5: 0xa000, + 0x9f6: 0x4149, 0x9f7: 0xa000, 0x9f8: 0x4151, 0x9f9: 0xa000, 0x9fa: 0x4159, 0x9fb: 0xa000, + 0x9fc: 0x4161, 0x9fd: 0xa000, 0x9fe: 0x4169, 0x9ff: 0xa000, + // Block 0x28, offset 0xa00 + 0xa00: 0x4171, 0xa01: 0xa000, 0xa02: 0x4179, 0xa04: 0xa000, 0xa05: 0x4181, + 0xa06: 0xa000, 0xa07: 0x4189, 0xa08: 0xa000, 0xa09: 0x4191, + 0xa0f: 0xa000, 0xa10: 0x4199, 0xa11: 0x41a1, + 0xa12: 0xa000, 0xa13: 0x41a9, 0xa14: 0x41b1, 0xa15: 0xa000, 0xa16: 0x41b9, 0xa17: 0x41c1, + 0xa18: 0xa000, 0xa19: 0x41c9, 0xa1a: 0x41d1, 0xa1b: 0xa000, 0xa1c: 0x41d9, 0xa1d: 0x41e1, + 0xa2f: 0xa000, + 0xa30: 0xa000, 0xa31: 0xa000, 0xa32: 0xa000, 0xa34: 0x4119, + 0xa37: 0x41e9, 0xa38: 0x41f1, 0xa39: 0x41f9, 0xa3a: 0x4201, + 0xa3d: 0xa000, 0xa3e: 0x4209, 0xa3f: 0x27f9, + // Block 0x29, offset 0xa40 + 0xa40: 0x045a, 0xa41: 0x041e, 0xa42: 0x0422, 0xa43: 0x0426, 0xa44: 0x046e, 0xa45: 0x042a, + 0xa46: 0x042e, 0xa47: 0x0432, 0xa48: 0x0436, 0xa49: 0x043a, 0xa4a: 0x043e, 0xa4b: 0x0442, + 0xa4c: 0x0446, 0xa4d: 0x044a, 0xa4e: 0x044e, 0xa4f: 0x4afe, 0xa50: 0x4b04, 0xa51: 0x4b0a, + 0xa52: 0x4b10, 0xa53: 0x4b16, 0xa54: 0x4b1c, 0xa55: 0x4b22, 0xa56: 0x4b28, 0xa57: 0x4b2e, + 0xa58: 0x4b34, 0xa59: 0x4b3a, 0xa5a: 0x4b40, 0xa5b: 0x4b46, 0xa5c: 0x4b4c, 0xa5d: 0x4b52, + 0xa5e: 0x4b58, 0xa5f: 0x4b5e, 0xa60: 0x4b64, 0xa61: 0x4b6a, 0xa62: 0x4b70, 0xa63: 0x4b76, + 0xa64: 0x04b6, 0xa65: 0x0452, 0xa66: 0x0456, 0xa67: 0x04da, 0xa68: 0x04de, 0xa69: 0x04e2, + 0xa6a: 0x04e6, 0xa6b: 0x04ea, 0xa6c: 0x04ee, 0xa6d: 0x04f2, 0xa6e: 0x045e, 0xa6f: 0x04f6, + 0xa70: 0x04fa, 0xa71: 0x0462, 0xa72: 0x0466, 0xa73: 0x046a, 0xa74: 0x0472, 0xa75: 0x0476, + 0xa76: 0x047a, 0xa77: 0x047e, 0xa78: 0x0482, 0xa79: 0x0486, 0xa7a: 0x048a, 0xa7b: 0x048e, + 0xa7c: 0x0492, 0xa7d: 0x0496, 0xa7e: 0x049a, 0xa7f: 0x049e, + // Block 0x2a, offset 0xa80 + 0xa80: 0x04a2, 0xa81: 0x04a6, 0xa82: 0x04fe, 0xa83: 0x0502, 0xa84: 0x04aa, 0xa85: 0x04ae, + 0xa86: 0x04b2, 0xa87: 0x04ba, 0xa88: 0x04be, 0xa89: 0x04c2, 0xa8a: 0x04c6, 0xa8b: 0x04ca, + 0xa8c: 0x04ce, 0xa8d: 0x04d2, 0xa8e: 0x04d6, + 0xa92: 0x07ba, 0xa93: 0x0816, 0xa94: 0x07c6, 0xa95: 0x0a76, 0xa96: 0x07ca, 0xa97: 0x07e2, + 0xa98: 0x07ce, 0xa99: 0x108e, 0xa9a: 0x0802, 0xa9b: 0x07d6, 0xa9c: 0x07be, 0xa9d: 0x0afa, + 0xa9e: 0x0a8a, 0xa9f: 0x082a, + // Block 0x2b, offset 0xac0 + 0xac0: 0x2184, 0xac1: 0x218a, 0xac2: 0x2190, 0xac3: 0x2196, 0xac4: 0x219c, 0xac5: 0x21a2, + 0xac6: 0x21a8, 0xac7: 0x21ae, 0xac8: 0x21b4, 0xac9: 0x21ba, 0xaca: 0x21c0, 0xacb: 0x21c6, + 0xacc: 0x21cc, 0xacd: 0x21d2, 0xace: 0x285d, 0xacf: 0x2866, 0xad0: 0x286f, 0xad1: 0x2878, + 0xad2: 0x2881, 0xad3: 0x288a, 0xad4: 0x2893, 0xad5: 0x289c, 0xad6: 0x28a5, 0xad7: 0x28b7, + 0xad8: 0x28c0, 0xad9: 0x28c9, 0xada: 0x28d2, 0xadb: 0x28db, 0xadc: 0x28ae, 0xadd: 0x2ce3, + 0xade: 0x2c24, 0xae0: 0x21d8, 0xae1: 0x21f0, 0xae2: 0x21e4, 0xae3: 0x2238, + 0xae4: 0x21f6, 0xae5: 0x2214, 0xae6: 0x21de, 0xae7: 0x220e, 0xae8: 0x21ea, 0xae9: 0x2220, + 0xaea: 0x2250, 0xaeb: 0x226e, 0xaec: 0x2268, 0xaed: 0x225c, 0xaee: 0x22aa, 0xaef: 0x223e, + 0xaf0: 0x224a, 0xaf1: 0x2262, 0xaf2: 0x2256, 0xaf3: 0x2280, 0xaf4: 0x222c, 0xaf5: 0x2274, + 0xaf6: 0x229e, 0xaf7: 0x2286, 0xaf8: 0x221a, 0xaf9: 0x21fc, 0xafa: 0x2232, 0xafb: 0x2244, + 0xafc: 0x227a, 0xafd: 0x2202, 0xafe: 0x22a4, 0xaff: 0x2226, + // Block 0x2c, offset 0xb00 + 0xb00: 0x228c, 0xb01: 0x2208, 0xb02: 0x2292, 0xb03: 0x2298, 0xb04: 0x0a2a, 0xb05: 0x0bfe, + 0xb06: 0x0da2, 0xb07: 0x11c2, + 0xb10: 0x1cf4, 0xb11: 0x19d6, + 0xb12: 0x19d9, 0xb13: 0x19dc, 0xb14: 0x19df, 0xb15: 0x19e2, 0xb16: 0x19e5, 0xb17: 0x19e8, + 0xb18: 0x19eb, 0xb19: 0x19ee, 0xb1a: 0x19f7, 0xb1b: 0x19fa, 0xb1c: 0x19fd, 0xb1d: 0x1a00, + 0xb1e: 0x1a03, 0xb1f: 0x1a06, 0xb20: 0x0406, 0xb21: 0x040e, 0xb22: 0x0412, 0xb23: 0x041a, + 0xb24: 0x041e, 0xb25: 0x0422, 0xb26: 0x042a, 0xb27: 0x0432, 0xb28: 0x0436, 0xb29: 0x043e, + 0xb2a: 0x0442, 0xb2b: 0x0446, 0xb2c: 0x044a, 0xb2d: 0x044e, 0xb2e: 0x2f59, 0xb2f: 0x2f61, + 0xb30: 0x2f69, 0xb31: 0x2f71, 0xb32: 0x2f79, 0xb33: 0x2f81, 0xb34: 0x2f89, 0xb35: 0x2f91, + 0xb36: 0x2fa1, 0xb37: 0x2fa9, 0xb38: 0x2fb1, 0xb39: 0x2fb9, 0xb3a: 0x2fc1, 0xb3b: 0x2fc9, + 0xb3c: 0x3014, 0xb3d: 0x2fdc, 0xb3e: 0x2f99, + // Block 0x2d, offset 0xb40 + 0xb40: 0x07ba, 0xb41: 0x0816, 0xb42: 0x07c6, 0xb43: 0x0a76, 0xb44: 0x081a, 0xb45: 0x08aa, + 0xb46: 0x07c2, 0xb47: 0x08a6, 0xb48: 0x0806, 0xb49: 0x0982, 0xb4a: 0x0e02, 0xb4b: 0x0f8a, + 0xb4c: 0x0ed2, 0xb4d: 0x0e16, 0xb4e: 0x155a, 0xb4f: 0x0a86, 0xb50: 0x0dca, 0xb51: 0x0e46, + 0xb52: 0x0e06, 0xb53: 0x1146, 0xb54: 0x09f6, 0xb55: 0x0ffe, 0xb56: 0x1482, 0xb57: 0x115a, + 0xb58: 0x093e, 0xb59: 0x118a, 0xb5a: 0x1096, 0xb5b: 0x0b12, 0xb5c: 0x150a, 0xb5d: 0x087a, + 0xb5e: 0x09a6, 0xb5f: 0x0ef2, 0xb60: 0x1622, 0xb61: 0x083e, 0xb62: 0x08ce, 0xb63: 0x0e96, + 0xb64: 0x07ca, 0xb65: 0x07e2, 0xb66: 0x07ce, 0xb67: 0x0bd6, 0xb68: 0x09ea, 0xb69: 0x097a, + 0xb6a: 0x0b52, 0xb6b: 0x0b46, 0xb6c: 0x10e6, 0xb6d: 0x083a, 0xb6e: 0x1496, 0xb6f: 0x0996, + 0xb70: 0x0aee, 0xb71: 0x1a09, 0xb72: 0x1a0c, 0xb73: 0x1a0f, 0xb74: 0x1a12, 0xb75: 0x1a1b, + 0xb76: 0x1a1e, 0xb77: 0x1a21, 0xb78: 0x1a24, 0xb79: 0x1a27, 0xb7a: 0x1a2a, 0xb7b: 0x1a2d, + 0xb7c: 0x1a30, 0xb7d: 0x1a33, 0xb7e: 0x1a36, 0xb7f: 0x1a3f, + // Block 0x2e, offset 0xb80 + 0xb80: 0x1df6, 0xb81: 0x1e05, 0xb82: 0x1e14, 0xb83: 0x1e23, 0xb84: 0x1e32, 0xb85: 0x1e41, + 0xb86: 0x1e50, 0xb87: 0x1e5f, 0xb88: 0x1e6e, 0xb89: 0x22bc, 0xb8a: 0x22ce, 0xb8b: 0x22e0, + 0xb8c: 0x1a81, 0xb8d: 0x1d34, 0xb8e: 0x1b02, 0xb8f: 0x1cd8, 0xb90: 0x05c6, 0xb91: 0x05ce, + 0xb92: 0x05d6, 0xb93: 0x05de, 0xb94: 0x05e6, 0xb95: 0x05ea, 0xb96: 0x05ee, 0xb97: 0x05f2, + 0xb98: 0x05f6, 0xb99: 0x05fa, 0xb9a: 0x05fe, 0xb9b: 0x0602, 0xb9c: 0x0606, 0xb9d: 0x060a, + 0xb9e: 0x060e, 0xb9f: 0x0612, 0xba0: 0x0616, 0xba1: 0x061e, 0xba2: 0x0622, 0xba3: 0x0626, + 0xba4: 0x062a, 0xba5: 0x062e, 0xba6: 0x0632, 0xba7: 0x0636, 0xba8: 0x063a, 0xba9: 0x063e, + 0xbaa: 0x0642, 0xbab: 0x0646, 0xbac: 0x064a, 0xbad: 0x064e, 0xbae: 0x0652, 0xbaf: 0x0656, + 0xbb0: 0x065a, 0xbb1: 0x065e, 0xbb2: 0x0662, 0xbb3: 0x066a, 0xbb4: 0x0672, 0xbb5: 0x067a, + 0xbb6: 0x067e, 0xbb7: 0x0682, 0xbb8: 0x0686, 0xbb9: 0x068a, 0xbba: 0x068e, 0xbbb: 0x0692, + 0xbbc: 0x0696, 0xbbd: 0x069a, 0xbbe: 0x069e, 0xbbf: 0x282a, + // Block 0x2f, offset 0xbc0 + 0xbc0: 0x2c43, 0xbc1: 0x2adf, 0xbc2: 0x2c53, 0xbc3: 0x29b7, 0xbc4: 0x3025, 0xbc5: 0x29c1, + 0xbc6: 0x29cb, 0xbc7: 0x3069, 0xbc8: 0x2aec, 0xbc9: 0x29d5, 0xbca: 0x29df, 0xbcb: 0x29e9, + 0xbcc: 0x2b13, 0xbcd: 0x2b20, 0xbce: 0x2af9, 0xbcf: 0x2b06, 0xbd0: 0x2fea, 0xbd1: 0x2b2d, + 0xbd2: 0x2b3a, 0xbd3: 0x2cf5, 0xbd4: 0x27eb, 0xbd5: 0x2d08, 0xbd6: 0x2d1b, 0xbd7: 0x2c63, + 0xbd8: 0x2b47, 0xbd9: 0x2d2e, 0xbda: 0x2d41, 0xbdb: 0x2b54, 0xbdc: 0x29f3, 0xbdd: 0x29fd, + 0xbde: 0x2ff8, 0xbdf: 0x2b61, 0xbe0: 0x2c73, 0xbe1: 0x3036, 0xbe2: 0x2a07, 0xbe3: 0x2a11, + 0xbe4: 0x2b6e, 0xbe5: 0x2a1b, 0xbe6: 0x2a25, 0xbe7: 0x2800, 0xbe8: 0x2807, 0xbe9: 0x2a2f, + 0xbea: 0x2a39, 0xbeb: 0x2d54, 0xbec: 0x2b7b, 0xbed: 0x2c83, 0xbee: 0x2d67, 0xbef: 0x2b88, + 0xbf0: 0x2a4d, 0xbf1: 0x2a43, 0xbf2: 0x307d, 0xbf3: 0x2b95, 0xbf4: 0x2d7a, 0xbf5: 0x2a57, + 0xbf6: 0x2c93, 0xbf7: 0x2a61, 0xbf8: 0x2baf, 0xbf9: 0x2a6b, 0xbfa: 0x2bbc, 0xbfb: 0x3047, + 0xbfc: 0x2ba2, 0xbfd: 0x2ca3, 0xbfe: 0x2bc9, 0xbff: 0x280e, + // Block 0x30, offset 0xc00 + 0xc00: 0x3058, 0xc01: 0x2a75, 0xc02: 0x2a7f, 0xc03: 0x2bd6, 0xc04: 0x2a89, 0xc05: 0x2a93, + 0xc06: 0x2a9d, 0xc07: 0x2cb3, 0xc08: 0x2be3, 0xc09: 0x2815, 0xc0a: 0x2d8d, 0xc0b: 0x2fd1, + 0xc0c: 0x2cc3, 0xc0d: 0x2bf0, 0xc0e: 0x3006, 0xc0f: 0x2aa7, 0xc10: 0x2ab1, 0xc11: 0x2bfd, + 0xc12: 0x281c, 0xc13: 0x2c0a, 0xc14: 0x2cd3, 0xc15: 0x2823, 0xc16: 0x2da0, 0xc17: 0x2abb, + 0xc18: 0x1de7, 0xc19: 0x1dfb, 0xc1a: 0x1e0a, 0xc1b: 0x1e19, 0xc1c: 0x1e28, 0xc1d: 0x1e37, + 0xc1e: 0x1e46, 0xc1f: 0x1e55, 0xc20: 0x1e64, 0xc21: 0x1e73, 0xc22: 0x22c2, 0xc23: 0x22d4, + 0xc24: 0x22e6, 0xc25: 0x22f2, 0xc26: 0x22fe, 0xc27: 0x230a, 0xc28: 0x2316, 0xc29: 0x2322, + 0xc2a: 0x232e, 0xc2b: 0x233a, 0xc2c: 0x2376, 0xc2d: 0x2382, 0xc2e: 0x238e, 0xc2f: 0x239a, + 0xc30: 0x23a6, 0xc31: 0x1d44, 0xc32: 0x1af6, 0xc33: 0x1a63, 0xc34: 0x1d14, 0xc35: 0x1b77, + 0xc36: 0x1b86, 0xc37: 0x1afc, 0xc38: 0x1d2c, 0xc39: 0x1d30, 0xc3a: 0x1a8d, 0xc3b: 0x2838, + 0xc3c: 0x2846, 0xc3d: 0x2831, 0xc3e: 0x283f, 0xc3f: 0x2c17, + // Block 0x31, offset 0xc40 + 0xc40: 0x1b7a, 0xc41: 0x1b62, 0xc42: 0x1d90, 0xc43: 0x1b4a, 0xc44: 0x1b23, 0xc45: 0x1a96, + 0xc46: 0x1aa5, 0xc47: 0x1a75, 0xc48: 0x1d20, 0xc49: 0x1e82, 0xc4a: 0x1b7d, 0xc4b: 0x1b65, + 0xc4c: 0x1d94, 0xc4d: 0x1da0, 0xc4e: 0x1b56, 0xc4f: 0x1b2c, 0xc50: 0x1a84, 0xc51: 0x1d4c, + 0xc52: 0x1ce0, 0xc53: 0x1ccc, 0xc54: 0x1cfc, 0xc55: 0x1da4, 0xc56: 0x1b59, 0xc57: 0x1af9, + 0xc58: 0x1b2f, 0xc59: 0x1b0e, 0xc5a: 0x1b71, 0xc5b: 0x1da8, 0xc5c: 0x1b5c, 0xc5d: 0x1af0, + 0xc5e: 0x1b32, 0xc5f: 0x1d6c, 0xc60: 0x1d24, 0xc61: 0x1b44, 0xc62: 0x1d54, 0xc63: 0x1d70, + 0xc64: 0x1d28, 0xc65: 0x1b47, 0xc66: 0x1d58, 0xc67: 0x2418, 0xc68: 0x242c, 0xc69: 0x1ac6, + 0xc6a: 0x1d50, 0xc6b: 0x1ce4, 0xc6c: 0x1cd0, 0xc6d: 0x1d78, 0xc6e: 0x284d, 0xc6f: 0x28e4, + 0xc70: 0x1b89, 0xc71: 0x1b74, 0xc72: 0x1dac, 0xc73: 0x1b5f, 0xc74: 0x1b80, 0xc75: 0x1b68, + 0xc76: 0x1d98, 0xc77: 0x1b4d, 0xc78: 0x1b26, 0xc79: 0x1ab1, 0xc7a: 0x1b83, 0xc7b: 0x1b6b, + 0xc7c: 0x1d9c, 0xc7d: 0x1b50, 0xc7e: 0x1b29, 0xc7f: 0x1ab4, + // Block 0x32, offset 0xc80 + 0xc80: 0x1d5c, 0xc81: 0x1ce8, 0xc82: 0x1e7d, 0xc83: 0x1a66, 0xc84: 0x1aea, 0xc85: 0x1aed, + 0xc86: 0x2425, 0xc87: 0x1cc4, 0xc88: 0x1af3, 0xc89: 0x1a78, 0xc8a: 0x1b11, 0xc8b: 0x1a7b, + 0xc8c: 0x1b1a, 0xc8d: 0x1a99, 0xc8e: 0x1a9c, 0xc8f: 0x1b35, 0xc90: 0x1b3b, 0xc91: 0x1b3e, + 0xc92: 0x1d60, 0xc93: 0x1b41, 0xc94: 0x1b53, 0xc95: 0x1d68, 0xc96: 0x1d74, 0xc97: 0x1ac0, + 0xc98: 0x1e87, 0xc99: 0x1cec, 0xc9a: 0x1ac3, 0xc9b: 0x1b8c, 0xc9c: 0x1ad5, 0xc9d: 0x1ae4, + 0xc9e: 0x2412, 0xc9f: 0x240c, 0xca0: 0x1df1, 0xca1: 0x1e00, 0xca2: 0x1e0f, 0xca3: 0x1e1e, + 0xca4: 0x1e2d, 0xca5: 0x1e3c, 0xca6: 0x1e4b, 0xca7: 0x1e5a, 0xca8: 0x1e69, 0xca9: 0x22b6, + 0xcaa: 0x22c8, 0xcab: 0x22da, 0xcac: 0x22ec, 0xcad: 0x22f8, 0xcae: 0x2304, 0xcaf: 0x2310, + 0xcb0: 0x231c, 0xcb1: 0x2328, 0xcb2: 0x2334, 0xcb3: 0x2370, 0xcb4: 0x237c, 0xcb5: 0x2388, + 0xcb6: 0x2394, 0xcb7: 0x23a0, 0xcb8: 0x23ac, 0xcb9: 0x23b2, 0xcba: 0x23b8, 0xcbb: 0x23be, + 0xcbc: 0x23c4, 0xcbd: 0x23d6, 0xcbe: 0x23dc, 0xcbf: 0x1d40, + // Block 0x33, offset 0xcc0 + 0xcc0: 0x1472, 0xcc1: 0x0df6, 0xcc2: 0x14ce, 0xcc3: 0x149a, 0xcc4: 0x0f52, 0xcc5: 0x07e6, + 0xcc6: 0x09da, 0xcc7: 0x1726, 0xcc8: 0x1726, 0xcc9: 0x0b06, 0xcca: 0x155a, 0xccb: 0x0a3e, + 0xccc: 0x0b02, 0xccd: 0x0cea, 0xcce: 0x10ca, 0xccf: 0x125a, 0xcd0: 0x1392, 0xcd1: 0x13ce, + 0xcd2: 0x1402, 0xcd3: 0x1516, 0xcd4: 0x0e6e, 0xcd5: 0x0efa, 0xcd6: 0x0fa6, 0xcd7: 0x103e, + 0xcd8: 0x135a, 0xcd9: 0x1542, 0xcda: 0x166e, 0xcdb: 0x080a, 0xcdc: 0x09ae, 0xcdd: 0x0e82, + 0xcde: 0x0fca, 0xcdf: 0x138e, 0xce0: 0x16be, 0xce1: 0x0bae, 0xce2: 0x0f72, 0xce3: 0x137e, + 0xce4: 0x1412, 0xce5: 0x0d1e, 0xce6: 0x12b6, 0xce7: 0x13da, 0xce8: 0x0c1a, 0xce9: 0x0e0a, + 0xcea: 0x0f12, 0xceb: 0x1016, 0xcec: 0x1522, 0xced: 0x084a, 0xcee: 0x08e2, 0xcef: 0x094e, + 0xcf0: 0x0d86, 0xcf1: 0x0e7a, 0xcf2: 0x0fc6, 0xcf3: 0x10ea, 0xcf4: 0x1272, 0xcf5: 0x1386, + 0xcf6: 0x139e, 0xcf7: 0x14c2, 0xcf8: 0x15ea, 0xcf9: 0x169e, 0xcfa: 0x16ba, 0xcfb: 0x1126, + 0xcfc: 0x1166, 0xcfd: 0x121e, 0xcfe: 0x133e, 0xcff: 0x1576, + // Block 0x34, offset 0xd00 + 0xd00: 0x16c6, 0xd01: 0x1446, 0xd02: 0x0ac2, 0xd03: 0x0c36, 0xd04: 0x11d6, 0xd05: 0x1296, + 0xd06: 0x0ffa, 0xd07: 0x112e, 0xd08: 0x1492, 0xd09: 0x15e2, 0xd0a: 0x0abe, 0xd0b: 0x0b8a, + 0xd0c: 0x0e72, 0xd0d: 0x0f26, 0xd0e: 0x0f5a, 0xd0f: 0x120e, 0xd10: 0x1236, 0xd11: 0x15a2, + 0xd12: 0x094a, 0xd13: 0x12a2, 0xd14: 0x08ee, 0xd15: 0x08ea, 0xd16: 0x1192, 0xd17: 0x1222, + 0xd18: 0x1356, 0xd19: 0x15aa, 0xd1a: 0x1462, 0xd1b: 0x0d22, 0xd1c: 0x0e6e, 0xd1d: 0x1452, + 0xd1e: 0x07f2, 0xd1f: 0x0b5e, 0xd20: 0x0c8e, 0xd21: 0x102a, 0xd22: 0x10aa, 0xd23: 0x096e, + 0xd24: 0x1136, 0xd25: 0x085a, 0xd26: 0x0c72, 0xd27: 0x07d2, 0xd28: 0x0ee6, 0xd29: 0x0d9e, + 0xd2a: 0x120a, 0xd2b: 0x09c2, 0xd2c: 0x0aae, 0xd2d: 0x10f6, 0xd2e: 0x135e, 0xd2f: 0x1436, + 0xd30: 0x0eb2, 0xd31: 0x14f2, 0xd32: 0x0ede, 0xd33: 0x0d32, 0xd34: 0x1316, 0xd35: 0x0d52, + 0xd36: 0x10a6, 0xd37: 0x0826, 0xd38: 0x08a2, 0xd39: 0x08e6, 0xd3a: 0x0e4e, 0xd3b: 0x11f6, + 0xd3c: 0x12ee, 0xd3d: 0x1442, 0xd3e: 0x1556, 0xd3f: 0x0956, + // Block 0x35, offset 0xd40 + 0xd40: 0x0a0a, 0xd41: 0x0b12, 0xd42: 0x0c2a, 0xd43: 0x0dba, 0xd44: 0x0f76, 0xd45: 0x113a, + 0xd46: 0x1592, 0xd47: 0x1676, 0xd48: 0x16ca, 0xd49: 0x16e2, 0xd4a: 0x0932, 0xd4b: 0x0dee, + 0xd4c: 0x0e9e, 0xd4d: 0x14e6, 0xd4e: 0x0bf6, 0xd4f: 0x0cd2, 0xd50: 0x0cee, 0xd51: 0x0d7e, + 0xd52: 0x0f66, 0xd53: 0x0fb2, 0xd54: 0x1062, 0xd55: 0x1186, 0xd56: 0x122a, 0xd57: 0x128e, + 0xd58: 0x14d6, 0xd59: 0x1366, 0xd5a: 0x14fe, 0xd5b: 0x157a, 0xd5c: 0x090a, 0xd5d: 0x0936, + 0xd5e: 0x0a1e, 0xd5f: 0x0fa2, 0xd60: 0x13ee, 0xd61: 0x1436, 0xd62: 0x0c16, 0xd63: 0x0c86, + 0xd64: 0x0d4a, 0xd65: 0x0eaa, 0xd66: 0x11d2, 0xd67: 0x101e, 0xd68: 0x0836, 0xd69: 0x0a7a, + 0xd6a: 0x0b5e, 0xd6b: 0x0bc2, 0xd6c: 0x0c92, 0xd6d: 0x103a, 0xd6e: 0x1056, 0xd6f: 0x1266, + 0xd70: 0x1286, 0xd71: 0x155e, 0xd72: 0x15de, 0xd73: 0x15ee, 0xd74: 0x162a, 0xd75: 0x084e, + 0xd76: 0x117a, 0xd77: 0x154a, 0xd78: 0x15c6, 0xd79: 0x0caa, 0xd7a: 0x0812, 0xd7b: 0x0872, + 0xd7c: 0x0b62, 0xd7d: 0x0b82, 0xd7e: 0x0daa, 0xd7f: 0x0e6e, + // Block 0x36, offset 0xd80 + 0xd80: 0x0fbe, 0xd81: 0x10c6, 0xd82: 0x1372, 0xd83: 0x1512, 0xd84: 0x171e, 0xd85: 0x0dde, + 0xd86: 0x159e, 0xd87: 0x092e, 0xd88: 0x0e2a, 0xd89: 0x0e36, 0xd8a: 0x0f0a, 0xd8b: 0x0f42, + 0xd8c: 0x1046, 0xd8d: 0x10a2, 0xd8e: 0x1122, 0xd8f: 0x1206, 0xd90: 0x1636, 0xd91: 0x08aa, + 0xd92: 0x0cfe, 0xd93: 0x15ae, 0xd94: 0x0862, 0xd95: 0x0ba6, 0xd96: 0x0f2a, 0xd97: 0x14da, + 0xd98: 0x0c62, 0xd99: 0x0cb2, 0xd9a: 0x0e3e, 0xd9b: 0x102a, 0xd9c: 0x15b6, 0xd9d: 0x0912, + 0xd9e: 0x09fa, 0xd9f: 0x0b92, 0xda0: 0x0dce, 0xda1: 0x0e1a, 0xda2: 0x0e5a, 0xda3: 0x0eee, + 0xda4: 0x1042, 0xda5: 0x10b6, 0xda6: 0x1252, 0xda7: 0x13f2, 0xda8: 0x13fe, 0xda9: 0x1552, + 0xdaa: 0x15d2, 0xdab: 0x097e, 0xdac: 0x0f46, 0xdad: 0x09fe, 0xdae: 0x0fc2, 0xdaf: 0x1066, + 0xdb0: 0x1382, 0xdb1: 0x15ba, 0xdb2: 0x16a6, 0xdb3: 0x16ce, 0xdb4: 0x0e32, 0xdb5: 0x0f22, + 0xdb6: 0x12be, 0xdb7: 0x11b2, 0xdb8: 0x11be, 0xdb9: 0x11e2, 0xdba: 0x1012, 0xdbb: 0x0f9a, + 0xdbc: 0x145e, 0xdbd: 0x082e, 0xdbe: 0x1326, 0xdbf: 0x0916, + // Block 0x37, offset 0xdc0 + 0xdc0: 0x0906, 0xdc1: 0x0c06, 0xdc2: 0x0d26, 0xdc3: 0x11ee, 0xdc4: 0x0b4e, 0xdc5: 0x0efe, + 0xdc6: 0x0dea, 0xdc7: 0x14e2, 0xdc8: 0x13e2, 0xdc9: 0x15a6, 0xdca: 0x141e, 0xdcb: 0x0c22, + 0xdcc: 0x0882, 0xdcd: 0x0a56, 0xdd0: 0x0aaa, + 0xdd2: 0x0dda, 0xdd5: 0x08f2, 0xdd6: 0x101a, 0xdd7: 0x10de, + 0xdd8: 0x1142, 0xdd9: 0x115e, 0xdda: 0x1162, 0xddb: 0x1176, 0xddc: 0x15f6, 0xddd: 0x11e6, + 0xdde: 0x126a, 0xde0: 0x138a, 0xde2: 0x144e, + 0xde5: 0x1502, 0xde6: 0x152e, + 0xdea: 0x164a, 0xdeb: 0x164e, 0xdec: 0x1652, 0xded: 0x16b6, 0xdee: 0x1526, 0xdef: 0x15c2, + 0xdf0: 0x0852, 0xdf1: 0x0876, 0xdf2: 0x088a, 0xdf3: 0x0946, 0xdf4: 0x0952, 0xdf5: 0x0992, + 0xdf6: 0x0a46, 0xdf7: 0x0a62, 0xdf8: 0x0a6a, 0xdf9: 0x0aa6, 0xdfa: 0x0ab2, 0xdfb: 0x0b8e, + 0xdfc: 0x0b96, 0xdfd: 0x0c9e, 0xdfe: 0x0cc6, 0xdff: 0x0cce, + // Block 0x38, offset 0xe00 + 0xe00: 0x0ce6, 0xe01: 0x0d92, 0xe02: 0x0dc2, 0xe03: 0x0de2, 0xe04: 0x0e52, 0xe05: 0x0f16, + 0xe06: 0x0f32, 0xe07: 0x0f62, 0xe08: 0x0fb6, 0xe09: 0x0fd6, 0xe0a: 0x104a, 0xe0b: 0x112a, + 0xe0c: 0x1146, 0xe0d: 0x114e, 0xe0e: 0x114a, 0xe0f: 0x1152, 0xe10: 0x1156, 0xe11: 0x115a, + 0xe12: 0x116e, 0xe13: 0x1172, 0xe14: 0x1196, 0xe15: 0x11aa, 0xe16: 0x11c6, 0xe17: 0x122a, + 0xe18: 0x1232, 0xe19: 0x123a, 0xe1a: 0x124e, 0xe1b: 0x1276, 0xe1c: 0x12c6, 0xe1d: 0x12fa, + 0xe1e: 0x12fa, 0xe1f: 0x1362, 0xe20: 0x140a, 0xe21: 0x1422, 0xe22: 0x1456, 0xe23: 0x145a, + 0xe24: 0x149e, 0xe25: 0x14a2, 0xe26: 0x14fa, 0xe27: 0x1502, 0xe28: 0x15d6, 0xe29: 0x161a, + 0xe2a: 0x1632, 0xe2b: 0x0c96, 0xe2c: 0x184b, 0xe2d: 0x12de, + 0xe30: 0x07da, 0xe31: 0x08de, 0xe32: 0x089e, 0xe33: 0x0846, 0xe34: 0x0886, 0xe35: 0x08b2, + 0xe36: 0x0942, 0xe37: 0x095e, 0xe38: 0x0a46, 0xe39: 0x0a32, 0xe3a: 0x0a42, 0xe3b: 0x0a5e, + 0xe3c: 0x0aaa, 0xe3d: 0x0aba, 0xe3e: 0x0afe, 0xe3f: 0x0b0a, + // Block 0x39, offset 0xe40 + 0xe40: 0x0b26, 0xe41: 0x0b36, 0xe42: 0x0c1e, 0xe43: 0x0c26, 0xe44: 0x0c56, 0xe45: 0x0c76, + 0xe46: 0x0ca6, 0xe47: 0x0cbe, 0xe48: 0x0cae, 0xe49: 0x0cce, 0xe4a: 0x0cc2, 0xe4b: 0x0ce6, + 0xe4c: 0x0d02, 0xe4d: 0x0d5a, 0xe4e: 0x0d66, 0xe4f: 0x0d6e, 0xe50: 0x0d96, 0xe51: 0x0dda, + 0xe52: 0x0e0a, 0xe53: 0x0e0e, 0xe54: 0x0e22, 0xe55: 0x0ea2, 0xe56: 0x0eb2, 0xe57: 0x0f0a, + 0xe58: 0x0f56, 0xe59: 0x0f4e, 0xe5a: 0x0f62, 0xe5b: 0x0f7e, 0xe5c: 0x0fb6, 0xe5d: 0x110e, + 0xe5e: 0x0fda, 0xe5f: 0x100e, 0xe60: 0x101a, 0xe61: 0x105a, 0xe62: 0x1076, 0xe63: 0x109a, + 0xe64: 0x10be, 0xe65: 0x10c2, 0xe66: 0x10de, 0xe67: 0x10e2, 0xe68: 0x10f2, 0xe69: 0x1106, + 0xe6a: 0x1102, 0xe6b: 0x1132, 0xe6c: 0x11ae, 0xe6d: 0x11c6, 0xe6e: 0x11de, 0xe6f: 0x1216, + 0xe70: 0x122a, 0xe71: 0x1246, 0xe72: 0x1276, 0xe73: 0x132a, 0xe74: 0x1352, 0xe75: 0x13c6, + 0xe76: 0x140e, 0xe77: 0x141a, 0xe78: 0x1422, 0xe79: 0x143a, 0xe7a: 0x144e, 0xe7b: 0x143e, + 0xe7c: 0x1456, 0xe7d: 0x1452, 0xe7e: 0x144a, 0xe7f: 0x145a, + // Block 0x3a, offset 0xe80 + 0xe80: 0x1466, 0xe81: 0x14a2, 0xe82: 0x14de, 0xe83: 0x150e, 0xe84: 0x1546, 0xe85: 0x1566, + 0xe86: 0x15b2, 0xe87: 0x15d6, 0xe88: 0x15f6, 0xe89: 0x160a, 0xe8a: 0x161a, 0xe8b: 0x1626, + 0xe8c: 0x1632, 0xe8d: 0x1686, 0xe8e: 0x1726, 0xe8f: 0x17e2, 0xe90: 0x17dd, 0xe91: 0x180f, + 0xe92: 0x0702, 0xe93: 0x072a, 0xe94: 0x072e, 0xe95: 0x1891, 0xe96: 0x18be, 0xe97: 0x1936, + 0xe98: 0x1712, 0xe99: 0x1722, + // Block 0x3b, offset 0xec0 + 0xec0: 0x1b05, 0xec1: 0x1b08, 0xec2: 0x1b0b, 0xec3: 0x1d38, 0xec4: 0x1d3c, 0xec5: 0x1b8f, + 0xec6: 0x1b8f, + 0xed3: 0x1ea5, 0xed4: 0x1e96, 0xed5: 0x1e9b, 0xed6: 0x1eaa, 0xed7: 0x1ea0, + 0xedd: 0x44d1, + 0xede: 0x8116, 0xedf: 0x4543, 0xee0: 0x0320, 0xee1: 0x0308, 0xee2: 0x0311, 0xee3: 0x0314, + 0xee4: 0x0317, 0xee5: 0x031a, 0xee6: 0x031d, 0xee7: 0x0323, 0xee8: 0x0326, 0xee9: 0x0017, + 0xeea: 0x4531, 0xeeb: 0x4537, 0xeec: 0x4635, 0xeed: 0x463d, 0xeee: 0x4489, 0xeef: 0x448f, + 0xef0: 0x4495, 0xef1: 0x449b, 0xef2: 0x44a7, 0xef3: 0x44ad, 0xef4: 0x44b3, 0xef5: 0x44bf, + 0xef6: 0x44c5, 0xef8: 0x44cb, 0xef9: 0x44d7, 0xefa: 0x44dd, 0xefb: 0x44e3, + 0xefc: 0x44ef, 0xefe: 0x44f5, + // Block 0x3c, offset 0xf00 + 0xf00: 0x44fb, 0xf01: 0x4501, 0xf03: 0x4507, 0xf04: 0x450d, + 0xf06: 0x4519, 0xf07: 0x451f, 0xf08: 0x4525, 0xf09: 0x452b, 0xf0a: 0x453d, 0xf0b: 0x44b9, + 0xf0c: 0x44a1, 0xf0d: 0x44e9, 0xf0e: 0x4513, 0xf0f: 0x1eaf, 0xf10: 0x038c, 0xf11: 0x038c, + 0xf12: 0x0395, 0xf13: 0x0395, 0xf14: 0x0395, 0xf15: 0x0395, 0xf16: 0x0398, 0xf17: 0x0398, + 0xf18: 0x0398, 0xf19: 0x0398, 0xf1a: 0x039e, 0xf1b: 0x039e, 0xf1c: 0x039e, 0xf1d: 0x039e, + 0xf1e: 0x0392, 0xf1f: 0x0392, 0xf20: 0x0392, 0xf21: 0x0392, 0xf22: 0x039b, 0xf23: 0x039b, + 0xf24: 0x039b, 0xf25: 0x039b, 0xf26: 0x038f, 0xf27: 0x038f, 0xf28: 0x038f, 0xf29: 0x038f, + 0xf2a: 0x03c2, 0xf2b: 0x03c2, 0xf2c: 0x03c2, 0xf2d: 0x03c2, 0xf2e: 0x03c5, 0xf2f: 0x03c5, + 0xf30: 0x03c5, 0xf31: 0x03c5, 0xf32: 0x03a4, 0xf33: 0x03a4, 0xf34: 0x03a4, 0xf35: 0x03a4, + 0xf36: 0x03a1, 0xf37: 0x03a1, 0xf38: 0x03a1, 0xf39: 0x03a1, 0xf3a: 0x03a7, 0xf3b: 0x03a7, + 0xf3c: 0x03a7, 0xf3d: 0x03a7, 0xf3e: 0x03aa, 0xf3f: 0x03aa, + // Block 0x3d, offset 0xf40 + 0xf40: 0x03aa, 0xf41: 0x03aa, 0xf42: 0x03b3, 0xf43: 0x03b3, 0xf44: 0x03b0, 0xf45: 0x03b0, + 0xf46: 0x03b6, 0xf47: 0x03b6, 0xf48: 0x03ad, 0xf49: 0x03ad, 0xf4a: 0x03bc, 0xf4b: 0x03bc, + 0xf4c: 0x03b9, 0xf4d: 0x03b9, 0xf4e: 0x03c8, 0xf4f: 0x03c8, 0xf50: 0x03c8, 0xf51: 0x03c8, + 0xf52: 0x03ce, 0xf53: 0x03ce, 0xf54: 0x03ce, 0xf55: 0x03ce, 0xf56: 0x03d4, 0xf57: 0x03d4, + 0xf58: 0x03d4, 0xf59: 0x03d4, 0xf5a: 0x03d1, 0xf5b: 0x03d1, 0xf5c: 0x03d1, 0xf5d: 0x03d1, + 0xf5e: 0x03d7, 0xf5f: 0x03d7, 0xf60: 0x03da, 0xf61: 0x03da, 0xf62: 0x03da, 0xf63: 0x03da, + 0xf64: 0x45af, 0xf65: 0x45af, 0xf66: 0x03e0, 0xf67: 0x03e0, 0xf68: 0x03e0, 0xf69: 0x03e0, + 0xf6a: 0x03dd, 0xf6b: 0x03dd, 0xf6c: 0x03dd, 0xf6d: 0x03dd, 0xf6e: 0x03fb, 0xf6f: 0x03fb, + 0xf70: 0x45a9, 0xf71: 0x45a9, + // Block 0x3e, offset 0xf80 + 0xf93: 0x03cb, 0xf94: 0x03cb, 0xf95: 0x03cb, 0xf96: 0x03cb, 0xf97: 0x03e9, + 0xf98: 0x03e9, 0xf99: 0x03e6, 0xf9a: 0x03e6, 0xf9b: 0x03ec, 0xf9c: 0x03ec, 0xf9d: 0x217f, + 0xf9e: 0x03f2, 0xf9f: 0x03f2, 0xfa0: 0x03e3, 0xfa1: 0x03e3, 0xfa2: 0x03ef, 0xfa3: 0x03ef, + 0xfa4: 0x03f8, 0xfa5: 0x03f8, 0xfa6: 0x03f8, 0xfa7: 0x03f8, 0xfa8: 0x0380, 0xfa9: 0x0380, + 0xfaa: 0x26da, 0xfab: 0x26da, 0xfac: 0x274a, 0xfad: 0x274a, 0xfae: 0x2719, 0xfaf: 0x2719, + 0xfb0: 0x2735, 0xfb1: 0x2735, 0xfb2: 0x272e, 0xfb3: 0x272e, 0xfb4: 0x273c, 0xfb5: 0x273c, + 0xfb6: 0x2743, 0xfb7: 0x2743, 0xfb8: 0x2743, 0xfb9: 0x2720, 0xfba: 0x2720, 0xfbb: 0x2720, + 0xfbc: 0x03f5, 0xfbd: 0x03f5, 0xfbe: 0x03f5, 0xfbf: 0x03f5, + // Block 0x3f, offset 0xfc0 + 0xfc0: 0x26e1, 0xfc1: 0x26e8, 0xfc2: 0x2704, 0xfc3: 0x2720, 0xfc4: 0x2727, 0xfc5: 0x1eb9, + 0xfc6: 0x1ebe, 0xfc7: 0x1ec3, 0xfc8: 0x1ed2, 0xfc9: 0x1ee1, 0xfca: 0x1ee6, 0xfcb: 0x1eeb, + 0xfcc: 0x1ef0, 0xfcd: 0x1ef5, 0xfce: 0x1f04, 0xfcf: 0x1f13, 0xfd0: 0x1f18, 0xfd1: 0x1f1d, + 0xfd2: 0x1f2c, 0xfd3: 0x1f3b, 0xfd4: 0x1f40, 0xfd5: 0x1f45, 0xfd6: 0x1f4a, 0xfd7: 0x1f59, + 0xfd8: 0x1f5e, 0xfd9: 0x1f6d, 0xfda: 0x1f72, 0xfdb: 0x1f77, 0xfdc: 0x1f86, 0xfdd: 0x1f8b, + 0xfde: 0x1f90, 0xfdf: 0x1f9a, 0xfe0: 0x1fd6, 0xfe1: 0x1fe5, 0xfe2: 0x1ff4, 0xfe3: 0x1ff9, + 0xfe4: 0x1ffe, 0xfe5: 0x2008, 0xfe6: 0x2017, 0xfe7: 0x201c, 0xfe8: 0x202b, 0xfe9: 0x2030, + 0xfea: 0x2035, 0xfeb: 0x2044, 0xfec: 0x2049, 0xfed: 0x2058, 0xfee: 0x205d, 0xfef: 0x2062, + 0xff0: 0x2067, 0xff1: 0x206c, 0xff2: 0x2071, 0xff3: 0x2076, 0xff4: 0x207b, 0xff5: 0x2080, + 0xff6: 0x2085, 0xff7: 0x208a, 0xff8: 0x208f, 0xff9: 0x2094, 0xffa: 0x2099, 0xffb: 0x209e, + 0xffc: 0x20a3, 0xffd: 0x20a8, 0xffe: 0x20ad, 0xfff: 0x20b7, + // Block 0x40, offset 0x1000 + 0x1000: 0x20bc, 0x1001: 0x20c1, 0x1002: 0x20c6, 0x1003: 0x20d0, 0x1004: 0x20d5, 0x1005: 0x20df, + 0x1006: 0x20e4, 0x1007: 0x20e9, 0x1008: 0x20ee, 0x1009: 0x20f3, 0x100a: 0x20f8, 0x100b: 0x20fd, + 0x100c: 0x2102, 0x100d: 0x2107, 0x100e: 0x2116, 0x100f: 0x2125, 0x1010: 0x212a, 0x1011: 0x212f, + 0x1012: 0x2134, 0x1013: 0x2139, 0x1014: 0x213e, 0x1015: 0x2148, 0x1016: 0x214d, 0x1017: 0x2152, + 0x1018: 0x2161, 0x1019: 0x2170, 0x101a: 0x2175, 0x101b: 0x4561, 0x101c: 0x4567, 0x101d: 0x459d, + 0x101e: 0x45f4, 0x101f: 0x45fb, 0x1020: 0x4602, 0x1021: 0x4609, 0x1022: 0x4610, 0x1023: 0x4617, + 0x1024: 0x26f6, 0x1025: 0x26fd, 0x1026: 0x2704, 0x1027: 0x270b, 0x1028: 0x2720, 0x1029: 0x2727, + 0x102a: 0x1ec8, 0x102b: 0x1ecd, 0x102c: 0x1ed2, 0x102d: 0x1ed7, 0x102e: 0x1ee1, 0x102f: 0x1ee6, + 0x1030: 0x1efa, 0x1031: 0x1eff, 0x1032: 0x1f04, 0x1033: 0x1f09, 0x1034: 0x1f13, 0x1035: 0x1f18, + 0x1036: 0x1f22, 0x1037: 0x1f27, 0x1038: 0x1f2c, 0x1039: 0x1f31, 0x103a: 0x1f3b, 0x103b: 0x1f40, + 0x103c: 0x206c, 0x103d: 0x2071, 0x103e: 0x2080, 0x103f: 0x2085, + // Block 0x41, offset 0x1040 + 0x1040: 0x208a, 0x1041: 0x209e, 0x1042: 0x20a3, 0x1043: 0x20a8, 0x1044: 0x20ad, 0x1045: 0x20c6, + 0x1046: 0x20d0, 0x1047: 0x20d5, 0x1048: 0x20da, 0x1049: 0x20ee, 0x104a: 0x210c, 0x104b: 0x2111, + 0x104c: 0x2116, 0x104d: 0x211b, 0x104e: 0x2125, 0x104f: 0x212a, 0x1050: 0x459d, 0x1051: 0x2157, + 0x1052: 0x215c, 0x1053: 0x2161, 0x1054: 0x2166, 0x1055: 0x2170, 0x1056: 0x2175, 0x1057: 0x26e1, + 0x1058: 0x26e8, 0x1059: 0x26ef, 0x105a: 0x2704, 0x105b: 0x2712, 0x105c: 0x1eb9, 0x105d: 0x1ebe, + 0x105e: 0x1ec3, 0x105f: 0x1ed2, 0x1060: 0x1edc, 0x1061: 0x1eeb, 0x1062: 0x1ef0, 0x1063: 0x1ef5, + 0x1064: 0x1f04, 0x1065: 0x1f0e, 0x1066: 0x1f2c, 0x1067: 0x1f45, 0x1068: 0x1f4a, 0x1069: 0x1f59, + 0x106a: 0x1f5e, 0x106b: 0x1f6d, 0x106c: 0x1f77, 0x106d: 0x1f86, 0x106e: 0x1f8b, 0x106f: 0x1f90, + 0x1070: 0x1f9a, 0x1071: 0x1fd6, 0x1072: 0x1fdb, 0x1073: 0x1fe5, 0x1074: 0x1ff4, 0x1075: 0x1ff9, + 0x1076: 0x1ffe, 0x1077: 0x2008, 0x1078: 0x2017, 0x1079: 0x202b, 0x107a: 0x2030, 0x107b: 0x2035, + 0x107c: 0x2044, 0x107d: 0x2049, 0x107e: 0x2058, 0x107f: 0x205d, + // Block 0x42, offset 0x1080 + 0x1080: 0x2062, 0x1081: 0x2067, 0x1082: 0x2076, 0x1083: 0x207b, 0x1084: 0x208f, 0x1085: 0x2094, + 0x1086: 0x2099, 0x1087: 0x209e, 0x1088: 0x20a3, 0x1089: 0x20b7, 0x108a: 0x20bc, 0x108b: 0x20c1, + 0x108c: 0x20c6, 0x108d: 0x20cb, 0x108e: 0x20df, 0x108f: 0x20e4, 0x1090: 0x20e9, 0x1091: 0x20ee, + 0x1092: 0x20fd, 0x1093: 0x2102, 0x1094: 0x2107, 0x1095: 0x2116, 0x1096: 0x2120, 0x1097: 0x212f, + 0x1098: 0x2134, 0x1099: 0x4591, 0x109a: 0x2148, 0x109b: 0x214d, 0x109c: 0x2152, 0x109d: 0x2161, + 0x109e: 0x216b, 0x109f: 0x2704, 0x10a0: 0x2712, 0x10a1: 0x1ed2, 0x10a2: 0x1edc, 0x10a3: 0x1f04, + 0x10a4: 0x1f0e, 0x10a5: 0x1f2c, 0x10a6: 0x1f36, 0x10a7: 0x1f9a, 0x10a8: 0x1f9f, 0x10a9: 0x1fc2, + 0x10aa: 0x1fc7, 0x10ab: 0x209e, 0x10ac: 0x20a3, 0x10ad: 0x20c6, 0x10ae: 0x2116, 0x10af: 0x2120, + 0x10b0: 0x2161, 0x10b1: 0x216b, 0x10b2: 0x4645, 0x10b3: 0x464d, 0x10b4: 0x4655, 0x10b5: 0x2021, + 0x10b6: 0x2026, 0x10b7: 0x203a, 0x10b8: 0x203f, 0x10b9: 0x204e, 0x10ba: 0x2053, 0x10bb: 0x1fa4, + 0x10bc: 0x1fa9, 0x10bd: 0x1fcc, 0x10be: 0x1fd1, 0x10bf: 0x1f63, + // Block 0x43, offset 0x10c0 + 0x10c0: 0x1f68, 0x10c1: 0x1f4f, 0x10c2: 0x1f54, 0x10c3: 0x1f7c, 0x10c4: 0x1f81, 0x10c5: 0x1fea, + 0x10c6: 0x1fef, 0x10c7: 0x200d, 0x10c8: 0x2012, 0x10c9: 0x1fae, 0x10ca: 0x1fb3, 0x10cb: 0x1fb8, + 0x10cc: 0x1fc2, 0x10cd: 0x1fbd, 0x10ce: 0x1f95, 0x10cf: 0x1fe0, 0x10d0: 0x2003, 0x10d1: 0x2021, + 0x10d2: 0x2026, 0x10d3: 0x203a, 0x10d4: 0x203f, 0x10d5: 0x204e, 0x10d6: 0x2053, 0x10d7: 0x1fa4, + 0x10d8: 0x1fa9, 0x10d9: 0x1fcc, 0x10da: 0x1fd1, 0x10db: 0x1f63, 0x10dc: 0x1f68, 0x10dd: 0x1f4f, + 0x10de: 0x1f54, 0x10df: 0x1f7c, 0x10e0: 0x1f81, 0x10e1: 0x1fea, 0x10e2: 0x1fef, 0x10e3: 0x200d, + 0x10e4: 0x2012, 0x10e5: 0x1fae, 0x10e6: 0x1fb3, 0x10e7: 0x1fb8, 0x10e8: 0x1fc2, 0x10e9: 0x1fbd, + 0x10ea: 0x1f95, 0x10eb: 0x1fe0, 0x10ec: 0x2003, 0x10ed: 0x1fae, 0x10ee: 0x1fb3, 0x10ef: 0x1fb8, + 0x10f0: 0x1fc2, 0x10f1: 0x1f9f, 0x10f2: 0x1fc7, 0x10f3: 0x201c, 0x10f4: 0x1f86, 0x10f5: 0x1f8b, + 0x10f6: 0x1f90, 0x10f7: 0x1fae, 0x10f8: 0x1fb3, 0x10f9: 0x1fb8, 0x10fa: 0x201c, 0x10fb: 0x202b, + 0x10fc: 0x4549, 0x10fd: 0x4549, + // Block 0x44, offset 0x1100 + 0x1110: 0x2441, 0x1111: 0x2456, + 0x1112: 0x2456, 0x1113: 0x245d, 0x1114: 0x2464, 0x1115: 0x2479, 0x1116: 0x2480, 0x1117: 0x2487, + 0x1118: 0x24aa, 0x1119: 0x24aa, 0x111a: 0x24cd, 0x111b: 0x24c6, 0x111c: 0x24e2, 0x111d: 0x24d4, + 0x111e: 0x24db, 0x111f: 0x24fe, 0x1120: 0x24fe, 0x1121: 0x24f7, 0x1122: 0x2505, 0x1123: 0x2505, + 0x1124: 0x252f, 0x1125: 0x252f, 0x1126: 0x254b, 0x1127: 0x2513, 0x1128: 0x2513, 0x1129: 0x250c, + 0x112a: 0x2521, 0x112b: 0x2521, 0x112c: 0x2528, 0x112d: 0x2528, 0x112e: 0x2552, 0x112f: 0x2560, + 0x1130: 0x2560, 0x1131: 0x2567, 0x1132: 0x2567, 0x1133: 0x256e, 0x1134: 0x2575, 0x1135: 0x257c, + 0x1136: 0x2583, 0x1137: 0x2583, 0x1138: 0x258a, 0x1139: 0x2598, 0x113a: 0x25a6, 0x113b: 0x259f, + 0x113c: 0x25ad, 0x113d: 0x25ad, 0x113e: 0x25c2, 0x113f: 0x25c9, + // Block 0x45, offset 0x1140 + 0x1140: 0x25fa, 0x1141: 0x2608, 0x1142: 0x2601, 0x1143: 0x25e5, 0x1144: 0x25e5, 0x1145: 0x260f, + 0x1146: 0x260f, 0x1147: 0x2616, 0x1148: 0x2616, 0x1149: 0x2640, 0x114a: 0x2647, 0x114b: 0x264e, + 0x114c: 0x2624, 0x114d: 0x2632, 0x114e: 0x2655, 0x114f: 0x265c, + 0x1152: 0x262b, 0x1153: 0x26b0, 0x1154: 0x26b7, 0x1155: 0x268d, 0x1156: 0x2694, 0x1157: 0x2678, + 0x1158: 0x2678, 0x1159: 0x267f, 0x115a: 0x26a9, 0x115b: 0x26a2, 0x115c: 0x26cc, 0x115d: 0x26cc, + 0x115e: 0x243a, 0x115f: 0x244f, 0x1160: 0x2448, 0x1161: 0x2472, 0x1162: 0x246b, 0x1163: 0x2495, + 0x1164: 0x248e, 0x1165: 0x24b8, 0x1166: 0x249c, 0x1167: 0x24b1, 0x1168: 0x24e9, 0x1169: 0x2536, + 0x116a: 0x251a, 0x116b: 0x2559, 0x116c: 0x25f3, 0x116d: 0x261d, 0x116e: 0x26c5, 0x116f: 0x26be, + 0x1170: 0x26d3, 0x1171: 0x266a, 0x1172: 0x25d0, 0x1173: 0x269b, 0x1174: 0x25c2, 0x1175: 0x25fa, + 0x1176: 0x2591, 0x1177: 0x25de, 0x1178: 0x2671, 0x1179: 0x2663, 0x117a: 0x25ec, 0x117b: 0x25d7, + 0x117c: 0x25ec, 0x117d: 0x2671, 0x117e: 0x24a3, 0x117f: 0x24bf, + // Block 0x46, offset 0x1180 + 0x1180: 0x2639, 0x1181: 0x25b4, 0x1182: 0x2433, 0x1183: 0x25d7, 0x1184: 0x257c, 0x1185: 0x254b, + 0x1186: 0x24f0, 0x1187: 0x2686, + 0x11b0: 0x2544, 0x11b1: 0x25bb, 0x11b2: 0x28f6, 0x11b3: 0x28ed, 0x11b4: 0x2923, 0x11b5: 0x2911, + 0x11b6: 0x28ff, 0x11b7: 0x291a, 0x11b8: 0x292c, 0x11b9: 0x253d, 0x11ba: 0x2db3, 0x11bb: 0x2c33, + 0x11bc: 0x2908, + // Block 0x47, offset 0x11c0 + 0x11d0: 0x0019, 0x11d1: 0x057e, + 0x11d2: 0x0582, 0x11d3: 0x0035, 0x11d4: 0x0037, 0x11d5: 0x0003, 0x11d6: 0x003f, 0x11d7: 0x05ba, + 0x11d8: 0x05be, 0x11d9: 0x1c8c, + 0x11e0: 0x8133, 0x11e1: 0x8133, 0x11e2: 0x8133, 0x11e3: 0x8133, + 0x11e4: 0x8133, 0x11e5: 0x8133, 0x11e6: 0x8133, 0x11e7: 0x812e, 0x11e8: 0x812e, 0x11e9: 0x812e, + 0x11ea: 0x812e, 0x11eb: 0x812e, 0x11ec: 0x812e, 0x11ed: 0x812e, 0x11ee: 0x8133, 0x11ef: 0x8133, + 0x11f0: 0x19a0, 0x11f1: 0x053a, 0x11f2: 0x0536, 0x11f3: 0x007f, 0x11f4: 0x007f, 0x11f5: 0x0011, + 0x11f6: 0x0013, 0x11f7: 0x00b7, 0x11f8: 0x00bb, 0x11f9: 0x05b2, 0x11fa: 0x05b6, 0x11fb: 0x05a6, + 0x11fc: 0x05aa, 0x11fd: 0x058e, 0x11fe: 0x0592, 0x11ff: 0x0586, + // Block 0x48, offset 0x1200 + 0x1200: 0x058a, 0x1201: 0x0596, 0x1202: 0x059a, 0x1203: 0x059e, 0x1204: 0x05a2, + 0x1207: 0x0077, 0x1208: 0x007b, 0x1209: 0x43aa, 0x120a: 0x43aa, 0x120b: 0x43aa, + 0x120c: 0x43aa, 0x120d: 0x007f, 0x120e: 0x007f, 0x120f: 0x007f, 0x1210: 0x0019, 0x1211: 0x057e, + 0x1212: 0x001d, 0x1214: 0x0037, 0x1215: 0x0035, 0x1216: 0x003f, 0x1217: 0x0003, + 0x1218: 0x053a, 0x1219: 0x0011, 0x121a: 0x0013, 0x121b: 0x00b7, 0x121c: 0x00bb, 0x121d: 0x05b2, + 0x121e: 0x05b6, 0x121f: 0x0007, 0x1220: 0x000d, 0x1221: 0x0015, 0x1222: 0x0017, 0x1223: 0x001b, + 0x1224: 0x0039, 0x1225: 0x003d, 0x1226: 0x003b, 0x1228: 0x0079, 0x1229: 0x0009, + 0x122a: 0x000b, 0x122b: 0x0041, + 0x1230: 0x43eb, 0x1231: 0x456d, 0x1232: 0x43f0, 0x1234: 0x43f5, + 0x1236: 0x43fa, 0x1237: 0x4573, 0x1238: 0x43ff, 0x1239: 0x4579, 0x123a: 0x4404, 0x123b: 0x457f, + 0x123c: 0x4409, 0x123d: 0x4585, 0x123e: 0x440e, 0x123f: 0x458b, + // Block 0x49, offset 0x1240 + 0x1240: 0x0329, 0x1241: 0x454f, 0x1242: 0x454f, 0x1243: 0x4555, 0x1244: 0x4555, 0x1245: 0x4597, + 0x1246: 0x4597, 0x1247: 0x455b, 0x1248: 0x455b, 0x1249: 0x45a3, 0x124a: 0x45a3, 0x124b: 0x45a3, + 0x124c: 0x45a3, 0x124d: 0x032c, 0x124e: 0x032c, 0x124f: 0x032f, 0x1250: 0x032f, 0x1251: 0x032f, + 0x1252: 0x032f, 0x1253: 0x0332, 0x1254: 0x0332, 0x1255: 0x0335, 0x1256: 0x0335, 0x1257: 0x0335, + 0x1258: 0x0335, 0x1259: 0x0338, 0x125a: 0x0338, 0x125b: 0x0338, 0x125c: 0x0338, 0x125d: 0x033b, + 0x125e: 0x033b, 0x125f: 0x033b, 0x1260: 0x033b, 0x1261: 0x033e, 0x1262: 0x033e, 0x1263: 0x033e, + 0x1264: 0x033e, 0x1265: 0x0341, 0x1266: 0x0341, 0x1267: 0x0341, 0x1268: 0x0341, 0x1269: 0x0344, + 0x126a: 0x0344, 0x126b: 0x0347, 0x126c: 0x0347, 0x126d: 0x034a, 0x126e: 0x034a, 0x126f: 0x034d, + 0x1270: 0x034d, 0x1271: 0x0350, 0x1272: 0x0350, 0x1273: 0x0350, 0x1274: 0x0350, 0x1275: 0x0353, + 0x1276: 0x0353, 0x1277: 0x0353, 0x1278: 0x0353, 0x1279: 0x0356, 0x127a: 0x0356, 0x127b: 0x0356, + 0x127c: 0x0356, 0x127d: 0x0359, 0x127e: 0x0359, 0x127f: 0x0359, + // Block 0x4a, offset 0x1280 + 0x1280: 0x0359, 0x1281: 0x035c, 0x1282: 0x035c, 0x1283: 0x035c, 0x1284: 0x035c, 0x1285: 0x035f, + 0x1286: 0x035f, 0x1287: 0x035f, 0x1288: 0x035f, 0x1289: 0x0362, 0x128a: 0x0362, 0x128b: 0x0362, + 0x128c: 0x0362, 0x128d: 0x0365, 0x128e: 0x0365, 0x128f: 0x0365, 0x1290: 0x0365, 0x1291: 0x0368, + 0x1292: 0x0368, 0x1293: 0x0368, 0x1294: 0x0368, 0x1295: 0x036b, 0x1296: 0x036b, 0x1297: 0x036b, + 0x1298: 0x036b, 0x1299: 0x036e, 0x129a: 0x036e, 0x129b: 0x036e, 0x129c: 0x036e, 0x129d: 0x0371, + 0x129e: 0x0371, 0x129f: 0x0371, 0x12a0: 0x0371, 0x12a1: 0x0374, 0x12a2: 0x0374, 0x12a3: 0x0374, + 0x12a4: 0x0374, 0x12a5: 0x0377, 0x12a6: 0x0377, 0x12a7: 0x0377, 0x12a8: 0x0377, 0x12a9: 0x037a, + 0x12aa: 0x037a, 0x12ab: 0x037a, 0x12ac: 0x037a, 0x12ad: 0x037d, 0x12ae: 0x037d, 0x12af: 0x0380, + 0x12b0: 0x0380, 0x12b1: 0x0383, 0x12b2: 0x0383, 0x12b3: 0x0383, 0x12b4: 0x0383, 0x12b5: 0x2f41, + 0x12b6: 0x2f41, 0x12b7: 0x2f49, 0x12b8: 0x2f49, 0x12b9: 0x2f51, 0x12ba: 0x2f51, 0x12bb: 0x20b2, + 0x12bc: 0x20b2, + // Block 0x4b, offset 0x12c0 + 0x12c0: 0x0081, 0x12c1: 0x0083, 0x12c2: 0x0085, 0x12c3: 0x0087, 0x12c4: 0x0089, 0x12c5: 0x008b, + 0x12c6: 0x008d, 0x12c7: 0x008f, 0x12c8: 0x0091, 0x12c9: 0x0093, 0x12ca: 0x0095, 0x12cb: 0x0097, + 0x12cc: 0x0099, 0x12cd: 0x009b, 0x12ce: 0x009d, 0x12cf: 0x009f, 0x12d0: 0x00a1, 0x12d1: 0x00a3, + 0x12d2: 0x00a5, 0x12d3: 0x00a7, 0x12d4: 0x00a9, 0x12d5: 0x00ab, 0x12d6: 0x00ad, 0x12d7: 0x00af, + 0x12d8: 0x00b1, 0x12d9: 0x00b3, 0x12da: 0x00b5, 0x12db: 0x00b7, 0x12dc: 0x00b9, 0x12dd: 0x00bb, + 0x12de: 0x00bd, 0x12df: 0x056e, 0x12e0: 0x0572, 0x12e1: 0x0582, 0x12e2: 0x0596, 0x12e3: 0x059a, + 0x12e4: 0x057e, 0x12e5: 0x06a6, 0x12e6: 0x069e, 0x12e7: 0x05c2, 0x12e8: 0x05ca, 0x12e9: 0x05d2, + 0x12ea: 0x05da, 0x12eb: 0x05e2, 0x12ec: 0x0666, 0x12ed: 0x066e, 0x12ee: 0x0676, 0x12ef: 0x061a, + 0x12f0: 0x06aa, 0x12f1: 0x05c6, 0x12f2: 0x05ce, 0x12f3: 0x05d6, 0x12f4: 0x05de, 0x12f5: 0x05e6, + 0x12f6: 0x05ea, 0x12f7: 0x05ee, 0x12f8: 0x05f2, 0x12f9: 0x05f6, 0x12fa: 0x05fa, 0x12fb: 0x05fe, + 0x12fc: 0x0602, 0x12fd: 0x0606, 0x12fe: 0x060a, 0x12ff: 0x060e, + // Block 0x4c, offset 0x1300 + 0x1300: 0x0612, 0x1301: 0x0616, 0x1302: 0x061e, 0x1303: 0x0622, 0x1304: 0x0626, 0x1305: 0x062a, + 0x1306: 0x062e, 0x1307: 0x0632, 0x1308: 0x0636, 0x1309: 0x063a, 0x130a: 0x063e, 0x130b: 0x0642, + 0x130c: 0x0646, 0x130d: 0x064a, 0x130e: 0x064e, 0x130f: 0x0652, 0x1310: 0x0656, 0x1311: 0x065a, + 0x1312: 0x065e, 0x1313: 0x0662, 0x1314: 0x066a, 0x1315: 0x0672, 0x1316: 0x067a, 0x1317: 0x067e, + 0x1318: 0x0682, 0x1319: 0x0686, 0x131a: 0x068a, 0x131b: 0x068e, 0x131c: 0x0692, 0x131d: 0x06a2, + 0x131e: 0x4bb9, 0x131f: 0x4bbf, 0x1320: 0x04b6, 0x1321: 0x0406, 0x1322: 0x040a, 0x1323: 0x4b7c, + 0x1324: 0x040e, 0x1325: 0x4b82, 0x1326: 0x4b88, 0x1327: 0x0412, 0x1328: 0x0416, 0x1329: 0x041a, + 0x132a: 0x4b8e, 0x132b: 0x4b94, 0x132c: 0x4b9a, 0x132d: 0x4ba0, 0x132e: 0x4ba6, 0x132f: 0x4bac, + 0x1330: 0x045a, 0x1331: 0x041e, 0x1332: 0x0422, 0x1333: 0x0426, 0x1334: 0x046e, 0x1335: 0x042a, + 0x1336: 0x042e, 0x1337: 0x0432, 0x1338: 0x0436, 0x1339: 0x043a, 0x133a: 0x043e, 0x133b: 0x0442, + 0x133c: 0x0446, 0x133d: 0x044a, 0x133e: 0x044e, + // Block 0x4d, offset 0x1340 + 0x1342: 0x4afe, 0x1343: 0x4b04, 0x1344: 0x4b0a, 0x1345: 0x4b10, + 0x1346: 0x4b16, 0x1347: 0x4b1c, 0x134a: 0x4b22, 0x134b: 0x4b28, + 0x134c: 0x4b2e, 0x134d: 0x4b34, 0x134e: 0x4b3a, 0x134f: 0x4b40, + 0x1352: 0x4b46, 0x1353: 0x4b4c, 0x1354: 0x4b52, 0x1355: 0x4b58, 0x1356: 0x4b5e, 0x1357: 0x4b64, + 0x135a: 0x4b6a, 0x135b: 0x4b70, 0x135c: 0x4b76, + 0x1360: 0x00bf, 0x1361: 0x00c2, 0x1362: 0x00cb, 0x1363: 0x43a5, + 0x1364: 0x00c8, 0x1365: 0x00c5, 0x1366: 0x053e, 0x1368: 0x0562, 0x1369: 0x0542, + 0x136a: 0x0546, 0x136b: 0x054a, 0x136c: 0x054e, 0x136d: 0x0566, 0x136e: 0x056a, + // Block 0x4e, offset 0x1380 + 0x1381: 0x01f1, 0x1382: 0x01f4, 0x1383: 0x00d4, 0x1384: 0x01be, 0x1385: 0x010d, + 0x1387: 0x01d3, 0x1388: 0x174e, 0x1389: 0x01d9, 0x138a: 0x01d6, 0x138b: 0x0116, + 0x138c: 0x0119, 0x138d: 0x0526, 0x138e: 0x011c, 0x138f: 0x0128, 0x1390: 0x01e5, 0x1391: 0x013a, + 0x1392: 0x0134, 0x1393: 0x012e, 0x1394: 0x01c1, 0x1395: 0x00e0, 0x1396: 0x01c4, 0x1397: 0x0143, + 0x1398: 0x0194, 0x1399: 0x01e8, 0x139a: 0x01eb, 0x139b: 0x0152, 0x139c: 0x1756, 0x139d: 0x1742, + 0x139e: 0x0158, 0x139f: 0x175b, 0x13a0: 0x01a9, 0x13a1: 0x1760, 0x13a2: 0x00da, 0x13a3: 0x0170, + 0x13a4: 0x0173, 0x13a5: 0x00a3, 0x13a6: 0x017c, 0x13a7: 0x1765, 0x13a8: 0x0182, 0x13a9: 0x0185, + 0x13aa: 0x0188, 0x13ab: 0x01e2, 0x13ac: 0x01dc, 0x13ad: 0x1752, 0x13ae: 0x01df, 0x13af: 0x0197, + 0x13b0: 0x0576, 0x13b2: 0x01ac, 0x13b3: 0x01cd, 0x13b4: 0x01d0, 0x13b5: 0x01bb, + 0x13b6: 0x00f5, 0x13b7: 0x00f8, 0x13b8: 0x00fb, 0x13b9: 0x176a, 0x13ba: 0x176f, + // Block 0x4f, offset 0x13c0 + 0x13c0: 0x0063, 0x13c1: 0x0065, 0x13c2: 0x0067, 0x13c3: 0x0069, 0x13c4: 0x006b, 0x13c5: 0x006d, + 0x13c6: 0x006f, 0x13c7: 0x0071, 0x13c8: 0x0073, 0x13c9: 0x0075, 0x13ca: 0x0083, 0x13cb: 0x0085, + 0x13cc: 0x0087, 0x13cd: 0x0089, 0x13ce: 0x008b, 0x13cf: 0x008d, 0x13d0: 0x008f, 0x13d1: 0x0091, + 0x13d2: 0x0093, 0x13d3: 0x0095, 0x13d4: 0x0097, 0x13d5: 0x0099, 0x13d6: 0x009b, 0x13d7: 0x009d, + 0x13d8: 0x009f, 0x13d9: 0x00a1, 0x13da: 0x00a3, 0x13db: 0x00a5, 0x13dc: 0x00a7, 0x13dd: 0x00a9, + 0x13de: 0x00ab, 0x13df: 0x00ad, 0x13e0: 0x00af, 0x13e1: 0x00b1, 0x13e2: 0x00b3, 0x13e3: 0x00b5, + 0x13e4: 0x00e3, 0x13e5: 0x0101, 0x13e8: 0x01f7, 0x13e9: 0x01fa, + 0x13ea: 0x01fd, 0x13eb: 0x0200, 0x13ec: 0x0203, 0x13ed: 0x0206, 0x13ee: 0x0209, 0x13ef: 0x020c, + 0x13f0: 0x020f, 0x13f1: 0x0212, 0x13f2: 0x0215, 0x13f3: 0x0218, 0x13f4: 0x021b, 0x13f5: 0x021e, + 0x13f6: 0x0221, 0x13f7: 0x0224, 0x13f8: 0x0227, 0x13f9: 0x020c, 0x13fa: 0x022a, 0x13fb: 0x022d, + 0x13fc: 0x0230, 0x13fd: 0x0233, 0x13fe: 0x0236, 0x13ff: 0x0239, + // Block 0x50, offset 0x1400 + 0x1400: 0x0281, 0x1401: 0x0284, 0x1402: 0x0287, 0x1403: 0x0552, 0x1404: 0x024b, 0x1405: 0x0254, + 0x1406: 0x025a, 0x1407: 0x027e, 0x1408: 0x026f, 0x1409: 0x026c, 0x140a: 0x028a, 0x140b: 0x028d, + 0x140e: 0x0021, 0x140f: 0x0023, 0x1410: 0x0025, 0x1411: 0x0027, + 0x1412: 0x0029, 0x1413: 0x002b, 0x1414: 0x002d, 0x1415: 0x002f, 0x1416: 0x0031, 0x1417: 0x0033, + 0x1418: 0x0021, 0x1419: 0x0023, 0x141a: 0x0025, 0x141b: 0x0027, 0x141c: 0x0029, 0x141d: 0x002b, + 0x141e: 0x002d, 0x141f: 0x002f, 0x1420: 0x0031, 0x1421: 0x0033, 0x1422: 0x0021, 0x1423: 0x0023, + 0x1424: 0x0025, 0x1425: 0x0027, 0x1426: 0x0029, 0x1427: 0x002b, 0x1428: 0x002d, 0x1429: 0x002f, + 0x142a: 0x0031, 0x142b: 0x0033, 0x142c: 0x0021, 0x142d: 0x0023, 0x142e: 0x0025, 0x142f: 0x0027, + 0x1430: 0x0029, 0x1431: 0x002b, 0x1432: 0x002d, 0x1433: 0x002f, 0x1434: 0x0031, 0x1435: 0x0033, + 0x1436: 0x0021, 0x1437: 0x0023, 0x1438: 0x0025, 0x1439: 0x0027, 0x143a: 0x0029, 0x143b: 0x002b, + 0x143c: 0x002d, 0x143d: 0x002f, 0x143e: 0x0031, 0x143f: 0x0033, + // Block 0x51, offset 0x1440 + 0x1440: 0x8133, 0x1441: 0x8133, 0x1442: 0x8133, 0x1443: 0x8133, 0x1444: 0x8133, 0x1445: 0x8133, + 0x1446: 0x8133, 0x1448: 0x8133, 0x1449: 0x8133, 0x144a: 0x8133, 0x144b: 0x8133, + 0x144c: 0x8133, 0x144d: 0x8133, 0x144e: 0x8133, 0x144f: 0x8133, 0x1450: 0x8133, 0x1451: 0x8133, + 0x1452: 0x8133, 0x1453: 0x8133, 0x1454: 0x8133, 0x1455: 0x8133, 0x1456: 0x8133, 0x1457: 0x8133, + 0x1458: 0x8133, 0x145b: 0x8133, 0x145c: 0x8133, 0x145d: 0x8133, + 0x145e: 0x8133, 0x145f: 0x8133, 0x1460: 0x8133, 0x1461: 0x8133, 0x1463: 0x8133, + 0x1464: 0x8133, 0x1466: 0x8133, 0x1467: 0x8133, 0x1468: 0x8133, 0x1469: 0x8133, + 0x146a: 0x8133, + 0x1470: 0x0290, 0x1471: 0x0293, 0x1472: 0x0296, 0x1473: 0x0299, 0x1474: 0x029c, 0x1475: 0x029f, + 0x1476: 0x02a2, 0x1477: 0x02a5, 0x1478: 0x02a8, 0x1479: 0x02ab, 0x147a: 0x02ae, 0x147b: 0x02b1, + 0x147c: 0x02b7, 0x147d: 0x02ba, 0x147e: 0x02bd, 0x147f: 0x02c0, + // Block 0x52, offset 0x1480 + 0x1480: 0x02c3, 0x1481: 0x02c6, 0x1482: 0x02c9, 0x1483: 0x02cc, 0x1484: 0x02cf, 0x1485: 0x02d2, + 0x1486: 0x02d5, 0x1487: 0x02db, 0x1488: 0x02e1, 0x1489: 0x02e4, 0x148a: 0x1736, 0x148b: 0x0302, + 0x148c: 0x02ea, 0x148d: 0x02ed, 0x148e: 0x0305, 0x148f: 0x02f9, 0x1490: 0x02ff, 0x1491: 0x0290, + 0x1492: 0x0293, 0x1493: 0x0296, 0x1494: 0x0299, 0x1495: 0x029c, 0x1496: 0x029f, 0x1497: 0x02a2, + 0x1498: 0x02a5, 0x1499: 0x02a8, 0x149a: 0x02ab, 0x149b: 0x02ae, 0x149c: 0x02b7, 0x149d: 0x02ba, + 0x149e: 0x02c0, 0x149f: 0x02c6, 0x14a0: 0x02c9, 0x14a1: 0x02cc, 0x14a2: 0x02cf, 0x14a3: 0x02d2, + 0x14a4: 0x02d5, 0x14a5: 0x02d8, 0x14a6: 0x02db, 0x14a7: 0x02f3, 0x14a8: 0x02ea, 0x14a9: 0x02e7, + 0x14aa: 0x02f0, 0x14ab: 0x02f6, 0x14ac: 0x1732, 0x14ad: 0x02fc, + // Block 0x53, offset 0x14c0 + 0x14c0: 0x032c, 0x14c1: 0x032f, 0x14c2: 0x033b, 0x14c3: 0x0344, 0x14c5: 0x037d, + 0x14c6: 0x034d, 0x14c7: 0x033e, 0x14c8: 0x035c, 0x14c9: 0x0383, 0x14ca: 0x036e, 0x14cb: 0x0371, + 0x14cc: 0x0374, 0x14cd: 0x0377, 0x14ce: 0x0350, 0x14cf: 0x0362, 0x14d0: 0x0368, 0x14d1: 0x0356, + 0x14d2: 0x036b, 0x14d3: 0x034a, 0x14d4: 0x0353, 0x14d5: 0x0335, 0x14d6: 0x0338, 0x14d7: 0x0341, + 0x14d8: 0x0347, 0x14d9: 0x0359, 0x14da: 0x035f, 0x14db: 0x0365, 0x14dc: 0x0386, 0x14dd: 0x03d7, + 0x14de: 0x03bf, 0x14df: 0x0389, 0x14e1: 0x032f, 0x14e2: 0x033b, + 0x14e4: 0x037a, 0x14e7: 0x033e, 0x14e9: 0x0383, + 0x14ea: 0x036e, 0x14eb: 0x0371, 0x14ec: 0x0374, 0x14ed: 0x0377, 0x14ee: 0x0350, 0x14ef: 0x0362, + 0x14f0: 0x0368, 0x14f1: 0x0356, 0x14f2: 0x036b, 0x14f4: 0x0353, 0x14f5: 0x0335, + 0x14f6: 0x0338, 0x14f7: 0x0341, 0x14f9: 0x0359, 0x14fb: 0x0365, + // Block 0x54, offset 0x1500 + 0x1502: 0x033b, + 0x1507: 0x033e, 0x1509: 0x0383, 0x150b: 0x0371, + 0x150d: 0x0377, 0x150e: 0x0350, 0x150f: 0x0362, 0x1511: 0x0356, + 0x1512: 0x036b, 0x1514: 0x0353, 0x1517: 0x0341, + 0x1519: 0x0359, 0x151b: 0x0365, 0x151d: 0x03d7, + 0x151f: 0x0389, 0x1521: 0x032f, 0x1522: 0x033b, + 0x1524: 0x037a, 0x1527: 0x033e, 0x1528: 0x035c, 0x1529: 0x0383, + 0x152a: 0x036e, 0x152c: 0x0374, 0x152d: 0x0377, 0x152e: 0x0350, 0x152f: 0x0362, + 0x1530: 0x0368, 0x1531: 0x0356, 0x1532: 0x036b, 0x1534: 0x0353, 0x1535: 0x0335, + 0x1536: 0x0338, 0x1537: 0x0341, 0x1539: 0x0359, 0x153a: 0x035f, 0x153b: 0x0365, + 0x153c: 0x0386, 0x153e: 0x03bf, + // Block 0x55, offset 0x1540 + 0x1540: 0x032c, 0x1541: 0x032f, 0x1542: 0x033b, 0x1543: 0x0344, 0x1544: 0x037a, 0x1545: 0x037d, + 0x1546: 0x034d, 0x1547: 0x033e, 0x1548: 0x035c, 0x1549: 0x0383, 0x154b: 0x0371, + 0x154c: 0x0374, 0x154d: 0x0377, 0x154e: 0x0350, 0x154f: 0x0362, 0x1550: 0x0368, 0x1551: 0x0356, + 0x1552: 0x036b, 0x1553: 0x034a, 0x1554: 0x0353, 0x1555: 0x0335, 0x1556: 0x0338, 0x1557: 0x0341, + 0x1558: 0x0347, 0x1559: 0x0359, 0x155a: 0x035f, 0x155b: 0x0365, + 0x1561: 0x032f, 0x1562: 0x033b, 0x1563: 0x0344, + 0x1565: 0x037d, 0x1566: 0x034d, 0x1567: 0x033e, 0x1568: 0x035c, 0x1569: 0x0383, + 0x156b: 0x0371, 0x156c: 0x0374, 0x156d: 0x0377, 0x156e: 0x0350, 0x156f: 0x0362, + 0x1570: 0x0368, 0x1571: 0x0356, 0x1572: 0x036b, 0x1573: 0x034a, 0x1574: 0x0353, 0x1575: 0x0335, + 0x1576: 0x0338, 0x1577: 0x0341, 0x1578: 0x0347, 0x1579: 0x0359, 0x157a: 0x035f, 0x157b: 0x0365, + // Block 0x56, offset 0x1580 + 0x1580: 0x19a6, 0x1581: 0x19a3, 0x1582: 0x19a9, 0x1583: 0x19cd, 0x1584: 0x19f1, 0x1585: 0x1a15, + 0x1586: 0x1a39, 0x1587: 0x1a42, 0x1588: 0x1a48, 0x1589: 0x1a4e, 0x158a: 0x1a54, + 0x1590: 0x1bbc, 0x1591: 0x1bc0, + 0x1592: 0x1bc4, 0x1593: 0x1bc8, 0x1594: 0x1bcc, 0x1595: 0x1bd0, 0x1596: 0x1bd4, 0x1597: 0x1bd8, + 0x1598: 0x1bdc, 0x1599: 0x1be0, 0x159a: 0x1be4, 0x159b: 0x1be8, 0x159c: 0x1bec, 0x159d: 0x1bf0, + 0x159e: 0x1bf4, 0x159f: 0x1bf8, 0x15a0: 0x1bfc, 0x15a1: 0x1c00, 0x15a2: 0x1c04, 0x15a3: 0x1c08, + 0x15a4: 0x1c0c, 0x15a5: 0x1c10, 0x15a6: 0x1c14, 0x15a7: 0x1c18, 0x15a8: 0x1c1c, 0x15a9: 0x1c20, + 0x15aa: 0x2855, 0x15ab: 0x0047, 0x15ac: 0x0065, 0x15ad: 0x1a69, 0x15ae: 0x1ae1, + 0x15b0: 0x0043, 0x15b1: 0x0045, 0x15b2: 0x0047, 0x15b3: 0x0049, 0x15b4: 0x004b, 0x15b5: 0x004d, + 0x15b6: 0x004f, 0x15b7: 0x0051, 0x15b8: 0x0053, 0x15b9: 0x0055, 0x15ba: 0x0057, 0x15bb: 0x0059, + 0x15bc: 0x005b, 0x15bd: 0x005d, 0x15be: 0x005f, 0x15bf: 0x0061, + // Block 0x57, offset 0x15c0 + 0x15c0: 0x27dd, 0x15c1: 0x27f2, 0x15c2: 0x05fe, + 0x15d0: 0x0d0a, 0x15d1: 0x0b42, + 0x15d2: 0x09ce, 0x15d3: 0x4705, 0x15d4: 0x0816, 0x15d5: 0x0aea, 0x15d6: 0x142a, 0x15d7: 0x0afa, + 0x15d8: 0x0822, 0x15d9: 0x0dd2, 0x15da: 0x0faa, 0x15db: 0x0daa, 0x15dc: 0x0922, 0x15dd: 0x0c66, + 0x15de: 0x08ba, 0x15df: 0x0db2, 0x15e0: 0x090e, 0x15e1: 0x1212, 0x15e2: 0x107e, 0x15e3: 0x1486, + 0x15e4: 0x0ace, 0x15e5: 0x0a06, 0x15e6: 0x0f5e, 0x15e7: 0x0d16, 0x15e8: 0x0d42, 0x15e9: 0x07ba, + 0x15ea: 0x07c6, 0x15eb: 0x1506, 0x15ec: 0x0bd6, 0x15ed: 0x07e2, 0x15ee: 0x09ea, 0x15ef: 0x0d36, + 0x15f0: 0x14ae, 0x15f1: 0x0d0e, 0x15f2: 0x116a, 0x15f3: 0x11a6, 0x15f4: 0x09f2, 0x15f5: 0x0f3e, + 0x15f6: 0x0e06, 0x15f7: 0x0e02, 0x15f8: 0x1092, 0x15f9: 0x0926, 0x15fa: 0x0a52, 0x15fb: 0x153e, + // Block 0x58, offset 0x1600 + 0x1600: 0x07f6, 0x1601: 0x07ee, 0x1602: 0x07fe, 0x1603: 0x1774, 0x1604: 0x0842, 0x1605: 0x0852, + 0x1606: 0x0856, 0x1607: 0x085e, 0x1608: 0x0866, 0x1609: 0x086a, 0x160a: 0x0876, 0x160b: 0x086e, + 0x160c: 0x06ae, 0x160d: 0x1788, 0x160e: 0x088a, 0x160f: 0x088e, 0x1610: 0x0892, 0x1611: 0x08ae, + 0x1612: 0x1779, 0x1613: 0x06b2, 0x1614: 0x089a, 0x1615: 0x08ba, 0x1616: 0x1783, 0x1617: 0x08ca, + 0x1618: 0x08d2, 0x1619: 0x0832, 0x161a: 0x08da, 0x161b: 0x08de, 0x161c: 0x195e, 0x161d: 0x08fa, + 0x161e: 0x0902, 0x161f: 0x06ba, 0x1620: 0x091a, 0x1621: 0x091e, 0x1622: 0x0926, 0x1623: 0x092a, + 0x1624: 0x06be, 0x1625: 0x0942, 0x1626: 0x0946, 0x1627: 0x0952, 0x1628: 0x095e, 0x1629: 0x0962, + 0x162a: 0x0966, 0x162b: 0x096e, 0x162c: 0x098e, 0x162d: 0x0992, 0x162e: 0x099a, 0x162f: 0x09aa, + 0x1630: 0x09b2, 0x1631: 0x09b6, 0x1632: 0x09b6, 0x1633: 0x09b6, 0x1634: 0x1797, 0x1635: 0x0f8e, + 0x1636: 0x09ca, 0x1637: 0x09d2, 0x1638: 0x179c, 0x1639: 0x09de, 0x163a: 0x09e6, 0x163b: 0x09ee, + 0x163c: 0x0a16, 0x163d: 0x0a02, 0x163e: 0x0a0e, 0x163f: 0x0a12, + // Block 0x59, offset 0x1640 + 0x1640: 0x0a1a, 0x1641: 0x0a22, 0x1642: 0x0a26, 0x1643: 0x0a2e, 0x1644: 0x0a36, 0x1645: 0x0a3a, + 0x1646: 0x0a3a, 0x1647: 0x0a42, 0x1648: 0x0a4a, 0x1649: 0x0a4e, 0x164a: 0x0a5a, 0x164b: 0x0a7e, + 0x164c: 0x0a62, 0x164d: 0x0a82, 0x164e: 0x0a66, 0x164f: 0x0a6e, 0x1650: 0x0906, 0x1651: 0x0aca, + 0x1652: 0x0a92, 0x1653: 0x0a96, 0x1654: 0x0a9a, 0x1655: 0x0a8e, 0x1656: 0x0aa2, 0x1657: 0x0a9e, + 0x1658: 0x0ab6, 0x1659: 0x17a1, 0x165a: 0x0ad2, 0x165b: 0x0ad6, 0x165c: 0x0ade, 0x165d: 0x0aea, + 0x165e: 0x0af2, 0x165f: 0x0b0e, 0x1660: 0x17a6, 0x1661: 0x17ab, 0x1662: 0x0b1a, 0x1663: 0x0b1e, + 0x1664: 0x0b22, 0x1665: 0x0b16, 0x1666: 0x0b2a, 0x1667: 0x06c2, 0x1668: 0x06c6, 0x1669: 0x0b32, + 0x166a: 0x0b3a, 0x166b: 0x0b3a, 0x166c: 0x17b0, 0x166d: 0x0b56, 0x166e: 0x0b5a, 0x166f: 0x0b5e, + 0x1670: 0x0b66, 0x1671: 0x17b5, 0x1672: 0x0b6e, 0x1673: 0x0b72, 0x1674: 0x0c4a, 0x1675: 0x0b7a, + 0x1676: 0x06ca, 0x1677: 0x0b86, 0x1678: 0x0b96, 0x1679: 0x0ba2, 0x167a: 0x0b9e, 0x167b: 0x17bf, + 0x167c: 0x0baa, 0x167d: 0x17c4, 0x167e: 0x0bb6, 0x167f: 0x0bb2, + // Block 0x5a, offset 0x1680 + 0x1680: 0x0bba, 0x1681: 0x0bca, 0x1682: 0x0bce, 0x1683: 0x06ce, 0x1684: 0x0bde, 0x1685: 0x0be6, + 0x1686: 0x0bea, 0x1687: 0x0bee, 0x1688: 0x06d2, 0x1689: 0x17c9, 0x168a: 0x06d6, 0x168b: 0x0c0a, + 0x168c: 0x0c0e, 0x168d: 0x0c12, 0x168e: 0x0c1a, 0x168f: 0x1990, 0x1690: 0x0c32, 0x1691: 0x17d3, + 0x1692: 0x17d3, 0x1693: 0x12d2, 0x1694: 0x0c42, 0x1695: 0x0c42, 0x1696: 0x06da, 0x1697: 0x17f6, + 0x1698: 0x18c8, 0x1699: 0x0c52, 0x169a: 0x0c5a, 0x169b: 0x06de, 0x169c: 0x0c6e, 0x169d: 0x0c7e, + 0x169e: 0x0c82, 0x169f: 0x0c8a, 0x16a0: 0x0c9a, 0x16a1: 0x06e6, 0x16a2: 0x06e2, 0x16a3: 0x0c9e, + 0x16a4: 0x17d8, 0x16a5: 0x0ca2, 0x16a6: 0x0cb6, 0x16a7: 0x0cba, 0x16a8: 0x0cbe, 0x16a9: 0x0cba, + 0x16aa: 0x0cca, 0x16ab: 0x0cce, 0x16ac: 0x0cde, 0x16ad: 0x0cd6, 0x16ae: 0x0cda, 0x16af: 0x0ce2, + 0x16b0: 0x0ce6, 0x16b1: 0x0cea, 0x16b2: 0x0cf6, 0x16b3: 0x0cfa, 0x16b4: 0x0d12, 0x16b5: 0x0d1a, + 0x16b6: 0x0d2a, 0x16b7: 0x0d3e, 0x16b8: 0x17e7, 0x16b9: 0x0d3a, 0x16ba: 0x0d2e, 0x16bb: 0x0d46, + 0x16bc: 0x0d4e, 0x16bd: 0x0d62, 0x16be: 0x17ec, 0x16bf: 0x0d6a, + // Block 0x5b, offset 0x16c0 + 0x16c0: 0x0d5e, 0x16c1: 0x0d56, 0x16c2: 0x06ea, 0x16c3: 0x0d72, 0x16c4: 0x0d7a, 0x16c5: 0x0d82, + 0x16c6: 0x0d76, 0x16c7: 0x06ee, 0x16c8: 0x0d92, 0x16c9: 0x0d9a, 0x16ca: 0x17f1, 0x16cb: 0x0dc6, + 0x16cc: 0x0dfa, 0x16cd: 0x0dd6, 0x16ce: 0x06fa, 0x16cf: 0x0de2, 0x16d0: 0x06f6, 0x16d1: 0x06f2, + 0x16d2: 0x08be, 0x16d3: 0x08c2, 0x16d4: 0x0dfe, 0x16d5: 0x0de6, 0x16d6: 0x12a6, 0x16d7: 0x075e, + 0x16d8: 0x0e0a, 0x16d9: 0x0e0e, 0x16da: 0x0e12, 0x16db: 0x0e26, 0x16dc: 0x0e1e, 0x16dd: 0x180a, + 0x16de: 0x06fe, 0x16df: 0x0e3a, 0x16e0: 0x0e2e, 0x16e1: 0x0e4a, 0x16e2: 0x0e52, 0x16e3: 0x1814, + 0x16e4: 0x0e56, 0x16e5: 0x0e42, 0x16e6: 0x0e5e, 0x16e7: 0x0702, 0x16e8: 0x0e62, 0x16e9: 0x0e66, + 0x16ea: 0x0e6a, 0x16eb: 0x0e76, 0x16ec: 0x1819, 0x16ed: 0x0e7e, 0x16ee: 0x0706, 0x16ef: 0x0e8a, + 0x16f0: 0x181e, 0x16f1: 0x0e8e, 0x16f2: 0x070a, 0x16f3: 0x0e9a, 0x16f4: 0x0ea6, 0x16f5: 0x0eb2, + 0x16f6: 0x0eb6, 0x16f7: 0x1823, 0x16f8: 0x17ba, 0x16f9: 0x1828, 0x16fa: 0x0ed6, 0x16fb: 0x182d, + 0x16fc: 0x0ee2, 0x16fd: 0x0eea, 0x16fe: 0x0eda, 0x16ff: 0x0ef6, + // Block 0x5c, offset 0x1700 + 0x1700: 0x0f06, 0x1701: 0x0f16, 0x1702: 0x0f0a, 0x1703: 0x0f0e, 0x1704: 0x0f1a, 0x1705: 0x0f1e, + 0x1706: 0x1832, 0x1707: 0x0f02, 0x1708: 0x0f36, 0x1709: 0x0f3a, 0x170a: 0x070e, 0x170b: 0x0f4e, + 0x170c: 0x0f4a, 0x170d: 0x1837, 0x170e: 0x0f2e, 0x170f: 0x0f6a, 0x1710: 0x183c, 0x1711: 0x1841, + 0x1712: 0x0f6e, 0x1713: 0x0f82, 0x1714: 0x0f7e, 0x1715: 0x0f7a, 0x1716: 0x0712, 0x1717: 0x0f86, + 0x1718: 0x0f96, 0x1719: 0x0f92, 0x171a: 0x0f9e, 0x171b: 0x177e, 0x171c: 0x0fae, 0x171d: 0x1846, + 0x171e: 0x0fba, 0x171f: 0x1850, 0x1720: 0x0fce, 0x1721: 0x0fda, 0x1722: 0x0fee, 0x1723: 0x1855, + 0x1724: 0x1002, 0x1725: 0x1006, 0x1726: 0x185a, 0x1727: 0x185f, 0x1728: 0x1022, 0x1729: 0x1032, + 0x172a: 0x0716, 0x172b: 0x1036, 0x172c: 0x071a, 0x172d: 0x071a, 0x172e: 0x104e, 0x172f: 0x1052, + 0x1730: 0x105a, 0x1731: 0x105e, 0x1732: 0x106a, 0x1733: 0x071e, 0x1734: 0x1082, 0x1735: 0x1864, + 0x1736: 0x109e, 0x1737: 0x1869, 0x1738: 0x10aa, 0x1739: 0x17ce, 0x173a: 0x10ba, 0x173b: 0x186e, + 0x173c: 0x1873, 0x173d: 0x1878, 0x173e: 0x0722, 0x173f: 0x0726, + // Block 0x5d, offset 0x1740 + 0x1740: 0x10f2, 0x1741: 0x1882, 0x1742: 0x187d, 0x1743: 0x1887, 0x1744: 0x188c, 0x1745: 0x10fa, + 0x1746: 0x10fe, 0x1747: 0x10fe, 0x1748: 0x1106, 0x1749: 0x072e, 0x174a: 0x110a, 0x174b: 0x0732, + 0x174c: 0x0736, 0x174d: 0x1896, 0x174e: 0x111e, 0x174f: 0x1126, 0x1750: 0x1132, 0x1751: 0x073a, + 0x1752: 0x189b, 0x1753: 0x1156, 0x1754: 0x18a0, 0x1755: 0x18a5, 0x1756: 0x1176, 0x1757: 0x118e, + 0x1758: 0x073e, 0x1759: 0x1196, 0x175a: 0x119a, 0x175b: 0x119e, 0x175c: 0x18aa, 0x175d: 0x18af, + 0x175e: 0x18af, 0x175f: 0x11b6, 0x1760: 0x0742, 0x1761: 0x18b4, 0x1762: 0x11ca, 0x1763: 0x11ce, + 0x1764: 0x0746, 0x1765: 0x18b9, 0x1766: 0x11ea, 0x1767: 0x074a, 0x1768: 0x11fa, 0x1769: 0x11f2, + 0x176a: 0x1202, 0x176b: 0x18c3, 0x176c: 0x121a, 0x176d: 0x074e, 0x176e: 0x1226, 0x176f: 0x122e, + 0x1770: 0x123e, 0x1771: 0x0752, 0x1772: 0x18cd, 0x1773: 0x18d2, 0x1774: 0x0756, 0x1775: 0x18d7, + 0x1776: 0x1256, 0x1777: 0x18dc, 0x1778: 0x1262, 0x1779: 0x126e, 0x177a: 0x1276, 0x177b: 0x18e1, + 0x177c: 0x18e6, 0x177d: 0x128a, 0x177e: 0x18eb, 0x177f: 0x1292, + // Block 0x5e, offset 0x1780 + 0x1780: 0x17fb, 0x1781: 0x075a, 0x1782: 0x12aa, 0x1783: 0x12ae, 0x1784: 0x0762, 0x1785: 0x12b2, + 0x1786: 0x0b2e, 0x1787: 0x18f0, 0x1788: 0x18f5, 0x1789: 0x1800, 0x178a: 0x1805, 0x178b: 0x12d2, + 0x178c: 0x12d6, 0x178d: 0x14ee, 0x178e: 0x0766, 0x178f: 0x1302, 0x1790: 0x12fe, 0x1791: 0x1306, + 0x1792: 0x093a, 0x1793: 0x130a, 0x1794: 0x130e, 0x1795: 0x1312, 0x1796: 0x131a, 0x1797: 0x18fa, + 0x1798: 0x1316, 0x1799: 0x131e, 0x179a: 0x1332, 0x179b: 0x1336, 0x179c: 0x1322, 0x179d: 0x133a, + 0x179e: 0x134e, 0x179f: 0x1362, 0x17a0: 0x132e, 0x17a1: 0x1342, 0x17a2: 0x1346, 0x17a3: 0x134a, + 0x17a4: 0x18ff, 0x17a5: 0x1909, 0x17a6: 0x1904, 0x17a7: 0x076a, 0x17a8: 0x136a, 0x17a9: 0x136e, + 0x17aa: 0x1376, 0x17ab: 0x191d, 0x17ac: 0x137a, 0x17ad: 0x190e, 0x17ae: 0x076e, 0x17af: 0x0772, + 0x17b0: 0x1913, 0x17b1: 0x1918, 0x17b2: 0x0776, 0x17b3: 0x139a, 0x17b4: 0x139e, 0x17b5: 0x13a2, + 0x17b6: 0x13a6, 0x17b7: 0x13b2, 0x17b8: 0x13ae, 0x17b9: 0x13ba, 0x17ba: 0x13b6, 0x17bb: 0x13c6, + 0x17bc: 0x13be, 0x17bd: 0x13c2, 0x17be: 0x13ca, 0x17bf: 0x077a, + // Block 0x5f, offset 0x17c0 + 0x17c0: 0x13d2, 0x17c1: 0x13d6, 0x17c2: 0x077e, 0x17c3: 0x13e6, 0x17c4: 0x13ea, 0x17c5: 0x1922, + 0x17c6: 0x13f6, 0x17c7: 0x13fa, 0x17c8: 0x0782, 0x17c9: 0x1406, 0x17ca: 0x06b6, 0x17cb: 0x1927, + 0x17cc: 0x192c, 0x17cd: 0x0786, 0x17ce: 0x078a, 0x17cf: 0x1432, 0x17d0: 0x144a, 0x17d1: 0x1466, + 0x17d2: 0x1476, 0x17d3: 0x1931, 0x17d4: 0x148a, 0x17d5: 0x148e, 0x17d6: 0x14a6, 0x17d7: 0x14b2, + 0x17d8: 0x193b, 0x17d9: 0x178d, 0x17da: 0x14be, 0x17db: 0x14ba, 0x17dc: 0x14c6, 0x17dd: 0x1792, + 0x17de: 0x14d2, 0x17df: 0x14de, 0x17e0: 0x1940, 0x17e1: 0x1945, 0x17e2: 0x151e, 0x17e3: 0x152a, + 0x17e4: 0x1532, 0x17e5: 0x194a, 0x17e6: 0x1536, 0x17e7: 0x1562, 0x17e8: 0x156e, 0x17e9: 0x1572, + 0x17ea: 0x156a, 0x17eb: 0x157e, 0x17ec: 0x1582, 0x17ed: 0x194f, 0x17ee: 0x158e, 0x17ef: 0x078e, + 0x17f0: 0x1596, 0x17f1: 0x1954, 0x17f2: 0x0792, 0x17f3: 0x15ce, 0x17f4: 0x0bbe, 0x17f5: 0x15e6, + 0x17f6: 0x1959, 0x17f7: 0x1963, 0x17f8: 0x0796, 0x17f9: 0x079a, 0x17fa: 0x160e, 0x17fb: 0x1968, + 0x17fc: 0x079e, 0x17fd: 0x196d, 0x17fe: 0x1626, 0x17ff: 0x1626, + // Block 0x60, offset 0x1800 + 0x1800: 0x162e, 0x1801: 0x1972, 0x1802: 0x1646, 0x1803: 0x07a2, 0x1804: 0x1656, 0x1805: 0x1662, + 0x1806: 0x166a, 0x1807: 0x1672, 0x1808: 0x07a6, 0x1809: 0x1977, 0x180a: 0x1686, 0x180b: 0x16a2, + 0x180c: 0x16ae, 0x180d: 0x07aa, 0x180e: 0x07ae, 0x180f: 0x16b2, 0x1810: 0x197c, 0x1811: 0x07b2, + 0x1812: 0x1981, 0x1813: 0x1986, 0x1814: 0x198b, 0x1815: 0x16d6, 0x1816: 0x07b6, 0x1817: 0x16ea, + 0x1818: 0x16f2, 0x1819: 0x16f6, 0x181a: 0x16fe, 0x181b: 0x1706, 0x181c: 0x170e, 0x181d: 0x1995, +} + +// nfkcIndex: 22 blocks, 1408 entries, 2816 bytes +// Block 0 is the zero block. +var nfkcIndex = [1408]uint16{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x5f, 0xc3: 0x01, 0xc4: 0x02, 0xc5: 0x03, 0xc6: 0x60, 0xc7: 0x04, + 0xc8: 0x05, 0xca: 0x61, 0xcb: 0x62, 0xcc: 0x06, 0xcd: 0x07, 0xce: 0x08, 0xcf: 0x09, + 0xd0: 0x0a, 0xd1: 0x63, 0xd2: 0x64, 0xd3: 0x0b, 0xd6: 0x0c, 0xd7: 0x65, + 0xd8: 0x66, 0xd9: 0x0d, 0xdb: 0x67, 0xdc: 0x68, 0xdd: 0x69, 0xdf: 0x6a, + 0xe0: 0x02, 0xe1: 0x03, 0xe2: 0x04, 0xe3: 0x05, + 0xea: 0x06, 0xeb: 0x07, 0xec: 0x08, 0xed: 0x09, 0xef: 0x0a, + 0xf0: 0x13, + // Block 0x4, offset 0x100 + 0x120: 0x6b, 0x121: 0x6c, 0x122: 0x6d, 0x123: 0x0e, 0x124: 0x6e, 0x125: 0x6f, 0x126: 0x70, 0x127: 0x71, + 0x128: 0x72, 0x129: 0x73, 0x12a: 0x74, 0x12b: 0x75, 0x12c: 0x70, 0x12d: 0x76, 0x12e: 0x77, 0x12f: 0x78, + 0x130: 0x74, 0x131: 0x79, 0x132: 0x7a, 0x133: 0x7b, 0x134: 0x7c, 0x135: 0x7d, 0x137: 0x7e, + 0x138: 0x7f, 0x139: 0x80, 0x13a: 0x81, 0x13b: 0x82, 0x13c: 0x83, 0x13d: 0x84, 0x13e: 0x85, 0x13f: 0x86, + // Block 0x5, offset 0x140 + 0x140: 0x87, 0x142: 0x88, 0x143: 0x89, 0x144: 0x8a, 0x145: 0x8b, 0x146: 0x8c, 0x147: 0x8d, + 0x14d: 0x8e, + 0x15c: 0x8f, 0x15f: 0x90, + 0x162: 0x91, 0x164: 0x92, + 0x168: 0x93, 0x169: 0x94, 0x16a: 0x95, 0x16b: 0x96, 0x16c: 0x0f, 0x16d: 0x97, 0x16e: 0x98, 0x16f: 0x99, + 0x170: 0x9a, 0x173: 0x9b, 0x174: 0x9c, 0x175: 0x10, 0x176: 0x11, 0x177: 0x12, + 0x178: 0x13, 0x179: 0x14, 0x17a: 0x15, 0x17b: 0x16, 0x17c: 0x17, 0x17d: 0x18, 0x17e: 0x19, 0x17f: 0x1a, + // Block 0x6, offset 0x180 + 0x180: 0x9d, 0x181: 0x9e, 0x182: 0x9f, 0x183: 0xa0, 0x184: 0x1b, 0x185: 0x1c, 0x186: 0xa1, 0x187: 0xa2, + 0x188: 0xa3, 0x189: 0x1d, 0x18a: 0x1e, 0x18b: 0xa4, 0x18c: 0xa5, + 0x191: 0x1f, 0x192: 0x20, 0x193: 0xa6, + 0x1a8: 0xa7, 0x1a9: 0xa8, 0x1ab: 0xa9, + 0x1b1: 0xaa, 0x1b3: 0xab, 0x1b5: 0xac, 0x1b7: 0xad, + 0x1ba: 0xae, 0x1bb: 0xaf, 0x1bc: 0x21, 0x1bd: 0x22, 0x1be: 0x23, 0x1bf: 0xb0, + // Block 0x7, offset 0x1c0 + 0x1c0: 0xb1, 0x1c1: 0x24, 0x1c2: 0x25, 0x1c3: 0x26, 0x1c4: 0xb2, 0x1c5: 0x27, 0x1c6: 0x28, + 0x1c8: 0x29, 0x1c9: 0x2a, 0x1ca: 0x2b, 0x1cb: 0x2c, 0x1cc: 0x2d, 0x1cd: 0x2e, 0x1ce: 0x2f, 0x1cf: 0x30, + // Block 0x8, offset 0x200 + 0x219: 0xb3, 0x21a: 0xb4, 0x21b: 0xb5, 0x21d: 0xb6, 0x21f: 0xb7, + 0x220: 0xb8, 0x223: 0xb9, 0x224: 0xba, 0x225: 0xbb, 0x226: 0xbc, 0x227: 0xbd, + 0x22a: 0xbe, 0x22b: 0xbf, 0x22d: 0xc0, 0x22f: 0xc1, + 0x230: 0xc2, 0x231: 0xc3, 0x232: 0xc4, 0x233: 0xc5, 0x234: 0xc6, 0x235: 0xc7, 0x236: 0xc8, 0x237: 0xc2, + 0x238: 0xc3, 0x239: 0xc4, 0x23a: 0xc5, 0x23b: 0xc6, 0x23c: 0xc7, 0x23d: 0xc8, 0x23e: 0xc2, 0x23f: 0xc3, + // Block 0x9, offset 0x240 + 0x240: 0xc4, 0x241: 0xc5, 0x242: 0xc6, 0x243: 0xc7, 0x244: 0xc8, 0x245: 0xc2, 0x246: 0xc3, 0x247: 0xc4, + 0x248: 0xc5, 0x249: 0xc6, 0x24a: 0xc7, 0x24b: 0xc8, 0x24c: 0xc2, 0x24d: 0xc3, 0x24e: 0xc4, 0x24f: 0xc5, + 0x250: 0xc6, 0x251: 0xc7, 0x252: 0xc8, 0x253: 0xc2, 0x254: 0xc3, 0x255: 0xc4, 0x256: 0xc5, 0x257: 0xc6, + 0x258: 0xc7, 0x259: 0xc8, 0x25a: 0xc2, 0x25b: 0xc3, 0x25c: 0xc4, 0x25d: 0xc5, 0x25e: 0xc6, 0x25f: 0xc7, + 0x260: 0xc8, 0x261: 0xc2, 0x262: 0xc3, 0x263: 0xc4, 0x264: 0xc5, 0x265: 0xc6, 0x266: 0xc7, 0x267: 0xc8, + 0x268: 0xc2, 0x269: 0xc3, 0x26a: 0xc4, 0x26b: 0xc5, 0x26c: 0xc6, 0x26d: 0xc7, 0x26e: 0xc8, 0x26f: 0xc2, + 0x270: 0xc3, 0x271: 0xc4, 0x272: 0xc5, 0x273: 0xc6, 0x274: 0xc7, 0x275: 0xc8, 0x276: 0xc2, 0x277: 0xc3, + 0x278: 0xc4, 0x279: 0xc5, 0x27a: 0xc6, 0x27b: 0xc7, 0x27c: 0xc8, 0x27d: 0xc2, 0x27e: 0xc3, 0x27f: 0xc4, + // Block 0xa, offset 0x280 + 0x280: 0xc5, 0x281: 0xc6, 0x282: 0xc7, 0x283: 0xc8, 0x284: 0xc2, 0x285: 0xc3, 0x286: 0xc4, 0x287: 0xc5, + 0x288: 0xc6, 0x289: 0xc7, 0x28a: 0xc8, 0x28b: 0xc2, 0x28c: 0xc3, 0x28d: 0xc4, 0x28e: 0xc5, 0x28f: 0xc6, + 0x290: 0xc7, 0x291: 0xc8, 0x292: 0xc2, 0x293: 0xc3, 0x294: 0xc4, 0x295: 0xc5, 0x296: 0xc6, 0x297: 0xc7, + 0x298: 0xc8, 0x299: 0xc2, 0x29a: 0xc3, 0x29b: 0xc4, 0x29c: 0xc5, 0x29d: 0xc6, 0x29e: 0xc7, 0x29f: 0xc8, + 0x2a0: 0xc2, 0x2a1: 0xc3, 0x2a2: 0xc4, 0x2a3: 0xc5, 0x2a4: 0xc6, 0x2a5: 0xc7, 0x2a6: 0xc8, 0x2a7: 0xc2, + 0x2a8: 0xc3, 0x2a9: 0xc4, 0x2aa: 0xc5, 0x2ab: 0xc6, 0x2ac: 0xc7, 0x2ad: 0xc8, 0x2ae: 0xc2, 0x2af: 0xc3, + 0x2b0: 0xc4, 0x2b1: 0xc5, 0x2b2: 0xc6, 0x2b3: 0xc7, 0x2b4: 0xc8, 0x2b5: 0xc2, 0x2b6: 0xc3, 0x2b7: 0xc4, + 0x2b8: 0xc5, 0x2b9: 0xc6, 0x2ba: 0xc7, 0x2bb: 0xc8, 0x2bc: 0xc2, 0x2bd: 0xc3, 0x2be: 0xc4, 0x2bf: 0xc5, + // Block 0xb, offset 0x2c0 + 0x2c0: 0xc6, 0x2c1: 0xc7, 0x2c2: 0xc8, 0x2c3: 0xc2, 0x2c4: 0xc3, 0x2c5: 0xc4, 0x2c6: 0xc5, 0x2c7: 0xc6, + 0x2c8: 0xc7, 0x2c9: 0xc8, 0x2ca: 0xc2, 0x2cb: 0xc3, 0x2cc: 0xc4, 0x2cd: 0xc5, 0x2ce: 0xc6, 0x2cf: 0xc7, + 0x2d0: 0xc8, 0x2d1: 0xc2, 0x2d2: 0xc3, 0x2d3: 0xc4, 0x2d4: 0xc5, 0x2d5: 0xc6, 0x2d6: 0xc7, 0x2d7: 0xc8, + 0x2d8: 0xc2, 0x2d9: 0xc3, 0x2da: 0xc4, 0x2db: 0xc5, 0x2dc: 0xc6, 0x2dd: 0xc7, 0x2de: 0xc9, + // Block 0xc, offset 0x300 + 0x324: 0x31, 0x325: 0x32, 0x326: 0x33, 0x327: 0x34, + 0x328: 0x35, 0x329: 0x36, 0x32a: 0x37, 0x32b: 0x38, 0x32c: 0x39, 0x32d: 0x3a, 0x32e: 0x3b, 0x32f: 0x3c, + 0x330: 0x3d, 0x331: 0x3e, 0x332: 0x3f, 0x333: 0x40, 0x334: 0x41, 0x335: 0x42, 0x336: 0x43, 0x337: 0x44, + 0x338: 0x45, 0x339: 0x46, 0x33a: 0x47, 0x33b: 0x48, 0x33c: 0xca, 0x33d: 0x49, 0x33e: 0x4a, 0x33f: 0x4b, + // Block 0xd, offset 0x340 + 0x347: 0xcb, + 0x34b: 0xcc, 0x34d: 0xcd, + 0x35e: 0x4c, + 0x368: 0xce, 0x36b: 0xcf, + 0x374: 0xd0, + 0x37a: 0xd1, 0x37b: 0xd2, 0x37d: 0xd3, 0x37e: 0xd4, + // Block 0xe, offset 0x380 + 0x381: 0xd5, 0x382: 0xd6, 0x384: 0xd7, 0x385: 0xbc, 0x387: 0xd8, + 0x388: 0xd9, 0x38b: 0xda, 0x38c: 0xdb, 0x38d: 0xdc, + 0x391: 0xdd, 0x392: 0xde, 0x393: 0xdf, 0x396: 0xe0, 0x397: 0xe1, + 0x398: 0xe2, 0x39a: 0xe3, 0x39c: 0xe4, + 0x3a0: 0xe5, 0x3a4: 0xe6, 0x3a5: 0xe7, 0x3a7: 0xe8, + 0x3a8: 0xe9, 0x3a9: 0xea, 0x3aa: 0xeb, + 0x3b0: 0xe2, 0x3b5: 0xec, 0x3b6: 0xed, + 0x3bd: 0xee, + // Block 0xf, offset 0x3c0 + 0x3eb: 0xef, 0x3ec: 0xf0, + 0x3ff: 0xf1, + // Block 0x10, offset 0x400 + 0x432: 0xf2, + // Block 0x11, offset 0x440 + 0x445: 0xf3, 0x446: 0xf4, 0x447: 0xf5, + 0x449: 0xf6, + 0x450: 0xf7, 0x451: 0xf8, 0x452: 0xf9, 0x453: 0xfa, 0x454: 0xfb, 0x455: 0xfc, 0x456: 0xfd, 0x457: 0xfe, + 0x458: 0xff, 0x459: 0x100, 0x45a: 0x4d, 0x45b: 0x101, 0x45c: 0x102, 0x45d: 0x103, 0x45e: 0x104, 0x45f: 0x4e, + // Block 0x12, offset 0x480 + 0x480: 0x4f, 0x481: 0x50, 0x482: 0x105, 0x484: 0xf0, + 0x48a: 0x106, 0x48b: 0x107, + 0x493: 0x108, + 0x4a3: 0x109, 0x4a5: 0x10a, + 0x4b8: 0x51, 0x4b9: 0x52, 0x4ba: 0x53, + // Block 0x13, offset 0x4c0 + 0x4c4: 0x54, 0x4c5: 0x10b, 0x4c6: 0x10c, + 0x4c8: 0x55, 0x4c9: 0x10d, + 0x4ef: 0x10e, + // Block 0x14, offset 0x500 + 0x520: 0x56, 0x521: 0x57, 0x522: 0x58, 0x523: 0x59, 0x524: 0x5a, 0x525: 0x5b, 0x526: 0x5c, 0x527: 0x5d, + 0x528: 0x5e, + // Block 0x15, offset 0x540 + 0x550: 0x0b, 0x551: 0x0c, 0x556: 0x0d, + 0x55b: 0x0e, 0x55d: 0x0f, 0x55e: 0x10, 0x55f: 0x11, + 0x56f: 0x12, +} + +// nfkcSparseOffset: 176 entries, 352 bytes +var nfkcSparseOffset = []uint16{0x0, 0xe, 0x12, 0x1c, 0x26, 0x36, 0x38, 0x3d, 0x48, 0x57, 0x64, 0x6c, 0x71, 0x76, 0x78, 0x7c, 0x84, 0x8b, 0x8e, 0x96, 0x9a, 0x9e, 0xa0, 0xa2, 0xab, 0xaf, 0xb6, 0xbb, 0xbe, 0xc8, 0xcb, 0xd2, 0xda, 0xde, 0xe0, 0xe4, 0xe8, 0xee, 0xff, 0x10b, 0x10d, 0x113, 0x115, 0x117, 0x119, 0x11b, 0x11d, 0x11f, 0x121, 0x124, 0x127, 0x129, 0x12c, 0x12f, 0x133, 0x139, 0x140, 0x149, 0x14b, 0x14e, 0x150, 0x15b, 0x166, 0x174, 0x182, 0x192, 0x1a0, 0x1a7, 0x1ad, 0x1bc, 0x1c0, 0x1c2, 0x1c6, 0x1c8, 0x1cb, 0x1cd, 0x1d0, 0x1d2, 0x1d5, 0x1d7, 0x1d9, 0x1db, 0x1e7, 0x1f1, 0x1fb, 0x1fe, 0x202, 0x204, 0x206, 0x20b, 0x20e, 0x211, 0x213, 0x215, 0x217, 0x219, 0x21f, 0x222, 0x227, 0x229, 0x230, 0x236, 0x23c, 0x244, 0x24a, 0x250, 0x256, 0x25a, 0x25c, 0x25e, 0x260, 0x262, 0x268, 0x26b, 0x26d, 0x26f, 0x271, 0x277, 0x27b, 0x27f, 0x287, 0x28e, 0x291, 0x294, 0x296, 0x299, 0x2a1, 0x2a5, 0x2ac, 0x2af, 0x2b5, 0x2b7, 0x2b9, 0x2bc, 0x2be, 0x2c1, 0x2c6, 0x2c8, 0x2ca, 0x2cc, 0x2ce, 0x2d0, 0x2d3, 0x2d5, 0x2d7, 0x2d9, 0x2db, 0x2dd, 0x2df, 0x2ec, 0x2f6, 0x2f8, 0x2fa, 0x2fe, 0x303, 0x30f, 0x314, 0x31d, 0x323, 0x328, 0x32c, 0x331, 0x335, 0x345, 0x353, 0x361, 0x36f, 0x371, 0x373, 0x375, 0x379, 0x37b, 0x37e, 0x389, 0x38b, 0x395} + +// nfkcSparseValues: 919 entries, 3676 bytes +var nfkcSparseValues = [919]valueRange{ + // Block 0x0, offset 0x0 + {value: 0x0002, lo: 0x0d}, + {value: 0x0001, lo: 0xa0, hi: 0xa0}, + {value: 0x43b9, lo: 0xa8, hi: 0xa8}, + {value: 0x0083, lo: 0xaa, hi: 0xaa}, + {value: 0x43a5, lo: 0xaf, hi: 0xaf}, + {value: 0x0025, lo: 0xb2, hi: 0xb3}, + {value: 0x439b, lo: 0xb4, hi: 0xb4}, + {value: 0x0260, lo: 0xb5, hi: 0xb5}, + {value: 0x43d2, lo: 0xb8, hi: 0xb8}, + {value: 0x0023, lo: 0xb9, hi: 0xb9}, + {value: 0x009f, lo: 0xba, hi: 0xba}, + {value: 0x234c, lo: 0xbc, hi: 0xbc}, + {value: 0x2340, lo: 0xbd, hi: 0xbd}, + {value: 0x23e2, lo: 0xbe, hi: 0xbe}, + // Block 0x1, offset 0xe + {value: 0x0091, lo: 0x03}, + {value: 0x4823, lo: 0xa0, hi: 0xa1}, + {value: 0x4855, lo: 0xaf, hi: 0xb0}, + {value: 0xa000, lo: 0xb7, hi: 0xb7}, + // Block 0x2, offset 0x12 + {value: 0x0004, lo: 0x09}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0x0091, lo: 0xb0, hi: 0xb0}, + {value: 0x0140, lo: 0xb1, hi: 0xb1}, + {value: 0x0095, lo: 0xb2, hi: 0xb2}, + {value: 0x00a5, lo: 0xb3, hi: 0xb3}, + {value: 0x0179, lo: 0xb4, hi: 0xb4}, + {value: 0x017f, lo: 0xb5, hi: 0xb5}, + {value: 0x018b, lo: 0xb6, hi: 0xb6}, + {value: 0x00af, lo: 0xb7, hi: 0xb8}, + // Block 0x3, offset 0x1c + {value: 0x000a, lo: 0x09}, + {value: 0x43af, lo: 0x98, hi: 0x98}, + {value: 0x43b4, lo: 0x99, hi: 0x9a}, + {value: 0x43d7, lo: 0x9b, hi: 0x9b}, + {value: 0x43a0, lo: 0x9c, hi: 0x9c}, + {value: 0x43c3, lo: 0x9d, hi: 0x9d}, + {value: 0x0137, lo: 0xa0, hi: 0xa0}, + {value: 0x0099, lo: 0xa1, hi: 0xa1}, + {value: 0x00a7, lo: 0xa2, hi: 0xa3}, + {value: 0x01b8, lo: 0xa4, hi: 0xa4}, + // Block 0x4, offset 0x26 + {value: 0x0000, lo: 0x0f}, + {value: 0xa000, lo: 0x83, hi: 0x83}, + {value: 0xa000, lo: 0x87, hi: 0x87}, + {value: 0xa000, lo: 0x8b, hi: 0x8b}, + {value: 0xa000, lo: 0x8d, hi: 0x8d}, + {value: 0x38e6, lo: 0x90, hi: 0x90}, + {value: 0x38f2, lo: 0x91, hi: 0x91}, + {value: 0x38e0, lo: 0x93, hi: 0x93}, + {value: 0xa000, lo: 0x96, hi: 0x96}, + {value: 0x3958, lo: 0x97, hi: 0x97}, + {value: 0x3922, lo: 0x9c, hi: 0x9c}, + {value: 0x390a, lo: 0x9d, hi: 0x9d}, + {value: 0x3934, lo: 0x9e, hi: 0x9e}, + {value: 0xa000, lo: 0xb4, hi: 0xb5}, + {value: 0x395e, lo: 0xb6, hi: 0xb6}, + {value: 0x3964, lo: 0xb7, hi: 0xb7}, + // Block 0x5, offset 0x36 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0x83, hi: 0x87}, + // Block 0x6, offset 0x38 + {value: 0x0001, lo: 0x04}, + {value: 0x8114, lo: 0x81, hi: 0x82}, + {value: 0x8133, lo: 0x84, hi: 0x84}, + {value: 0x812e, lo: 0x85, hi: 0x85}, + {value: 0x810e, lo: 0x87, hi: 0x87}, + // Block 0x7, offset 0x3d + {value: 0x0000, lo: 0x0a}, + {value: 0x8133, lo: 0x90, hi: 0x97}, + {value: 0x811a, lo: 0x98, hi: 0x98}, + {value: 0x811b, lo: 0x99, hi: 0x99}, + {value: 0x811c, lo: 0x9a, hi: 0x9a}, + {value: 0x3982, lo: 0xa2, hi: 0xa2}, + {value: 0x3988, lo: 0xa3, hi: 0xa3}, + {value: 0x3994, lo: 0xa4, hi: 0xa4}, + {value: 0x398e, lo: 0xa5, hi: 0xa5}, + {value: 0x399a, lo: 0xa6, hi: 0xa6}, + {value: 0xa000, lo: 0xa7, hi: 0xa7}, + // Block 0x8, offset 0x48 + {value: 0x0000, lo: 0x0e}, + {value: 0x39ac, lo: 0x80, hi: 0x80}, + {value: 0xa000, lo: 0x81, hi: 0x81}, + {value: 0x39a0, lo: 0x82, hi: 0x82}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0x39a6, lo: 0x93, hi: 0x93}, + {value: 0xa000, lo: 0x95, hi: 0x95}, + {value: 0x8133, lo: 0x96, hi: 0x9c}, + {value: 0x8133, lo: 0x9f, hi: 0xa2}, + {value: 0x812e, lo: 0xa3, hi: 0xa3}, + {value: 0x8133, lo: 0xa4, hi: 0xa4}, + {value: 0x8133, lo: 0xa7, hi: 0xa8}, + {value: 0x812e, lo: 0xaa, hi: 0xaa}, + {value: 0x8133, lo: 0xab, hi: 0xac}, + {value: 0x812e, lo: 0xad, hi: 0xad}, + // Block 0x9, offset 0x57 + {value: 0x0000, lo: 0x0c}, + {value: 0x8120, lo: 0x91, hi: 0x91}, + {value: 0x8133, lo: 0xb0, hi: 0xb0}, + {value: 0x812e, lo: 0xb1, hi: 0xb1}, + {value: 0x8133, lo: 0xb2, hi: 0xb3}, + {value: 0x812e, lo: 0xb4, hi: 0xb4}, + {value: 0x8133, lo: 0xb5, hi: 0xb6}, + {value: 0x812e, lo: 0xb7, hi: 0xb9}, + {value: 0x8133, lo: 0xba, hi: 0xba}, + {value: 0x812e, lo: 0xbb, hi: 0xbc}, + {value: 0x8133, lo: 0xbd, hi: 0xbd}, + {value: 0x812e, lo: 0xbe, hi: 0xbe}, + {value: 0x8133, lo: 0xbf, hi: 0xbf}, + // Block 0xa, offset 0x64 + {value: 0x0005, lo: 0x07}, + {value: 0x8133, lo: 0x80, hi: 0x80}, + {value: 0x8133, lo: 0x81, hi: 0x81}, + {value: 0x812e, lo: 0x82, hi: 0x83}, + {value: 0x812e, lo: 0x84, hi: 0x85}, + {value: 0x812e, lo: 0x86, hi: 0x87}, + {value: 0x812e, lo: 0x88, hi: 0x89}, + {value: 0x8133, lo: 0x8a, hi: 0x8a}, + // Block 0xb, offset 0x6c + {value: 0x0000, lo: 0x04}, + {value: 0x8133, lo: 0xab, hi: 0xb1}, + {value: 0x812e, lo: 0xb2, hi: 0xb2}, + {value: 0x8133, lo: 0xb3, hi: 0xb3}, + {value: 0x812e, lo: 0xbd, hi: 0xbd}, + // Block 0xc, offset 0x71 + {value: 0x0000, lo: 0x04}, + {value: 0x8133, lo: 0x96, hi: 0x99}, + {value: 0x8133, lo: 0x9b, hi: 0xa3}, + {value: 0x8133, lo: 0xa5, hi: 0xa7}, + {value: 0x8133, lo: 0xa9, hi: 0xad}, + // Block 0xd, offset 0x76 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0x99, hi: 0x9b}, + // Block 0xe, offset 0x78 + {value: 0x0000, lo: 0x03}, + {value: 0x8133, lo: 0x98, hi: 0x98}, + {value: 0x812e, lo: 0x99, hi: 0x9b}, + {value: 0x8133, lo: 0x9c, hi: 0x9f}, + // Block 0xf, offset 0x7c + {value: 0x0000, lo: 0x07}, + {value: 0xa000, lo: 0xa8, hi: 0xa8}, + {value: 0x4019, lo: 0xa9, hi: 0xa9}, + {value: 0xa000, lo: 0xb0, hi: 0xb0}, + {value: 0x4021, lo: 0xb1, hi: 0xb1}, + {value: 0xa000, lo: 0xb3, hi: 0xb3}, + {value: 0x4029, lo: 0xb4, hi: 0xb4}, + {value: 0x9903, lo: 0xbc, hi: 0xbc}, + // Block 0x10, offset 0x84 + {value: 0x0008, lo: 0x06}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x8133, lo: 0x91, hi: 0x91}, + {value: 0x812e, lo: 0x92, hi: 0x92}, + {value: 0x8133, lo: 0x93, hi: 0x93}, + {value: 0x8133, lo: 0x94, hi: 0x94}, + {value: 0x465d, lo: 0x98, hi: 0x9f}, + // Block 0x11, offset 0x8b + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0xbc, hi: 0xbc}, + {value: 0x9900, lo: 0xbe, hi: 0xbe}, + // Block 0x12, offset 0x8e + {value: 0x0008, lo: 0x07}, + {value: 0xa000, lo: 0x87, hi: 0x87}, + {value: 0x2dd5, lo: 0x8b, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x97, hi: 0x97}, + {value: 0x469d, lo: 0x9c, hi: 0x9d}, + {value: 0x46ad, lo: 0x9f, hi: 0x9f}, + {value: 0x8133, lo: 0xbe, hi: 0xbe}, + // Block 0x13, offset 0x96 + {value: 0x0000, lo: 0x03}, + {value: 0x46d5, lo: 0xb3, hi: 0xb3}, + {value: 0x46dd, lo: 0xb6, hi: 0xb6}, + {value: 0x8103, lo: 0xbc, hi: 0xbc}, + // Block 0x14, offset 0x9a + {value: 0x0008, lo: 0x03}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x46b5, lo: 0x99, hi: 0x9b}, + {value: 0x46cd, lo: 0x9e, hi: 0x9e}, + // Block 0x15, offset 0x9e + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0xbc, hi: 0xbc}, + // Block 0x16, offset 0xa0 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + // Block 0x17, offset 0xa2 + {value: 0x0000, lo: 0x08}, + {value: 0xa000, lo: 0x87, hi: 0x87}, + {value: 0x2ded, lo: 0x88, hi: 0x88}, + {value: 0x2de5, lo: 0x8b, hi: 0x8b}, + {value: 0x2df5, lo: 0x8c, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x96, hi: 0x97}, + {value: 0x46e5, lo: 0x9c, hi: 0x9c}, + {value: 0x46ed, lo: 0x9d, hi: 0x9d}, + // Block 0x18, offset 0xab + {value: 0x0000, lo: 0x03}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0x2dfd, lo: 0x94, hi: 0x94}, + {value: 0x9900, lo: 0xbe, hi: 0xbe}, + // Block 0x19, offset 0xaf + {value: 0x0000, lo: 0x06}, + {value: 0xa000, lo: 0x86, hi: 0x87}, + {value: 0x2e05, lo: 0x8a, hi: 0x8a}, + {value: 0x2e15, lo: 0x8b, hi: 0x8b}, + {value: 0x2e0d, lo: 0x8c, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x97, hi: 0x97}, + // Block 0x1a, offset 0xb6 + {value: 0x1801, lo: 0x04}, + {value: 0xa000, lo: 0x86, hi: 0x86}, + {value: 0x4031, lo: 0x88, hi: 0x88}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x8121, lo: 0x95, hi: 0x96}, + // Block 0x1b, offset 0xbb + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0xbc, hi: 0xbc}, + {value: 0xa000, lo: 0xbf, hi: 0xbf}, + // Block 0x1c, offset 0xbe + {value: 0x0000, lo: 0x09}, + {value: 0x2e1d, lo: 0x80, hi: 0x80}, + {value: 0x9900, lo: 0x82, hi: 0x82}, + {value: 0xa000, lo: 0x86, hi: 0x86}, + {value: 0x2e25, lo: 0x87, hi: 0x87}, + {value: 0x2e2d, lo: 0x88, hi: 0x88}, + {value: 0x3091, lo: 0x8a, hi: 0x8a}, + {value: 0x2f19, lo: 0x8b, hi: 0x8b}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x95, hi: 0x96}, + // Block 0x1d, offset 0xc8 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0xbb, hi: 0xbc}, + {value: 0x9900, lo: 0xbe, hi: 0xbe}, + // Block 0x1e, offset 0xcb + {value: 0x0000, lo: 0x06}, + {value: 0xa000, lo: 0x86, hi: 0x87}, + {value: 0x2e35, lo: 0x8a, hi: 0x8a}, + {value: 0x2e45, lo: 0x8b, hi: 0x8b}, + {value: 0x2e3d, lo: 0x8c, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x97, hi: 0x97}, + // Block 0x1f, offset 0xd2 + {value: 0x6ab3, lo: 0x07}, + {value: 0x9905, lo: 0x8a, hi: 0x8a}, + {value: 0x9900, lo: 0x8f, hi: 0x8f}, + {value: 0xa000, lo: 0x99, hi: 0x99}, + {value: 0x4039, lo: 0x9a, hi: 0x9a}, + {value: 0x3099, lo: 0x9c, hi: 0x9c}, + {value: 0x2f24, lo: 0x9d, hi: 0x9d}, + {value: 0x2e4d, lo: 0x9e, hi: 0x9f}, + // Block 0x20, offset 0xda + {value: 0x0000, lo: 0x03}, + {value: 0x2751, lo: 0xb3, hi: 0xb3}, + {value: 0x8123, lo: 0xb8, hi: 0xb9}, + {value: 0x8105, lo: 0xba, hi: 0xba}, + // Block 0x21, offset 0xde + {value: 0x0000, lo: 0x01}, + {value: 0x8124, lo: 0x88, hi: 0x8b}, + // Block 0x22, offset 0xe0 + {value: 0x0000, lo: 0x03}, + {value: 0x2766, lo: 0xb3, hi: 0xb3}, + {value: 0x8125, lo: 0xb8, hi: 0xb9}, + {value: 0x8105, lo: 0xba, hi: 0xba}, + // Block 0x23, offset 0xe4 + {value: 0x0000, lo: 0x03}, + {value: 0x8126, lo: 0x88, hi: 0x8b}, + {value: 0x2758, lo: 0x9c, hi: 0x9c}, + {value: 0x275f, lo: 0x9d, hi: 0x9d}, + // Block 0x24, offset 0xe8 + {value: 0x0000, lo: 0x05}, + {value: 0x03fe, lo: 0x8c, hi: 0x8c}, + {value: 0x812e, lo: 0x98, hi: 0x99}, + {value: 0x812e, lo: 0xb5, hi: 0xb5}, + {value: 0x812e, lo: 0xb7, hi: 0xb7}, + {value: 0x812c, lo: 0xb9, hi: 0xb9}, + // Block 0x25, offset 0xee + {value: 0x0000, lo: 0x10}, + {value: 0x2774, lo: 0x83, hi: 0x83}, + {value: 0x277b, lo: 0x8d, hi: 0x8d}, + {value: 0x2782, lo: 0x92, hi: 0x92}, + {value: 0x2789, lo: 0x97, hi: 0x97}, + {value: 0x2790, lo: 0x9c, hi: 0x9c}, + {value: 0x276d, lo: 0xa9, hi: 0xa9}, + {value: 0x8127, lo: 0xb1, hi: 0xb1}, + {value: 0x8128, lo: 0xb2, hi: 0xb2}, + {value: 0x4bc5, lo: 0xb3, hi: 0xb3}, + {value: 0x8129, lo: 0xb4, hi: 0xb4}, + {value: 0x4bce, lo: 0xb5, hi: 0xb5}, + {value: 0x46f5, lo: 0xb6, hi: 0xb6}, + {value: 0x4735, lo: 0xb7, hi: 0xb7}, + {value: 0x46fd, lo: 0xb8, hi: 0xb8}, + {value: 0x4740, lo: 0xb9, hi: 0xb9}, + {value: 0x8128, lo: 0xba, hi: 0xbd}, + // Block 0x26, offset 0xff + {value: 0x0000, lo: 0x0b}, + {value: 0x8128, lo: 0x80, hi: 0x80}, + {value: 0x4bd7, lo: 0x81, hi: 0x81}, + {value: 0x8133, lo: 0x82, hi: 0x83}, + {value: 0x8105, lo: 0x84, hi: 0x84}, + {value: 0x8133, lo: 0x86, hi: 0x87}, + {value: 0x279e, lo: 0x93, hi: 0x93}, + {value: 0x27a5, lo: 0x9d, hi: 0x9d}, + {value: 0x27ac, lo: 0xa2, hi: 0xa2}, + {value: 0x27b3, lo: 0xa7, hi: 0xa7}, + {value: 0x27ba, lo: 0xac, hi: 0xac}, + {value: 0x2797, lo: 0xb9, hi: 0xb9}, + // Block 0x27, offset 0x10b + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0x86, hi: 0x86}, + // Block 0x28, offset 0x10d + {value: 0x0000, lo: 0x05}, + {value: 0xa000, lo: 0xa5, hi: 0xa5}, + {value: 0x2e55, lo: 0xa6, hi: 0xa6}, + {value: 0x9900, lo: 0xae, hi: 0xae}, + {value: 0x8103, lo: 0xb7, hi: 0xb7}, + {value: 0x8105, lo: 0xb9, hi: 0xba}, + // Block 0x29, offset 0x113 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0x8d, hi: 0x8d}, + // Block 0x2a, offset 0x115 + {value: 0x0000, lo: 0x01}, + {value: 0x0402, lo: 0xbc, hi: 0xbc}, + // Block 0x2b, offset 0x117 + {value: 0x0000, lo: 0x01}, + {value: 0xa000, lo: 0x80, hi: 0x92}, + // Block 0x2c, offset 0x119 + {value: 0x0000, lo: 0x01}, + {value: 0xb900, lo: 0xa1, hi: 0xb5}, + // Block 0x2d, offset 0x11b + {value: 0x0000, lo: 0x01}, + {value: 0x9900, lo: 0xa8, hi: 0xbf}, + // Block 0x2e, offset 0x11d + {value: 0x0000, lo: 0x01}, + {value: 0x9900, lo: 0x80, hi: 0x82}, + // Block 0x2f, offset 0x11f + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0x9d, hi: 0x9f}, + // Block 0x30, offset 0x121 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x94, hi: 0x95}, + {value: 0x8105, lo: 0xb4, hi: 0xb4}, + // Block 0x31, offset 0x124 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x92, hi: 0x92}, + {value: 0x8133, lo: 0x9d, hi: 0x9d}, + // Block 0x32, offset 0x127 + {value: 0x0000, lo: 0x01}, + {value: 0x8132, lo: 0xa9, hi: 0xa9}, + // Block 0x33, offset 0x129 + {value: 0x0004, lo: 0x02}, + {value: 0x812f, lo: 0xb9, hi: 0xba}, + {value: 0x812e, lo: 0xbb, hi: 0xbb}, + // Block 0x34, offset 0x12c + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0x97, hi: 0x97}, + {value: 0x812e, lo: 0x98, hi: 0x98}, + // Block 0x35, offset 0x12f + {value: 0x0000, lo: 0x03}, + {value: 0x8105, lo: 0xa0, hi: 0xa0}, + {value: 0x8133, lo: 0xb5, hi: 0xbc}, + {value: 0x812e, lo: 0xbf, hi: 0xbf}, + // Block 0x36, offset 0x133 + {value: 0x0000, lo: 0x05}, + {value: 0x8133, lo: 0xb0, hi: 0xb4}, + {value: 0x812e, lo: 0xb5, hi: 0xba}, + {value: 0x8133, lo: 0xbb, hi: 0xbc}, + {value: 0x812e, lo: 0xbd, hi: 0xbd}, + {value: 0x812e, lo: 0xbf, hi: 0xbf}, + // Block 0x37, offset 0x139 + {value: 0x0000, lo: 0x06}, + {value: 0x812e, lo: 0x80, hi: 0x80}, + {value: 0x8133, lo: 0x81, hi: 0x82}, + {value: 0x812e, lo: 0x83, hi: 0x84}, + {value: 0x8133, lo: 0x85, hi: 0x89}, + {value: 0x812e, lo: 0x8a, hi: 0x8a}, + {value: 0x8133, lo: 0x8b, hi: 0x8e}, + // Block 0x38, offset 0x140 + {value: 0x0000, lo: 0x08}, + {value: 0x2e9d, lo: 0x80, hi: 0x80}, + {value: 0x2ea5, lo: 0x81, hi: 0x81}, + {value: 0xa000, lo: 0x82, hi: 0x82}, + {value: 0x2ead, lo: 0x83, hi: 0x83}, + {value: 0x8105, lo: 0x84, hi: 0x84}, + {value: 0x8133, lo: 0xab, hi: 0xab}, + {value: 0x812e, lo: 0xac, hi: 0xac}, + {value: 0x8133, lo: 0xad, hi: 0xb3}, + // Block 0x39, offset 0x149 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xaa, hi: 0xab}, + // Block 0x3a, offset 0x14b + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0xa6, hi: 0xa6}, + {value: 0x8105, lo: 0xb2, hi: 0xb3}, + // Block 0x3b, offset 0x14e + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0xb7, hi: 0xb7}, + // Block 0x3c, offset 0x150 + {value: 0x0000, lo: 0x0a}, + {value: 0x8133, lo: 0x90, hi: 0x92}, + {value: 0x8101, lo: 0x94, hi: 0x94}, + {value: 0x812e, lo: 0x95, hi: 0x99}, + {value: 0x8133, lo: 0x9a, hi: 0x9b}, + {value: 0x812e, lo: 0x9c, hi: 0x9f}, + {value: 0x8133, lo: 0xa0, hi: 0xa0}, + {value: 0x8101, lo: 0xa2, hi: 0xa8}, + {value: 0x812e, lo: 0xad, hi: 0xad}, + {value: 0x8133, lo: 0xb4, hi: 0xb4}, + {value: 0x8133, lo: 0xb8, hi: 0xb9}, + // Block 0x3d, offset 0x15b + {value: 0x0002, lo: 0x0a}, + {value: 0x0043, lo: 0xac, hi: 0xac}, + {value: 0x00d1, lo: 0xad, hi: 0xad}, + {value: 0x0045, lo: 0xae, hi: 0xae}, + {value: 0x0049, lo: 0xb0, hi: 0xb1}, + {value: 0x00ec, lo: 0xb2, hi: 0xb2}, + {value: 0x004f, lo: 0xb3, hi: 0xba}, + {value: 0x005f, lo: 0xbc, hi: 0xbc}, + {value: 0x00fe, lo: 0xbd, hi: 0xbd}, + {value: 0x0061, lo: 0xbe, hi: 0xbe}, + {value: 0x0065, lo: 0xbf, hi: 0xbf}, + // Block 0x3e, offset 0x166 + {value: 0x0000, lo: 0x0d}, + {value: 0x0001, lo: 0x80, hi: 0x8a}, + {value: 0x0532, lo: 0x91, hi: 0x91}, + {value: 0x43dc, lo: 0x97, hi: 0x97}, + {value: 0x001d, lo: 0xa4, hi: 0xa4}, + {value: 0x19a0, lo: 0xa5, hi: 0xa5}, + {value: 0x1c8c, lo: 0xa6, hi: 0xa6}, + {value: 0x0001, lo: 0xaf, hi: 0xaf}, + {value: 0x27c1, lo: 0xb3, hi: 0xb3}, + {value: 0x2935, lo: 0xb4, hi: 0xb4}, + {value: 0x27c8, lo: 0xb6, hi: 0xb6}, + {value: 0x293f, lo: 0xb7, hi: 0xb7}, + {value: 0x199a, lo: 0xbc, hi: 0xbc}, + {value: 0x43aa, lo: 0xbe, hi: 0xbe}, + // Block 0x3f, offset 0x174 + {value: 0x0002, lo: 0x0d}, + {value: 0x1a60, lo: 0x87, hi: 0x87}, + {value: 0x1a5d, lo: 0x88, hi: 0x88}, + {value: 0x199d, lo: 0x89, hi: 0x89}, + {value: 0x2ac5, lo: 0x97, hi: 0x97}, + {value: 0x0001, lo: 0x9f, hi: 0x9f}, + {value: 0x0021, lo: 0xb0, hi: 0xb0}, + {value: 0x0093, lo: 0xb1, hi: 0xb1}, + {value: 0x0029, lo: 0xb4, hi: 0xb9}, + {value: 0x0017, lo: 0xba, hi: 0xba}, + {value: 0x055e, lo: 0xbb, hi: 0xbb}, + {value: 0x003b, lo: 0xbc, hi: 0xbc}, + {value: 0x0011, lo: 0xbd, hi: 0xbe}, + {value: 0x009d, lo: 0xbf, hi: 0xbf}, + // Block 0x40, offset 0x182 + {value: 0x0002, lo: 0x0f}, + {value: 0x0021, lo: 0x80, hi: 0x89}, + {value: 0x0017, lo: 0x8a, hi: 0x8a}, + {value: 0x055e, lo: 0x8b, hi: 0x8b}, + {value: 0x003b, lo: 0x8c, hi: 0x8c}, + {value: 0x0011, lo: 0x8d, hi: 0x8e}, + {value: 0x0083, lo: 0x90, hi: 0x90}, + {value: 0x008b, lo: 0x91, hi: 0x91}, + {value: 0x009f, lo: 0x92, hi: 0x92}, + {value: 0x00b1, lo: 0x93, hi: 0x93}, + {value: 0x011f, lo: 0x94, hi: 0x94}, + {value: 0x0091, lo: 0x95, hi: 0x95}, + {value: 0x0097, lo: 0x96, hi: 0x99}, + {value: 0x00a1, lo: 0x9a, hi: 0x9a}, + {value: 0x00a7, lo: 0x9b, hi: 0x9c}, + {value: 0x1ac9, lo: 0xa8, hi: 0xa8}, + // Block 0x41, offset 0x192 + {value: 0x0000, lo: 0x0d}, + {value: 0x8133, lo: 0x90, hi: 0x91}, + {value: 0x8101, lo: 0x92, hi: 0x93}, + {value: 0x8133, lo: 0x94, hi: 0x97}, + {value: 0x8101, lo: 0x98, hi: 0x9a}, + {value: 0x8133, lo: 0x9b, hi: 0x9c}, + {value: 0x8133, lo: 0xa1, hi: 0xa1}, + {value: 0x8101, lo: 0xa5, hi: 0xa6}, + {value: 0x8133, lo: 0xa7, hi: 0xa7}, + {value: 0x812e, lo: 0xa8, hi: 0xa8}, + {value: 0x8133, lo: 0xa9, hi: 0xa9}, + {value: 0x8101, lo: 0xaa, hi: 0xab}, + {value: 0x812e, lo: 0xac, hi: 0xaf}, + {value: 0x8133, lo: 0xb0, hi: 0xb0}, + // Block 0x42, offset 0x1a0 + {value: 0x0007, lo: 0x06}, + {value: 0x22b0, lo: 0x89, hi: 0x89}, + {value: 0xa000, lo: 0x90, hi: 0x90}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0xa000, lo: 0x94, hi: 0x94}, + {value: 0x3cfa, lo: 0x9a, hi: 0x9b}, + {value: 0x3d08, lo: 0xae, hi: 0xae}, + // Block 0x43, offset 0x1a7 + {value: 0x000e, lo: 0x05}, + {value: 0x3d0f, lo: 0x8d, hi: 0x8e}, + {value: 0x3d16, lo: 0x8f, hi: 0x8f}, + {value: 0xa000, lo: 0x90, hi: 0x90}, + {value: 0xa000, lo: 0x92, hi: 0x92}, + {value: 0xa000, lo: 0x94, hi: 0x94}, + // Block 0x44, offset 0x1ad + {value: 0x017a, lo: 0x0e}, + {value: 0xa000, lo: 0x83, hi: 0x83}, + {value: 0x3d24, lo: 0x84, hi: 0x84}, + {value: 0xa000, lo: 0x88, hi: 0x88}, + {value: 0x3d2b, lo: 0x89, hi: 0x89}, + {value: 0xa000, lo: 0x8b, hi: 0x8b}, + {value: 0x3d32, lo: 0x8c, hi: 0x8c}, + {value: 0xa000, lo: 0xa3, hi: 0xa3}, + {value: 0x3d39, lo: 0xa4, hi: 0xa4}, + {value: 0xa000, lo: 0xa5, hi: 0xa5}, + {value: 0x3d40, lo: 0xa6, hi: 0xa6}, + {value: 0x27cf, lo: 0xac, hi: 0xad}, + {value: 0x27d6, lo: 0xaf, hi: 0xaf}, + {value: 0x2953, lo: 0xb0, hi: 0xb0}, + {value: 0xa000, lo: 0xbc, hi: 0xbc}, + // Block 0x45, offset 0x1bc + {value: 0x0007, lo: 0x03}, + {value: 0x3da9, lo: 0xa0, hi: 0xa1}, + {value: 0x3dd3, lo: 0xa2, hi: 0xa3}, + {value: 0x3dfd, lo: 0xaa, hi: 0xad}, + // Block 0x46, offset 0x1c0 + {value: 0x0004, lo: 0x01}, + {value: 0x0586, lo: 0xa9, hi: 0xaa}, + // Block 0x47, offset 0x1c2 + {value: 0x0002, lo: 0x03}, + {value: 0x0057, lo: 0x80, hi: 0x8f}, + {value: 0x0083, lo: 0x90, hi: 0xa9}, + {value: 0x0021, lo: 0xaa, hi: 0xaa}, + // Block 0x48, offset 0x1c6 + {value: 0x0000, lo: 0x01}, + {value: 0x2ad2, lo: 0x8c, hi: 0x8c}, + // Block 0x49, offset 0x1c8 + {value: 0x0266, lo: 0x02}, + {value: 0x1cbc, lo: 0xb4, hi: 0xb4}, + {value: 0x1a5a, lo: 0xb5, hi: 0xb6}, + // Block 0x4a, offset 0x1cb + {value: 0x0000, lo: 0x01}, + {value: 0x461e, lo: 0x9c, hi: 0x9c}, + // Block 0x4b, offset 0x1cd + {value: 0x0000, lo: 0x02}, + {value: 0x0095, lo: 0xbc, hi: 0xbc}, + {value: 0x006d, lo: 0xbd, hi: 0xbd}, + // Block 0x4c, offset 0x1d0 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xaf, hi: 0xb1}, + // Block 0x4d, offset 0x1d2 + {value: 0x0000, lo: 0x02}, + {value: 0x057a, lo: 0xaf, hi: 0xaf}, + {value: 0x8105, lo: 0xbf, hi: 0xbf}, + // Block 0x4e, offset 0x1d5 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xa0, hi: 0xbf}, + // Block 0x4f, offset 0x1d7 + {value: 0x0000, lo: 0x01}, + {value: 0x0ebe, lo: 0x9f, hi: 0x9f}, + // Block 0x50, offset 0x1d9 + {value: 0x0000, lo: 0x01}, + {value: 0x172a, lo: 0xb3, hi: 0xb3}, + // Block 0x51, offset 0x1db + {value: 0x0004, lo: 0x0b}, + {value: 0x1692, lo: 0x80, hi: 0x82}, + {value: 0x16aa, lo: 0x83, hi: 0x83}, + {value: 0x16c2, lo: 0x84, hi: 0x85}, + {value: 0x16d2, lo: 0x86, hi: 0x89}, + {value: 0x16e6, lo: 0x8a, hi: 0x8c}, + {value: 0x16fa, lo: 0x8d, hi: 0x8d}, + {value: 0x1702, lo: 0x8e, hi: 0x8e}, + {value: 0x170a, lo: 0x8f, hi: 0x90}, + {value: 0x1716, lo: 0x91, hi: 0x93}, + {value: 0x1726, lo: 0x94, hi: 0x94}, + {value: 0x172e, lo: 0x95, hi: 0x95}, + // Block 0x52, offset 0x1e7 + {value: 0x0004, lo: 0x09}, + {value: 0x0001, lo: 0x80, hi: 0x80}, + {value: 0x812d, lo: 0xaa, hi: 0xaa}, + {value: 0x8132, lo: 0xab, hi: 0xab}, + {value: 0x8134, lo: 0xac, hi: 0xac}, + {value: 0x812f, lo: 0xad, hi: 0xad}, + {value: 0x8130, lo: 0xae, hi: 0xae}, + {value: 0x8130, lo: 0xaf, hi: 0xaf}, + {value: 0x05ae, lo: 0xb6, hi: 0xb6}, + {value: 0x0982, lo: 0xb8, hi: 0xba}, + // Block 0x53, offset 0x1f1 + {value: 0x0006, lo: 0x09}, + {value: 0x0406, lo: 0xb1, hi: 0xb1}, + {value: 0x040a, lo: 0xb2, hi: 0xb2}, + {value: 0x4b7c, lo: 0xb3, hi: 0xb3}, + {value: 0x040e, lo: 0xb4, hi: 0xb4}, + {value: 0x4b82, lo: 0xb5, hi: 0xb6}, + {value: 0x0412, lo: 0xb7, hi: 0xb7}, + {value: 0x0416, lo: 0xb8, hi: 0xb8}, + {value: 0x041a, lo: 0xb9, hi: 0xb9}, + {value: 0x4b8e, lo: 0xba, hi: 0xbf}, + // Block 0x54, offset 0x1fb + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0xaf, hi: 0xaf}, + {value: 0x8133, lo: 0xb4, hi: 0xbd}, + // Block 0x55, offset 0x1fe + {value: 0x0000, lo: 0x03}, + {value: 0x02d8, lo: 0x9c, hi: 0x9c}, + {value: 0x02de, lo: 0x9d, hi: 0x9d}, + {value: 0x8133, lo: 0x9e, hi: 0x9f}, + // Block 0x56, offset 0x202 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xb0, hi: 0xb1}, + // Block 0x57, offset 0x204 + {value: 0x0000, lo: 0x01}, + {value: 0x173e, lo: 0xb0, hi: 0xb0}, + // Block 0x58, offset 0x206 + {value: 0x0006, lo: 0x04}, + {value: 0x0047, lo: 0xb2, hi: 0xb3}, + {value: 0x0063, lo: 0xb4, hi: 0xb4}, + {value: 0x00dd, lo: 0xb8, hi: 0xb8}, + {value: 0x00e9, lo: 0xb9, hi: 0xb9}, + // Block 0x59, offset 0x20b + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x86, hi: 0x86}, + {value: 0x8105, lo: 0xac, hi: 0xac}, + // Block 0x5a, offset 0x20e + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x84, hi: 0x84}, + {value: 0x8133, lo: 0xa0, hi: 0xb1}, + // Block 0x5b, offset 0x211 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0xab, hi: 0xad}, + // Block 0x5c, offset 0x213 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x93, hi: 0x93}, + // Block 0x5d, offset 0x215 + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0xb3, hi: 0xb3}, + // Block 0x5e, offset 0x217 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x80, hi: 0x80}, + // Block 0x5f, offset 0x219 + {value: 0x0000, lo: 0x05}, + {value: 0x8133, lo: 0xb0, hi: 0xb0}, + {value: 0x8133, lo: 0xb2, hi: 0xb3}, + {value: 0x812e, lo: 0xb4, hi: 0xb4}, + {value: 0x8133, lo: 0xb7, hi: 0xb8}, + {value: 0x8133, lo: 0xbe, hi: 0xbf}, + // Block 0x60, offset 0x21f + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0x81, hi: 0x81}, + {value: 0x8105, lo: 0xb6, hi: 0xb6}, + // Block 0x61, offset 0x222 + {value: 0x000c, lo: 0x04}, + {value: 0x173a, lo: 0x9c, hi: 0x9d}, + {value: 0x014f, lo: 0x9e, hi: 0x9e}, + {value: 0x174a, lo: 0x9f, hi: 0x9f}, + {value: 0x01a6, lo: 0xa9, hi: 0xa9}, + // Block 0x62, offset 0x227 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xad, hi: 0xad}, + // Block 0x63, offset 0x229 + {value: 0x0000, lo: 0x06}, + {value: 0xe500, lo: 0x80, hi: 0x80}, + {value: 0xc600, lo: 0x81, hi: 0x9b}, + {value: 0xe500, lo: 0x9c, hi: 0x9c}, + {value: 0xc600, lo: 0x9d, hi: 0xb7}, + {value: 0xe500, lo: 0xb8, hi: 0xb8}, + {value: 0xc600, lo: 0xb9, hi: 0xbf}, + // Block 0x64, offset 0x230 + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x93}, + {value: 0xe500, lo: 0x94, hi: 0x94}, + {value: 0xc600, lo: 0x95, hi: 0xaf}, + {value: 0xe500, lo: 0xb0, hi: 0xb0}, + {value: 0xc600, lo: 0xb1, hi: 0xbf}, + // Block 0x65, offset 0x236 + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x8b}, + {value: 0xe500, lo: 0x8c, hi: 0x8c}, + {value: 0xc600, lo: 0x8d, hi: 0xa7}, + {value: 0xe500, lo: 0xa8, hi: 0xa8}, + {value: 0xc600, lo: 0xa9, hi: 0xbf}, + // Block 0x66, offset 0x23c + {value: 0x0000, lo: 0x07}, + {value: 0xc600, lo: 0x80, hi: 0x83}, + {value: 0xe500, lo: 0x84, hi: 0x84}, + {value: 0xc600, lo: 0x85, hi: 0x9f}, + {value: 0xe500, lo: 0xa0, hi: 0xa0}, + {value: 0xc600, lo: 0xa1, hi: 0xbb}, + {value: 0xe500, lo: 0xbc, hi: 0xbc}, + {value: 0xc600, lo: 0xbd, hi: 0xbf}, + // Block 0x67, offset 0x244 + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x97}, + {value: 0xe500, lo: 0x98, hi: 0x98}, + {value: 0xc600, lo: 0x99, hi: 0xb3}, + {value: 0xe500, lo: 0xb4, hi: 0xb4}, + {value: 0xc600, lo: 0xb5, hi: 0xbf}, + // Block 0x68, offset 0x24a + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x8f}, + {value: 0xe500, lo: 0x90, hi: 0x90}, + {value: 0xc600, lo: 0x91, hi: 0xab}, + {value: 0xe500, lo: 0xac, hi: 0xac}, + {value: 0xc600, lo: 0xad, hi: 0xbf}, + // Block 0x69, offset 0x250 + {value: 0x0000, lo: 0x05}, + {value: 0xc600, lo: 0x80, hi: 0x87}, + {value: 0xe500, lo: 0x88, hi: 0x88}, + {value: 0xc600, lo: 0x89, hi: 0xa3}, + {value: 0xe500, lo: 0xa4, hi: 0xa4}, + {value: 0xc600, lo: 0xa5, hi: 0xbf}, + // Block 0x6a, offset 0x256 + {value: 0x0000, lo: 0x03}, + {value: 0xc600, lo: 0x80, hi: 0x87}, + {value: 0xe500, lo: 0x88, hi: 0x88}, + {value: 0xc600, lo: 0x89, hi: 0xa3}, + // Block 0x6b, offset 0x25a + {value: 0x0002, lo: 0x01}, + {value: 0x0003, lo: 0x81, hi: 0xbf}, + // Block 0x6c, offset 0x25c + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0xbd, hi: 0xbd}, + // Block 0x6d, offset 0x25e + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0xa0, hi: 0xa0}, + // Block 0x6e, offset 0x260 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xb6, hi: 0xba}, + // Block 0x6f, offset 0x262 + {value: 0x002d, lo: 0x05}, + {value: 0x812e, lo: 0x8d, hi: 0x8d}, + {value: 0x8133, lo: 0x8f, hi: 0x8f}, + {value: 0x8133, lo: 0xb8, hi: 0xb8}, + {value: 0x8101, lo: 0xb9, hi: 0xba}, + {value: 0x8105, lo: 0xbf, hi: 0xbf}, + // Block 0x70, offset 0x268 + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0xa5, hi: 0xa5}, + {value: 0x812e, lo: 0xa6, hi: 0xa6}, + // Block 0x71, offset 0x26b + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xa4, hi: 0xa7}, + // Block 0x72, offset 0x26d + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xab, hi: 0xac}, + // Block 0x73, offset 0x26f + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0xbd, hi: 0xbf}, + // Block 0x74, offset 0x271 + {value: 0x0000, lo: 0x05}, + {value: 0x812e, lo: 0x86, hi: 0x87}, + {value: 0x8133, lo: 0x88, hi: 0x8a}, + {value: 0x812e, lo: 0x8b, hi: 0x8b}, + {value: 0x8133, lo: 0x8c, hi: 0x8c}, + {value: 0x812e, lo: 0x8d, hi: 0x90}, + // Block 0x75, offset 0x277 + {value: 0x0005, lo: 0x03}, + {value: 0x8133, lo: 0x82, hi: 0x82}, + {value: 0x812e, lo: 0x83, hi: 0x84}, + {value: 0x812e, lo: 0x85, hi: 0x85}, + // Block 0x76, offset 0x27b + {value: 0x0000, lo: 0x03}, + {value: 0x8105, lo: 0x86, hi: 0x86}, + {value: 0x8105, lo: 0xb0, hi: 0xb0}, + {value: 0x8105, lo: 0xbf, hi: 0xbf}, + // Block 0x77, offset 0x27f + {value: 0x17fe, lo: 0x07}, + {value: 0xa000, lo: 0x99, hi: 0x99}, + {value: 0x4379, lo: 0x9a, hi: 0x9a}, + {value: 0xa000, lo: 0x9b, hi: 0x9b}, + {value: 0x4383, lo: 0x9c, hi: 0x9c}, + {value: 0xa000, lo: 0xa5, hi: 0xa5}, + {value: 0x438d, lo: 0xab, hi: 0xab}, + {value: 0x8105, lo: 0xb9, hi: 0xba}, + // Block 0x78, offset 0x287 + {value: 0x0000, lo: 0x06}, + {value: 0x8133, lo: 0x80, hi: 0x82}, + {value: 0x9900, lo: 0xa7, hi: 0xa7}, + {value: 0x2eb5, lo: 0xae, hi: 0xae}, + {value: 0x2ebf, lo: 0xaf, hi: 0xaf}, + {value: 0xa000, lo: 0xb1, hi: 0xb2}, + {value: 0x8105, lo: 0xb3, hi: 0xb4}, + // Block 0x79, offset 0x28e + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x80, hi: 0x80}, + {value: 0x8103, lo: 0x8a, hi: 0x8a}, + // Block 0x7a, offset 0x291 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0xb5, hi: 0xb5}, + {value: 0x8103, lo: 0xb6, hi: 0xb6}, + // Block 0x7b, offset 0x294 + {value: 0x0002, lo: 0x01}, + {value: 0x8103, lo: 0xa9, hi: 0xaa}, + // Block 0x7c, offset 0x296 + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0xbb, hi: 0xbc}, + {value: 0x9900, lo: 0xbe, hi: 0xbe}, + // Block 0x7d, offset 0x299 + {value: 0x0000, lo: 0x07}, + {value: 0xa000, lo: 0x87, hi: 0x87}, + {value: 0x2ec9, lo: 0x8b, hi: 0x8b}, + {value: 0x2ed3, lo: 0x8c, hi: 0x8c}, + {value: 0x8105, lo: 0x8d, hi: 0x8d}, + {value: 0x9900, lo: 0x97, hi: 0x97}, + {value: 0x8133, lo: 0xa6, hi: 0xac}, + {value: 0x8133, lo: 0xb0, hi: 0xb4}, + // Block 0x7e, offset 0x2a1 + {value: 0x0000, lo: 0x03}, + {value: 0x8105, lo: 0x82, hi: 0x82}, + {value: 0x8103, lo: 0x86, hi: 0x86}, + {value: 0x8133, lo: 0x9e, hi: 0x9e}, + // Block 0x7f, offset 0x2a5 + {value: 0x6a23, lo: 0x06}, + {value: 0x9900, lo: 0xb0, hi: 0xb0}, + {value: 0xa000, lo: 0xb9, hi: 0xb9}, + {value: 0x9900, lo: 0xba, hi: 0xba}, + {value: 0x2ee7, lo: 0xbb, hi: 0xbb}, + {value: 0x2edd, lo: 0xbc, hi: 0xbd}, + {value: 0x2ef1, lo: 0xbe, hi: 0xbe}, + // Block 0x80, offset 0x2ac + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0x82, hi: 0x82}, + {value: 0x8103, lo: 0x83, hi: 0x83}, + // Block 0x81, offset 0x2af + {value: 0x0000, lo: 0x05}, + {value: 0x9900, lo: 0xaf, hi: 0xaf}, + {value: 0xa000, lo: 0xb8, hi: 0xb9}, + {value: 0x2efb, lo: 0xba, hi: 0xba}, + {value: 0x2f05, lo: 0xbb, hi: 0xbb}, + {value: 0x8105, lo: 0xbf, hi: 0xbf}, + // Block 0x82, offset 0x2b5 + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0x80, hi: 0x80}, + // Block 0x83, offset 0x2b7 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xbf, hi: 0xbf}, + // Block 0x84, offset 0x2b9 + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0xb6, hi: 0xb6}, + {value: 0x8103, lo: 0xb7, hi: 0xb7}, + // Block 0x85, offset 0x2bc + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xab, hi: 0xab}, + // Block 0x86, offset 0x2be + {value: 0x0000, lo: 0x02}, + {value: 0x8105, lo: 0xb9, hi: 0xb9}, + {value: 0x8103, lo: 0xba, hi: 0xba}, + // Block 0x87, offset 0x2c1 + {value: 0x0000, lo: 0x04}, + {value: 0x9900, lo: 0xb0, hi: 0xb0}, + {value: 0xa000, lo: 0xb5, hi: 0xb5}, + {value: 0x2f0f, lo: 0xb8, hi: 0xb8}, + {value: 0x8105, lo: 0xbd, hi: 0xbe}, + // Block 0x88, offset 0x2c6 + {value: 0x0000, lo: 0x01}, + {value: 0x8103, lo: 0x83, hi: 0x83}, + // Block 0x89, offset 0x2c8 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xa0, hi: 0xa0}, + // Block 0x8a, offset 0x2ca + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0xb4, hi: 0xb4}, + // Block 0x8b, offset 0x2cc + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x87, hi: 0x87}, + // Block 0x8c, offset 0x2ce + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x99, hi: 0x99}, + // Block 0x8d, offset 0x2d0 + {value: 0x0000, lo: 0x02}, + {value: 0x8103, lo: 0x82, hi: 0x82}, + {value: 0x8105, lo: 0x84, hi: 0x85}, + // Block 0x8e, offset 0x2d3 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x97, hi: 0x97}, + // Block 0x8f, offset 0x2d5 + {value: 0x0000, lo: 0x01}, + {value: 0x8105, lo: 0x81, hi: 0x82}, + // Block 0x90, offset 0x2d7 + {value: 0x0000, lo: 0x01}, + {value: 0x8101, lo: 0xb0, hi: 0xb4}, + // Block 0x91, offset 0x2d9 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xb0, hi: 0xb6}, + // Block 0x92, offset 0x2db + {value: 0x0000, lo: 0x01}, + {value: 0x8102, lo: 0xb0, hi: 0xb1}, + // Block 0x93, offset 0x2dd + {value: 0x0000, lo: 0x01}, + {value: 0x8101, lo: 0x9e, hi: 0x9e}, + // Block 0x94, offset 0x2df + {value: 0x0000, lo: 0x0c}, + {value: 0x470d, lo: 0x9e, hi: 0x9e}, + {value: 0x4717, lo: 0x9f, hi: 0x9f}, + {value: 0x474b, lo: 0xa0, hi: 0xa0}, + {value: 0x4759, lo: 0xa1, hi: 0xa1}, + {value: 0x4767, lo: 0xa2, hi: 0xa2}, + {value: 0x4775, lo: 0xa3, hi: 0xa3}, + {value: 0x4783, lo: 0xa4, hi: 0xa4}, + {value: 0x812c, lo: 0xa5, hi: 0xa6}, + {value: 0x8101, lo: 0xa7, hi: 0xa9}, + {value: 0x8131, lo: 0xad, hi: 0xad}, + {value: 0x812c, lo: 0xae, hi: 0xb2}, + {value: 0x812e, lo: 0xbb, hi: 0xbf}, + // Block 0x95, offset 0x2ec + {value: 0x0000, lo: 0x09}, + {value: 0x812e, lo: 0x80, hi: 0x82}, + {value: 0x8133, lo: 0x85, hi: 0x89}, + {value: 0x812e, lo: 0x8a, hi: 0x8b}, + {value: 0x8133, lo: 0xaa, hi: 0xad}, + {value: 0x4721, lo: 0xbb, hi: 0xbb}, + {value: 0x472b, lo: 0xbc, hi: 0xbc}, + {value: 0x4791, lo: 0xbd, hi: 0xbd}, + {value: 0x47ad, lo: 0xbe, hi: 0xbe}, + {value: 0x479f, lo: 0xbf, hi: 0xbf}, + // Block 0x96, offset 0x2f6 + {value: 0x0000, lo: 0x01}, + {value: 0x47bb, lo: 0x80, hi: 0x80}, + // Block 0x97, offset 0x2f8 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0x82, hi: 0x84}, + // Block 0x98, offset 0x2fa + {value: 0x0002, lo: 0x03}, + {value: 0x0043, lo: 0x80, hi: 0x99}, + {value: 0x0083, lo: 0x9a, hi: 0xb3}, + {value: 0x0043, lo: 0xb4, hi: 0xbf}, + // Block 0x99, offset 0x2fe + {value: 0x0002, lo: 0x04}, + {value: 0x005b, lo: 0x80, hi: 0x8d}, + {value: 0x0083, lo: 0x8e, hi: 0x94}, + {value: 0x0093, lo: 0x96, hi: 0xa7}, + {value: 0x0043, lo: 0xa8, hi: 0xbf}, + // Block 0x9a, offset 0x303 + {value: 0x0002, lo: 0x0b}, + {value: 0x0073, lo: 0x80, hi: 0x81}, + {value: 0x0083, lo: 0x82, hi: 0x9b}, + {value: 0x0043, lo: 0x9c, hi: 0x9c}, + {value: 0x0047, lo: 0x9e, hi: 0x9f}, + {value: 0x004f, lo: 0xa2, hi: 0xa2}, + {value: 0x0055, lo: 0xa5, hi: 0xa6}, + {value: 0x005d, lo: 0xa9, hi: 0xac}, + {value: 0x0067, lo: 0xae, hi: 0xb5}, + {value: 0x0083, lo: 0xb6, hi: 0xb9}, + {value: 0x008d, lo: 0xbb, hi: 0xbb}, + {value: 0x0091, lo: 0xbd, hi: 0xbf}, + // Block 0x9b, offset 0x30f + {value: 0x0002, lo: 0x04}, + {value: 0x0097, lo: 0x80, hi: 0x83}, + {value: 0x00a1, lo: 0x85, hi: 0x8f}, + {value: 0x0043, lo: 0x90, hi: 0xa9}, + {value: 0x0083, lo: 0xaa, hi: 0xbf}, + // Block 0x9c, offset 0x314 + {value: 0x0002, lo: 0x08}, + {value: 0x00af, lo: 0x80, hi: 0x83}, + {value: 0x0043, lo: 0x84, hi: 0x85}, + {value: 0x0049, lo: 0x87, hi: 0x8a}, + {value: 0x0055, lo: 0x8d, hi: 0x94}, + {value: 0x0067, lo: 0x96, hi: 0x9c}, + {value: 0x0083, lo: 0x9e, hi: 0xb7}, + {value: 0x0043, lo: 0xb8, hi: 0xb9}, + {value: 0x0049, lo: 0xbb, hi: 0xbe}, + // Block 0x9d, offset 0x31d + {value: 0x0002, lo: 0x05}, + {value: 0x0053, lo: 0x80, hi: 0x84}, + {value: 0x005f, lo: 0x86, hi: 0x86}, + {value: 0x0067, lo: 0x8a, hi: 0x90}, + {value: 0x0083, lo: 0x92, hi: 0xab}, + {value: 0x0043, lo: 0xac, hi: 0xbf}, + // Block 0x9e, offset 0x323 + {value: 0x0002, lo: 0x04}, + {value: 0x006b, lo: 0x80, hi: 0x85}, + {value: 0x0083, lo: 0x86, hi: 0x9f}, + {value: 0x0043, lo: 0xa0, hi: 0xb9}, + {value: 0x0083, lo: 0xba, hi: 0xbf}, + // Block 0x9f, offset 0x328 + {value: 0x0002, lo: 0x03}, + {value: 0x008f, lo: 0x80, hi: 0x93}, + {value: 0x0043, lo: 0x94, hi: 0xad}, + {value: 0x0083, lo: 0xae, hi: 0xbf}, + // Block 0xa0, offset 0x32c + {value: 0x0002, lo: 0x04}, + {value: 0x00a7, lo: 0x80, hi: 0x87}, + {value: 0x0043, lo: 0x88, hi: 0xa1}, + {value: 0x0083, lo: 0xa2, hi: 0xbb}, + {value: 0x0043, lo: 0xbc, hi: 0xbf}, + // Block 0xa1, offset 0x331 + {value: 0x0002, lo: 0x03}, + {value: 0x004b, lo: 0x80, hi: 0x95}, + {value: 0x0083, lo: 0x96, hi: 0xaf}, + {value: 0x0043, lo: 0xb0, hi: 0xbf}, + // Block 0xa2, offset 0x335 + {value: 0x0003, lo: 0x0f}, + {value: 0x023c, lo: 0x80, hi: 0x80}, + {value: 0x0556, lo: 0x81, hi: 0x81}, + {value: 0x023f, lo: 0x82, hi: 0x9a}, + {value: 0x0552, lo: 0x9b, hi: 0x9b}, + {value: 0x024b, lo: 0x9c, hi: 0x9c}, + {value: 0x0254, lo: 0x9d, hi: 0x9d}, + {value: 0x025a, lo: 0x9e, hi: 0x9e}, + {value: 0x027e, lo: 0x9f, hi: 0x9f}, + {value: 0x026f, lo: 0xa0, hi: 0xa0}, + {value: 0x026c, lo: 0xa1, hi: 0xa1}, + {value: 0x01f7, lo: 0xa2, hi: 0xb2}, + {value: 0x020c, lo: 0xb3, hi: 0xb3}, + {value: 0x022a, lo: 0xb4, hi: 0xba}, + {value: 0x0556, lo: 0xbb, hi: 0xbb}, + {value: 0x023f, lo: 0xbc, hi: 0xbf}, + // Block 0xa3, offset 0x345 + {value: 0x0003, lo: 0x0d}, + {value: 0x024b, lo: 0x80, hi: 0x94}, + {value: 0x0552, lo: 0x95, hi: 0x95}, + {value: 0x024b, lo: 0x96, hi: 0x96}, + {value: 0x0254, lo: 0x97, hi: 0x97}, + {value: 0x025a, lo: 0x98, hi: 0x98}, + {value: 0x027e, lo: 0x99, hi: 0x99}, + {value: 0x026f, lo: 0x9a, hi: 0x9a}, + {value: 0x026c, lo: 0x9b, hi: 0x9b}, + {value: 0x01f7, lo: 0x9c, hi: 0xac}, + {value: 0x020c, lo: 0xad, hi: 0xad}, + {value: 0x022a, lo: 0xae, hi: 0xb4}, + {value: 0x0556, lo: 0xb5, hi: 0xb5}, + {value: 0x023f, lo: 0xb6, hi: 0xbf}, + // Block 0xa4, offset 0x353 + {value: 0x0003, lo: 0x0d}, + {value: 0x025d, lo: 0x80, hi: 0x8e}, + {value: 0x0552, lo: 0x8f, hi: 0x8f}, + {value: 0x024b, lo: 0x90, hi: 0x90}, + {value: 0x0254, lo: 0x91, hi: 0x91}, + {value: 0x025a, lo: 0x92, hi: 0x92}, + {value: 0x027e, lo: 0x93, hi: 0x93}, + {value: 0x026f, lo: 0x94, hi: 0x94}, + {value: 0x026c, lo: 0x95, hi: 0x95}, + {value: 0x01f7, lo: 0x96, hi: 0xa6}, + {value: 0x020c, lo: 0xa7, hi: 0xa7}, + {value: 0x022a, lo: 0xa8, hi: 0xae}, + {value: 0x0556, lo: 0xaf, hi: 0xaf}, + {value: 0x023f, lo: 0xb0, hi: 0xbf}, + // Block 0xa5, offset 0x361 + {value: 0x0003, lo: 0x0d}, + {value: 0x026f, lo: 0x80, hi: 0x88}, + {value: 0x0552, lo: 0x89, hi: 0x89}, + {value: 0x024b, lo: 0x8a, hi: 0x8a}, + {value: 0x0254, lo: 0x8b, hi: 0x8b}, + {value: 0x025a, lo: 0x8c, hi: 0x8c}, + {value: 0x027e, lo: 0x8d, hi: 0x8d}, + {value: 0x026f, lo: 0x8e, hi: 0x8e}, + {value: 0x026c, lo: 0x8f, hi: 0x8f}, + {value: 0x01f7, lo: 0x90, hi: 0xa0}, + {value: 0x020c, lo: 0xa1, hi: 0xa1}, + {value: 0x022a, lo: 0xa2, hi: 0xa8}, + {value: 0x0556, lo: 0xa9, hi: 0xa9}, + {value: 0x023f, lo: 0xaa, hi: 0xbf}, + // Block 0xa6, offset 0x36f + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0x8f, hi: 0x8f}, + // Block 0xa7, offset 0x371 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xae, hi: 0xae}, + // Block 0xa8, offset 0x373 + {value: 0x0000, lo: 0x01}, + {value: 0x8133, lo: 0xac, hi: 0xaf}, + // Block 0xa9, offset 0x375 + {value: 0x0000, lo: 0x03}, + {value: 0x8134, lo: 0xac, hi: 0xad}, + {value: 0x812e, lo: 0xae, hi: 0xae}, + {value: 0x8133, lo: 0xaf, hi: 0xaf}, + // Block 0xaa, offset 0x379 + {value: 0x0000, lo: 0x01}, + {value: 0x812e, lo: 0x90, hi: 0x96}, + // Block 0xab, offset 0x37b + {value: 0x0000, lo: 0x02}, + {value: 0x8133, lo: 0x84, hi: 0x89}, + {value: 0x8103, lo: 0x8a, hi: 0x8a}, + // Block 0xac, offset 0x37e + {value: 0x0002, lo: 0x0a}, + {value: 0x0063, lo: 0x80, hi: 0x89}, + {value: 0x1a7e, lo: 0x8a, hi: 0x8a}, + {value: 0x1ab1, lo: 0x8b, hi: 0x8b}, + {value: 0x1acc, lo: 0x8c, hi: 0x8c}, + {value: 0x1ad2, lo: 0x8d, hi: 0x8d}, + {value: 0x1cf0, lo: 0x8e, hi: 0x8e}, + {value: 0x1ade, lo: 0x8f, hi: 0x8f}, + {value: 0x1aa8, lo: 0xaa, hi: 0xaa}, + {value: 0x1aab, lo: 0xab, hi: 0xab}, + {value: 0x1aae, lo: 0xac, hi: 0xac}, + // Block 0xad, offset 0x389 + {value: 0x0000, lo: 0x01}, + {value: 0x1a6c, lo: 0x90, hi: 0x90}, + // Block 0xae, offset 0x38b + {value: 0x0028, lo: 0x09}, + {value: 0x2999, lo: 0x80, hi: 0x80}, + {value: 0x295d, lo: 0x81, hi: 0x81}, + {value: 0x2967, lo: 0x82, hi: 0x82}, + {value: 0x297b, lo: 0x83, hi: 0x84}, + {value: 0x2985, lo: 0x85, hi: 0x86}, + {value: 0x2971, lo: 0x87, hi: 0x87}, + {value: 0x298f, lo: 0x88, hi: 0x88}, + {value: 0x0c6a, lo: 0x90, hi: 0x90}, + {value: 0x09e2, lo: 0x91, hi: 0x91}, + // Block 0xaf, offset 0x395 + {value: 0x0002, lo: 0x01}, + {value: 0x0021, lo: 0xb0, hi: 0xb9}, +} + +// recompMap: 7528 bytes (entries only) +var recompMap map[uint32]rune +var recompMapOnce sync.Once + +const recompMapPacked = "" + + "\x00A\x03\x00\x00\x00\x00\xc0" + // 0x00410300: 0x000000C0 + "\x00A\x03\x01\x00\x00\x00\xc1" + // 0x00410301: 0x000000C1 + "\x00A\x03\x02\x00\x00\x00\xc2" + // 0x00410302: 0x000000C2 + "\x00A\x03\x03\x00\x00\x00\xc3" + // 0x00410303: 0x000000C3 + "\x00A\x03\b\x00\x00\x00\xc4" + // 0x00410308: 0x000000C4 + "\x00A\x03\n\x00\x00\x00\xc5" + // 0x0041030A: 0x000000C5 + "\x00C\x03'\x00\x00\x00\xc7" + // 0x00430327: 0x000000C7 + "\x00E\x03\x00\x00\x00\x00\xc8" + // 0x00450300: 0x000000C8 + "\x00E\x03\x01\x00\x00\x00\xc9" + // 0x00450301: 0x000000C9 + "\x00E\x03\x02\x00\x00\x00\xca" + // 0x00450302: 0x000000CA + "\x00E\x03\b\x00\x00\x00\xcb" + // 0x00450308: 0x000000CB + "\x00I\x03\x00\x00\x00\x00\xcc" + // 0x00490300: 0x000000CC + "\x00I\x03\x01\x00\x00\x00\xcd" + // 0x00490301: 0x000000CD + "\x00I\x03\x02\x00\x00\x00\xce" + // 0x00490302: 0x000000CE + "\x00I\x03\b\x00\x00\x00\xcf" + // 0x00490308: 0x000000CF + "\x00N\x03\x03\x00\x00\x00\xd1" + // 0x004E0303: 0x000000D1 + "\x00O\x03\x00\x00\x00\x00\xd2" + // 0x004F0300: 0x000000D2 + "\x00O\x03\x01\x00\x00\x00\xd3" + // 0x004F0301: 0x000000D3 + "\x00O\x03\x02\x00\x00\x00\xd4" + // 0x004F0302: 0x000000D4 + "\x00O\x03\x03\x00\x00\x00\xd5" + // 0x004F0303: 0x000000D5 + "\x00O\x03\b\x00\x00\x00\xd6" + // 0x004F0308: 0x000000D6 + "\x00U\x03\x00\x00\x00\x00\xd9" + // 0x00550300: 0x000000D9 + "\x00U\x03\x01\x00\x00\x00\xda" + // 0x00550301: 0x000000DA + "\x00U\x03\x02\x00\x00\x00\xdb" + // 0x00550302: 0x000000DB + "\x00U\x03\b\x00\x00\x00\xdc" + // 0x00550308: 0x000000DC + "\x00Y\x03\x01\x00\x00\x00\xdd" + // 0x00590301: 0x000000DD + "\x00a\x03\x00\x00\x00\x00\xe0" + // 0x00610300: 0x000000E0 + "\x00a\x03\x01\x00\x00\x00\xe1" + // 0x00610301: 0x000000E1 + "\x00a\x03\x02\x00\x00\x00\xe2" + // 0x00610302: 0x000000E2 + "\x00a\x03\x03\x00\x00\x00\xe3" + // 0x00610303: 0x000000E3 + "\x00a\x03\b\x00\x00\x00\xe4" + // 0x00610308: 0x000000E4 + "\x00a\x03\n\x00\x00\x00\xe5" + // 0x0061030A: 0x000000E5 + "\x00c\x03'\x00\x00\x00\xe7" + // 0x00630327: 0x000000E7 + "\x00e\x03\x00\x00\x00\x00\xe8" + // 0x00650300: 0x000000E8 + "\x00e\x03\x01\x00\x00\x00\xe9" + // 0x00650301: 0x000000E9 + "\x00e\x03\x02\x00\x00\x00\xea" + // 0x00650302: 0x000000EA + "\x00e\x03\b\x00\x00\x00\xeb" + // 0x00650308: 0x000000EB + "\x00i\x03\x00\x00\x00\x00\xec" + // 0x00690300: 0x000000EC + "\x00i\x03\x01\x00\x00\x00\xed" + // 0x00690301: 0x000000ED + "\x00i\x03\x02\x00\x00\x00\xee" + // 0x00690302: 0x000000EE + "\x00i\x03\b\x00\x00\x00\xef" + // 0x00690308: 0x000000EF + "\x00n\x03\x03\x00\x00\x00\xf1" + // 0x006E0303: 0x000000F1 + "\x00o\x03\x00\x00\x00\x00\xf2" + // 0x006F0300: 0x000000F2 + "\x00o\x03\x01\x00\x00\x00\xf3" + // 0x006F0301: 0x000000F3 + "\x00o\x03\x02\x00\x00\x00\xf4" + // 0x006F0302: 0x000000F4 + "\x00o\x03\x03\x00\x00\x00\xf5" + // 0x006F0303: 0x000000F5 + "\x00o\x03\b\x00\x00\x00\xf6" + // 0x006F0308: 0x000000F6 + "\x00u\x03\x00\x00\x00\x00\xf9" + // 0x00750300: 0x000000F9 + "\x00u\x03\x01\x00\x00\x00\xfa" + // 0x00750301: 0x000000FA + "\x00u\x03\x02\x00\x00\x00\xfb" + // 0x00750302: 0x000000FB + "\x00u\x03\b\x00\x00\x00\xfc" + // 0x00750308: 0x000000FC + "\x00y\x03\x01\x00\x00\x00\xfd" + // 0x00790301: 0x000000FD + "\x00y\x03\b\x00\x00\x00\xff" + // 0x00790308: 0x000000FF + "\x00A\x03\x04\x00\x00\x01\x00" + // 0x00410304: 0x00000100 + "\x00a\x03\x04\x00\x00\x01\x01" + // 0x00610304: 0x00000101 + "\x00A\x03\x06\x00\x00\x01\x02" + // 0x00410306: 0x00000102 + "\x00a\x03\x06\x00\x00\x01\x03" + // 0x00610306: 0x00000103 + "\x00A\x03(\x00\x00\x01\x04" + // 0x00410328: 0x00000104 + "\x00a\x03(\x00\x00\x01\x05" + // 0x00610328: 0x00000105 + "\x00C\x03\x01\x00\x00\x01\x06" + // 0x00430301: 0x00000106 + "\x00c\x03\x01\x00\x00\x01\a" + // 0x00630301: 0x00000107 + "\x00C\x03\x02\x00\x00\x01\b" + // 0x00430302: 0x00000108 + "\x00c\x03\x02\x00\x00\x01\t" + // 0x00630302: 0x00000109 + "\x00C\x03\a\x00\x00\x01\n" + // 0x00430307: 0x0000010A + "\x00c\x03\a\x00\x00\x01\v" + // 0x00630307: 0x0000010B + "\x00C\x03\f\x00\x00\x01\f" + // 0x0043030C: 0x0000010C + "\x00c\x03\f\x00\x00\x01\r" + // 0x0063030C: 0x0000010D + "\x00D\x03\f\x00\x00\x01\x0e" + // 0x0044030C: 0x0000010E + "\x00d\x03\f\x00\x00\x01\x0f" + // 0x0064030C: 0x0000010F + "\x00E\x03\x04\x00\x00\x01\x12" + // 0x00450304: 0x00000112 + "\x00e\x03\x04\x00\x00\x01\x13" + // 0x00650304: 0x00000113 + "\x00E\x03\x06\x00\x00\x01\x14" + // 0x00450306: 0x00000114 + "\x00e\x03\x06\x00\x00\x01\x15" + // 0x00650306: 0x00000115 + "\x00E\x03\a\x00\x00\x01\x16" + // 0x00450307: 0x00000116 + "\x00e\x03\a\x00\x00\x01\x17" + // 0x00650307: 0x00000117 + "\x00E\x03(\x00\x00\x01\x18" + // 0x00450328: 0x00000118 + "\x00e\x03(\x00\x00\x01\x19" + // 0x00650328: 0x00000119 + "\x00E\x03\f\x00\x00\x01\x1a" + // 0x0045030C: 0x0000011A + "\x00e\x03\f\x00\x00\x01\x1b" + // 0x0065030C: 0x0000011B + "\x00G\x03\x02\x00\x00\x01\x1c" + // 0x00470302: 0x0000011C + "\x00g\x03\x02\x00\x00\x01\x1d" + // 0x00670302: 0x0000011D + "\x00G\x03\x06\x00\x00\x01\x1e" + // 0x00470306: 0x0000011E + "\x00g\x03\x06\x00\x00\x01\x1f" + // 0x00670306: 0x0000011F + "\x00G\x03\a\x00\x00\x01 " + // 0x00470307: 0x00000120 + "\x00g\x03\a\x00\x00\x01!" + // 0x00670307: 0x00000121 + "\x00G\x03'\x00\x00\x01\"" + // 0x00470327: 0x00000122 + "\x00g\x03'\x00\x00\x01#" + // 0x00670327: 0x00000123 + "\x00H\x03\x02\x00\x00\x01$" + // 0x00480302: 0x00000124 + "\x00h\x03\x02\x00\x00\x01%" + // 0x00680302: 0x00000125 + "\x00I\x03\x03\x00\x00\x01(" + // 0x00490303: 0x00000128 + "\x00i\x03\x03\x00\x00\x01)" + // 0x00690303: 0x00000129 + "\x00I\x03\x04\x00\x00\x01*" + // 0x00490304: 0x0000012A + "\x00i\x03\x04\x00\x00\x01+" + // 0x00690304: 0x0000012B + "\x00I\x03\x06\x00\x00\x01," + // 0x00490306: 0x0000012C + "\x00i\x03\x06\x00\x00\x01-" + // 0x00690306: 0x0000012D + "\x00I\x03(\x00\x00\x01." + // 0x00490328: 0x0000012E + "\x00i\x03(\x00\x00\x01/" + // 0x00690328: 0x0000012F + "\x00I\x03\a\x00\x00\x010" + // 0x00490307: 0x00000130 + "\x00J\x03\x02\x00\x00\x014" + // 0x004A0302: 0x00000134 + "\x00j\x03\x02\x00\x00\x015" + // 0x006A0302: 0x00000135 + "\x00K\x03'\x00\x00\x016" + // 0x004B0327: 0x00000136 + "\x00k\x03'\x00\x00\x017" + // 0x006B0327: 0x00000137 + "\x00L\x03\x01\x00\x00\x019" + // 0x004C0301: 0x00000139 + "\x00l\x03\x01\x00\x00\x01:" + // 0x006C0301: 0x0000013A + "\x00L\x03'\x00\x00\x01;" + // 0x004C0327: 0x0000013B + "\x00l\x03'\x00\x00\x01<" + // 0x006C0327: 0x0000013C + "\x00L\x03\f\x00\x00\x01=" + // 0x004C030C: 0x0000013D + "\x00l\x03\f\x00\x00\x01>" + // 0x006C030C: 0x0000013E + "\x00N\x03\x01\x00\x00\x01C" + // 0x004E0301: 0x00000143 + "\x00n\x03\x01\x00\x00\x01D" + // 0x006E0301: 0x00000144 + "\x00N\x03'\x00\x00\x01E" + // 0x004E0327: 0x00000145 + "\x00n\x03'\x00\x00\x01F" + // 0x006E0327: 0x00000146 + "\x00N\x03\f\x00\x00\x01G" + // 0x004E030C: 0x00000147 + "\x00n\x03\f\x00\x00\x01H" + // 0x006E030C: 0x00000148 + "\x00O\x03\x04\x00\x00\x01L" + // 0x004F0304: 0x0000014C + "\x00o\x03\x04\x00\x00\x01M" + // 0x006F0304: 0x0000014D + "\x00O\x03\x06\x00\x00\x01N" + // 0x004F0306: 0x0000014E + "\x00o\x03\x06\x00\x00\x01O" + // 0x006F0306: 0x0000014F + "\x00O\x03\v\x00\x00\x01P" + // 0x004F030B: 0x00000150 + "\x00o\x03\v\x00\x00\x01Q" + // 0x006F030B: 0x00000151 + "\x00R\x03\x01\x00\x00\x01T" + // 0x00520301: 0x00000154 + "\x00r\x03\x01\x00\x00\x01U" + // 0x00720301: 0x00000155 + "\x00R\x03'\x00\x00\x01V" + // 0x00520327: 0x00000156 + "\x00r\x03'\x00\x00\x01W" + // 0x00720327: 0x00000157 + "\x00R\x03\f\x00\x00\x01X" + // 0x0052030C: 0x00000158 + "\x00r\x03\f\x00\x00\x01Y" + // 0x0072030C: 0x00000159 + "\x00S\x03\x01\x00\x00\x01Z" + // 0x00530301: 0x0000015A + "\x00s\x03\x01\x00\x00\x01[" + // 0x00730301: 0x0000015B + "\x00S\x03\x02\x00\x00\x01\\" + // 0x00530302: 0x0000015C + "\x00s\x03\x02\x00\x00\x01]" + // 0x00730302: 0x0000015D + "\x00S\x03'\x00\x00\x01^" + // 0x00530327: 0x0000015E + "\x00s\x03'\x00\x00\x01_" + // 0x00730327: 0x0000015F + "\x00S\x03\f\x00\x00\x01`" + // 0x0053030C: 0x00000160 + "\x00s\x03\f\x00\x00\x01a" + // 0x0073030C: 0x00000161 + "\x00T\x03'\x00\x00\x01b" + // 0x00540327: 0x00000162 + "\x00t\x03'\x00\x00\x01c" + // 0x00740327: 0x00000163 + "\x00T\x03\f\x00\x00\x01d" + // 0x0054030C: 0x00000164 + "\x00t\x03\f\x00\x00\x01e" + // 0x0074030C: 0x00000165 + "\x00U\x03\x03\x00\x00\x01h" + // 0x00550303: 0x00000168 + "\x00u\x03\x03\x00\x00\x01i" + // 0x00750303: 0x00000169 + "\x00U\x03\x04\x00\x00\x01j" + // 0x00550304: 0x0000016A + "\x00u\x03\x04\x00\x00\x01k" + // 0x00750304: 0x0000016B + "\x00U\x03\x06\x00\x00\x01l" + // 0x00550306: 0x0000016C + "\x00u\x03\x06\x00\x00\x01m" + // 0x00750306: 0x0000016D + "\x00U\x03\n\x00\x00\x01n" + // 0x0055030A: 0x0000016E + "\x00u\x03\n\x00\x00\x01o" + // 0x0075030A: 0x0000016F + "\x00U\x03\v\x00\x00\x01p" + // 0x0055030B: 0x00000170 + "\x00u\x03\v\x00\x00\x01q" + // 0x0075030B: 0x00000171 + "\x00U\x03(\x00\x00\x01r" + // 0x00550328: 0x00000172 + "\x00u\x03(\x00\x00\x01s" + // 0x00750328: 0x00000173 + "\x00W\x03\x02\x00\x00\x01t" + // 0x00570302: 0x00000174 + "\x00w\x03\x02\x00\x00\x01u" + // 0x00770302: 0x00000175 + "\x00Y\x03\x02\x00\x00\x01v" + // 0x00590302: 0x00000176 + "\x00y\x03\x02\x00\x00\x01w" + // 0x00790302: 0x00000177 + "\x00Y\x03\b\x00\x00\x01x" + // 0x00590308: 0x00000178 + "\x00Z\x03\x01\x00\x00\x01y" + // 0x005A0301: 0x00000179 + "\x00z\x03\x01\x00\x00\x01z" + // 0x007A0301: 0x0000017A + "\x00Z\x03\a\x00\x00\x01{" + // 0x005A0307: 0x0000017B + "\x00z\x03\a\x00\x00\x01|" + // 0x007A0307: 0x0000017C + "\x00Z\x03\f\x00\x00\x01}" + // 0x005A030C: 0x0000017D + "\x00z\x03\f\x00\x00\x01~" + // 0x007A030C: 0x0000017E + "\x00O\x03\x1b\x00\x00\x01\xa0" + // 0x004F031B: 0x000001A0 + "\x00o\x03\x1b\x00\x00\x01\xa1" + // 0x006F031B: 0x000001A1 + "\x00U\x03\x1b\x00\x00\x01\xaf" + // 0x0055031B: 0x000001AF + "\x00u\x03\x1b\x00\x00\x01\xb0" + // 0x0075031B: 0x000001B0 + "\x00A\x03\f\x00\x00\x01\xcd" + // 0x0041030C: 0x000001CD + "\x00a\x03\f\x00\x00\x01\xce" + // 0x0061030C: 0x000001CE + "\x00I\x03\f\x00\x00\x01\xcf" + // 0x0049030C: 0x000001CF + "\x00i\x03\f\x00\x00\x01\xd0" + // 0x0069030C: 0x000001D0 + "\x00O\x03\f\x00\x00\x01\xd1" + // 0x004F030C: 0x000001D1 + "\x00o\x03\f\x00\x00\x01\xd2" + // 0x006F030C: 0x000001D2 + "\x00U\x03\f\x00\x00\x01\xd3" + // 0x0055030C: 0x000001D3 + "\x00u\x03\f\x00\x00\x01\xd4" + // 0x0075030C: 0x000001D4 + "\x00\xdc\x03\x04\x00\x00\x01\xd5" + // 0x00DC0304: 0x000001D5 + "\x00\xfc\x03\x04\x00\x00\x01\xd6" + // 0x00FC0304: 0x000001D6 + "\x00\xdc\x03\x01\x00\x00\x01\xd7" + // 0x00DC0301: 0x000001D7 + "\x00\xfc\x03\x01\x00\x00\x01\xd8" + // 0x00FC0301: 0x000001D8 + "\x00\xdc\x03\f\x00\x00\x01\xd9" + // 0x00DC030C: 0x000001D9 + "\x00\xfc\x03\f\x00\x00\x01\xda" + // 0x00FC030C: 0x000001DA + "\x00\xdc\x03\x00\x00\x00\x01\xdb" + // 0x00DC0300: 0x000001DB + "\x00\xfc\x03\x00\x00\x00\x01\xdc" + // 0x00FC0300: 0x000001DC + "\x00\xc4\x03\x04\x00\x00\x01\xde" + // 0x00C40304: 0x000001DE + "\x00\xe4\x03\x04\x00\x00\x01\xdf" + // 0x00E40304: 0x000001DF + "\x02&\x03\x04\x00\x00\x01\xe0" + // 0x02260304: 0x000001E0 + "\x02'\x03\x04\x00\x00\x01\xe1" + // 0x02270304: 0x000001E1 + "\x00\xc6\x03\x04\x00\x00\x01\xe2" + // 0x00C60304: 0x000001E2 + "\x00\xe6\x03\x04\x00\x00\x01\xe3" + // 0x00E60304: 0x000001E3 + "\x00G\x03\f\x00\x00\x01\xe6" + // 0x0047030C: 0x000001E6 + "\x00g\x03\f\x00\x00\x01\xe7" + // 0x0067030C: 0x000001E7 + "\x00K\x03\f\x00\x00\x01\xe8" + // 0x004B030C: 0x000001E8 + "\x00k\x03\f\x00\x00\x01\xe9" + // 0x006B030C: 0x000001E9 + "\x00O\x03(\x00\x00\x01\xea" + // 0x004F0328: 0x000001EA + "\x00o\x03(\x00\x00\x01\xeb" + // 0x006F0328: 0x000001EB + "\x01\xea\x03\x04\x00\x00\x01\xec" + // 0x01EA0304: 0x000001EC + "\x01\xeb\x03\x04\x00\x00\x01\xed" + // 0x01EB0304: 0x000001ED + "\x01\xb7\x03\f\x00\x00\x01\xee" + // 0x01B7030C: 0x000001EE + "\x02\x92\x03\f\x00\x00\x01\xef" + // 0x0292030C: 0x000001EF + "\x00j\x03\f\x00\x00\x01\xf0" + // 0x006A030C: 0x000001F0 + "\x00G\x03\x01\x00\x00\x01\xf4" + // 0x00470301: 0x000001F4 + "\x00g\x03\x01\x00\x00\x01\xf5" + // 0x00670301: 0x000001F5 + "\x00N\x03\x00\x00\x00\x01\xf8" + // 0x004E0300: 0x000001F8 + "\x00n\x03\x00\x00\x00\x01\xf9" + // 0x006E0300: 0x000001F9 + "\x00\xc5\x03\x01\x00\x00\x01\xfa" + // 0x00C50301: 0x000001FA + "\x00\xe5\x03\x01\x00\x00\x01\xfb" + // 0x00E50301: 0x000001FB + "\x00\xc6\x03\x01\x00\x00\x01\xfc" + // 0x00C60301: 0x000001FC + "\x00\xe6\x03\x01\x00\x00\x01\xfd" + // 0x00E60301: 0x000001FD + "\x00\xd8\x03\x01\x00\x00\x01\xfe" + // 0x00D80301: 0x000001FE + "\x00\xf8\x03\x01\x00\x00\x01\xff" + // 0x00F80301: 0x000001FF + "\x00A\x03\x0f\x00\x00\x02\x00" + // 0x0041030F: 0x00000200 + "\x00a\x03\x0f\x00\x00\x02\x01" + // 0x0061030F: 0x00000201 + "\x00A\x03\x11\x00\x00\x02\x02" + // 0x00410311: 0x00000202 + "\x00a\x03\x11\x00\x00\x02\x03" + // 0x00610311: 0x00000203 + "\x00E\x03\x0f\x00\x00\x02\x04" + // 0x0045030F: 0x00000204 + "\x00e\x03\x0f\x00\x00\x02\x05" + // 0x0065030F: 0x00000205 + "\x00E\x03\x11\x00\x00\x02\x06" + // 0x00450311: 0x00000206 + "\x00e\x03\x11\x00\x00\x02\a" + // 0x00650311: 0x00000207 + "\x00I\x03\x0f\x00\x00\x02\b" + // 0x0049030F: 0x00000208 + "\x00i\x03\x0f\x00\x00\x02\t" + // 0x0069030F: 0x00000209 + "\x00I\x03\x11\x00\x00\x02\n" + // 0x00490311: 0x0000020A + "\x00i\x03\x11\x00\x00\x02\v" + // 0x00690311: 0x0000020B + "\x00O\x03\x0f\x00\x00\x02\f" + // 0x004F030F: 0x0000020C + "\x00o\x03\x0f\x00\x00\x02\r" + // 0x006F030F: 0x0000020D + "\x00O\x03\x11\x00\x00\x02\x0e" + // 0x004F0311: 0x0000020E + "\x00o\x03\x11\x00\x00\x02\x0f" + // 0x006F0311: 0x0000020F + "\x00R\x03\x0f\x00\x00\x02\x10" + // 0x0052030F: 0x00000210 + "\x00r\x03\x0f\x00\x00\x02\x11" + // 0x0072030F: 0x00000211 + "\x00R\x03\x11\x00\x00\x02\x12" + // 0x00520311: 0x00000212 + "\x00r\x03\x11\x00\x00\x02\x13" + // 0x00720311: 0x00000213 + "\x00U\x03\x0f\x00\x00\x02\x14" + // 0x0055030F: 0x00000214 + "\x00u\x03\x0f\x00\x00\x02\x15" + // 0x0075030F: 0x00000215 + "\x00U\x03\x11\x00\x00\x02\x16" + // 0x00550311: 0x00000216 + "\x00u\x03\x11\x00\x00\x02\x17" + // 0x00750311: 0x00000217 + "\x00S\x03&\x00\x00\x02\x18" + // 0x00530326: 0x00000218 + "\x00s\x03&\x00\x00\x02\x19" + // 0x00730326: 0x00000219 + "\x00T\x03&\x00\x00\x02\x1a" + // 0x00540326: 0x0000021A + "\x00t\x03&\x00\x00\x02\x1b" + // 0x00740326: 0x0000021B + "\x00H\x03\f\x00\x00\x02\x1e" + // 0x0048030C: 0x0000021E + "\x00h\x03\f\x00\x00\x02\x1f" + // 0x0068030C: 0x0000021F + "\x00A\x03\a\x00\x00\x02&" + // 0x00410307: 0x00000226 + "\x00a\x03\a\x00\x00\x02'" + // 0x00610307: 0x00000227 + "\x00E\x03'\x00\x00\x02(" + // 0x00450327: 0x00000228 + "\x00e\x03'\x00\x00\x02)" + // 0x00650327: 0x00000229 + "\x00\xd6\x03\x04\x00\x00\x02*" + // 0x00D60304: 0x0000022A + "\x00\xf6\x03\x04\x00\x00\x02+" + // 0x00F60304: 0x0000022B + "\x00\xd5\x03\x04\x00\x00\x02," + // 0x00D50304: 0x0000022C + "\x00\xf5\x03\x04\x00\x00\x02-" + // 0x00F50304: 0x0000022D + "\x00O\x03\a\x00\x00\x02." + // 0x004F0307: 0x0000022E + "\x00o\x03\a\x00\x00\x02/" + // 0x006F0307: 0x0000022F + "\x02.\x03\x04\x00\x00\x020" + // 0x022E0304: 0x00000230 + "\x02/\x03\x04\x00\x00\x021" + // 0x022F0304: 0x00000231 + "\x00Y\x03\x04\x00\x00\x022" + // 0x00590304: 0x00000232 + "\x00y\x03\x04\x00\x00\x023" + // 0x00790304: 0x00000233 + "\x00\xa8\x03\x01\x00\x00\x03\x85" + // 0x00A80301: 0x00000385 + "\x03\x91\x03\x01\x00\x00\x03\x86" + // 0x03910301: 0x00000386 + "\x03\x95\x03\x01\x00\x00\x03\x88" + // 0x03950301: 0x00000388 + "\x03\x97\x03\x01\x00\x00\x03\x89" + // 0x03970301: 0x00000389 + "\x03\x99\x03\x01\x00\x00\x03\x8a" + // 0x03990301: 0x0000038A + "\x03\x9f\x03\x01\x00\x00\x03\x8c" + // 0x039F0301: 0x0000038C + "\x03\xa5\x03\x01\x00\x00\x03\x8e" + // 0x03A50301: 0x0000038E + "\x03\xa9\x03\x01\x00\x00\x03\x8f" + // 0x03A90301: 0x0000038F + "\x03\xca\x03\x01\x00\x00\x03\x90" + // 0x03CA0301: 0x00000390 + "\x03\x99\x03\b\x00\x00\x03\xaa" + // 0x03990308: 0x000003AA + "\x03\xa5\x03\b\x00\x00\x03\xab" + // 0x03A50308: 0x000003AB + "\x03\xb1\x03\x01\x00\x00\x03\xac" + // 0x03B10301: 0x000003AC + "\x03\xb5\x03\x01\x00\x00\x03\xad" + // 0x03B50301: 0x000003AD + "\x03\xb7\x03\x01\x00\x00\x03\xae" + // 0x03B70301: 0x000003AE + "\x03\xb9\x03\x01\x00\x00\x03\xaf" + // 0x03B90301: 0x000003AF + "\x03\xcb\x03\x01\x00\x00\x03\xb0" + // 0x03CB0301: 0x000003B0 + "\x03\xb9\x03\b\x00\x00\x03\xca" + // 0x03B90308: 0x000003CA + "\x03\xc5\x03\b\x00\x00\x03\xcb" + // 0x03C50308: 0x000003CB + "\x03\xbf\x03\x01\x00\x00\x03\xcc" + // 0x03BF0301: 0x000003CC + "\x03\xc5\x03\x01\x00\x00\x03\xcd" + // 0x03C50301: 0x000003CD + "\x03\xc9\x03\x01\x00\x00\x03\xce" + // 0x03C90301: 0x000003CE + "\x03\xd2\x03\x01\x00\x00\x03\xd3" + // 0x03D20301: 0x000003D3 + "\x03\xd2\x03\b\x00\x00\x03\xd4" + // 0x03D20308: 0x000003D4 + "\x04\x15\x03\x00\x00\x00\x04\x00" + // 0x04150300: 0x00000400 + "\x04\x15\x03\b\x00\x00\x04\x01" + // 0x04150308: 0x00000401 + "\x04\x13\x03\x01\x00\x00\x04\x03" + // 0x04130301: 0x00000403 + "\x04\x06\x03\b\x00\x00\x04\a" + // 0x04060308: 0x00000407 + "\x04\x1a\x03\x01\x00\x00\x04\f" + // 0x041A0301: 0x0000040C + "\x04\x18\x03\x00\x00\x00\x04\r" + // 0x04180300: 0x0000040D + "\x04#\x03\x06\x00\x00\x04\x0e" + // 0x04230306: 0x0000040E + "\x04\x18\x03\x06\x00\x00\x04\x19" + // 0x04180306: 0x00000419 + "\x048\x03\x06\x00\x00\x049" + // 0x04380306: 0x00000439 + "\x045\x03\x00\x00\x00\x04P" + // 0x04350300: 0x00000450 + "\x045\x03\b\x00\x00\x04Q" + // 0x04350308: 0x00000451 + "\x043\x03\x01\x00\x00\x04S" + // 0x04330301: 0x00000453 + "\x04V\x03\b\x00\x00\x04W" + // 0x04560308: 0x00000457 + "\x04:\x03\x01\x00\x00\x04\\" + // 0x043A0301: 0x0000045C + "\x048\x03\x00\x00\x00\x04]" + // 0x04380300: 0x0000045D + "\x04C\x03\x06\x00\x00\x04^" + // 0x04430306: 0x0000045E + "\x04t\x03\x0f\x00\x00\x04v" + // 0x0474030F: 0x00000476 + "\x04u\x03\x0f\x00\x00\x04w" + // 0x0475030F: 0x00000477 + "\x04\x16\x03\x06\x00\x00\x04\xc1" + // 0x04160306: 0x000004C1 + "\x046\x03\x06\x00\x00\x04\xc2" + // 0x04360306: 0x000004C2 + "\x04\x10\x03\x06\x00\x00\x04\xd0" + // 0x04100306: 0x000004D0 + "\x040\x03\x06\x00\x00\x04\xd1" + // 0x04300306: 0x000004D1 + "\x04\x10\x03\b\x00\x00\x04\xd2" + // 0x04100308: 0x000004D2 + "\x040\x03\b\x00\x00\x04\xd3" + // 0x04300308: 0x000004D3 + "\x04\x15\x03\x06\x00\x00\x04\xd6" + // 0x04150306: 0x000004D6 + "\x045\x03\x06\x00\x00\x04\xd7" + // 0x04350306: 0x000004D7 + "\x04\xd8\x03\b\x00\x00\x04\xda" + // 0x04D80308: 0x000004DA + "\x04\xd9\x03\b\x00\x00\x04\xdb" + // 0x04D90308: 0x000004DB + "\x04\x16\x03\b\x00\x00\x04\xdc" + // 0x04160308: 0x000004DC + "\x046\x03\b\x00\x00\x04\xdd" + // 0x04360308: 0x000004DD + "\x04\x17\x03\b\x00\x00\x04\xde" + // 0x04170308: 0x000004DE + "\x047\x03\b\x00\x00\x04\xdf" + // 0x04370308: 0x000004DF + "\x04\x18\x03\x04\x00\x00\x04\xe2" + // 0x04180304: 0x000004E2 + "\x048\x03\x04\x00\x00\x04\xe3" + // 0x04380304: 0x000004E3 + "\x04\x18\x03\b\x00\x00\x04\xe4" + // 0x04180308: 0x000004E4 + "\x048\x03\b\x00\x00\x04\xe5" + // 0x04380308: 0x000004E5 + "\x04\x1e\x03\b\x00\x00\x04\xe6" + // 0x041E0308: 0x000004E6 + "\x04>\x03\b\x00\x00\x04\xe7" + // 0x043E0308: 0x000004E7 + "\x04\xe8\x03\b\x00\x00\x04\xea" + // 0x04E80308: 0x000004EA + "\x04\xe9\x03\b\x00\x00\x04\xeb" + // 0x04E90308: 0x000004EB + "\x04-\x03\b\x00\x00\x04\xec" + // 0x042D0308: 0x000004EC + "\x04M\x03\b\x00\x00\x04\xed" + // 0x044D0308: 0x000004ED + "\x04#\x03\x04\x00\x00\x04\xee" + // 0x04230304: 0x000004EE + "\x04C\x03\x04\x00\x00\x04\xef" + // 0x04430304: 0x000004EF + "\x04#\x03\b\x00\x00\x04\xf0" + // 0x04230308: 0x000004F0 + "\x04C\x03\b\x00\x00\x04\xf1" + // 0x04430308: 0x000004F1 + "\x04#\x03\v\x00\x00\x04\xf2" + // 0x0423030B: 0x000004F2 + "\x04C\x03\v\x00\x00\x04\xf3" + // 0x0443030B: 0x000004F3 + "\x04'\x03\b\x00\x00\x04\xf4" + // 0x04270308: 0x000004F4 + "\x04G\x03\b\x00\x00\x04\xf5" + // 0x04470308: 0x000004F5 + "\x04+\x03\b\x00\x00\x04\xf8" + // 0x042B0308: 0x000004F8 + "\x04K\x03\b\x00\x00\x04\xf9" + // 0x044B0308: 0x000004F9 + "\x06'\x06S\x00\x00\x06\"" + // 0x06270653: 0x00000622 + "\x06'\x06T\x00\x00\x06#" + // 0x06270654: 0x00000623 + "\x06H\x06T\x00\x00\x06$" + // 0x06480654: 0x00000624 + "\x06'\x06U\x00\x00\x06%" + // 0x06270655: 0x00000625 + "\x06J\x06T\x00\x00\x06&" + // 0x064A0654: 0x00000626 + "\x06\xd5\x06T\x00\x00\x06\xc0" + // 0x06D50654: 0x000006C0 + "\x06\xc1\x06T\x00\x00\x06\xc2" + // 0x06C10654: 0x000006C2 + "\x06\xd2\x06T\x00\x00\x06\xd3" + // 0x06D20654: 0x000006D3 + "\t(\t<\x00\x00\t)" + // 0x0928093C: 0x00000929 + "\t0\t<\x00\x00\t1" + // 0x0930093C: 0x00000931 + "\t3\t<\x00\x00\t4" + // 0x0933093C: 0x00000934 + "\t\xc7\t\xbe\x00\x00\t\xcb" + // 0x09C709BE: 0x000009CB + "\t\xc7\t\xd7\x00\x00\t\xcc" + // 0x09C709D7: 0x000009CC + "\vG\vV\x00\x00\vH" + // 0x0B470B56: 0x00000B48 + "\vG\v>\x00\x00\vK" + // 0x0B470B3E: 0x00000B4B + "\vG\vW\x00\x00\vL" + // 0x0B470B57: 0x00000B4C + "\v\x92\v\xd7\x00\x00\v\x94" + // 0x0B920BD7: 0x00000B94 + "\v\xc6\v\xbe\x00\x00\v\xca" + // 0x0BC60BBE: 0x00000BCA + "\v\xc7\v\xbe\x00\x00\v\xcb" + // 0x0BC70BBE: 0x00000BCB + "\v\xc6\v\xd7\x00\x00\v\xcc" + // 0x0BC60BD7: 0x00000BCC + "\fF\fV\x00\x00\fH" + // 0x0C460C56: 0x00000C48 + "\f\xbf\f\xd5\x00\x00\f\xc0" + // 0x0CBF0CD5: 0x00000CC0 + "\f\xc6\f\xd5\x00\x00\f\xc7" + // 0x0CC60CD5: 0x00000CC7 + "\f\xc6\f\xd6\x00\x00\f\xc8" + // 0x0CC60CD6: 0x00000CC8 + "\f\xc6\f\xc2\x00\x00\f\xca" + // 0x0CC60CC2: 0x00000CCA + "\f\xca\f\xd5\x00\x00\f\xcb" + // 0x0CCA0CD5: 0x00000CCB + "\rF\r>\x00\x00\rJ" + // 0x0D460D3E: 0x00000D4A + "\rG\r>\x00\x00\rK" + // 0x0D470D3E: 0x00000D4B + "\rF\rW\x00\x00\rL" + // 0x0D460D57: 0x00000D4C + "\r\xd9\r\xca\x00\x00\r\xda" + // 0x0DD90DCA: 0x00000DDA + "\r\xd9\r\xcf\x00\x00\r\xdc" + // 0x0DD90DCF: 0x00000DDC + "\r\xdc\r\xca\x00\x00\r\xdd" + // 0x0DDC0DCA: 0x00000DDD + "\r\xd9\r\xdf\x00\x00\r\xde" + // 0x0DD90DDF: 0x00000DDE + "\x10%\x10.\x00\x00\x10&" + // 0x1025102E: 0x00001026 + "\x1b\x05\x1b5\x00\x00\x1b\x06" + // 0x1B051B35: 0x00001B06 + "\x1b\a\x1b5\x00\x00\x1b\b" + // 0x1B071B35: 0x00001B08 + "\x1b\t\x1b5\x00\x00\x1b\n" + // 0x1B091B35: 0x00001B0A + "\x1b\v\x1b5\x00\x00\x1b\f" + // 0x1B0B1B35: 0x00001B0C + "\x1b\r\x1b5\x00\x00\x1b\x0e" + // 0x1B0D1B35: 0x00001B0E + "\x1b\x11\x1b5\x00\x00\x1b\x12" + // 0x1B111B35: 0x00001B12 + "\x1b:\x1b5\x00\x00\x1b;" + // 0x1B3A1B35: 0x00001B3B + "\x1b<\x1b5\x00\x00\x1b=" + // 0x1B3C1B35: 0x00001B3D + "\x1b>\x1b5\x00\x00\x1b@" + // 0x1B3E1B35: 0x00001B40 + "\x1b?\x1b5\x00\x00\x1bA" + // 0x1B3F1B35: 0x00001B41 + "\x1bB\x1b5\x00\x00\x1bC" + // 0x1B421B35: 0x00001B43 + "\x00A\x03%\x00\x00\x1e\x00" + // 0x00410325: 0x00001E00 + "\x00a\x03%\x00\x00\x1e\x01" + // 0x00610325: 0x00001E01 + "\x00B\x03\a\x00\x00\x1e\x02" + // 0x00420307: 0x00001E02 + "\x00b\x03\a\x00\x00\x1e\x03" + // 0x00620307: 0x00001E03 + "\x00B\x03#\x00\x00\x1e\x04" + // 0x00420323: 0x00001E04 + "\x00b\x03#\x00\x00\x1e\x05" + // 0x00620323: 0x00001E05 + "\x00B\x031\x00\x00\x1e\x06" + // 0x00420331: 0x00001E06 + "\x00b\x031\x00\x00\x1e\a" + // 0x00620331: 0x00001E07 + "\x00\xc7\x03\x01\x00\x00\x1e\b" + // 0x00C70301: 0x00001E08 + "\x00\xe7\x03\x01\x00\x00\x1e\t" + // 0x00E70301: 0x00001E09 + "\x00D\x03\a\x00\x00\x1e\n" + // 0x00440307: 0x00001E0A + "\x00d\x03\a\x00\x00\x1e\v" + // 0x00640307: 0x00001E0B + "\x00D\x03#\x00\x00\x1e\f" + // 0x00440323: 0x00001E0C + "\x00d\x03#\x00\x00\x1e\r" + // 0x00640323: 0x00001E0D + "\x00D\x031\x00\x00\x1e\x0e" + // 0x00440331: 0x00001E0E + "\x00d\x031\x00\x00\x1e\x0f" + // 0x00640331: 0x00001E0F + "\x00D\x03'\x00\x00\x1e\x10" + // 0x00440327: 0x00001E10 + "\x00d\x03'\x00\x00\x1e\x11" + // 0x00640327: 0x00001E11 + "\x00D\x03-\x00\x00\x1e\x12" + // 0x0044032D: 0x00001E12 + "\x00d\x03-\x00\x00\x1e\x13" + // 0x0064032D: 0x00001E13 + "\x01\x12\x03\x00\x00\x00\x1e\x14" + // 0x01120300: 0x00001E14 + "\x01\x13\x03\x00\x00\x00\x1e\x15" + // 0x01130300: 0x00001E15 + "\x01\x12\x03\x01\x00\x00\x1e\x16" + // 0x01120301: 0x00001E16 + "\x01\x13\x03\x01\x00\x00\x1e\x17" + // 0x01130301: 0x00001E17 + "\x00E\x03-\x00\x00\x1e\x18" + // 0x0045032D: 0x00001E18 + "\x00e\x03-\x00\x00\x1e\x19" + // 0x0065032D: 0x00001E19 + "\x00E\x030\x00\x00\x1e\x1a" + // 0x00450330: 0x00001E1A + "\x00e\x030\x00\x00\x1e\x1b" + // 0x00650330: 0x00001E1B + "\x02(\x03\x06\x00\x00\x1e\x1c" + // 0x02280306: 0x00001E1C + "\x02)\x03\x06\x00\x00\x1e\x1d" + // 0x02290306: 0x00001E1D + "\x00F\x03\a\x00\x00\x1e\x1e" + // 0x00460307: 0x00001E1E + "\x00f\x03\a\x00\x00\x1e\x1f" + // 0x00660307: 0x00001E1F + "\x00G\x03\x04\x00\x00\x1e " + // 0x00470304: 0x00001E20 + "\x00g\x03\x04\x00\x00\x1e!" + // 0x00670304: 0x00001E21 + "\x00H\x03\a\x00\x00\x1e\"" + // 0x00480307: 0x00001E22 + "\x00h\x03\a\x00\x00\x1e#" + // 0x00680307: 0x00001E23 + "\x00H\x03#\x00\x00\x1e$" + // 0x00480323: 0x00001E24 + "\x00h\x03#\x00\x00\x1e%" + // 0x00680323: 0x00001E25 + "\x00H\x03\b\x00\x00\x1e&" + // 0x00480308: 0x00001E26 + "\x00h\x03\b\x00\x00\x1e'" + // 0x00680308: 0x00001E27 + "\x00H\x03'\x00\x00\x1e(" + // 0x00480327: 0x00001E28 + "\x00h\x03'\x00\x00\x1e)" + // 0x00680327: 0x00001E29 + "\x00H\x03.\x00\x00\x1e*" + // 0x0048032E: 0x00001E2A + "\x00h\x03.\x00\x00\x1e+" + // 0x0068032E: 0x00001E2B + "\x00I\x030\x00\x00\x1e," + // 0x00490330: 0x00001E2C + "\x00i\x030\x00\x00\x1e-" + // 0x00690330: 0x00001E2D + "\x00\xcf\x03\x01\x00\x00\x1e." + // 0x00CF0301: 0x00001E2E + "\x00\xef\x03\x01\x00\x00\x1e/" + // 0x00EF0301: 0x00001E2F + "\x00K\x03\x01\x00\x00\x1e0" + // 0x004B0301: 0x00001E30 + "\x00k\x03\x01\x00\x00\x1e1" + // 0x006B0301: 0x00001E31 + "\x00K\x03#\x00\x00\x1e2" + // 0x004B0323: 0x00001E32 + "\x00k\x03#\x00\x00\x1e3" + // 0x006B0323: 0x00001E33 + "\x00K\x031\x00\x00\x1e4" + // 0x004B0331: 0x00001E34 + "\x00k\x031\x00\x00\x1e5" + // 0x006B0331: 0x00001E35 + "\x00L\x03#\x00\x00\x1e6" + // 0x004C0323: 0x00001E36 + "\x00l\x03#\x00\x00\x1e7" + // 0x006C0323: 0x00001E37 + "\x1e6\x03\x04\x00\x00\x1e8" + // 0x1E360304: 0x00001E38 + "\x1e7\x03\x04\x00\x00\x1e9" + // 0x1E370304: 0x00001E39 + "\x00L\x031\x00\x00\x1e:" + // 0x004C0331: 0x00001E3A + "\x00l\x031\x00\x00\x1e;" + // 0x006C0331: 0x00001E3B + "\x00L\x03-\x00\x00\x1e<" + // 0x004C032D: 0x00001E3C + "\x00l\x03-\x00\x00\x1e=" + // 0x006C032D: 0x00001E3D + "\x00M\x03\x01\x00\x00\x1e>" + // 0x004D0301: 0x00001E3E + "\x00m\x03\x01\x00\x00\x1e?" + // 0x006D0301: 0x00001E3F + "\x00M\x03\a\x00\x00\x1e@" + // 0x004D0307: 0x00001E40 + "\x00m\x03\a\x00\x00\x1eA" + // 0x006D0307: 0x00001E41 + "\x00M\x03#\x00\x00\x1eB" + // 0x004D0323: 0x00001E42 + "\x00m\x03#\x00\x00\x1eC" + // 0x006D0323: 0x00001E43 + "\x00N\x03\a\x00\x00\x1eD" + // 0x004E0307: 0x00001E44 + "\x00n\x03\a\x00\x00\x1eE" + // 0x006E0307: 0x00001E45 + "\x00N\x03#\x00\x00\x1eF" + // 0x004E0323: 0x00001E46 + "\x00n\x03#\x00\x00\x1eG" + // 0x006E0323: 0x00001E47 + "\x00N\x031\x00\x00\x1eH" + // 0x004E0331: 0x00001E48 + "\x00n\x031\x00\x00\x1eI" + // 0x006E0331: 0x00001E49 + "\x00N\x03-\x00\x00\x1eJ" + // 0x004E032D: 0x00001E4A + "\x00n\x03-\x00\x00\x1eK" + // 0x006E032D: 0x00001E4B + "\x00\xd5\x03\x01\x00\x00\x1eL" + // 0x00D50301: 0x00001E4C + "\x00\xf5\x03\x01\x00\x00\x1eM" + // 0x00F50301: 0x00001E4D + "\x00\xd5\x03\b\x00\x00\x1eN" + // 0x00D50308: 0x00001E4E + "\x00\xf5\x03\b\x00\x00\x1eO" + // 0x00F50308: 0x00001E4F + "\x01L\x03\x00\x00\x00\x1eP" + // 0x014C0300: 0x00001E50 + "\x01M\x03\x00\x00\x00\x1eQ" + // 0x014D0300: 0x00001E51 + "\x01L\x03\x01\x00\x00\x1eR" + // 0x014C0301: 0x00001E52 + "\x01M\x03\x01\x00\x00\x1eS" + // 0x014D0301: 0x00001E53 + "\x00P\x03\x01\x00\x00\x1eT" + // 0x00500301: 0x00001E54 + "\x00p\x03\x01\x00\x00\x1eU" + // 0x00700301: 0x00001E55 + "\x00P\x03\a\x00\x00\x1eV" + // 0x00500307: 0x00001E56 + "\x00p\x03\a\x00\x00\x1eW" + // 0x00700307: 0x00001E57 + "\x00R\x03\a\x00\x00\x1eX" + // 0x00520307: 0x00001E58 + "\x00r\x03\a\x00\x00\x1eY" + // 0x00720307: 0x00001E59 + "\x00R\x03#\x00\x00\x1eZ" + // 0x00520323: 0x00001E5A + "\x00r\x03#\x00\x00\x1e[" + // 0x00720323: 0x00001E5B + "\x1eZ\x03\x04\x00\x00\x1e\\" + // 0x1E5A0304: 0x00001E5C + "\x1e[\x03\x04\x00\x00\x1e]" + // 0x1E5B0304: 0x00001E5D + "\x00R\x031\x00\x00\x1e^" + // 0x00520331: 0x00001E5E + "\x00r\x031\x00\x00\x1e_" + // 0x00720331: 0x00001E5F + "\x00S\x03\a\x00\x00\x1e`" + // 0x00530307: 0x00001E60 + "\x00s\x03\a\x00\x00\x1ea" + // 0x00730307: 0x00001E61 + "\x00S\x03#\x00\x00\x1eb" + // 0x00530323: 0x00001E62 + "\x00s\x03#\x00\x00\x1ec" + // 0x00730323: 0x00001E63 + "\x01Z\x03\a\x00\x00\x1ed" + // 0x015A0307: 0x00001E64 + "\x01[\x03\a\x00\x00\x1ee" + // 0x015B0307: 0x00001E65 + "\x01`\x03\a\x00\x00\x1ef" + // 0x01600307: 0x00001E66 + "\x01a\x03\a\x00\x00\x1eg" + // 0x01610307: 0x00001E67 + "\x1eb\x03\a\x00\x00\x1eh" + // 0x1E620307: 0x00001E68 + "\x1ec\x03\a\x00\x00\x1ei" + // 0x1E630307: 0x00001E69 + "\x00T\x03\a\x00\x00\x1ej" + // 0x00540307: 0x00001E6A + "\x00t\x03\a\x00\x00\x1ek" + // 0x00740307: 0x00001E6B + "\x00T\x03#\x00\x00\x1el" + // 0x00540323: 0x00001E6C + "\x00t\x03#\x00\x00\x1em" + // 0x00740323: 0x00001E6D + "\x00T\x031\x00\x00\x1en" + // 0x00540331: 0x00001E6E + "\x00t\x031\x00\x00\x1eo" + // 0x00740331: 0x00001E6F + "\x00T\x03-\x00\x00\x1ep" + // 0x0054032D: 0x00001E70 + "\x00t\x03-\x00\x00\x1eq" + // 0x0074032D: 0x00001E71 + "\x00U\x03$\x00\x00\x1er" + // 0x00550324: 0x00001E72 + "\x00u\x03$\x00\x00\x1es" + // 0x00750324: 0x00001E73 + "\x00U\x030\x00\x00\x1et" + // 0x00550330: 0x00001E74 + "\x00u\x030\x00\x00\x1eu" + // 0x00750330: 0x00001E75 + "\x00U\x03-\x00\x00\x1ev" + // 0x0055032D: 0x00001E76 + "\x00u\x03-\x00\x00\x1ew" + // 0x0075032D: 0x00001E77 + "\x01h\x03\x01\x00\x00\x1ex" + // 0x01680301: 0x00001E78 + "\x01i\x03\x01\x00\x00\x1ey" + // 0x01690301: 0x00001E79 + "\x01j\x03\b\x00\x00\x1ez" + // 0x016A0308: 0x00001E7A + "\x01k\x03\b\x00\x00\x1e{" + // 0x016B0308: 0x00001E7B + "\x00V\x03\x03\x00\x00\x1e|" + // 0x00560303: 0x00001E7C + "\x00v\x03\x03\x00\x00\x1e}" + // 0x00760303: 0x00001E7D + "\x00V\x03#\x00\x00\x1e~" + // 0x00560323: 0x00001E7E + "\x00v\x03#\x00\x00\x1e\x7f" + // 0x00760323: 0x00001E7F + "\x00W\x03\x00\x00\x00\x1e\x80" + // 0x00570300: 0x00001E80 + "\x00w\x03\x00\x00\x00\x1e\x81" + // 0x00770300: 0x00001E81 + "\x00W\x03\x01\x00\x00\x1e\x82" + // 0x00570301: 0x00001E82 + "\x00w\x03\x01\x00\x00\x1e\x83" + // 0x00770301: 0x00001E83 + "\x00W\x03\b\x00\x00\x1e\x84" + // 0x00570308: 0x00001E84 + "\x00w\x03\b\x00\x00\x1e\x85" + // 0x00770308: 0x00001E85 + "\x00W\x03\a\x00\x00\x1e\x86" + // 0x00570307: 0x00001E86 + "\x00w\x03\a\x00\x00\x1e\x87" + // 0x00770307: 0x00001E87 + "\x00W\x03#\x00\x00\x1e\x88" + // 0x00570323: 0x00001E88 + "\x00w\x03#\x00\x00\x1e\x89" + // 0x00770323: 0x00001E89 + "\x00X\x03\a\x00\x00\x1e\x8a" + // 0x00580307: 0x00001E8A + "\x00x\x03\a\x00\x00\x1e\x8b" + // 0x00780307: 0x00001E8B + "\x00X\x03\b\x00\x00\x1e\x8c" + // 0x00580308: 0x00001E8C + "\x00x\x03\b\x00\x00\x1e\x8d" + // 0x00780308: 0x00001E8D + "\x00Y\x03\a\x00\x00\x1e\x8e" + // 0x00590307: 0x00001E8E + "\x00y\x03\a\x00\x00\x1e\x8f" + // 0x00790307: 0x00001E8F + "\x00Z\x03\x02\x00\x00\x1e\x90" + // 0x005A0302: 0x00001E90 + "\x00z\x03\x02\x00\x00\x1e\x91" + // 0x007A0302: 0x00001E91 + "\x00Z\x03#\x00\x00\x1e\x92" + // 0x005A0323: 0x00001E92 + "\x00z\x03#\x00\x00\x1e\x93" + // 0x007A0323: 0x00001E93 + "\x00Z\x031\x00\x00\x1e\x94" + // 0x005A0331: 0x00001E94 + "\x00z\x031\x00\x00\x1e\x95" + // 0x007A0331: 0x00001E95 + "\x00h\x031\x00\x00\x1e\x96" + // 0x00680331: 0x00001E96 + "\x00t\x03\b\x00\x00\x1e\x97" + // 0x00740308: 0x00001E97 + "\x00w\x03\n\x00\x00\x1e\x98" + // 0x0077030A: 0x00001E98 + "\x00y\x03\n\x00\x00\x1e\x99" + // 0x0079030A: 0x00001E99 + "\x01\x7f\x03\a\x00\x00\x1e\x9b" + // 0x017F0307: 0x00001E9B + "\x00A\x03#\x00\x00\x1e\xa0" + // 0x00410323: 0x00001EA0 + "\x00a\x03#\x00\x00\x1e\xa1" + // 0x00610323: 0x00001EA1 + "\x00A\x03\t\x00\x00\x1e\xa2" + // 0x00410309: 0x00001EA2 + "\x00a\x03\t\x00\x00\x1e\xa3" + // 0x00610309: 0x00001EA3 + "\x00\xc2\x03\x01\x00\x00\x1e\xa4" + // 0x00C20301: 0x00001EA4 + "\x00\xe2\x03\x01\x00\x00\x1e\xa5" + // 0x00E20301: 0x00001EA5 + "\x00\xc2\x03\x00\x00\x00\x1e\xa6" + // 0x00C20300: 0x00001EA6 + "\x00\xe2\x03\x00\x00\x00\x1e\xa7" + // 0x00E20300: 0x00001EA7 + "\x00\xc2\x03\t\x00\x00\x1e\xa8" + // 0x00C20309: 0x00001EA8 + "\x00\xe2\x03\t\x00\x00\x1e\xa9" + // 0x00E20309: 0x00001EA9 + "\x00\xc2\x03\x03\x00\x00\x1e\xaa" + // 0x00C20303: 0x00001EAA + "\x00\xe2\x03\x03\x00\x00\x1e\xab" + // 0x00E20303: 0x00001EAB + "\x1e\xa0\x03\x02\x00\x00\x1e\xac" + // 0x1EA00302: 0x00001EAC + "\x1e\xa1\x03\x02\x00\x00\x1e\xad" + // 0x1EA10302: 0x00001EAD + "\x01\x02\x03\x01\x00\x00\x1e\xae" + // 0x01020301: 0x00001EAE + "\x01\x03\x03\x01\x00\x00\x1e\xaf" + // 0x01030301: 0x00001EAF + "\x01\x02\x03\x00\x00\x00\x1e\xb0" + // 0x01020300: 0x00001EB0 + "\x01\x03\x03\x00\x00\x00\x1e\xb1" + // 0x01030300: 0x00001EB1 + "\x01\x02\x03\t\x00\x00\x1e\xb2" + // 0x01020309: 0x00001EB2 + "\x01\x03\x03\t\x00\x00\x1e\xb3" + // 0x01030309: 0x00001EB3 + "\x01\x02\x03\x03\x00\x00\x1e\xb4" + // 0x01020303: 0x00001EB4 + "\x01\x03\x03\x03\x00\x00\x1e\xb5" + // 0x01030303: 0x00001EB5 + "\x1e\xa0\x03\x06\x00\x00\x1e\xb6" + // 0x1EA00306: 0x00001EB6 + "\x1e\xa1\x03\x06\x00\x00\x1e\xb7" + // 0x1EA10306: 0x00001EB7 + "\x00E\x03#\x00\x00\x1e\xb8" + // 0x00450323: 0x00001EB8 + "\x00e\x03#\x00\x00\x1e\xb9" + // 0x00650323: 0x00001EB9 + "\x00E\x03\t\x00\x00\x1e\xba" + // 0x00450309: 0x00001EBA + "\x00e\x03\t\x00\x00\x1e\xbb" + // 0x00650309: 0x00001EBB + "\x00E\x03\x03\x00\x00\x1e\xbc" + // 0x00450303: 0x00001EBC + "\x00e\x03\x03\x00\x00\x1e\xbd" + // 0x00650303: 0x00001EBD + "\x00\xca\x03\x01\x00\x00\x1e\xbe" + // 0x00CA0301: 0x00001EBE + "\x00\xea\x03\x01\x00\x00\x1e\xbf" + // 0x00EA0301: 0x00001EBF + "\x00\xca\x03\x00\x00\x00\x1e\xc0" + // 0x00CA0300: 0x00001EC0 + "\x00\xea\x03\x00\x00\x00\x1e\xc1" + // 0x00EA0300: 0x00001EC1 + "\x00\xca\x03\t\x00\x00\x1e\xc2" + // 0x00CA0309: 0x00001EC2 + "\x00\xea\x03\t\x00\x00\x1e\xc3" + // 0x00EA0309: 0x00001EC3 + "\x00\xca\x03\x03\x00\x00\x1e\xc4" + // 0x00CA0303: 0x00001EC4 + "\x00\xea\x03\x03\x00\x00\x1e\xc5" + // 0x00EA0303: 0x00001EC5 + "\x1e\xb8\x03\x02\x00\x00\x1e\xc6" + // 0x1EB80302: 0x00001EC6 + "\x1e\xb9\x03\x02\x00\x00\x1e\xc7" + // 0x1EB90302: 0x00001EC7 + "\x00I\x03\t\x00\x00\x1e\xc8" + // 0x00490309: 0x00001EC8 + "\x00i\x03\t\x00\x00\x1e\xc9" + // 0x00690309: 0x00001EC9 + "\x00I\x03#\x00\x00\x1e\xca" + // 0x00490323: 0x00001ECA + "\x00i\x03#\x00\x00\x1e\xcb" + // 0x00690323: 0x00001ECB + "\x00O\x03#\x00\x00\x1e\xcc" + // 0x004F0323: 0x00001ECC + "\x00o\x03#\x00\x00\x1e\xcd" + // 0x006F0323: 0x00001ECD + "\x00O\x03\t\x00\x00\x1e\xce" + // 0x004F0309: 0x00001ECE + "\x00o\x03\t\x00\x00\x1e\xcf" + // 0x006F0309: 0x00001ECF + "\x00\xd4\x03\x01\x00\x00\x1e\xd0" + // 0x00D40301: 0x00001ED0 + "\x00\xf4\x03\x01\x00\x00\x1e\xd1" + // 0x00F40301: 0x00001ED1 + "\x00\xd4\x03\x00\x00\x00\x1e\xd2" + // 0x00D40300: 0x00001ED2 + "\x00\xf4\x03\x00\x00\x00\x1e\xd3" + // 0x00F40300: 0x00001ED3 + "\x00\xd4\x03\t\x00\x00\x1e\xd4" + // 0x00D40309: 0x00001ED4 + "\x00\xf4\x03\t\x00\x00\x1e\xd5" + // 0x00F40309: 0x00001ED5 + "\x00\xd4\x03\x03\x00\x00\x1e\xd6" + // 0x00D40303: 0x00001ED6 + "\x00\xf4\x03\x03\x00\x00\x1e\xd7" + // 0x00F40303: 0x00001ED7 + "\x1e\xcc\x03\x02\x00\x00\x1e\xd8" + // 0x1ECC0302: 0x00001ED8 + "\x1e\xcd\x03\x02\x00\x00\x1e\xd9" + // 0x1ECD0302: 0x00001ED9 + "\x01\xa0\x03\x01\x00\x00\x1e\xda" + // 0x01A00301: 0x00001EDA + "\x01\xa1\x03\x01\x00\x00\x1e\xdb" + // 0x01A10301: 0x00001EDB + "\x01\xa0\x03\x00\x00\x00\x1e\xdc" + // 0x01A00300: 0x00001EDC + "\x01\xa1\x03\x00\x00\x00\x1e\xdd" + // 0x01A10300: 0x00001EDD + "\x01\xa0\x03\t\x00\x00\x1e\xde" + // 0x01A00309: 0x00001EDE + "\x01\xa1\x03\t\x00\x00\x1e\xdf" + // 0x01A10309: 0x00001EDF + "\x01\xa0\x03\x03\x00\x00\x1e\xe0" + // 0x01A00303: 0x00001EE0 + "\x01\xa1\x03\x03\x00\x00\x1e\xe1" + // 0x01A10303: 0x00001EE1 + "\x01\xa0\x03#\x00\x00\x1e\xe2" + // 0x01A00323: 0x00001EE2 + "\x01\xa1\x03#\x00\x00\x1e\xe3" + // 0x01A10323: 0x00001EE3 + "\x00U\x03#\x00\x00\x1e\xe4" + // 0x00550323: 0x00001EE4 + "\x00u\x03#\x00\x00\x1e\xe5" + // 0x00750323: 0x00001EE5 + "\x00U\x03\t\x00\x00\x1e\xe6" + // 0x00550309: 0x00001EE6 + "\x00u\x03\t\x00\x00\x1e\xe7" + // 0x00750309: 0x00001EE7 + "\x01\xaf\x03\x01\x00\x00\x1e\xe8" + // 0x01AF0301: 0x00001EE8 + "\x01\xb0\x03\x01\x00\x00\x1e\xe9" + // 0x01B00301: 0x00001EE9 + "\x01\xaf\x03\x00\x00\x00\x1e\xea" + // 0x01AF0300: 0x00001EEA + "\x01\xb0\x03\x00\x00\x00\x1e\xeb" + // 0x01B00300: 0x00001EEB + "\x01\xaf\x03\t\x00\x00\x1e\xec" + // 0x01AF0309: 0x00001EEC + "\x01\xb0\x03\t\x00\x00\x1e\xed" + // 0x01B00309: 0x00001EED + "\x01\xaf\x03\x03\x00\x00\x1e\xee" + // 0x01AF0303: 0x00001EEE + "\x01\xb0\x03\x03\x00\x00\x1e\xef" + // 0x01B00303: 0x00001EEF + "\x01\xaf\x03#\x00\x00\x1e\xf0" + // 0x01AF0323: 0x00001EF0 + "\x01\xb0\x03#\x00\x00\x1e\xf1" + // 0x01B00323: 0x00001EF1 + "\x00Y\x03\x00\x00\x00\x1e\xf2" + // 0x00590300: 0x00001EF2 + "\x00y\x03\x00\x00\x00\x1e\xf3" + // 0x00790300: 0x00001EF3 + "\x00Y\x03#\x00\x00\x1e\xf4" + // 0x00590323: 0x00001EF4 + "\x00y\x03#\x00\x00\x1e\xf5" + // 0x00790323: 0x00001EF5 + "\x00Y\x03\t\x00\x00\x1e\xf6" + // 0x00590309: 0x00001EF6 + "\x00y\x03\t\x00\x00\x1e\xf7" + // 0x00790309: 0x00001EF7 + "\x00Y\x03\x03\x00\x00\x1e\xf8" + // 0x00590303: 0x00001EF8 + "\x00y\x03\x03\x00\x00\x1e\xf9" + // 0x00790303: 0x00001EF9 + "\x03\xb1\x03\x13\x00\x00\x1f\x00" + // 0x03B10313: 0x00001F00 + "\x03\xb1\x03\x14\x00\x00\x1f\x01" + // 0x03B10314: 0x00001F01 + "\x1f\x00\x03\x00\x00\x00\x1f\x02" + // 0x1F000300: 0x00001F02 + "\x1f\x01\x03\x00\x00\x00\x1f\x03" + // 0x1F010300: 0x00001F03 + "\x1f\x00\x03\x01\x00\x00\x1f\x04" + // 0x1F000301: 0x00001F04 + "\x1f\x01\x03\x01\x00\x00\x1f\x05" + // 0x1F010301: 0x00001F05 + "\x1f\x00\x03B\x00\x00\x1f\x06" + // 0x1F000342: 0x00001F06 + "\x1f\x01\x03B\x00\x00\x1f\a" + // 0x1F010342: 0x00001F07 + "\x03\x91\x03\x13\x00\x00\x1f\b" + // 0x03910313: 0x00001F08 + "\x03\x91\x03\x14\x00\x00\x1f\t" + // 0x03910314: 0x00001F09 + "\x1f\b\x03\x00\x00\x00\x1f\n" + // 0x1F080300: 0x00001F0A + "\x1f\t\x03\x00\x00\x00\x1f\v" + // 0x1F090300: 0x00001F0B + "\x1f\b\x03\x01\x00\x00\x1f\f" + // 0x1F080301: 0x00001F0C + "\x1f\t\x03\x01\x00\x00\x1f\r" + // 0x1F090301: 0x00001F0D + "\x1f\b\x03B\x00\x00\x1f\x0e" + // 0x1F080342: 0x00001F0E + "\x1f\t\x03B\x00\x00\x1f\x0f" + // 0x1F090342: 0x00001F0F + "\x03\xb5\x03\x13\x00\x00\x1f\x10" + // 0x03B50313: 0x00001F10 + "\x03\xb5\x03\x14\x00\x00\x1f\x11" + // 0x03B50314: 0x00001F11 + "\x1f\x10\x03\x00\x00\x00\x1f\x12" + // 0x1F100300: 0x00001F12 + "\x1f\x11\x03\x00\x00\x00\x1f\x13" + // 0x1F110300: 0x00001F13 + "\x1f\x10\x03\x01\x00\x00\x1f\x14" + // 0x1F100301: 0x00001F14 + "\x1f\x11\x03\x01\x00\x00\x1f\x15" + // 0x1F110301: 0x00001F15 + "\x03\x95\x03\x13\x00\x00\x1f\x18" + // 0x03950313: 0x00001F18 + "\x03\x95\x03\x14\x00\x00\x1f\x19" + // 0x03950314: 0x00001F19 + "\x1f\x18\x03\x00\x00\x00\x1f\x1a" + // 0x1F180300: 0x00001F1A + "\x1f\x19\x03\x00\x00\x00\x1f\x1b" + // 0x1F190300: 0x00001F1B + "\x1f\x18\x03\x01\x00\x00\x1f\x1c" + // 0x1F180301: 0x00001F1C + "\x1f\x19\x03\x01\x00\x00\x1f\x1d" + // 0x1F190301: 0x00001F1D + "\x03\xb7\x03\x13\x00\x00\x1f " + // 0x03B70313: 0x00001F20 + "\x03\xb7\x03\x14\x00\x00\x1f!" + // 0x03B70314: 0x00001F21 + "\x1f \x03\x00\x00\x00\x1f\"" + // 0x1F200300: 0x00001F22 + "\x1f!\x03\x00\x00\x00\x1f#" + // 0x1F210300: 0x00001F23 + "\x1f \x03\x01\x00\x00\x1f$" + // 0x1F200301: 0x00001F24 + "\x1f!\x03\x01\x00\x00\x1f%" + // 0x1F210301: 0x00001F25 + "\x1f \x03B\x00\x00\x1f&" + // 0x1F200342: 0x00001F26 + "\x1f!\x03B\x00\x00\x1f'" + // 0x1F210342: 0x00001F27 + "\x03\x97\x03\x13\x00\x00\x1f(" + // 0x03970313: 0x00001F28 + "\x03\x97\x03\x14\x00\x00\x1f)" + // 0x03970314: 0x00001F29 + "\x1f(\x03\x00\x00\x00\x1f*" + // 0x1F280300: 0x00001F2A + "\x1f)\x03\x00\x00\x00\x1f+" + // 0x1F290300: 0x00001F2B + "\x1f(\x03\x01\x00\x00\x1f," + // 0x1F280301: 0x00001F2C + "\x1f)\x03\x01\x00\x00\x1f-" + // 0x1F290301: 0x00001F2D + "\x1f(\x03B\x00\x00\x1f." + // 0x1F280342: 0x00001F2E + "\x1f)\x03B\x00\x00\x1f/" + // 0x1F290342: 0x00001F2F + "\x03\xb9\x03\x13\x00\x00\x1f0" + // 0x03B90313: 0x00001F30 + "\x03\xb9\x03\x14\x00\x00\x1f1" + // 0x03B90314: 0x00001F31 + "\x1f0\x03\x00\x00\x00\x1f2" + // 0x1F300300: 0x00001F32 + "\x1f1\x03\x00\x00\x00\x1f3" + // 0x1F310300: 0x00001F33 + "\x1f0\x03\x01\x00\x00\x1f4" + // 0x1F300301: 0x00001F34 + "\x1f1\x03\x01\x00\x00\x1f5" + // 0x1F310301: 0x00001F35 + "\x1f0\x03B\x00\x00\x1f6" + // 0x1F300342: 0x00001F36 + "\x1f1\x03B\x00\x00\x1f7" + // 0x1F310342: 0x00001F37 + "\x03\x99\x03\x13\x00\x00\x1f8" + // 0x03990313: 0x00001F38 + "\x03\x99\x03\x14\x00\x00\x1f9" + // 0x03990314: 0x00001F39 + "\x1f8\x03\x00\x00\x00\x1f:" + // 0x1F380300: 0x00001F3A + "\x1f9\x03\x00\x00\x00\x1f;" + // 0x1F390300: 0x00001F3B + "\x1f8\x03\x01\x00\x00\x1f<" + // 0x1F380301: 0x00001F3C + "\x1f9\x03\x01\x00\x00\x1f=" + // 0x1F390301: 0x00001F3D + "\x1f8\x03B\x00\x00\x1f>" + // 0x1F380342: 0x00001F3E + "\x1f9\x03B\x00\x00\x1f?" + // 0x1F390342: 0x00001F3F + "\x03\xbf\x03\x13\x00\x00\x1f@" + // 0x03BF0313: 0x00001F40 + "\x03\xbf\x03\x14\x00\x00\x1fA" + // 0x03BF0314: 0x00001F41 + "\x1f@\x03\x00\x00\x00\x1fB" + // 0x1F400300: 0x00001F42 + "\x1fA\x03\x00\x00\x00\x1fC" + // 0x1F410300: 0x00001F43 + "\x1f@\x03\x01\x00\x00\x1fD" + // 0x1F400301: 0x00001F44 + "\x1fA\x03\x01\x00\x00\x1fE" + // 0x1F410301: 0x00001F45 + "\x03\x9f\x03\x13\x00\x00\x1fH" + // 0x039F0313: 0x00001F48 + "\x03\x9f\x03\x14\x00\x00\x1fI" + // 0x039F0314: 0x00001F49 + "\x1fH\x03\x00\x00\x00\x1fJ" + // 0x1F480300: 0x00001F4A + "\x1fI\x03\x00\x00\x00\x1fK" + // 0x1F490300: 0x00001F4B + "\x1fH\x03\x01\x00\x00\x1fL" + // 0x1F480301: 0x00001F4C + "\x1fI\x03\x01\x00\x00\x1fM" + // 0x1F490301: 0x00001F4D + "\x03\xc5\x03\x13\x00\x00\x1fP" + // 0x03C50313: 0x00001F50 + "\x03\xc5\x03\x14\x00\x00\x1fQ" + // 0x03C50314: 0x00001F51 + "\x1fP\x03\x00\x00\x00\x1fR" + // 0x1F500300: 0x00001F52 + "\x1fQ\x03\x00\x00\x00\x1fS" + // 0x1F510300: 0x00001F53 + "\x1fP\x03\x01\x00\x00\x1fT" + // 0x1F500301: 0x00001F54 + "\x1fQ\x03\x01\x00\x00\x1fU" + // 0x1F510301: 0x00001F55 + "\x1fP\x03B\x00\x00\x1fV" + // 0x1F500342: 0x00001F56 + "\x1fQ\x03B\x00\x00\x1fW" + // 0x1F510342: 0x00001F57 + "\x03\xa5\x03\x14\x00\x00\x1fY" + // 0x03A50314: 0x00001F59 + "\x1fY\x03\x00\x00\x00\x1f[" + // 0x1F590300: 0x00001F5B + "\x1fY\x03\x01\x00\x00\x1f]" + // 0x1F590301: 0x00001F5D + "\x1fY\x03B\x00\x00\x1f_" + // 0x1F590342: 0x00001F5F + "\x03\xc9\x03\x13\x00\x00\x1f`" + // 0x03C90313: 0x00001F60 + "\x03\xc9\x03\x14\x00\x00\x1fa" + // 0x03C90314: 0x00001F61 + "\x1f`\x03\x00\x00\x00\x1fb" + // 0x1F600300: 0x00001F62 + "\x1fa\x03\x00\x00\x00\x1fc" + // 0x1F610300: 0x00001F63 + "\x1f`\x03\x01\x00\x00\x1fd" + // 0x1F600301: 0x00001F64 + "\x1fa\x03\x01\x00\x00\x1fe" + // 0x1F610301: 0x00001F65 + "\x1f`\x03B\x00\x00\x1ff" + // 0x1F600342: 0x00001F66 + "\x1fa\x03B\x00\x00\x1fg" + // 0x1F610342: 0x00001F67 + "\x03\xa9\x03\x13\x00\x00\x1fh" + // 0x03A90313: 0x00001F68 + "\x03\xa9\x03\x14\x00\x00\x1fi" + // 0x03A90314: 0x00001F69 + "\x1fh\x03\x00\x00\x00\x1fj" + // 0x1F680300: 0x00001F6A + "\x1fi\x03\x00\x00\x00\x1fk" + // 0x1F690300: 0x00001F6B + "\x1fh\x03\x01\x00\x00\x1fl" + // 0x1F680301: 0x00001F6C + "\x1fi\x03\x01\x00\x00\x1fm" + // 0x1F690301: 0x00001F6D + "\x1fh\x03B\x00\x00\x1fn" + // 0x1F680342: 0x00001F6E + "\x1fi\x03B\x00\x00\x1fo" + // 0x1F690342: 0x00001F6F + "\x03\xb1\x03\x00\x00\x00\x1fp" + // 0x03B10300: 0x00001F70 + "\x03\xb5\x03\x00\x00\x00\x1fr" + // 0x03B50300: 0x00001F72 + "\x03\xb7\x03\x00\x00\x00\x1ft" + // 0x03B70300: 0x00001F74 + "\x03\xb9\x03\x00\x00\x00\x1fv" + // 0x03B90300: 0x00001F76 + "\x03\xbf\x03\x00\x00\x00\x1fx" + // 0x03BF0300: 0x00001F78 + "\x03\xc5\x03\x00\x00\x00\x1fz" + // 0x03C50300: 0x00001F7A + "\x03\xc9\x03\x00\x00\x00\x1f|" + // 0x03C90300: 0x00001F7C + "\x1f\x00\x03E\x00\x00\x1f\x80" + // 0x1F000345: 0x00001F80 + "\x1f\x01\x03E\x00\x00\x1f\x81" + // 0x1F010345: 0x00001F81 + "\x1f\x02\x03E\x00\x00\x1f\x82" + // 0x1F020345: 0x00001F82 + "\x1f\x03\x03E\x00\x00\x1f\x83" + // 0x1F030345: 0x00001F83 + "\x1f\x04\x03E\x00\x00\x1f\x84" + // 0x1F040345: 0x00001F84 + "\x1f\x05\x03E\x00\x00\x1f\x85" + // 0x1F050345: 0x00001F85 + "\x1f\x06\x03E\x00\x00\x1f\x86" + // 0x1F060345: 0x00001F86 + "\x1f\a\x03E\x00\x00\x1f\x87" + // 0x1F070345: 0x00001F87 + "\x1f\b\x03E\x00\x00\x1f\x88" + // 0x1F080345: 0x00001F88 + "\x1f\t\x03E\x00\x00\x1f\x89" + // 0x1F090345: 0x00001F89 + "\x1f\n\x03E\x00\x00\x1f\x8a" + // 0x1F0A0345: 0x00001F8A + "\x1f\v\x03E\x00\x00\x1f\x8b" + // 0x1F0B0345: 0x00001F8B + "\x1f\f\x03E\x00\x00\x1f\x8c" + // 0x1F0C0345: 0x00001F8C + "\x1f\r\x03E\x00\x00\x1f\x8d" + // 0x1F0D0345: 0x00001F8D + "\x1f\x0e\x03E\x00\x00\x1f\x8e" + // 0x1F0E0345: 0x00001F8E + "\x1f\x0f\x03E\x00\x00\x1f\x8f" + // 0x1F0F0345: 0x00001F8F + "\x1f \x03E\x00\x00\x1f\x90" + // 0x1F200345: 0x00001F90 + "\x1f!\x03E\x00\x00\x1f\x91" + // 0x1F210345: 0x00001F91 + "\x1f\"\x03E\x00\x00\x1f\x92" + // 0x1F220345: 0x00001F92 + "\x1f#\x03E\x00\x00\x1f\x93" + // 0x1F230345: 0x00001F93 + "\x1f$\x03E\x00\x00\x1f\x94" + // 0x1F240345: 0x00001F94 + "\x1f%\x03E\x00\x00\x1f\x95" + // 0x1F250345: 0x00001F95 + "\x1f&\x03E\x00\x00\x1f\x96" + // 0x1F260345: 0x00001F96 + "\x1f'\x03E\x00\x00\x1f\x97" + // 0x1F270345: 0x00001F97 + "\x1f(\x03E\x00\x00\x1f\x98" + // 0x1F280345: 0x00001F98 + "\x1f)\x03E\x00\x00\x1f\x99" + // 0x1F290345: 0x00001F99 + "\x1f*\x03E\x00\x00\x1f\x9a" + // 0x1F2A0345: 0x00001F9A + "\x1f+\x03E\x00\x00\x1f\x9b" + // 0x1F2B0345: 0x00001F9B + "\x1f,\x03E\x00\x00\x1f\x9c" + // 0x1F2C0345: 0x00001F9C + "\x1f-\x03E\x00\x00\x1f\x9d" + // 0x1F2D0345: 0x00001F9D + "\x1f.\x03E\x00\x00\x1f\x9e" + // 0x1F2E0345: 0x00001F9E + "\x1f/\x03E\x00\x00\x1f\x9f" + // 0x1F2F0345: 0x00001F9F + "\x1f`\x03E\x00\x00\x1f\xa0" + // 0x1F600345: 0x00001FA0 + "\x1fa\x03E\x00\x00\x1f\xa1" + // 0x1F610345: 0x00001FA1 + "\x1fb\x03E\x00\x00\x1f\xa2" + // 0x1F620345: 0x00001FA2 + "\x1fc\x03E\x00\x00\x1f\xa3" + // 0x1F630345: 0x00001FA3 + "\x1fd\x03E\x00\x00\x1f\xa4" + // 0x1F640345: 0x00001FA4 + "\x1fe\x03E\x00\x00\x1f\xa5" + // 0x1F650345: 0x00001FA5 + "\x1ff\x03E\x00\x00\x1f\xa6" + // 0x1F660345: 0x00001FA6 + "\x1fg\x03E\x00\x00\x1f\xa7" + // 0x1F670345: 0x00001FA7 + "\x1fh\x03E\x00\x00\x1f\xa8" + // 0x1F680345: 0x00001FA8 + "\x1fi\x03E\x00\x00\x1f\xa9" + // 0x1F690345: 0x00001FA9 + "\x1fj\x03E\x00\x00\x1f\xaa" + // 0x1F6A0345: 0x00001FAA + "\x1fk\x03E\x00\x00\x1f\xab" + // 0x1F6B0345: 0x00001FAB + "\x1fl\x03E\x00\x00\x1f\xac" + // 0x1F6C0345: 0x00001FAC + "\x1fm\x03E\x00\x00\x1f\xad" + // 0x1F6D0345: 0x00001FAD + "\x1fn\x03E\x00\x00\x1f\xae" + // 0x1F6E0345: 0x00001FAE + "\x1fo\x03E\x00\x00\x1f\xaf" + // 0x1F6F0345: 0x00001FAF + "\x03\xb1\x03\x06\x00\x00\x1f\xb0" + // 0x03B10306: 0x00001FB0 + "\x03\xb1\x03\x04\x00\x00\x1f\xb1" + // 0x03B10304: 0x00001FB1 + "\x1fp\x03E\x00\x00\x1f\xb2" + // 0x1F700345: 0x00001FB2 + "\x03\xb1\x03E\x00\x00\x1f\xb3" + // 0x03B10345: 0x00001FB3 + "\x03\xac\x03E\x00\x00\x1f\xb4" + // 0x03AC0345: 0x00001FB4 + "\x03\xb1\x03B\x00\x00\x1f\xb6" + // 0x03B10342: 0x00001FB6 + "\x1f\xb6\x03E\x00\x00\x1f\xb7" + // 0x1FB60345: 0x00001FB7 + "\x03\x91\x03\x06\x00\x00\x1f\xb8" + // 0x03910306: 0x00001FB8 + "\x03\x91\x03\x04\x00\x00\x1f\xb9" + // 0x03910304: 0x00001FB9 + "\x03\x91\x03\x00\x00\x00\x1f\xba" + // 0x03910300: 0x00001FBA + "\x03\x91\x03E\x00\x00\x1f\xbc" + // 0x03910345: 0x00001FBC + "\x00\xa8\x03B\x00\x00\x1f\xc1" + // 0x00A80342: 0x00001FC1 + "\x1ft\x03E\x00\x00\x1f\xc2" + // 0x1F740345: 0x00001FC2 + "\x03\xb7\x03E\x00\x00\x1f\xc3" + // 0x03B70345: 0x00001FC3 + "\x03\xae\x03E\x00\x00\x1f\xc4" + // 0x03AE0345: 0x00001FC4 + "\x03\xb7\x03B\x00\x00\x1f\xc6" + // 0x03B70342: 0x00001FC6 + "\x1f\xc6\x03E\x00\x00\x1f\xc7" + // 0x1FC60345: 0x00001FC7 + "\x03\x95\x03\x00\x00\x00\x1f\xc8" + // 0x03950300: 0x00001FC8 + "\x03\x97\x03\x00\x00\x00\x1f\xca" + // 0x03970300: 0x00001FCA + "\x03\x97\x03E\x00\x00\x1f\xcc" + // 0x03970345: 0x00001FCC + "\x1f\xbf\x03\x00\x00\x00\x1f\xcd" + // 0x1FBF0300: 0x00001FCD + "\x1f\xbf\x03\x01\x00\x00\x1f\xce" + // 0x1FBF0301: 0x00001FCE + "\x1f\xbf\x03B\x00\x00\x1f\xcf" + // 0x1FBF0342: 0x00001FCF + "\x03\xb9\x03\x06\x00\x00\x1f\xd0" + // 0x03B90306: 0x00001FD0 + "\x03\xb9\x03\x04\x00\x00\x1f\xd1" + // 0x03B90304: 0x00001FD1 + "\x03\xca\x03\x00\x00\x00\x1f\xd2" + // 0x03CA0300: 0x00001FD2 + "\x03\xb9\x03B\x00\x00\x1f\xd6" + // 0x03B90342: 0x00001FD6 + "\x03\xca\x03B\x00\x00\x1f\xd7" + // 0x03CA0342: 0x00001FD7 + "\x03\x99\x03\x06\x00\x00\x1f\xd8" + // 0x03990306: 0x00001FD8 + "\x03\x99\x03\x04\x00\x00\x1f\xd9" + // 0x03990304: 0x00001FD9 + "\x03\x99\x03\x00\x00\x00\x1f\xda" + // 0x03990300: 0x00001FDA + "\x1f\xfe\x03\x00\x00\x00\x1f\xdd" + // 0x1FFE0300: 0x00001FDD + "\x1f\xfe\x03\x01\x00\x00\x1f\xde" + // 0x1FFE0301: 0x00001FDE + "\x1f\xfe\x03B\x00\x00\x1f\xdf" + // 0x1FFE0342: 0x00001FDF + "\x03\xc5\x03\x06\x00\x00\x1f\xe0" + // 0x03C50306: 0x00001FE0 + "\x03\xc5\x03\x04\x00\x00\x1f\xe1" + // 0x03C50304: 0x00001FE1 + "\x03\xcb\x03\x00\x00\x00\x1f\xe2" + // 0x03CB0300: 0x00001FE2 + "\x03\xc1\x03\x13\x00\x00\x1f\xe4" + // 0x03C10313: 0x00001FE4 + "\x03\xc1\x03\x14\x00\x00\x1f\xe5" + // 0x03C10314: 0x00001FE5 + "\x03\xc5\x03B\x00\x00\x1f\xe6" + // 0x03C50342: 0x00001FE6 + "\x03\xcb\x03B\x00\x00\x1f\xe7" + // 0x03CB0342: 0x00001FE7 + "\x03\xa5\x03\x06\x00\x00\x1f\xe8" + // 0x03A50306: 0x00001FE8 + "\x03\xa5\x03\x04\x00\x00\x1f\xe9" + // 0x03A50304: 0x00001FE9 + "\x03\xa5\x03\x00\x00\x00\x1f\xea" + // 0x03A50300: 0x00001FEA + "\x03\xa1\x03\x14\x00\x00\x1f\xec" + // 0x03A10314: 0x00001FEC + "\x00\xa8\x03\x00\x00\x00\x1f\xed" + // 0x00A80300: 0x00001FED + "\x1f|\x03E\x00\x00\x1f\xf2" + // 0x1F7C0345: 0x00001FF2 + "\x03\xc9\x03E\x00\x00\x1f\xf3" + // 0x03C90345: 0x00001FF3 + "\x03\xce\x03E\x00\x00\x1f\xf4" + // 0x03CE0345: 0x00001FF4 + "\x03\xc9\x03B\x00\x00\x1f\xf6" + // 0x03C90342: 0x00001FF6 + "\x1f\xf6\x03E\x00\x00\x1f\xf7" + // 0x1FF60345: 0x00001FF7 + "\x03\x9f\x03\x00\x00\x00\x1f\xf8" + // 0x039F0300: 0x00001FF8 + "\x03\xa9\x03\x00\x00\x00\x1f\xfa" + // 0x03A90300: 0x00001FFA + "\x03\xa9\x03E\x00\x00\x1f\xfc" + // 0x03A90345: 0x00001FFC + "!\x90\x038\x00\x00!\x9a" + // 0x21900338: 0x0000219A + "!\x92\x038\x00\x00!\x9b" + // 0x21920338: 0x0000219B + "!\x94\x038\x00\x00!\xae" + // 0x21940338: 0x000021AE + "!\xd0\x038\x00\x00!\xcd" + // 0x21D00338: 0x000021CD + "!\xd4\x038\x00\x00!\xce" + // 0x21D40338: 0x000021CE + "!\xd2\x038\x00\x00!\xcf" + // 0x21D20338: 0x000021CF + "\"\x03\x038\x00\x00\"\x04" + // 0x22030338: 0x00002204 + "\"\b\x038\x00\x00\"\t" + // 0x22080338: 0x00002209 + "\"\v\x038\x00\x00\"\f" + // 0x220B0338: 0x0000220C + "\"#\x038\x00\x00\"$" + // 0x22230338: 0x00002224 + "\"%\x038\x00\x00\"&" + // 0x22250338: 0x00002226 + "\"<\x038\x00\x00\"A" + // 0x223C0338: 0x00002241 + "\"C\x038\x00\x00\"D" + // 0x22430338: 0x00002244 + "\"E\x038\x00\x00\"G" + // 0x22450338: 0x00002247 + "\"H\x038\x00\x00\"I" + // 0x22480338: 0x00002249 + "\x00=\x038\x00\x00\"`" + // 0x003D0338: 0x00002260 + "\"a\x038\x00\x00\"b" + // 0x22610338: 0x00002262 + "\"M\x038\x00\x00\"m" + // 0x224D0338: 0x0000226D + "\x00<\x038\x00\x00\"n" + // 0x003C0338: 0x0000226E + "\x00>\x038\x00\x00\"o" + // 0x003E0338: 0x0000226F + "\"d\x038\x00\x00\"p" + // 0x22640338: 0x00002270 + "\"e\x038\x00\x00\"q" + // 0x22650338: 0x00002271 + "\"r\x038\x00\x00\"t" + // 0x22720338: 0x00002274 + "\"s\x038\x00\x00\"u" + // 0x22730338: 0x00002275 + "\"v\x038\x00\x00\"x" + // 0x22760338: 0x00002278 + "\"w\x038\x00\x00\"y" + // 0x22770338: 0x00002279 + "\"z\x038\x00\x00\"\x80" + // 0x227A0338: 0x00002280 + "\"{\x038\x00\x00\"\x81" + // 0x227B0338: 0x00002281 + "\"\x82\x038\x00\x00\"\x84" + // 0x22820338: 0x00002284 + "\"\x83\x038\x00\x00\"\x85" + // 0x22830338: 0x00002285 + "\"\x86\x038\x00\x00\"\x88" + // 0x22860338: 0x00002288 + "\"\x87\x038\x00\x00\"\x89" + // 0x22870338: 0x00002289 + "\"\xa2\x038\x00\x00\"\xac" + // 0x22A20338: 0x000022AC + "\"\xa8\x038\x00\x00\"\xad" + // 0x22A80338: 0x000022AD + "\"\xa9\x038\x00\x00\"\xae" + // 0x22A90338: 0x000022AE + "\"\xab\x038\x00\x00\"\xaf" + // 0x22AB0338: 0x000022AF + "\"|\x038\x00\x00\"\xe0" + // 0x227C0338: 0x000022E0 + "\"}\x038\x00\x00\"\xe1" + // 0x227D0338: 0x000022E1 + "\"\x91\x038\x00\x00\"\xe2" + // 0x22910338: 0x000022E2 + "\"\x92\x038\x00\x00\"\xe3" + // 0x22920338: 0x000022E3 + "\"\xb2\x038\x00\x00\"\xea" + // 0x22B20338: 0x000022EA + "\"\xb3\x038\x00\x00\"\xeb" + // 0x22B30338: 0x000022EB + "\"\xb4\x038\x00\x00\"\xec" + // 0x22B40338: 0x000022EC + "\"\xb5\x038\x00\x00\"\xed" + // 0x22B50338: 0x000022ED + "0K0\x99\x00\x000L" + // 0x304B3099: 0x0000304C + "0M0\x99\x00\x000N" + // 0x304D3099: 0x0000304E + "0O0\x99\x00\x000P" + // 0x304F3099: 0x00003050 + "0Q0\x99\x00\x000R" + // 0x30513099: 0x00003052 + "0S0\x99\x00\x000T" + // 0x30533099: 0x00003054 + "0U0\x99\x00\x000V" + // 0x30553099: 0x00003056 + "0W0\x99\x00\x000X" + // 0x30573099: 0x00003058 + "0Y0\x99\x00\x000Z" + // 0x30593099: 0x0000305A + "0[0\x99\x00\x000\\" + // 0x305B3099: 0x0000305C + "0]0\x99\x00\x000^" + // 0x305D3099: 0x0000305E + "0_0\x99\x00\x000`" + // 0x305F3099: 0x00003060 + "0a0\x99\x00\x000b" + // 0x30613099: 0x00003062 + "0d0\x99\x00\x000e" + // 0x30643099: 0x00003065 + "0f0\x99\x00\x000g" + // 0x30663099: 0x00003067 + "0h0\x99\x00\x000i" + // 0x30683099: 0x00003069 + "0o0\x99\x00\x000p" + // 0x306F3099: 0x00003070 + "0o0\x9a\x00\x000q" + // 0x306F309A: 0x00003071 + "0r0\x99\x00\x000s" + // 0x30723099: 0x00003073 + "0r0\x9a\x00\x000t" + // 0x3072309A: 0x00003074 + "0u0\x99\x00\x000v" + // 0x30753099: 0x00003076 + "0u0\x9a\x00\x000w" + // 0x3075309A: 0x00003077 + "0x0\x99\x00\x000y" + // 0x30783099: 0x00003079 + "0x0\x9a\x00\x000z" + // 0x3078309A: 0x0000307A + "0{0\x99\x00\x000|" + // 0x307B3099: 0x0000307C + "0{0\x9a\x00\x000}" + // 0x307B309A: 0x0000307D + "0F0\x99\x00\x000\x94" + // 0x30463099: 0x00003094 + "0\x9d0\x99\x00\x000\x9e" + // 0x309D3099: 0x0000309E + "0\xab0\x99\x00\x000\xac" + // 0x30AB3099: 0x000030AC + "0\xad0\x99\x00\x000\xae" + // 0x30AD3099: 0x000030AE + "0\xaf0\x99\x00\x000\xb0" + // 0x30AF3099: 0x000030B0 + "0\xb10\x99\x00\x000\xb2" + // 0x30B13099: 0x000030B2 + "0\xb30\x99\x00\x000\xb4" + // 0x30B33099: 0x000030B4 + "0\xb50\x99\x00\x000\xb6" + // 0x30B53099: 0x000030B6 + "0\xb70\x99\x00\x000\xb8" + // 0x30B73099: 0x000030B8 + "0\xb90\x99\x00\x000\xba" + // 0x30B93099: 0x000030BA + "0\xbb0\x99\x00\x000\xbc" + // 0x30BB3099: 0x000030BC + "0\xbd0\x99\x00\x000\xbe" + // 0x30BD3099: 0x000030BE + "0\xbf0\x99\x00\x000\xc0" + // 0x30BF3099: 0x000030C0 + "0\xc10\x99\x00\x000\xc2" + // 0x30C13099: 0x000030C2 + "0\xc40\x99\x00\x000\xc5" + // 0x30C43099: 0x000030C5 + "0\xc60\x99\x00\x000\xc7" + // 0x30C63099: 0x000030C7 + "0\xc80\x99\x00\x000\xc9" + // 0x30C83099: 0x000030C9 + "0\xcf0\x99\x00\x000\xd0" + // 0x30CF3099: 0x000030D0 + "0\xcf0\x9a\x00\x000\xd1" + // 0x30CF309A: 0x000030D1 + "0\xd20\x99\x00\x000\xd3" + // 0x30D23099: 0x000030D3 + "0\xd20\x9a\x00\x000\xd4" + // 0x30D2309A: 0x000030D4 + "0\xd50\x99\x00\x000\xd6" + // 0x30D53099: 0x000030D6 + "0\xd50\x9a\x00\x000\xd7" + // 0x30D5309A: 0x000030D7 + "0\xd80\x99\x00\x000\xd9" + // 0x30D83099: 0x000030D9 + "0\xd80\x9a\x00\x000\xda" + // 0x30D8309A: 0x000030DA + "0\xdb0\x99\x00\x000\xdc" + // 0x30DB3099: 0x000030DC + "0\xdb0\x9a\x00\x000\xdd" + // 0x30DB309A: 0x000030DD + "0\xa60\x99\x00\x000\xf4" + // 0x30A63099: 0x000030F4 + "0\xef0\x99\x00\x000\xf7" + // 0x30EF3099: 0x000030F7 + "0\xf00\x99\x00\x000\xf8" + // 0x30F03099: 0x000030F8 + "0\xf10\x99\x00\x000\xf9" + // 0x30F13099: 0x000030F9 + "0\xf20\x99\x00\x000\xfa" + // 0x30F23099: 0x000030FA + "0\xfd0\x99\x00\x000\xfe" + // 0x30FD3099: 0x000030FE + "\x10\x99\x10\xba\x00\x01\x10\x9a" + // 0x109910BA: 0x0001109A + "\x10\x9b\x10\xba\x00\x01\x10\x9c" + // 0x109B10BA: 0x0001109C + "\x10\xa5\x10\xba\x00\x01\x10\xab" + // 0x10A510BA: 0x000110AB + "\x111\x11'\x00\x01\x11." + // 0x11311127: 0x0001112E + "\x112\x11'\x00\x01\x11/" + // 0x11321127: 0x0001112F + "\x13G\x13>\x00\x01\x13K" + // 0x1347133E: 0x0001134B + "\x13G\x13W\x00\x01\x13L" + // 0x13471357: 0x0001134C + "\x14\xb9\x14\xba\x00\x01\x14\xbb" + // 0x14B914BA: 0x000114BB + "\x14\xb9\x14\xb0\x00\x01\x14\xbc" + // 0x14B914B0: 0x000114BC + "\x14\xb9\x14\xbd\x00\x01\x14\xbe" + // 0x14B914BD: 0x000114BE + "\x15\xb8\x15\xaf\x00\x01\x15\xba" + // 0x15B815AF: 0x000115BA + "\x15\xb9\x15\xaf\x00\x01\x15\xbb" + // 0x15B915AF: 0x000115BB + "\x195\x190\x00\x01\x198" + // 0x19351930: 0x00011938 + "" + // Total size of tables: 56KB (57068 bytes) diff --git a/tools/vendor/golang.org/x/text/width/tables13.0.0.go b/tools/vendor/golang.org/x/text/width/tables13.0.0.go index ab258e3848..b1fcb522cb 100644 --- a/tools/vendor/golang.org/x/text/width/tables13.0.0.go +++ b/tools/vendor/golang.org/x/text/width/tables13.0.0.go @@ -1,7 +1,7 @@ // Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. -//go:build go1.16 -// +build go1.16 +//go:build go1.16 && !go1.21 +// +build go1.16,!go1.21 package width diff --git a/tools/vendor/golang.org/x/text/width/tables15.0.0.go b/tools/vendor/golang.org/x/text/width/tables15.0.0.go new file mode 100644 index 0000000000..4b91e3384d --- /dev/null +++ b/tools/vendor/golang.org/x/text/width/tables15.0.0.go @@ -0,0 +1,1368 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.21 +// +build go1.21 + +package width + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "15.0.0" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// widthTrie. Total size: 14912 bytes (14.56 KiB). Checksum: 4468b6cd178303d2. +type widthTrie struct{} + +func newWidthTrie(i int) *widthTrie { + return &widthTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { + switch { + default: + return uint16(widthValues[n<<6+uint32(b)]) + } +} + +// widthValues: 105 blocks, 6720 entries, 13440 bytes +// The third block is the zero block. +var widthValues = [6720]uint16{ + // Block 0x0, offset 0x0 + 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, + 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, + 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, + 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, + 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, + 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, + // Block 0x1, offset 0x40 + 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, + 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, + 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, + 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, + 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, + 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, + 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, + 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, + 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, + 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, + 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, + 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, + 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, + 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, + 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, + 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, + // Block 0x4, offset 0x100 + 0x106: 0x2000, + 0x110: 0x2000, + 0x117: 0x2000, + 0x118: 0x2000, + 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, + 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, + 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, + 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, + 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, + 0x13c: 0x2000, 0x13e: 0x2000, + // Block 0x5, offset 0x140 + 0x141: 0x2000, + 0x151: 0x2000, + 0x153: 0x2000, + 0x15b: 0x2000, + 0x166: 0x2000, 0x167: 0x2000, + 0x16b: 0x2000, + 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, + 0x178: 0x2000, + 0x17f: 0x2000, + // Block 0x6, offset 0x180 + 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, + 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, + 0x18d: 0x2000, + 0x192: 0x2000, 0x193: 0x2000, + 0x1a6: 0x2000, 0x1a7: 0x2000, + 0x1ab: 0x2000, + // Block 0x7, offset 0x1c0 + 0x1ce: 0x2000, 0x1d0: 0x2000, + 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, + 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, + // Block 0x8, offset 0x200 + 0x211: 0x2000, + 0x221: 0x2000, + // Block 0x9, offset 0x240 + 0x244: 0x2000, + 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, + 0x24d: 0x2000, 0x250: 0x2000, + 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, + 0x25f: 0x2000, + // Block 0xa, offset 0x280 + 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, + 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, + 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, + 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, + 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, + 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, + 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, + 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, + 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, + 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, + 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, + 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, + 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, + 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, + 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, + 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, + 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, + 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, + // Block 0xc, offset 0x300 + 0x311: 0x2000, + 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, + 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, + 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, + 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, + 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, + 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, + 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, + // Block 0xd, offset 0x340 + 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, + 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, + // Block 0xe, offset 0x380 + 0x381: 0x2000, + 0x390: 0x2000, 0x391: 0x2000, + 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, + 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, + 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, + 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, + 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, + 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, + 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, + 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, + 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, + 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, + // Block 0x10, offset 0x400 + 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, + 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, + 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, + 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, + 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, + 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, + 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, + 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, + 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, + 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, + 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, + // Block 0x11, offset 0x440 + 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, + 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, + 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, + 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, + 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, + 0x45e: 0x4000, 0x45f: 0x4000, + // Block 0x12, offset 0x480 + 0x490: 0x2000, + 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, + 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, + 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, + 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, + 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, + 0x4bb: 0x2000, + 0x4be: 0x2000, + // Block 0x13, offset 0x4c0 + 0x4f4: 0x2000, + 0x4ff: 0x2000, + // Block 0x14, offset 0x500 + 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, + 0x529: 0xa009, + 0x52c: 0x2000, + // Block 0x15, offset 0x540 + 0x543: 0x2000, 0x545: 0x2000, + 0x549: 0x2000, + 0x553: 0x2000, 0x556: 0x2000, + 0x561: 0x2000, 0x562: 0x2000, + 0x566: 0x2000, + 0x56b: 0x2000, + // Block 0x16, offset 0x580 + 0x593: 0x2000, 0x594: 0x2000, + 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, + 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, + 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, + 0x5aa: 0x2000, 0x5ab: 0x2000, + 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, + 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, + // Block 0x17, offset 0x5c0 + 0x5c9: 0x2000, + 0x5d0: 0x200a, 0x5d1: 0x200b, + 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, + 0x5d8: 0x2000, 0x5d9: 0x2000, + 0x5f8: 0x2000, 0x5f9: 0x2000, + // Block 0x18, offset 0x600 + 0x612: 0x2000, 0x614: 0x2000, + 0x627: 0x2000, + // Block 0x19, offset 0x640 + 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, + 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, + 0x64f: 0x2000, 0x651: 0x2000, + 0x655: 0x2000, + 0x65a: 0x2000, 0x65d: 0x2000, + 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, + 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, + 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, + 0x674: 0x2000, 0x675: 0x2000, + 0x676: 0x2000, 0x677: 0x2000, + 0x67c: 0x2000, 0x67d: 0x2000, + // Block 0x1a, offset 0x680 + 0x688: 0x2000, + 0x68c: 0x2000, + 0x692: 0x2000, + 0x6a0: 0x2000, 0x6a1: 0x2000, + 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, + 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, + // Block 0x1b, offset 0x6c0 + 0x6c2: 0x2000, 0x6c3: 0x2000, + 0x6c6: 0x2000, 0x6c7: 0x2000, + 0x6d5: 0x2000, + 0x6d9: 0x2000, + 0x6e5: 0x2000, + 0x6ff: 0x2000, + // Block 0x1c, offset 0x700 + 0x712: 0x2000, + 0x71a: 0x4000, 0x71b: 0x4000, + 0x729: 0x4000, + 0x72a: 0x4000, + // Block 0x1d, offset 0x740 + 0x769: 0x4000, + 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, + 0x770: 0x4000, 0x773: 0x4000, + // Block 0x1e, offset 0x780 + 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, + 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, + 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, + 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, + 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, + 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, + // Block 0x1f, offset 0x7c0 + 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, + 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, + 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, + 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, + 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, + 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, + 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, + 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, + 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, + 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, + 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, + // Block 0x20, offset 0x800 + 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, + 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, + 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, + 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, + 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, + 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, + 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, + 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, + 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, + 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, + 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, + // Block 0x21, offset 0x840 + 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, + 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, + 0x850: 0x2000, 0x851: 0x2000, + 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, + 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, + 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, + 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, + 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, + 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, + // Block 0x22, offset 0x880 + 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, + 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, + 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, + 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, + 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, + 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, + 0x8b2: 0x2000, 0x8b3: 0x2000, + 0x8b6: 0x2000, 0x8b7: 0x2000, + 0x8bc: 0x2000, 0x8bd: 0x2000, + // Block 0x23, offset 0x8c0 + 0x8c0: 0x2000, 0x8c1: 0x2000, + 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, + 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, + 0x8e2: 0x2000, 0x8e3: 0x2000, + 0x8e4: 0x2000, 0x8e5: 0x2000, + 0x8ef: 0x2000, + 0x8fd: 0x4000, 0x8fe: 0x4000, + // Block 0x24, offset 0x900 + 0x905: 0x2000, + 0x906: 0x2000, 0x909: 0x2000, + 0x90e: 0x2000, 0x90f: 0x2000, + 0x914: 0x4000, 0x915: 0x4000, + 0x91c: 0x2000, + 0x91e: 0x2000, + // Block 0x25, offset 0x940 + 0x940: 0x2000, 0x942: 0x2000, + 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, + 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, + 0x952: 0x4000, 0x953: 0x4000, + 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, + 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, + 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, + 0x97f: 0x4000, + // Block 0x26, offset 0x980 + 0x993: 0x4000, + 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, + 0x9aa: 0x4000, 0x9ab: 0x4000, + 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, + // Block 0x27, offset 0x9c0 + 0x9c4: 0x4000, 0x9c5: 0x4000, + 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, + 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, + 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, + 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, + 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, + 0x9e8: 0x2000, 0x9e9: 0x2000, + 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, + 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, + 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, + 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, + // Block 0x28, offset 0xa00 + 0xa05: 0x4000, + 0xa0a: 0x4000, 0xa0b: 0x4000, + 0xa28: 0x4000, + 0xa3d: 0x2000, + // Block 0x29, offset 0xa40 + 0xa4c: 0x4000, 0xa4e: 0x4000, + 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, + 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, + 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, + // Block 0x2a, offset 0xa80 + 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, + 0xab0: 0x4000, + 0xabf: 0x4000, + // Block 0x2b, offset 0xac0 + 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, + 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, + // Block 0x2c, offset 0xb00 + 0xb05: 0x6010, + 0xb06: 0x6011, + // Block 0x2d, offset 0xb40 + 0xb5b: 0x4000, 0xb5c: 0x4000, + // Block 0x2e, offset 0xb80 + 0xb90: 0x4000, + 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, + 0xb98: 0x2000, 0xb99: 0x2000, + // Block 0x2f, offset 0xbc0 + 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, + 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, + 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, + 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, + 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, + 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, + 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, + 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, + 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, + 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, + 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, + // Block 0x30, offset 0xc00 + 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, + 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, + 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, + 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, + 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, + 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, + 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, + 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, + 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, + // Block 0x31, offset 0xc40 + 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, + 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, + 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, + 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, + 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, + 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, + // Block 0x32, offset 0xc80 + 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, + 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, + 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, + 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, + 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, + 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, + 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, + 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, + 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, + 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, + 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, + // Block 0x33, offset 0xcc0 + 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, + 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, + 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, + 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, + 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, + 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, + 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, + 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, + 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, + 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, + 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, + // Block 0x34, offset 0xd00 + 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, + 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, + 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, + 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, + 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, + 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, + 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, + 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, + 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, + 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, + 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, + // Block 0x35, offset 0xd40 + 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, + 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, + 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, + 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, + 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, + 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, + 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, + 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, + 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, + 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, + 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, + // Block 0x36, offset 0xd80 + 0xd85: 0x4000, + 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, + 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, + 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, + 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, + 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, + 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, + 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, 0xdaf: 0x4000, + 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, + 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, + 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, + // Block 0x37, offset 0xdc0 + 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, + 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, + 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, + 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, + 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, + 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, + 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, + 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, + 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, + 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, + 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, + // Block 0x38, offset 0xe00 + 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, + 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, + 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, + 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, + 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, + 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, + 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, + 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, + 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, + 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, 0xe3b: 0x4000, + 0xe3c: 0x4000, 0xe3d: 0x4000, 0xe3e: 0x4000, 0xe3f: 0x4000, + // Block 0x39, offset 0xe40 + 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, + 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, + 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, + 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, + 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, + 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, + 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, + 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, + 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, + // Block 0x3a, offset 0xe80 + 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, + 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, + 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, + 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, + 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, + 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, + 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, + 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, + 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, + 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, + 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, + // Block 0x3b, offset 0xec0 + 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, + 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, + 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, + 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, + 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, + 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, + 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, + 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, + 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, + 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, + 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, + // Block 0x3c, offset 0xf00 + 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, + 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, + 0xf0c: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, + 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, + 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, + 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, + 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, + 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, + 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, + 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, + 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, 0xf3f: 0x4000, + // Block 0x3d, offset 0xf40 + 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, + 0xf46: 0x4000, + // Block 0x3e, offset 0xf80 + 0xfa0: 0x4000, 0xfa1: 0x4000, 0xfa2: 0x4000, 0xfa3: 0x4000, + 0xfa4: 0x4000, 0xfa5: 0x4000, 0xfa6: 0x4000, 0xfa7: 0x4000, 0xfa8: 0x4000, 0xfa9: 0x4000, + 0xfaa: 0x4000, 0xfab: 0x4000, 0xfac: 0x4000, 0xfad: 0x4000, 0xfae: 0x4000, 0xfaf: 0x4000, + 0xfb0: 0x4000, 0xfb1: 0x4000, 0xfb2: 0x4000, 0xfb3: 0x4000, 0xfb4: 0x4000, 0xfb5: 0x4000, + 0xfb6: 0x4000, 0xfb7: 0x4000, 0xfb8: 0x4000, 0xfb9: 0x4000, 0xfba: 0x4000, 0xfbb: 0x4000, + 0xfbc: 0x4000, + // Block 0x3f, offset 0xfc0 + 0xfc0: 0x4000, 0xfc1: 0x4000, 0xfc2: 0x4000, 0xfc3: 0x4000, 0xfc4: 0x4000, 0xfc5: 0x4000, + 0xfc6: 0x4000, 0xfc7: 0x4000, 0xfc8: 0x4000, 0xfc9: 0x4000, 0xfca: 0x4000, 0xfcb: 0x4000, + 0xfcc: 0x4000, 0xfcd: 0x4000, 0xfce: 0x4000, 0xfcf: 0x4000, 0xfd0: 0x4000, 0xfd1: 0x4000, + 0xfd2: 0x4000, 0xfd3: 0x4000, 0xfd4: 0x4000, 0xfd5: 0x4000, 0xfd6: 0x4000, 0xfd7: 0x4000, + 0xfd8: 0x4000, 0xfd9: 0x4000, 0xfda: 0x4000, 0xfdb: 0x4000, 0xfdc: 0x4000, 0xfdd: 0x4000, + 0xfde: 0x4000, 0xfdf: 0x4000, 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, + // Block 0x40, offset 0x1000 + 0x1000: 0x2000, 0x1001: 0x2000, 0x1002: 0x2000, 0x1003: 0x2000, 0x1004: 0x2000, 0x1005: 0x2000, + 0x1006: 0x2000, 0x1007: 0x2000, 0x1008: 0x2000, 0x1009: 0x2000, 0x100a: 0x2000, 0x100b: 0x2000, + 0x100c: 0x2000, 0x100d: 0x2000, 0x100e: 0x2000, 0x100f: 0x2000, 0x1010: 0x4000, 0x1011: 0x4000, + 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, + 0x1018: 0x4000, 0x1019: 0x4000, + 0x1030: 0x4000, 0x1031: 0x4000, 0x1032: 0x4000, 0x1033: 0x4000, 0x1034: 0x4000, 0x1035: 0x4000, + 0x1036: 0x4000, 0x1037: 0x4000, 0x1038: 0x4000, 0x1039: 0x4000, 0x103a: 0x4000, 0x103b: 0x4000, + 0x103c: 0x4000, 0x103d: 0x4000, 0x103e: 0x4000, 0x103f: 0x4000, + // Block 0x41, offset 0x1040 + 0x1040: 0x4000, 0x1041: 0x4000, 0x1042: 0x4000, 0x1043: 0x4000, 0x1044: 0x4000, 0x1045: 0x4000, + 0x1046: 0x4000, 0x1047: 0x4000, 0x1048: 0x4000, 0x1049: 0x4000, 0x104a: 0x4000, 0x104b: 0x4000, + 0x104c: 0x4000, 0x104d: 0x4000, 0x104e: 0x4000, 0x104f: 0x4000, 0x1050: 0x4000, 0x1051: 0x4000, + 0x1052: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, + 0x1058: 0x4000, 0x1059: 0x4000, 0x105a: 0x4000, 0x105b: 0x4000, 0x105c: 0x4000, 0x105d: 0x4000, + 0x105e: 0x4000, 0x105f: 0x4000, 0x1060: 0x4000, 0x1061: 0x4000, 0x1062: 0x4000, 0x1063: 0x4000, + 0x1064: 0x4000, 0x1065: 0x4000, 0x1066: 0x4000, 0x1068: 0x4000, 0x1069: 0x4000, + 0x106a: 0x4000, 0x106b: 0x4000, + // Block 0x42, offset 0x1080 + 0x1081: 0x9012, 0x1082: 0x9012, 0x1083: 0x9012, 0x1084: 0x9012, 0x1085: 0x9012, + 0x1086: 0x9012, 0x1087: 0x9012, 0x1088: 0x9012, 0x1089: 0x9012, 0x108a: 0x9012, 0x108b: 0x9012, + 0x108c: 0x9012, 0x108d: 0x9012, 0x108e: 0x9012, 0x108f: 0x9012, 0x1090: 0x9012, 0x1091: 0x9012, + 0x1092: 0x9012, 0x1093: 0x9012, 0x1094: 0x9012, 0x1095: 0x9012, 0x1096: 0x9012, 0x1097: 0x9012, + 0x1098: 0x9012, 0x1099: 0x9012, 0x109a: 0x9012, 0x109b: 0x9012, 0x109c: 0x9012, 0x109d: 0x9012, + 0x109e: 0x9012, 0x109f: 0x9012, 0x10a0: 0x9049, 0x10a1: 0x9049, 0x10a2: 0x9049, 0x10a3: 0x9049, + 0x10a4: 0x9049, 0x10a5: 0x9049, 0x10a6: 0x9049, 0x10a7: 0x9049, 0x10a8: 0x9049, 0x10a9: 0x9049, + 0x10aa: 0x9049, 0x10ab: 0x9049, 0x10ac: 0x9049, 0x10ad: 0x9049, 0x10ae: 0x9049, 0x10af: 0x9049, + 0x10b0: 0x9049, 0x10b1: 0x9049, 0x10b2: 0x9049, 0x10b3: 0x9049, 0x10b4: 0x9049, 0x10b5: 0x9049, + 0x10b6: 0x9049, 0x10b7: 0x9049, 0x10b8: 0x9049, 0x10b9: 0x9049, 0x10ba: 0x9049, 0x10bb: 0x9049, + 0x10bc: 0x9049, 0x10bd: 0x9049, 0x10be: 0x9049, 0x10bf: 0x9049, + // Block 0x43, offset 0x10c0 + 0x10c0: 0x9049, 0x10c1: 0x9049, 0x10c2: 0x9049, 0x10c3: 0x9049, 0x10c4: 0x9049, 0x10c5: 0x9049, + 0x10c6: 0x9049, 0x10c7: 0x9049, 0x10c8: 0x9049, 0x10c9: 0x9049, 0x10ca: 0x9049, 0x10cb: 0x9049, + 0x10cc: 0x9049, 0x10cd: 0x9049, 0x10ce: 0x9049, 0x10cf: 0x9049, 0x10d0: 0x9049, 0x10d1: 0x9049, + 0x10d2: 0x9049, 0x10d3: 0x9049, 0x10d4: 0x9049, 0x10d5: 0x9049, 0x10d6: 0x9049, 0x10d7: 0x9049, + 0x10d8: 0x9049, 0x10d9: 0x9049, 0x10da: 0x9049, 0x10db: 0x9049, 0x10dc: 0x9049, 0x10dd: 0x9049, + 0x10de: 0x9049, 0x10df: 0x904a, 0x10e0: 0x904b, 0x10e1: 0xb04c, 0x10e2: 0xb04d, 0x10e3: 0xb04d, + 0x10e4: 0xb04e, 0x10e5: 0xb04f, 0x10e6: 0xb050, 0x10e7: 0xb051, 0x10e8: 0xb052, 0x10e9: 0xb053, + 0x10ea: 0xb054, 0x10eb: 0xb055, 0x10ec: 0xb056, 0x10ed: 0xb057, 0x10ee: 0xb058, 0x10ef: 0xb059, + 0x10f0: 0xb05a, 0x10f1: 0xb05b, 0x10f2: 0xb05c, 0x10f3: 0xb05d, 0x10f4: 0xb05e, 0x10f5: 0xb05f, + 0x10f6: 0xb060, 0x10f7: 0xb061, 0x10f8: 0xb062, 0x10f9: 0xb063, 0x10fa: 0xb064, 0x10fb: 0xb065, + 0x10fc: 0xb052, 0x10fd: 0xb066, 0x10fe: 0xb067, 0x10ff: 0xb055, + // Block 0x44, offset 0x1100 + 0x1100: 0xb068, 0x1101: 0xb069, 0x1102: 0xb06a, 0x1103: 0xb06b, 0x1104: 0xb05a, 0x1105: 0xb056, + 0x1106: 0xb06c, 0x1107: 0xb06d, 0x1108: 0xb06b, 0x1109: 0xb06e, 0x110a: 0xb06b, 0x110b: 0xb06f, + 0x110c: 0xb06f, 0x110d: 0xb070, 0x110e: 0xb070, 0x110f: 0xb071, 0x1110: 0xb056, 0x1111: 0xb072, + 0x1112: 0xb073, 0x1113: 0xb072, 0x1114: 0xb074, 0x1115: 0xb073, 0x1116: 0xb075, 0x1117: 0xb075, + 0x1118: 0xb076, 0x1119: 0xb076, 0x111a: 0xb077, 0x111b: 0xb077, 0x111c: 0xb073, 0x111d: 0xb078, + 0x111e: 0xb079, 0x111f: 0xb067, 0x1120: 0xb07a, 0x1121: 0xb07b, 0x1122: 0xb07b, 0x1123: 0xb07b, + 0x1124: 0xb07b, 0x1125: 0xb07b, 0x1126: 0xb07b, 0x1127: 0xb07b, 0x1128: 0xb07b, 0x1129: 0xb07b, + 0x112a: 0xb07b, 0x112b: 0xb07b, 0x112c: 0xb07b, 0x112d: 0xb07b, 0x112e: 0xb07b, 0x112f: 0xb07b, + 0x1130: 0xb07c, 0x1131: 0xb07c, 0x1132: 0xb07c, 0x1133: 0xb07c, 0x1134: 0xb07c, 0x1135: 0xb07c, + 0x1136: 0xb07c, 0x1137: 0xb07c, 0x1138: 0xb07c, 0x1139: 0xb07c, 0x113a: 0xb07c, 0x113b: 0xb07c, + 0x113c: 0xb07c, 0x113d: 0xb07c, 0x113e: 0xb07c, + // Block 0x45, offset 0x1140 + 0x1142: 0xb07d, 0x1143: 0xb07e, 0x1144: 0xb07f, 0x1145: 0xb080, + 0x1146: 0xb07f, 0x1147: 0xb07e, 0x114a: 0xb081, 0x114b: 0xb082, + 0x114c: 0xb083, 0x114d: 0xb07f, 0x114e: 0xb080, 0x114f: 0xb07f, + 0x1152: 0xb084, 0x1153: 0xb085, 0x1154: 0xb084, 0x1155: 0xb086, 0x1156: 0xb084, 0x1157: 0xb087, + 0x115a: 0xb088, 0x115b: 0xb089, 0x115c: 0xb08a, + 0x1160: 0x908b, 0x1161: 0x908b, 0x1162: 0x908c, 0x1163: 0x908d, + 0x1164: 0x908b, 0x1165: 0x908e, 0x1166: 0x908f, 0x1168: 0xb090, 0x1169: 0xb091, + 0x116a: 0xb092, 0x116b: 0xb091, 0x116c: 0xb093, 0x116d: 0xb094, 0x116e: 0xb095, + 0x117d: 0x2000, + // Block 0x46, offset 0x1180 + 0x11a0: 0x4000, 0x11a1: 0x4000, 0x11a2: 0x4000, 0x11a3: 0x4000, + 0x11a4: 0x4000, + 0x11b0: 0x4000, 0x11b1: 0x4000, + // Block 0x47, offset 0x11c0 + 0x11c0: 0x4000, 0x11c1: 0x4000, 0x11c2: 0x4000, 0x11c3: 0x4000, 0x11c4: 0x4000, 0x11c5: 0x4000, + 0x11c6: 0x4000, 0x11c7: 0x4000, 0x11c8: 0x4000, 0x11c9: 0x4000, 0x11ca: 0x4000, 0x11cb: 0x4000, + 0x11cc: 0x4000, 0x11cd: 0x4000, 0x11ce: 0x4000, 0x11cf: 0x4000, 0x11d0: 0x4000, 0x11d1: 0x4000, + 0x11d2: 0x4000, 0x11d3: 0x4000, 0x11d4: 0x4000, 0x11d5: 0x4000, 0x11d6: 0x4000, 0x11d7: 0x4000, + 0x11d8: 0x4000, 0x11d9: 0x4000, 0x11da: 0x4000, 0x11db: 0x4000, 0x11dc: 0x4000, 0x11dd: 0x4000, + 0x11de: 0x4000, 0x11df: 0x4000, 0x11e0: 0x4000, 0x11e1: 0x4000, 0x11e2: 0x4000, 0x11e3: 0x4000, + 0x11e4: 0x4000, 0x11e5: 0x4000, 0x11e6: 0x4000, 0x11e7: 0x4000, 0x11e8: 0x4000, 0x11e9: 0x4000, + 0x11ea: 0x4000, 0x11eb: 0x4000, 0x11ec: 0x4000, 0x11ed: 0x4000, 0x11ee: 0x4000, 0x11ef: 0x4000, + 0x11f0: 0x4000, 0x11f1: 0x4000, 0x11f2: 0x4000, 0x11f3: 0x4000, 0x11f4: 0x4000, 0x11f5: 0x4000, + 0x11f6: 0x4000, 0x11f7: 0x4000, + // Block 0x48, offset 0x1200 + 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, + 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, + 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, + 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, + // Block 0x49, offset 0x1240 + 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, + 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, + // Block 0x4a, offset 0x1280 + 0x12b0: 0x4000, 0x12b1: 0x4000, 0x12b2: 0x4000, 0x12b3: 0x4000, 0x12b5: 0x4000, + 0x12b6: 0x4000, 0x12b7: 0x4000, 0x12b8: 0x4000, 0x12b9: 0x4000, 0x12ba: 0x4000, 0x12bb: 0x4000, + 0x12bd: 0x4000, 0x12be: 0x4000, + // Block 0x4b, offset 0x12c0 + 0x12c0: 0x4000, 0x12c1: 0x4000, 0x12c2: 0x4000, 0x12c3: 0x4000, 0x12c4: 0x4000, 0x12c5: 0x4000, + 0x12c6: 0x4000, 0x12c7: 0x4000, 0x12c8: 0x4000, 0x12c9: 0x4000, 0x12ca: 0x4000, 0x12cb: 0x4000, + 0x12cc: 0x4000, 0x12cd: 0x4000, 0x12ce: 0x4000, 0x12cf: 0x4000, 0x12d0: 0x4000, 0x12d1: 0x4000, + 0x12d2: 0x4000, 0x12d3: 0x4000, 0x12d4: 0x4000, 0x12d5: 0x4000, 0x12d6: 0x4000, 0x12d7: 0x4000, + 0x12d8: 0x4000, 0x12d9: 0x4000, 0x12da: 0x4000, 0x12db: 0x4000, 0x12dc: 0x4000, 0x12dd: 0x4000, + 0x12de: 0x4000, 0x12df: 0x4000, 0x12e0: 0x4000, 0x12e1: 0x4000, 0x12e2: 0x4000, + 0x12f2: 0x4000, + // Block 0x4c, offset 0x1300 + 0x1310: 0x4000, 0x1311: 0x4000, + 0x1312: 0x4000, 0x1315: 0x4000, + 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, + 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, + 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, + 0x133c: 0x4000, 0x133d: 0x4000, 0x133e: 0x4000, 0x133f: 0x4000, + // Block 0x4d, offset 0x1340 + 0x1340: 0x4000, 0x1341: 0x4000, 0x1342: 0x4000, 0x1343: 0x4000, 0x1344: 0x4000, 0x1345: 0x4000, + 0x1346: 0x4000, 0x1347: 0x4000, 0x1348: 0x4000, 0x1349: 0x4000, 0x134a: 0x4000, 0x134b: 0x4000, + 0x134c: 0x4000, 0x134d: 0x4000, 0x134e: 0x4000, 0x134f: 0x4000, 0x1350: 0x4000, 0x1351: 0x4000, + 0x1352: 0x4000, 0x1353: 0x4000, 0x1354: 0x4000, 0x1355: 0x4000, 0x1356: 0x4000, 0x1357: 0x4000, + 0x1358: 0x4000, 0x1359: 0x4000, 0x135a: 0x4000, 0x135b: 0x4000, 0x135c: 0x4000, 0x135d: 0x4000, + 0x135e: 0x4000, 0x135f: 0x4000, 0x1360: 0x4000, 0x1361: 0x4000, 0x1362: 0x4000, 0x1363: 0x4000, + 0x1364: 0x4000, 0x1365: 0x4000, 0x1366: 0x4000, 0x1367: 0x4000, 0x1368: 0x4000, 0x1369: 0x4000, + 0x136a: 0x4000, 0x136b: 0x4000, 0x136c: 0x4000, 0x136d: 0x4000, 0x136e: 0x4000, 0x136f: 0x4000, + 0x1370: 0x4000, 0x1371: 0x4000, 0x1372: 0x4000, 0x1373: 0x4000, 0x1374: 0x4000, 0x1375: 0x4000, + 0x1376: 0x4000, 0x1377: 0x4000, 0x1378: 0x4000, 0x1379: 0x4000, 0x137a: 0x4000, 0x137b: 0x4000, + // Block 0x4e, offset 0x1380 + 0x1384: 0x4000, + // Block 0x4f, offset 0x13c0 + 0x13cf: 0x4000, + // Block 0x50, offset 0x1400 + 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, + 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, + 0x1410: 0x2000, 0x1411: 0x2000, + 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, + 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, + 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, + 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, + 0x142a: 0x2000, 0x142b: 0x2000, 0x142c: 0x2000, 0x142d: 0x2000, + 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, + 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, + 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, + // Block 0x51, offset 0x1440 + 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, + 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, + 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x2000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x2000, + 0x1452: 0x2000, 0x1453: 0x2000, 0x1454: 0x2000, 0x1455: 0x2000, 0x1456: 0x2000, 0x1457: 0x2000, + 0x1458: 0x2000, 0x1459: 0x2000, 0x145a: 0x2000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, + 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, + 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, + 0x1470: 0x2000, 0x1471: 0x2000, 0x1472: 0x2000, 0x1473: 0x2000, 0x1474: 0x2000, 0x1475: 0x2000, + 0x1476: 0x2000, 0x1477: 0x2000, 0x1478: 0x2000, 0x1479: 0x2000, 0x147a: 0x2000, 0x147b: 0x2000, + 0x147c: 0x2000, 0x147d: 0x2000, 0x147e: 0x2000, 0x147f: 0x2000, + // Block 0x52, offset 0x1480 + 0x1480: 0x2000, 0x1481: 0x2000, 0x1482: 0x2000, 0x1483: 0x2000, 0x1484: 0x2000, 0x1485: 0x2000, + 0x1486: 0x2000, 0x1487: 0x2000, 0x1488: 0x2000, 0x1489: 0x2000, 0x148a: 0x2000, 0x148b: 0x2000, + 0x148c: 0x2000, 0x148d: 0x2000, 0x148e: 0x4000, 0x148f: 0x2000, 0x1490: 0x2000, 0x1491: 0x4000, + 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, + 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x2000, 0x149c: 0x2000, 0x149d: 0x2000, + 0x149e: 0x2000, 0x149f: 0x2000, 0x14a0: 0x2000, 0x14a1: 0x2000, 0x14a2: 0x2000, 0x14a3: 0x2000, + 0x14a4: 0x2000, 0x14a5: 0x2000, 0x14a6: 0x2000, 0x14a7: 0x2000, 0x14a8: 0x2000, 0x14a9: 0x2000, + 0x14aa: 0x2000, 0x14ab: 0x2000, 0x14ac: 0x2000, + // Block 0x53, offset 0x14c0 + 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, + 0x14d0: 0x4000, 0x14d1: 0x4000, + 0x14d2: 0x4000, 0x14d3: 0x4000, 0x14d4: 0x4000, 0x14d5: 0x4000, 0x14d6: 0x4000, 0x14d7: 0x4000, + 0x14d8: 0x4000, 0x14d9: 0x4000, 0x14da: 0x4000, 0x14db: 0x4000, 0x14dc: 0x4000, 0x14dd: 0x4000, + 0x14de: 0x4000, 0x14df: 0x4000, 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, + 0x14e4: 0x4000, 0x14e5: 0x4000, 0x14e6: 0x4000, 0x14e7: 0x4000, 0x14e8: 0x4000, 0x14e9: 0x4000, + 0x14ea: 0x4000, 0x14eb: 0x4000, 0x14ec: 0x4000, 0x14ed: 0x4000, 0x14ee: 0x4000, 0x14ef: 0x4000, + 0x14f0: 0x4000, 0x14f1: 0x4000, 0x14f2: 0x4000, 0x14f3: 0x4000, 0x14f4: 0x4000, 0x14f5: 0x4000, + 0x14f6: 0x4000, 0x14f7: 0x4000, 0x14f8: 0x4000, 0x14f9: 0x4000, 0x14fa: 0x4000, 0x14fb: 0x4000, + // Block 0x54, offset 0x1500 + 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, + 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, + 0x1510: 0x4000, 0x1511: 0x4000, + 0x1520: 0x4000, 0x1521: 0x4000, 0x1522: 0x4000, 0x1523: 0x4000, + 0x1524: 0x4000, 0x1525: 0x4000, + // Block 0x55, offset 0x1540 + 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, + 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, + 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, + 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, + 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, + 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, + 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, + 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, + 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, + 0x157c: 0x4000, 0x157d: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, + // Block 0x56, offset 0x1580 + 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, + 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, + 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, + 0x1592: 0x4000, 0x1593: 0x4000, 0x1594: 0x4000, 0x1595: 0x4000, 0x1596: 0x4000, 0x1597: 0x4000, + 0x1598: 0x4000, 0x1599: 0x4000, 0x159a: 0x4000, 0x159b: 0x4000, 0x159c: 0x4000, 0x159d: 0x4000, + 0x159e: 0x4000, 0x159f: 0x4000, 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, + 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, + 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, + 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, + 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, + 0x15bc: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, + // Block 0x57, offset 0x15c0 + 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, + 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, 0x15cb: 0x4000, + 0x15cc: 0x4000, 0x15cd: 0x4000, 0x15ce: 0x4000, 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, + 0x15d2: 0x4000, 0x15d3: 0x4000, + 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, + 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, + 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, + 0x15f0: 0x4000, 0x15f1: 0x4000, 0x15f2: 0x4000, 0x15f3: 0x4000, 0x15f4: 0x4000, 0x15f5: 0x4000, + 0x15f6: 0x4000, 0x15f7: 0x4000, 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, + 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, + // Block 0x58, offset 0x1600 + 0x1600: 0x4000, 0x1601: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, + 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, + 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, + 0x1612: 0x4000, 0x1613: 0x4000, + 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, + 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, + 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, + 0x1630: 0x4000, 0x1634: 0x4000, + 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, + 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, 0x163f: 0x4000, + // Block 0x59, offset 0x1640 + 0x1640: 0x4000, 0x1641: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, + 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, + 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, + 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, + 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, + 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, + 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, + 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, + 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, + 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, + 0x167c: 0x4000, 0x167d: 0x4000, 0x167e: 0x4000, + // Block 0x5a, offset 0x1680 + 0x1680: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, + 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, + 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, + 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, + 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, + 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, + 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, + 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, + 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, + 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, + 0x16bc: 0x4000, 0x16bd: 0x4000, 0x16be: 0x4000, 0x16bf: 0x4000, + // Block 0x5b, offset 0x16c0 + 0x16c0: 0x4000, 0x16c1: 0x4000, 0x16c2: 0x4000, 0x16c3: 0x4000, 0x16c4: 0x4000, 0x16c5: 0x4000, + 0x16c6: 0x4000, 0x16c7: 0x4000, 0x16c8: 0x4000, 0x16c9: 0x4000, 0x16ca: 0x4000, 0x16cb: 0x4000, + 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16cf: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, + 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, + 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, + 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, + 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, 0x16e8: 0x4000, 0x16e9: 0x4000, + 0x16ea: 0x4000, 0x16eb: 0x4000, 0x16ec: 0x4000, 0x16ed: 0x4000, 0x16ee: 0x4000, 0x16ef: 0x4000, + 0x16f0: 0x4000, 0x16f1: 0x4000, 0x16f2: 0x4000, 0x16f3: 0x4000, 0x16f4: 0x4000, 0x16f5: 0x4000, + 0x16f6: 0x4000, 0x16f7: 0x4000, 0x16f8: 0x4000, 0x16f9: 0x4000, 0x16fa: 0x4000, 0x16fb: 0x4000, + 0x16fc: 0x4000, 0x16ff: 0x4000, + // Block 0x5c, offset 0x1700 + 0x1700: 0x4000, 0x1701: 0x4000, 0x1702: 0x4000, 0x1703: 0x4000, 0x1704: 0x4000, 0x1705: 0x4000, + 0x1706: 0x4000, 0x1707: 0x4000, 0x1708: 0x4000, 0x1709: 0x4000, 0x170a: 0x4000, 0x170b: 0x4000, + 0x170c: 0x4000, 0x170d: 0x4000, 0x170e: 0x4000, 0x170f: 0x4000, 0x1710: 0x4000, 0x1711: 0x4000, + 0x1712: 0x4000, 0x1713: 0x4000, 0x1714: 0x4000, 0x1715: 0x4000, 0x1716: 0x4000, 0x1717: 0x4000, + 0x1718: 0x4000, 0x1719: 0x4000, 0x171a: 0x4000, 0x171b: 0x4000, 0x171c: 0x4000, 0x171d: 0x4000, + 0x171e: 0x4000, 0x171f: 0x4000, 0x1720: 0x4000, 0x1721: 0x4000, 0x1722: 0x4000, 0x1723: 0x4000, + 0x1724: 0x4000, 0x1725: 0x4000, 0x1726: 0x4000, 0x1727: 0x4000, 0x1728: 0x4000, 0x1729: 0x4000, + 0x172a: 0x4000, 0x172b: 0x4000, 0x172c: 0x4000, 0x172d: 0x4000, 0x172e: 0x4000, 0x172f: 0x4000, + 0x1730: 0x4000, 0x1731: 0x4000, 0x1732: 0x4000, 0x1733: 0x4000, 0x1734: 0x4000, 0x1735: 0x4000, + 0x1736: 0x4000, 0x1737: 0x4000, 0x1738: 0x4000, 0x1739: 0x4000, 0x173a: 0x4000, 0x173b: 0x4000, + 0x173c: 0x4000, 0x173d: 0x4000, + // Block 0x5d, offset 0x1740 + 0x174b: 0x4000, + 0x174c: 0x4000, 0x174d: 0x4000, 0x174e: 0x4000, 0x1750: 0x4000, 0x1751: 0x4000, + 0x1752: 0x4000, 0x1753: 0x4000, 0x1754: 0x4000, 0x1755: 0x4000, 0x1756: 0x4000, 0x1757: 0x4000, + 0x1758: 0x4000, 0x1759: 0x4000, 0x175a: 0x4000, 0x175b: 0x4000, 0x175c: 0x4000, 0x175d: 0x4000, + 0x175e: 0x4000, 0x175f: 0x4000, 0x1760: 0x4000, 0x1761: 0x4000, 0x1762: 0x4000, 0x1763: 0x4000, + 0x1764: 0x4000, 0x1765: 0x4000, 0x1766: 0x4000, 0x1767: 0x4000, + 0x177a: 0x4000, + // Block 0x5e, offset 0x1780 + 0x1795: 0x4000, 0x1796: 0x4000, + 0x17a4: 0x4000, + // Block 0x5f, offset 0x17c0 + 0x17fb: 0x4000, + 0x17fc: 0x4000, 0x17fd: 0x4000, 0x17fe: 0x4000, 0x17ff: 0x4000, + // Block 0x60, offset 0x1800 + 0x1800: 0x4000, 0x1801: 0x4000, 0x1802: 0x4000, 0x1803: 0x4000, 0x1804: 0x4000, 0x1805: 0x4000, + 0x1806: 0x4000, 0x1807: 0x4000, 0x1808: 0x4000, 0x1809: 0x4000, 0x180a: 0x4000, 0x180b: 0x4000, + 0x180c: 0x4000, 0x180d: 0x4000, 0x180e: 0x4000, 0x180f: 0x4000, + // Block 0x61, offset 0x1840 + 0x1840: 0x4000, 0x1841: 0x4000, 0x1842: 0x4000, 0x1843: 0x4000, 0x1844: 0x4000, 0x1845: 0x4000, + 0x184c: 0x4000, 0x1850: 0x4000, 0x1851: 0x4000, + 0x1852: 0x4000, 0x1855: 0x4000, 0x1856: 0x4000, 0x1857: 0x4000, + 0x185c: 0x4000, 0x185d: 0x4000, + 0x185e: 0x4000, 0x185f: 0x4000, + 0x186b: 0x4000, 0x186c: 0x4000, + 0x1874: 0x4000, 0x1875: 0x4000, + 0x1876: 0x4000, 0x1877: 0x4000, 0x1878: 0x4000, 0x1879: 0x4000, 0x187a: 0x4000, 0x187b: 0x4000, + 0x187c: 0x4000, + // Block 0x62, offset 0x1880 + 0x18a0: 0x4000, 0x18a1: 0x4000, 0x18a2: 0x4000, 0x18a3: 0x4000, + 0x18a4: 0x4000, 0x18a5: 0x4000, 0x18a6: 0x4000, 0x18a7: 0x4000, 0x18a8: 0x4000, 0x18a9: 0x4000, + 0x18aa: 0x4000, 0x18ab: 0x4000, + 0x18b0: 0x4000, + // Block 0x63, offset 0x18c0 + 0x18cc: 0x4000, 0x18cd: 0x4000, 0x18ce: 0x4000, 0x18cf: 0x4000, 0x18d0: 0x4000, 0x18d1: 0x4000, + 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, + 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, + 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, 0x18e3: 0x4000, + 0x18e4: 0x4000, 0x18e5: 0x4000, 0x18e6: 0x4000, 0x18e7: 0x4000, 0x18e8: 0x4000, 0x18e9: 0x4000, + 0x18ea: 0x4000, 0x18eb: 0x4000, 0x18ec: 0x4000, 0x18ed: 0x4000, 0x18ee: 0x4000, 0x18ef: 0x4000, + 0x18f0: 0x4000, 0x18f1: 0x4000, 0x18f2: 0x4000, 0x18f3: 0x4000, 0x18f4: 0x4000, 0x18f5: 0x4000, + 0x18f6: 0x4000, 0x18f7: 0x4000, 0x18f8: 0x4000, 0x18f9: 0x4000, 0x18fa: 0x4000, + 0x18fc: 0x4000, 0x18fd: 0x4000, 0x18fe: 0x4000, 0x18ff: 0x4000, + // Block 0x64, offset 0x1900 + 0x1900: 0x4000, 0x1901: 0x4000, 0x1902: 0x4000, 0x1903: 0x4000, 0x1904: 0x4000, 0x1905: 0x4000, + 0x1907: 0x4000, 0x1908: 0x4000, 0x1909: 0x4000, 0x190a: 0x4000, 0x190b: 0x4000, + 0x190c: 0x4000, 0x190d: 0x4000, 0x190e: 0x4000, 0x190f: 0x4000, 0x1910: 0x4000, 0x1911: 0x4000, + 0x1912: 0x4000, 0x1913: 0x4000, 0x1914: 0x4000, 0x1915: 0x4000, 0x1916: 0x4000, 0x1917: 0x4000, + 0x1918: 0x4000, 0x1919: 0x4000, 0x191a: 0x4000, 0x191b: 0x4000, 0x191c: 0x4000, 0x191d: 0x4000, + 0x191e: 0x4000, 0x191f: 0x4000, 0x1920: 0x4000, 0x1921: 0x4000, 0x1922: 0x4000, 0x1923: 0x4000, + 0x1924: 0x4000, 0x1925: 0x4000, 0x1926: 0x4000, 0x1927: 0x4000, 0x1928: 0x4000, 0x1929: 0x4000, + 0x192a: 0x4000, 0x192b: 0x4000, 0x192c: 0x4000, 0x192d: 0x4000, 0x192e: 0x4000, 0x192f: 0x4000, + 0x1930: 0x4000, 0x1931: 0x4000, 0x1932: 0x4000, 0x1933: 0x4000, 0x1934: 0x4000, 0x1935: 0x4000, + 0x1936: 0x4000, 0x1937: 0x4000, 0x1938: 0x4000, 0x1939: 0x4000, 0x193a: 0x4000, 0x193b: 0x4000, + 0x193c: 0x4000, 0x193d: 0x4000, 0x193e: 0x4000, 0x193f: 0x4000, + // Block 0x65, offset 0x1940 + 0x1970: 0x4000, 0x1971: 0x4000, 0x1972: 0x4000, 0x1973: 0x4000, 0x1974: 0x4000, 0x1975: 0x4000, + 0x1976: 0x4000, 0x1977: 0x4000, 0x1978: 0x4000, 0x1979: 0x4000, 0x197a: 0x4000, 0x197b: 0x4000, + 0x197c: 0x4000, + // Block 0x66, offset 0x1980 + 0x1980: 0x4000, 0x1981: 0x4000, 0x1982: 0x4000, 0x1983: 0x4000, 0x1984: 0x4000, 0x1985: 0x4000, + 0x1986: 0x4000, 0x1987: 0x4000, 0x1988: 0x4000, + 0x1990: 0x4000, 0x1991: 0x4000, + 0x1992: 0x4000, 0x1993: 0x4000, 0x1994: 0x4000, 0x1995: 0x4000, 0x1996: 0x4000, 0x1997: 0x4000, + 0x1998: 0x4000, 0x1999: 0x4000, 0x199a: 0x4000, 0x199b: 0x4000, 0x199c: 0x4000, 0x199d: 0x4000, + 0x199e: 0x4000, 0x199f: 0x4000, 0x19a0: 0x4000, 0x19a1: 0x4000, 0x19a2: 0x4000, 0x19a3: 0x4000, + 0x19a4: 0x4000, 0x19a5: 0x4000, 0x19a6: 0x4000, 0x19a7: 0x4000, 0x19a8: 0x4000, 0x19a9: 0x4000, + 0x19aa: 0x4000, 0x19ab: 0x4000, 0x19ac: 0x4000, 0x19ad: 0x4000, 0x19ae: 0x4000, 0x19af: 0x4000, + 0x19b0: 0x4000, 0x19b1: 0x4000, 0x19b2: 0x4000, 0x19b3: 0x4000, 0x19b4: 0x4000, 0x19b5: 0x4000, + 0x19b6: 0x4000, 0x19b7: 0x4000, 0x19b8: 0x4000, 0x19b9: 0x4000, 0x19ba: 0x4000, 0x19bb: 0x4000, + 0x19bc: 0x4000, 0x19bd: 0x4000, 0x19bf: 0x4000, + // Block 0x67, offset 0x19c0 + 0x19c0: 0x4000, 0x19c1: 0x4000, 0x19c2: 0x4000, 0x19c3: 0x4000, 0x19c4: 0x4000, 0x19c5: 0x4000, + 0x19ce: 0x4000, 0x19cf: 0x4000, 0x19d0: 0x4000, 0x19d1: 0x4000, + 0x19d2: 0x4000, 0x19d3: 0x4000, 0x19d4: 0x4000, 0x19d5: 0x4000, 0x19d6: 0x4000, 0x19d7: 0x4000, + 0x19d8: 0x4000, 0x19d9: 0x4000, 0x19da: 0x4000, 0x19db: 0x4000, + 0x19e0: 0x4000, 0x19e1: 0x4000, 0x19e2: 0x4000, 0x19e3: 0x4000, + 0x19e4: 0x4000, 0x19e5: 0x4000, 0x19e6: 0x4000, 0x19e7: 0x4000, 0x19e8: 0x4000, + 0x19f0: 0x4000, 0x19f1: 0x4000, 0x19f2: 0x4000, 0x19f3: 0x4000, 0x19f4: 0x4000, 0x19f5: 0x4000, + 0x19f6: 0x4000, 0x19f7: 0x4000, 0x19f8: 0x4000, + // Block 0x68, offset 0x1a00 + 0x1a00: 0x2000, 0x1a01: 0x2000, 0x1a02: 0x2000, 0x1a03: 0x2000, 0x1a04: 0x2000, 0x1a05: 0x2000, + 0x1a06: 0x2000, 0x1a07: 0x2000, 0x1a08: 0x2000, 0x1a09: 0x2000, 0x1a0a: 0x2000, 0x1a0b: 0x2000, + 0x1a0c: 0x2000, 0x1a0d: 0x2000, 0x1a0e: 0x2000, 0x1a0f: 0x2000, 0x1a10: 0x2000, 0x1a11: 0x2000, + 0x1a12: 0x2000, 0x1a13: 0x2000, 0x1a14: 0x2000, 0x1a15: 0x2000, 0x1a16: 0x2000, 0x1a17: 0x2000, + 0x1a18: 0x2000, 0x1a19: 0x2000, 0x1a1a: 0x2000, 0x1a1b: 0x2000, 0x1a1c: 0x2000, 0x1a1d: 0x2000, + 0x1a1e: 0x2000, 0x1a1f: 0x2000, 0x1a20: 0x2000, 0x1a21: 0x2000, 0x1a22: 0x2000, 0x1a23: 0x2000, + 0x1a24: 0x2000, 0x1a25: 0x2000, 0x1a26: 0x2000, 0x1a27: 0x2000, 0x1a28: 0x2000, 0x1a29: 0x2000, + 0x1a2a: 0x2000, 0x1a2b: 0x2000, 0x1a2c: 0x2000, 0x1a2d: 0x2000, 0x1a2e: 0x2000, 0x1a2f: 0x2000, + 0x1a30: 0x2000, 0x1a31: 0x2000, 0x1a32: 0x2000, 0x1a33: 0x2000, 0x1a34: 0x2000, 0x1a35: 0x2000, + 0x1a36: 0x2000, 0x1a37: 0x2000, 0x1a38: 0x2000, 0x1a39: 0x2000, 0x1a3a: 0x2000, 0x1a3b: 0x2000, + 0x1a3c: 0x2000, 0x1a3d: 0x2000, +} + +// widthIndex: 23 blocks, 1472 entries, 1472 bytes +// Block 0 is the zero block. +var widthIndex = [1472]uint8{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, + 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, + 0xd0: 0x0c, 0xd1: 0x0d, + 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, + 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, + 0xf0: 0x10, 0xf3: 0x13, 0xf4: 0x14, + // Block 0x4, offset 0x100 + 0x104: 0x0e, 0x105: 0x0f, + // Block 0x5, offset 0x140 + 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, + 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, + 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, + 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, + 0x166: 0x2a, + 0x16c: 0x2b, 0x16d: 0x2c, + 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, + // Block 0x6, offset 0x180 + 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, + 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x0e, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, + 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, + 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, + 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, + 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, + 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, + 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, + 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, + 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, + 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, + 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, + 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, + 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, + 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, + // Block 0x8, offset 0x200 + 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, + 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, + 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, + 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, + 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, + 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, + 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, + 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, + // Block 0x9, offset 0x240 + 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, + 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, + 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3a, 0x253: 0x3b, + 0x265: 0x3c, + 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, + 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, + // Block 0xa, offset 0x280 + 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, + 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, + 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, + 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3d, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, + 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, + 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, + 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, + 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, + 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, + 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, + 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, + // Block 0xc, offset 0x300 + 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, + 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, + 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, + 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, + 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, + 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, + 0x338: 0x3e, 0x339: 0x3f, 0x33c: 0x40, 0x33d: 0x41, 0x33e: 0x42, 0x33f: 0x43, + // Block 0xd, offset 0x340 + 0x37f: 0x44, + // Block 0xe, offset 0x380 + 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, + 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, + 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, + 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x45, + 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, + 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x0e, 0x3ac: 0x0e, 0x3ad: 0x0e, 0x3ae: 0x0e, 0x3af: 0x0e, + 0x3b0: 0x0e, 0x3b1: 0x0e, 0x3b2: 0x0e, 0x3b3: 0x46, 0x3b4: 0x47, + // Block 0xf, offset 0x3c0 + 0x3ff: 0x48, + // Block 0x10, offset 0x400 + 0x400: 0x0e, 0x401: 0x0e, 0x402: 0x0e, 0x403: 0x0e, 0x404: 0x49, 0x405: 0x4a, 0x406: 0x0e, 0x407: 0x0e, + 0x408: 0x0e, 0x409: 0x0e, 0x40a: 0x0e, 0x40b: 0x4b, + // Block 0x11, offset 0x440 + 0x440: 0x4c, 0x443: 0x4d, 0x444: 0x4e, 0x445: 0x4f, 0x446: 0x50, + 0x448: 0x51, 0x449: 0x52, 0x44c: 0x53, 0x44d: 0x54, 0x44e: 0x55, 0x44f: 0x56, + 0x450: 0x57, 0x451: 0x58, 0x452: 0x0e, 0x453: 0x59, 0x454: 0x5a, 0x455: 0x5b, 0x456: 0x5c, 0x457: 0x5d, + 0x458: 0x0e, 0x459: 0x5e, 0x45a: 0x0e, 0x45b: 0x5f, 0x45f: 0x60, + 0x464: 0x61, 0x465: 0x62, 0x466: 0x0e, 0x467: 0x0e, + 0x469: 0x63, 0x46a: 0x64, 0x46b: 0x65, + // Block 0x12, offset 0x480 + 0x496: 0x0b, 0x497: 0x06, + 0x498: 0x0c, 0x49a: 0x0d, 0x49b: 0x0e, 0x49f: 0x0f, + 0x4a0: 0x06, 0x4a1: 0x06, 0x4a2: 0x06, 0x4a3: 0x06, 0x4a4: 0x06, 0x4a5: 0x06, 0x4a6: 0x06, 0x4a7: 0x06, + 0x4a8: 0x06, 0x4a9: 0x06, 0x4aa: 0x06, 0x4ab: 0x06, 0x4ac: 0x06, 0x4ad: 0x06, 0x4ae: 0x06, 0x4af: 0x06, + 0x4b0: 0x06, 0x4b1: 0x06, 0x4b2: 0x06, 0x4b3: 0x06, 0x4b4: 0x06, 0x4b5: 0x06, 0x4b6: 0x06, 0x4b7: 0x06, + 0x4b8: 0x06, 0x4b9: 0x06, 0x4ba: 0x06, 0x4bb: 0x06, 0x4bc: 0x06, 0x4bd: 0x06, 0x4be: 0x06, 0x4bf: 0x06, + // Block 0x13, offset 0x4c0 + 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x09, + // Block 0x14, offset 0x500 + 0x500: 0x08, 0x501: 0x08, 0x502: 0x08, 0x503: 0x08, 0x504: 0x08, 0x505: 0x08, 0x506: 0x08, 0x507: 0x08, + 0x508: 0x08, 0x509: 0x08, 0x50a: 0x08, 0x50b: 0x08, 0x50c: 0x08, 0x50d: 0x08, 0x50e: 0x08, 0x50f: 0x08, + 0x510: 0x08, 0x511: 0x08, 0x512: 0x08, 0x513: 0x08, 0x514: 0x08, 0x515: 0x08, 0x516: 0x08, 0x517: 0x08, + 0x518: 0x08, 0x519: 0x08, 0x51a: 0x08, 0x51b: 0x08, 0x51c: 0x08, 0x51d: 0x08, 0x51e: 0x08, 0x51f: 0x08, + 0x520: 0x08, 0x521: 0x08, 0x522: 0x08, 0x523: 0x08, 0x524: 0x08, 0x525: 0x08, 0x526: 0x08, 0x527: 0x08, + 0x528: 0x08, 0x529: 0x08, 0x52a: 0x08, 0x52b: 0x08, 0x52c: 0x08, 0x52d: 0x08, 0x52e: 0x08, 0x52f: 0x08, + 0x530: 0x08, 0x531: 0x08, 0x532: 0x08, 0x533: 0x08, 0x534: 0x08, 0x535: 0x08, 0x536: 0x08, 0x537: 0x08, + 0x538: 0x08, 0x539: 0x08, 0x53a: 0x08, 0x53b: 0x08, 0x53c: 0x08, 0x53d: 0x08, 0x53e: 0x08, 0x53f: 0x66, + // Block 0x15, offset 0x540 + 0x560: 0x11, + 0x570: 0x09, 0x571: 0x09, 0x572: 0x09, 0x573: 0x09, 0x574: 0x09, 0x575: 0x09, 0x576: 0x09, 0x577: 0x09, + 0x578: 0x09, 0x579: 0x09, 0x57a: 0x09, 0x57b: 0x09, 0x57c: 0x09, 0x57d: 0x09, 0x57e: 0x09, 0x57f: 0x12, + // Block 0x16, offset 0x580 + 0x580: 0x09, 0x581: 0x09, 0x582: 0x09, 0x583: 0x09, 0x584: 0x09, 0x585: 0x09, 0x586: 0x09, 0x587: 0x09, + 0x588: 0x09, 0x589: 0x09, 0x58a: 0x09, 0x58b: 0x09, 0x58c: 0x09, 0x58d: 0x09, 0x58e: 0x09, 0x58f: 0x12, +} + +// inverseData contains 4-byte entries of the following format: +// +// <0 padding> +// +// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the +// UTF-8 encoding of the original rune. Mappings often have the following +// pattern: +// +// A -> A (U+FF21 -> U+0041) +// B -> B (U+FF22 -> U+0042) +// ... +// +// By xor-ing the last byte the same entry can be shared by many mappings. This +// reduces the total number of distinct entries by about two thirds. +// The resulting entry for the aforementioned mappings is +// +// { 0x01, 0xE0, 0x00, 0x00 } +// +// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get +// +// E0 ^ A1 = 41. +// +// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get +// +// E0 ^ A2 = 42. +// +// Note that because of the xor-ing, the byte sequence stored in the entry is +// not valid UTF-8. +var inverseData = [150][4]byte{ + {0x00, 0x00, 0x00, 0x00}, + {0x03, 0xe3, 0x80, 0xa0}, + {0x03, 0xef, 0xbc, 0xa0}, + {0x03, 0xef, 0xbc, 0xe0}, + {0x03, 0xef, 0xbd, 0xe0}, + {0x03, 0xef, 0xbf, 0x02}, + {0x03, 0xef, 0xbf, 0x00}, + {0x03, 0xef, 0xbf, 0x0e}, + {0x03, 0xef, 0xbf, 0x0c}, + {0x03, 0xef, 0xbf, 0x0f}, + {0x03, 0xef, 0xbf, 0x39}, + {0x03, 0xef, 0xbf, 0x3b}, + {0x03, 0xef, 0xbf, 0x3f}, + {0x03, 0xef, 0xbf, 0x2a}, + {0x03, 0xef, 0xbf, 0x0d}, + {0x03, 0xef, 0xbf, 0x25}, + {0x03, 0xef, 0xbd, 0x1a}, + {0x03, 0xef, 0xbd, 0x26}, + {0x01, 0xa0, 0x00, 0x00}, + {0x03, 0xef, 0xbd, 0x25}, + {0x03, 0xef, 0xbd, 0x23}, + {0x03, 0xef, 0xbd, 0x2e}, + {0x03, 0xef, 0xbe, 0x07}, + {0x03, 0xef, 0xbe, 0x05}, + {0x03, 0xef, 0xbd, 0x06}, + {0x03, 0xef, 0xbd, 0x13}, + {0x03, 0xef, 0xbd, 0x0b}, + {0x03, 0xef, 0xbd, 0x16}, + {0x03, 0xef, 0xbd, 0x0c}, + {0x03, 0xef, 0xbd, 0x15}, + {0x03, 0xef, 0xbd, 0x0d}, + {0x03, 0xef, 0xbd, 0x1c}, + {0x03, 0xef, 0xbd, 0x02}, + {0x03, 0xef, 0xbd, 0x1f}, + {0x03, 0xef, 0xbd, 0x1d}, + {0x03, 0xef, 0xbd, 0x17}, + {0x03, 0xef, 0xbd, 0x08}, + {0x03, 0xef, 0xbd, 0x09}, + {0x03, 0xef, 0xbd, 0x0e}, + {0x03, 0xef, 0xbd, 0x04}, + {0x03, 0xef, 0xbd, 0x05}, + {0x03, 0xef, 0xbe, 0x3f}, + {0x03, 0xef, 0xbe, 0x00}, + {0x03, 0xef, 0xbd, 0x2c}, + {0x03, 0xef, 0xbe, 0x06}, + {0x03, 0xef, 0xbe, 0x0c}, + {0x03, 0xef, 0xbe, 0x0f}, + {0x03, 0xef, 0xbe, 0x0d}, + {0x03, 0xef, 0xbe, 0x0b}, + {0x03, 0xef, 0xbe, 0x19}, + {0x03, 0xef, 0xbe, 0x15}, + {0x03, 0xef, 0xbe, 0x11}, + {0x03, 0xef, 0xbe, 0x31}, + {0x03, 0xef, 0xbe, 0x33}, + {0x03, 0xef, 0xbd, 0x0f}, + {0x03, 0xef, 0xbe, 0x30}, + {0x03, 0xef, 0xbe, 0x3e}, + {0x03, 0xef, 0xbe, 0x32}, + {0x03, 0xef, 0xbe, 0x36}, + {0x03, 0xef, 0xbd, 0x14}, + {0x03, 0xef, 0xbe, 0x2e}, + {0x03, 0xef, 0xbd, 0x1e}, + {0x03, 0xef, 0xbe, 0x10}, + {0x03, 0xef, 0xbf, 0x13}, + {0x03, 0xef, 0xbf, 0x15}, + {0x03, 0xef, 0xbf, 0x17}, + {0x03, 0xef, 0xbf, 0x1f}, + {0x03, 0xef, 0xbf, 0x1d}, + {0x03, 0xef, 0xbf, 0x1b}, + {0x03, 0xef, 0xbf, 0x09}, + {0x03, 0xef, 0xbf, 0x0b}, + {0x03, 0xef, 0xbf, 0x37}, + {0x03, 0xef, 0xbe, 0x04}, + {0x01, 0xe0, 0x00, 0x00}, + {0x03, 0xe2, 0xa6, 0x1a}, + {0x03, 0xe2, 0xa6, 0x26}, + {0x03, 0xe3, 0x80, 0x23}, + {0x03, 0xe3, 0x80, 0x2e}, + {0x03, 0xe3, 0x80, 0x25}, + {0x03, 0xe3, 0x83, 0x1e}, + {0x03, 0xe3, 0x83, 0x14}, + {0x03, 0xe3, 0x82, 0x06}, + {0x03, 0xe3, 0x82, 0x0b}, + {0x03, 0xe3, 0x82, 0x0c}, + {0x03, 0xe3, 0x82, 0x0d}, + {0x03, 0xe3, 0x82, 0x02}, + {0x03, 0xe3, 0x83, 0x0f}, + {0x03, 0xe3, 0x83, 0x08}, + {0x03, 0xe3, 0x83, 0x09}, + {0x03, 0xe3, 0x83, 0x2c}, + {0x03, 0xe3, 0x83, 0x0c}, + {0x03, 0xe3, 0x82, 0x13}, + {0x03, 0xe3, 0x82, 0x16}, + {0x03, 0xe3, 0x82, 0x15}, + {0x03, 0xe3, 0x82, 0x1c}, + {0x03, 0xe3, 0x82, 0x1f}, + {0x03, 0xe3, 0x82, 0x1d}, + {0x03, 0xe3, 0x82, 0x1a}, + {0x03, 0xe3, 0x82, 0x17}, + {0x03, 0xe3, 0x82, 0x08}, + {0x03, 0xe3, 0x82, 0x09}, + {0x03, 0xe3, 0x82, 0x0e}, + {0x03, 0xe3, 0x82, 0x04}, + {0x03, 0xe3, 0x82, 0x05}, + {0x03, 0xe3, 0x82, 0x3f}, + {0x03, 0xe3, 0x83, 0x00}, + {0x03, 0xe3, 0x83, 0x06}, + {0x03, 0xe3, 0x83, 0x05}, + {0x03, 0xe3, 0x83, 0x0d}, + {0x03, 0xe3, 0x83, 0x0b}, + {0x03, 0xe3, 0x83, 0x07}, + {0x03, 0xe3, 0x83, 0x19}, + {0x03, 0xe3, 0x83, 0x15}, + {0x03, 0xe3, 0x83, 0x11}, + {0x03, 0xe3, 0x83, 0x31}, + {0x03, 0xe3, 0x83, 0x33}, + {0x03, 0xe3, 0x83, 0x30}, + {0x03, 0xe3, 0x83, 0x3e}, + {0x03, 0xe3, 0x83, 0x32}, + {0x03, 0xe3, 0x83, 0x36}, + {0x03, 0xe3, 0x83, 0x2e}, + {0x03, 0xe3, 0x82, 0x07}, + {0x03, 0xe3, 0x85, 0x04}, + {0x03, 0xe3, 0x84, 0x10}, + {0x03, 0xe3, 0x85, 0x30}, + {0x03, 0xe3, 0x85, 0x0d}, + {0x03, 0xe3, 0x85, 0x13}, + {0x03, 0xe3, 0x85, 0x15}, + {0x03, 0xe3, 0x85, 0x17}, + {0x03, 0xe3, 0x85, 0x1f}, + {0x03, 0xe3, 0x85, 0x1d}, + {0x03, 0xe3, 0x85, 0x1b}, + {0x03, 0xe3, 0x85, 0x09}, + {0x03, 0xe3, 0x85, 0x0f}, + {0x03, 0xe3, 0x85, 0x0b}, + {0x03, 0xe3, 0x85, 0x37}, + {0x03, 0xe3, 0x85, 0x3b}, + {0x03, 0xe3, 0x85, 0x39}, + {0x03, 0xe3, 0x85, 0x3f}, + {0x02, 0xc2, 0x02, 0x00}, + {0x02, 0xc2, 0x0e, 0x00}, + {0x02, 0xc2, 0x0c, 0x00}, + {0x02, 0xc2, 0x00, 0x00}, + {0x03, 0xe2, 0x82, 0x0f}, + {0x03, 0xe2, 0x94, 0x2a}, + {0x03, 0xe2, 0x86, 0x39}, + {0x03, 0xe2, 0x86, 0x3b}, + {0x03, 0xe2, 0x86, 0x3f}, + {0x03, 0xe2, 0x96, 0x0d}, + {0x03, 0xe2, 0x97, 0x25}, +} + +// Total table size 15512 bytes (15KiB) diff --git a/tools/vendor/golang.org/x/tools/go/analysis/analysis.go b/tools/vendor/golang.org/x/tools/go/analysis/analysis.go index 44ada22a03..e51e58b3d2 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/analysis.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/analysis.go @@ -24,6 +24,10 @@ type Analyzer struct { // (no capital or period, max ~60 letters). Doc string + // URL holds an optional link to a web page with additional + // documentation for this analyzer. + URL string + // Flags defines any flags accepted by the analyzer. // The manner in which these flags are exposed to the user // depends on the driver which runs the analyzer. @@ -145,11 +149,7 @@ type Pass struct { // WARNING: This is an experimental API and may change in the future. AllObjectFacts func() []ObjectFact - // typeErrors contains types.Errors that are associated with the pkg. - typeErrors []types.Error - /* Further fields may be added in future. */ - // For example, suggested or applied refactorings. } // PackageFact is a package together with an associated fact. diff --git a/tools/vendor/golang.org/x/tools/go/analysis/diagnostic.go b/tools/vendor/golang.org/x/tools/go/analysis/diagnostic.go index 5cdcf46d2a..7646ad0d49 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/diagnostic.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/diagnostic.go @@ -20,6 +20,17 @@ type Diagnostic struct { Category string // optional Message string + // URL is the optional location of a web page that provides + // additional documentation for this diagnostic. + // + // If URL is empty but a Category is specified, then the + // Analysis driver should treat the URL as "#"+Category. + // + // The URL may be relative. If so, the base URL is that of the + // Analyzer that produced the diagnostic; + // see https://pkg.go.dev/net/url#URL.ResolveReference. + URL string + // SuggestedFixes contains suggested fixes for a diagnostic which can be used to perform // edits to a file that address the diagnostic. // TODO(matloob): Should multiple SuggestedFixes be allowed for a diagnostic? diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go index 7288559fc0..e24dac9865 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go @@ -27,6 +27,7 @@ const Doc = "report mismatches between assembly files and Go declarations" var Analyzer = &analysis.Analyzer{ Name: "asmdecl", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/asmdecl", Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go index 89146b7334..10489bea17 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go @@ -2,13 +2,13 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package assign defines an Analyzer that detects useless assignments. package assign // TODO(adonovan): check also for assignments to struct fields inside // methods that are on T instead of *T. import ( + _ "embed" "fmt" "go/ast" "go/token" @@ -21,15 +21,13 @@ import ( "golang.org/x/tools/go/ast/inspector" ) -const Doc = `check for useless assignments - -This checker reports assignments of the form x = x or a[i] = a[i]. -These are almost always useless, and even when they aren't they are -usually a mistake.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "assign", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "assign"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/assign", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/doc.go new file mode 100644 index 0000000000..a4b1b64c51 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/assign/doc.go @@ -0,0 +1,14 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package assign defines an Analyzer that detects useless assignments. +// +// # Analyzer assign +// +// assign: check for useless assignments +// +// This checker reports assignments of the form x = x or a[i] = a[i]. +// These are almost always useless, and even when they aren't they are +// usually a mistake. +package assign diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go index 9261db7e4e..b40e081ec2 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go @@ -2,11 +2,10 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package atomic defines an Analyzer that checks for common mistakes -// using the sync/atomic package. package atomic import ( + _ "embed" "go/ast" "go/token" "go/types" @@ -17,23 +16,23 @@ import ( "golang.org/x/tools/go/ast/inspector" ) -const Doc = `check for common mistakes using the sync/atomic package - -The atomic checker looks for assignment statements of the form: - - x = atomic.AddUint64(&x, 1) - -which are not atomic.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "atomic", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "atomic"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomic", Requires: []*analysis.Analyzer{inspect.Analyzer}, RunDespiteErrors: true, Run: run, } func run(pass *analysis.Pass) (interface{}, error) { + if !analysisutil.Imports(pass.Pkg, "sync/atomic") { + return nil, nil // doesn't directly import sync/atomic + } + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/doc.go new file mode 100644 index 0000000000..5aafe25d32 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomic/doc.go @@ -0,0 +1,17 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package atomic defines an Analyzer that checks for common mistakes +// using the sync/atomic package. +// +// # Analyzer atomic +// +// atomic: check for common mistakes using the sync/atomic package +// +// The atomic checker looks for assignment statements of the form: +// +// x = atomic.AddUint64(&x, 1) +// +// which are not atomic. +package atomic diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go index e2e1a4f67c..fc2ce3052d 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go @@ -25,6 +25,7 @@ const Doc = "check for non-64-bits-aligned arguments to sync/atomic functions" var Analyzer = &analysis.Analyzer{ Name: "atomicalign", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/atomicalign", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go index 0d8b0bf4f1..4219f087b9 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go @@ -22,6 +22,7 @@ const Doc = "check for common mistakes involving boolean operators" var Analyzer = &analysis.Analyzer{ Name: "bools", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/bools", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go index 02b7b18b3f..ad42f39a8a 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go @@ -22,6 +22,7 @@ import ( var Analyzer = &analysis.Analyzer{ Name: "buildssa", Doc: "build SSA-form IR for later passes", + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/buildssa", Run: run, ResultType: reflect.TypeOf(new(SSA)), } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go index 775e507a34..a2a4a89b3a 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go @@ -25,6 +25,7 @@ const Doc = "check //go:build and // +build directives" var Analyzer = &analysis.Analyzer{ Name: "buildtag", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/buildtag", Run: runBuildTag, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go index b61ee5c3dc..afff0d82d8 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go @@ -35,6 +35,7 @@ or slice to C, either directly, or via a pointer, array, or struct.` var Analyzer = &analysis.Analyzer{ Name: "cgocall", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/cgocall", RunDespiteErrors: true, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go index 64e184d343..20fb70806a 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go @@ -37,6 +37,7 @@ should be replaced by: var Analyzer = &analysis.Analyzer{ Name: "composites", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/composites", Requires: []*analysis.Analyzer{inspect.Analyzer}, RunDespiteErrors: true, Run: run, diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go index 8cc93e94dc..b3ca8ada40 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go @@ -29,6 +29,7 @@ values should be referred to through a pointer.` var Analyzer = &analysis.Analyzer{ Name: "copylocks", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/copylocks", Requires: []*analysis.Analyzer{inspect.Analyzer}, RunDespiteErrors: true, Run: run, diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go index 73746d6f04..d21adeee90 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go @@ -24,6 +24,7 @@ import ( var Analyzer = &analysis.Analyzer{ Name: "ctrlflow", Doc: "build a control-flow graph", + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ctrlflow", Run: run, ResultType: reflect.TypeOf(new(CFGs)), FactTypes: []analysis.Fact{new(noReturn)}, diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go index 9ea137386b..3a1818764a 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go @@ -12,6 +12,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" ) @@ -28,11 +29,16 @@ errors is discouraged.` var Analyzer = &analysis.Analyzer{ Name: "deepequalerrors", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/deepequalerrors", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } func run(pass *analysis.Pass) (interface{}, error) { + if !analysisutil.Imports(pass.Pkg, "reflect") { + return nil, nil // doesn't directly import reflect + } + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go index 96adad3ee8..2fcbdfafb6 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go @@ -13,6 +13,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" ) @@ -25,6 +26,7 @@ of the second argument is not a pointer to a type implementing error.` var Analyzer = &analysis.Analyzer{ Name: "errorsas", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/errorsas", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } @@ -37,6 +39,10 @@ func run(pass *analysis.Pass) (interface{}, error) { return nil, nil } + if !analysisutil.Imports(pass.Pkg, "errors") { + return nil, nil // doesn't directly import errors + } + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go index aff663046a..012e2ecd0c 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go @@ -51,6 +51,7 @@ known as "false sharing" that slows down both goroutines. var Analyzer = &analysis.Analyzer{ Name: "fieldalignment", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/fieldalignment", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go index 27b1b8400f..2671573d1f 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go @@ -26,6 +26,7 @@ of a particular name.` var Analyzer = &analysis.Analyzer{ Name: "findcall", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/findcall", Run: run, RunDespiteErrors: true, FactTypes: []analysis.Fact{new(foundFact)}, diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go index 741492e477..0b3ded47ea 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go @@ -20,6 +20,7 @@ const Doc = "report assembly that clobbers the frame pointer before saving it" var Analyzer = &analysis.Analyzer{ Name: "framepointer", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/framepointer", Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go index 3b9168c6c3..61c3b764f7 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go @@ -35,6 +35,7 @@ diagnostic for such mistakes.` var Analyzer = &analysis.Analyzer{ Name: "httpresponse", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/httpresponse", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/doc.go new file mode 100644 index 0000000000..3d2b1a3dcb --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/doc.go @@ -0,0 +1,24 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ifaceassert defines an Analyzer that flags +// impossible interface-interface type assertions. +// +// # Analyzer ifaceassert +// +// ifaceassert: detect impossible interface-to-interface type assertions +// +// This checker flags type assertions v.(T) and corresponding type-switch cases +// in which the static type V of v is an interface that cannot possibly implement +// the target interface T. This occurs when V and T contain methods with the same +// name but different signatures. Example: +// +// var v interface { +// Read() +// } +// _ = v.(io.Reader) +// +// The Read method in v has a different signature than the Read method in +// io.Reader, so this assertion cannot succeed. +package ifaceassert diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go index 30130f63ea..cd4a477626 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go @@ -2,38 +2,26 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package ifaceassert defines an Analyzer that flags -// impossible interface-interface type assertions. package ifaceassert import ( + _ "embed" "go/ast" "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" ) -const Doc = `detect impossible interface-to-interface type assertions - -This checker flags type assertions v.(T) and corresponding type-switch cases -in which the static type V of v is an interface that cannot possibly implement -the target interface T. This occurs when V and T contain methods with the same -name but different signatures. Example: - - var v interface { - Read() - } - _ = v.(io.Reader) - -The Read method in v has a different signature than the Read method in -io.Reader, so this assertion cannot succeed. -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "ifaceassert", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "ifaceassert"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/ifaceassert", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go index b35f62dc73..b84577fcf8 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/parameterized.go @@ -67,7 +67,7 @@ func (w *tpWalker) isParameterized(typ types.Type) (res bool) { // of a generic function type (or an interface method) that is // part of the type we're testing. We don't care about these type // parameters. - // Similarly, the receiver of a method may declare (rather then + // Similarly, the receiver of a method may declare (rather than // use) type parameters, we don't care about those either. // Thus, we only need to look at the input and result parameters. return w.isParameterized(t.Params()) || w.isParameterized(t.Results()) diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go index 165c70cbd3..3b121cb0ce 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go @@ -38,6 +38,7 @@ import ( var Analyzer = &analysis.Analyzer{ Name: "inspect", Doc: "optimize AST traversal for later passes", + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/inspect", Run: run, RunDespiteErrors: true, ResultType: reflect.TypeOf(new(inspector.Inspector)), diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/extractdoc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/extractdoc.go new file mode 100644 index 0000000000..0e175ca06f --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/extractdoc.go @@ -0,0 +1,113 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package analysisutil + +import ( + "fmt" + "go/parser" + "go/token" + "strings" +) + +// MustExtractDoc is like [ExtractDoc] but it panics on error. +// +// To use, define a doc.go file such as: +// +// // Package halting defines an analyzer of program termination. +// // +// // # Analyzer halting +// // +// // halting: reports whether execution will halt. +// // +// // The halting analyzer reports a diagnostic for functions +// // that run forever. To suppress the diagnostics, try inserting +// // a 'break' statement into each loop. +// package halting +// +// import _ "embed" +// +// //go:embed doc.go +// var doc string +// +// And declare your analyzer as: +// +// var Analyzer = &analysis.Analyzer{ +// Name: "halting", +// Doc: analysisutil.MustExtractDoc(doc, "halting"), +// ... +// } +func MustExtractDoc(content, name string) string { + doc, err := ExtractDoc(content, name) + if err != nil { + panic(err) + } + return doc +} + +// ExtractDoc extracts a section of a package doc comment from the +// provided contents of an analyzer package's doc.go file. +// +// A section is a portion of the comment between one heading and +// the next, using this form: +// +// # Analyzer NAME +// +// NAME: SUMMARY +// +// Full description... +// +// where NAME matches the name argument, and SUMMARY is a brief +// verb-phrase that describes the analyzer. The following lines, up +// until the next heading or the end of the comment, contain the full +// description. ExtractDoc returns the portion following the colon, +// which is the form expected by Analyzer.Doc. +// +// Example: +// +// # Analyzer printf +// +// printf: checks consistency of calls to printf +// +// The printf analyzer checks consistency of calls to printf. +// Here is the complete description... +// +// This notation allows a single doc comment to provide documentation +// for multiple analyzers, each in its own section. +// The HTML anchors generated for each heading are predictable. +// +// It returns an error if the content was not a valid Go source file +// containing a package doc comment with a heading of the required +// form. +// +// This machinery enables the package documentation (typically +// accessible via the web at https://pkg.go.dev/) and the command +// documentation (typically printed to a terminal) to be derived from +// the same source and formatted appropriately. +func ExtractDoc(content, name string) (string, error) { + if content == "" { + return "", fmt.Errorf("empty Go source file") + } + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, "", content, parser.ParseComments|parser.PackageClauseOnly) + if err != nil { + return "", fmt.Errorf("not a Go source file") + } + if f.Doc == nil { + return "", fmt.Errorf("Go source file has no package doc comment") + } + for _, section := range strings.Split(f.Doc.Text(), "\n# ") { + if body := strings.TrimPrefix(section, "Analyzer "+name); body != section && + body != "" && + body[0] == '\r' || body[0] == '\n' { + body = strings.TrimSpace(body) + rest := strings.TrimPrefix(body, name+":") + if rest == body { + return "", fmt.Errorf("'Analyzer %s' heading not followed by '%s: summary...' line", name, name) + } + return strings.TrimSpace(rest), nil + } + } + return "", fmt.Errorf("package doc comment contains no 'Analyzer %s' heading", name) +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go index ac37e4784e..6d8039fe2b 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go @@ -118,3 +118,12 @@ func Imports(pkg *types.Package, path string) bool { } return false } + +// IsNamed reports whether t is exactly a named type in a package with a given path. +func IsNamed(t types.Type, path, name string) bool { + if n, ok := t.(*types.Named); ok { + obj := n.Obj() + return obj.Pkg().Path() == path && obj.Name() == name + } + return false +} diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/doc.go new file mode 100644 index 0000000000..dc544df1bf --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/doc.go @@ -0,0 +1,68 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package loopclosure defines an Analyzer that checks for references to +// enclosing loop variables from within nested functions. +// +// # Analyzer loopclosure +// +// loopclosure: check references to loop variables from within nested functions +// +// This analyzer reports places where a function literal references the +// iteration variable of an enclosing loop, and the loop calls the function +// in such a way (e.g. with go or defer) that it may outlive the loop +// iteration and possibly observe the wrong value of the variable. +// +// In this example, all the deferred functions run after the loop has +// completed, so all observe the final value of v. +// +// for _, v := range list { +// defer func() { +// use(v) // incorrect +// }() +// } +// +// One fix is to create a new variable for each iteration of the loop: +// +// for _, v := range list { +// v := v // new var per iteration +// defer func() { +// use(v) // ok +// }() +// } +// +// The next example uses a go statement and has a similar problem. +// In addition, it has a data race because the loop updates v +// concurrent with the goroutines accessing it. +// +// for _, v := range elem { +// go func() { +// use(v) // incorrect, and a data race +// }() +// } +// +// A fix is the same as before. The checker also reports problems +// in goroutines started by golang.org/x/sync/errgroup.Group. +// A hard-to-spot variant of this form is common in parallel tests: +// +// func Test(t *testing.T) { +// for _, test := range tests { +// t.Run(test.name, func(t *testing.T) { +// t.Parallel() +// use(test) // incorrect, and a data race +// }) +// } +// } +// +// The t.Parallel() call causes the rest of the function to execute +// concurrent with the loop. +// +// The analyzer reports references only in the last statement, +// as it is not deep enough to understand the effects of subsequent +// statements that might render the reference benign. +// ("Last statement" is defined recursively in compound +// statements such as if, switch, and select.) +// +// See: https://golang.org/doc/go_faq.html#closures_and_goroutines +package loopclosure diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go index ae5b4151db..5620c35faa 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go @@ -2,82 +2,27 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package loopclosure defines an Analyzer that checks for references to -// enclosing loop variables from within nested functions. package loopclosure import ( + _ "embed" "go/ast" "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" ) -const Doc = `check references to loop variables from within nested functions - -This analyzer reports places where a function literal references the -iteration variable of an enclosing loop, and the loop calls the function -in such a way (e.g. with go or defer) that it may outlive the loop -iteration and possibly observe the wrong value of the variable. - -In this example, all the deferred functions run after the loop has -completed, so all observe the final value of v. - - for _, v := range list { - defer func() { - use(v) // incorrect - }() - } - -One fix is to create a new variable for each iteration of the loop: - - for _, v := range list { - v := v // new var per iteration - defer func() { - use(v) // ok - }() - } - -The next example uses a go statement and has a similar problem. -In addition, it has a data race because the loop updates v -concurrent with the goroutines accessing it. - - for _, v := range elem { - go func() { - use(v) // incorrect, and a data race - }() - } - -A fix is the same as before. The checker also reports problems -in goroutines started by golang.org/x/sync/errgroup.Group. -A hard-to-spot variant of this form is common in parallel tests: - - func Test(t *testing.T) { - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - t.Parallel() - use(test) // incorrect, and a data race - }) - } - } - -The t.Parallel() call causes the rest of the function to execute -concurrent with the loop. - -The analyzer reports references only in the last statement, -as it is not deep enough to understand the effects of subsequent -statements that might render the reference benign. -("Last statement" is defined recursively in compound -statements such as if, switch, and select.) - -See: https://golang.org/doc/go_faq.html#closures_and_goroutines` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "loopclosure", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "loopclosure"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/loopclosure", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go new file mode 100644 index 0000000000..28bf6c7e26 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go @@ -0,0 +1,16 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package lostcancel defines an Analyzer that checks for failure to +// call a context cancellation function. +// +// # Analyzer lostcancel +// +// lostcancel: check cancel func returned by context.WithCancel is called +// +// The cancellation function returned by context.WithCancel, WithTimeout, +// and WithDeadline must be called or the new context will remain live +// until its parent context is cancelled. +// (The background context is never cancelled.) +package lostcancel diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go index de6f840f68..2bccb67502 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go @@ -2,11 +2,10 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package lostcancel defines an Analyzer that checks for failure to -// call a context cancellation function. package lostcancel import ( + _ "embed" "fmt" "go/ast" "go/types" @@ -14,20 +13,18 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/ctrlflow" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/cfg" ) -const Doc = `check cancel func returned by context.WithCancel is called - -The cancellation function returned by context.WithCancel, WithTimeout, -and WithDeadline must be called or the new context will remain live -until its parent context is cancelled. -(The background context is never cancelled.)` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "lostcancel", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "lostcancel"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/lostcancel", Run: run, Requires: []*analysis.Analyzer{ inspect.Analyzer, @@ -51,7 +48,7 @@ var contextPackage = "context" // checkLostCancel analyzes a single named or literal function. func run(pass *analysis.Pass) (interface{}, error) { // Fast path: bypass check if file doesn't use context.WithCancel. - if !hasImport(pass.Pkg, contextPackage) { + if !analysisutil.Imports(pass.Pkg, contextPackage) { return nil, nil } @@ -182,15 +179,6 @@ func runFunc(pass *analysis.Pass, node ast.Node) { func isCall(n ast.Node) bool { _, ok := n.(*ast.CallExpr); return ok } -func hasImport(pkg *types.Package, path string) bool { - for _, imp := range pkg.Imports() { - if imp.Path() == path { - return true - } - } - return false -} - // isContextWithCancel reports whether n is one of the qualified identifiers // context.With{Cancel,Timeout,Deadline}. func isContextWithCancel(info *types.Info, n ast.Node) bool { diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/doc.go new file mode 100644 index 0000000000..07f79332b2 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/doc.go @@ -0,0 +1,13 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package nilfunc defines an Analyzer that checks for useless +// comparisons against nil. +// +// # Analyzer nilfunc +// +// nilfunc: check for useless comparisons between functions and nil +// +// A useless comparison is one like f == nil as opposed to f() == nil. +package nilfunc diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go index e4c66df6d6..6df134399a 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go @@ -7,23 +7,25 @@ package nilfunc import ( + _ "embed" "go/ast" "go/token" "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/typeparams" ) -const Doc = `check for useless comparisons between functions and nil - -A useless comparison is one like f == nil as opposed to f() == nil.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "nilfunc", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "nilfunc"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilfunc", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go new file mode 100644 index 0000000000..212263741d --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/doc.go @@ -0,0 +1,45 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package nilness inspects the control-flow graph of an SSA function +// and reports errors such as nil pointer dereferences and degenerate +// nil pointer comparisons. +// +// # Analyzer nilness +// +// nilness: check for redundant or impossible nil comparisons +// +// The nilness checker inspects the control-flow graph of each function in +// a package and reports nil pointer dereferences, degenerate nil +// pointers, and panics with nil values. A degenerate comparison is of the form +// x==nil or x!=nil where x is statically known to be nil or non-nil. These are +// often a mistake, especially in control flow related to errors. Panics with nil +// values are checked because they are not detectable by +// +// if r := recover(); r != nil { +// +// This check reports conditions such as: +// +// if f == nil { // impossible condition (f is a function) +// } +// +// and: +// +// p := &v +// ... +// if p != nil { // tautological condition +// } +// +// and: +// +// if p == nil { +// print(*p) // nil dereference +// } +// +// and: +// +// if p == nil { +// panic(p) +// } +package nilness diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go index 6849c33cce..d1ca0748e8 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go @@ -2,61 +2,28 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package nilness inspects the control-flow graph of an SSA function -// and reports errors such as nil pointer dereferences and degenerate -// nil pointer comparisons. package nilness import ( + _ "embed" "fmt" "go/token" "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ssa" "golang.org/x/tools/internal/typeparams" ) -const Doc = `check for redundant or impossible nil comparisons - -The nilness checker inspects the control-flow graph of each function in -a package and reports nil pointer dereferences, degenerate nil -pointers, and panics with nil values. A degenerate comparison is of the form -x==nil or x!=nil where x is statically known to be nil or non-nil. These are -often a mistake, especially in control flow related to errors. Panics with nil -values are checked because they are not detectable by - - if r := recover(); r != nil { - -This check reports conditions such as: - - if f == nil { // impossible condition (f is a function) - } - -and: - - p := &v - ... - if p != nil { // tautological condition - } - -and: - - if p == nil { - print(*p) // nil dereference - } - -and: - - if p == nil { - panic(p) - } -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "nilness", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "nilness"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/nilness", Run: run, Requires: []*analysis.Analyzer{buildssa.Analyzer}, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go index f4f5616e56..4bf33d45f5 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go @@ -38,6 +38,7 @@ import ( var Analyzer = &analysis.Analyzer{ Name: "pkgfact", Doc: "gather name/value pairs from constant declarations", + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/pkgfact", Run: run, FactTypes: []analysis.Fact{new(pairsFact)}, ResultType: reflect.TypeOf(map[string]string{}), diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go new file mode 100644 index 0000000000..1ee16126ad --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/doc.go @@ -0,0 +1,47 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package printf defines an Analyzer that checks consistency +// of Printf format strings and arguments. +// +// # Analyzer printf +// +// printf: check consistency of Printf format strings and arguments +// +// The check applies to calls of the formatting functions such as +// [fmt.Printf] and [fmt.Sprintf], as well as any detected wrappers of +// those functions. +// +// In this example, the %d format operator requires an integer operand: +// +// fmt.Printf("%d", "hello") // fmt.Printf format %d has arg "hello" of wrong type string +// +// See the documentation of the fmt package for the complete set of +// format operators and their operand types. +// +// To enable printf checking on a function that is not found by this +// analyzer's heuristics (for example, because control is obscured by +// dynamic method calls), insert a bogus call: +// +// func MyPrintf(format string, args ...any) { +// if false { +// _ = fmt.Sprintf(format, args...) // enable printf checker +// } +// ... +// } +// +// The -funcs flag specifies a comma-separated list of names of additional +// known formatting functions or methods. If the name contains a period, +// it must denote a specific function using one of the following forms: +// +// dir/pkg.Function +// dir/pkg.Type.Method +// (*dir/pkg.Type).Method +// +// Otherwise the name is interpreted as a case-insensitive unqualified +// identifier such as "errorf". Either way, if a listed name ends in f, the +// function is assumed to be Printf-like, taking a format string before the +// argument list. Otherwise it is assumed to be Print-like, taking a list +// of arguments with no format string. +package printf diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go index daaf709a44..b2b8c67c75 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go @@ -2,12 +2,11 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package printf defines an Analyzer that checks consistency -// of Printf format strings and arguments. package printf import ( "bytes" + _ "embed" "fmt" "go/ast" "go/constant" @@ -32,43 +31,19 @@ func init() { Analyzer.Flags.Var(isPrint, "funcs", "comma-separated list of print function names to check") } +//go:embed doc.go +var doc string + var Analyzer = &analysis.Analyzer{ Name: "printf", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "printf"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/printf", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, ResultType: reflect.TypeOf((*Result)(nil)), FactTypes: []analysis.Fact{new(isWrapper)}, } -const Doc = `check consistency of Printf format strings and arguments - -The check applies to known functions (for example, those in package fmt) -as well as any detected wrappers of known functions. - -A function that wants to avail itself of printf checking but is not -found by this analyzer's heuristics (for example, due to use of -dynamic calls) can insert a bogus call: - - if false { - _ = fmt.Sprintf(format, args...) // enable printf checking - } - -The -funcs flag specifies a comma-separated list of names of additional -known formatting functions or methods. If the name contains a period, -it must denote a specific function using one of the following forms: - - dir/pkg.Function - dir/pkg.Type.Method - (*dir/pkg.Type).Method - -Otherwise the name is interpreted as a case-insensitive unqualified -identifier such as "errorf". Either way, if a listed name ends in f, the -function is assumed to be Printf-like, taking a format string before the -argument list. Otherwise it is assumed to be Print-like, taking a list -of arguments with no format string. -` - // Kind is a kind of fmt function behavior. type Kind int @@ -303,7 +278,7 @@ func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, k // print/printf function can take, adding an ellipsis // would break the program. For example: // - // func foo(arg1 string, arg2 ...interface{} { + // func foo(arg1 string, arg2 ...interface{}) { // fmt.Printf("%s %v", arg1, arg2) // } return @@ -340,9 +315,10 @@ func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, k // example, fmt.Printf forwards to fmt.Fprintf. We avoid relying on the // driver applying analyzers to standard packages because "go vet" does // not do so with gccgo, and nor do some other build systems. -// TODO(adonovan): eliminate the redundant facts once this restriction -// is lifted. var isPrint = stringSet{ + "fmt.Appendf": true, + "fmt.Append": true, + "fmt.Appendln": true, "fmt.Errorf": true, "fmt.Fprint": true, "fmt.Fprintf": true, @@ -1080,7 +1056,7 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { if strings.Contains(s, "%") { m := printFormatRE.FindStringSubmatch(s) if m != nil { - pass.ReportRangef(call, "%s call has possible formatting directive %s", fn.FullName(), m[0]) + pass.ReportRangef(call, "%s call has possible Printf formatting directive %s", fn.FullName(), m[0]) } } } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/doc.go new file mode 100644 index 0000000000..32f342b97f --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/doc.go @@ -0,0 +1,27 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package reflectvaluecompare defines an Analyzer that checks for accidentally +// using == or reflect.DeepEqual to compare reflect.Value values. +// See issues 43993 and 18871. +// +// # Analyzer reflectvaluecompare +// +// reflectvaluecompare: check for comparing reflect.Value values with == or reflect.DeepEqual +// +// The reflectvaluecompare checker looks for expressions of the form: +// +// v1 == v2 +// v1 != v2 +// reflect.DeepEqual(v1, v2) +// +// where v1 or v2 are reflect.Values. Comparing reflect.Values directly +// is almost certainly not correct, as it compares the reflect package's +// internal representation, not the underlying value. +// Likely what is intended is: +// +// v1.Interface() == v2.Interface() +// v1.Interface() != v2.Interface() +// reflect.DeepEqual(v1.Interface(), v2.Interface()) +package reflectvaluecompare diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go index ef21f0e7da..27677139e1 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go @@ -2,43 +2,28 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package reflectvaluecompare defines an Analyzer that checks for accidentally -// using == or reflect.DeepEqual to compare reflect.Value values. -// See issues 43993 and 18871. package reflectvaluecompare import ( + _ "embed" "go/ast" "go/token" "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" ) -const Doc = `check for comparing reflect.Value values with == or reflect.DeepEqual - -The reflectvaluecompare checker looks for expressions of the form: - - v1 == v2 - v1 != v2 - reflect.DeepEqual(v1, v2) - -where v1 or v2 are reflect.Values. Comparing reflect.Values directly -is almost certainly not correct, as it compares the reflect package's -internal representation, not the underlying value. -Likely what is intended is: - - v1.Interface() == v2.Interface() - v1.Interface() != v2.Interface() - reflect.DeepEqual(v1.Interface(), v2.Interface()) -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "reflectvaluecompare", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "reflectvaluecompare"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/reflectvaluecompare", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/shadow/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/shadow/doc.go new file mode 100644 index 0000000000..781fd2eb81 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/shadow/doc.go @@ -0,0 +1,33 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package shadow defines an Analyzer that checks for shadowed variables. +// +// # Analyzer shadow +// +// shadow: check for possible unintended shadowing of variables +// +// This analyzer check for shadowed variables. +// A shadowed variable is a variable declared in an inner scope +// with the same name and type as a variable in an outer scope, +// and where the outer variable is mentioned after the inner one +// is declared. +// +// (This definition can be refined; the module generates too many +// false positives and is not yet enabled by default.) +// +// For example: +// +// func BadRead(f *os.File, buf []byte) error { +// var err error +// for { +// n, err := f.Read(buf) // shadows the function variable 'err' +// if err != nil { +// break // causes return of wrong value +// } +// foo(buf) +// } +// return err +// } +package shadow diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go index a19cecd14b..30258c991f 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go @@ -2,50 +2,29 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package shadow defines an Analyzer that checks for shadowed variables. package shadow import ( + _ "embed" "go/ast" "go/token" "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" ) // NOTE: Experimental. Not part of the vet suite. -const Doc = `check for possible unintended shadowing of variables - -This analyzer check for shadowed variables. -A shadowed variable is a variable declared in an inner scope -with the same name and type as a variable in an outer scope, -and where the outer variable is mentioned after the inner one -is declared. - -(This definition can be refined; the module generates too many -false positives and is not yet enabled by default.) - -For example: - - func BadRead(f *os.File, buf []byte) error { - var err error - for { - n, err := f.Read(buf) // shadows the function variable 'err' - if err != nil { - break // causes return of wrong value - } - foo(buf) - } - return err - } -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "shadow", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "shadow"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/shadow", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go index e968f27b40..bafb9112e1 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go @@ -29,6 +29,7 @@ const Doc = "check for shifts that equal or exceed the width of the integer" var Analyzer = &analysis.Analyzer{ Name: "shift", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/shift", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/doc.go new file mode 100644 index 0000000000..583fed0147 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/doc.go @@ -0,0 +1,17 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package sigchanyzer defines an Analyzer that detects +// misuse of unbuffered signal as argument to signal.Notify. +// +// # Analyzer sigchanyzer +// +// sigchanyzer: check for unbuffered channel of os.Signal +// +// This checker reports call expression of the form +// +// signal.Notify(c <-chan os.Signal, sig ...os.Signal), +// +// where c is an unbuffered channel, which can be at risk of missing the signal. +package sigchanyzer diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go index c490a84ea7..5f121f720d 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go @@ -8,6 +8,7 @@ package sigchanyzer import ( "bytes" + _ "embed" "go/ast" "go/format" "go/token" @@ -15,23 +16,27 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" ) -const Doc = `check for unbuffered channel of os.Signal - -This checker reports call expression of the form signal.Notify(c <-chan os.Signal, sig ...os.Signal), -where c is an unbuffered channel, which can be at risk of missing the signal.` +//go:embed doc.go +var doc string // Analyzer describes sigchanyzer analysis function detector. var Analyzer = &analysis.Analyzer{ Name: "sigchanyzer", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "sigchanyzer"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sigchanyzer", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } func run(pass *analysis.Pass) (interface{}, error) { + if !analysisutil.Imports(pass.Pkg, "os/signal") { + return nil, nil // doesn't directly import signal + } + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go index f85837d66b..1fe206b0fc 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go @@ -15,6 +15,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" ) @@ -27,11 +28,16 @@ the interface{} value passed to sort.Slice is actually a slice.` var Analyzer = &analysis.Analyzer{ Name: "sortslice", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/sortslice", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } func run(pass *analysis.Pass) (interface{}, error) { + if !analysisutil.Imports(pass.Pkg, "sort") { + return nil, nil // doesn't directly import sort + } + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/doc.go new file mode 100644 index 0000000000..9ed88698dd --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/doc.go @@ -0,0 +1,30 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package stdmethods defines an Analyzer that checks for misspellings +// in the signatures of methods similar to well-known interfaces. +// +// # Analyzer stdmethods +// +// stdmethods: check signature of methods of well-known interfaces +// +// Sometimes a type may be intended to satisfy an interface but may fail to +// do so because of a mistake in its method signature. +// For example, the result of this WriteTo method should be (int64, error), +// not error, to satisfy io.WriterTo: +// +// type myWriterTo struct{...} +// func (myWriterTo) WriteTo(w io.Writer) error { ... } +// +// This check ensures that each method whose name matches one of several +// well-known interface methods from the standard library has the correct +// signature for that interface. +// +// Checked method names include: +// +// Format GobEncode GobDecode MarshalJSON MarshalXML +// Peek ReadByte ReadFrom ReadRune Scan Seek +// UnmarshalJSON UnreadByte UnreadRune WriteByte +// WriteTo +package stdmethods diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go index 41f455d100..28f51b1ec9 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go @@ -2,44 +2,27 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package stdmethods defines an Analyzer that checks for misspellings -// in the signatures of methods similar to well-known interfaces. package stdmethods import ( + _ "embed" "go/ast" "go/types" "strings" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" ) -const Doc = `check signature of methods of well-known interfaces - -Sometimes a type may be intended to satisfy an interface but may fail to -do so because of a mistake in its method signature. -For example, the result of this WriteTo method should be (int64, error), -not error, to satisfy io.WriterTo: - - type myWriterTo struct{...} - func (myWriterTo) WriteTo(w io.Writer) error { ... } - -This check ensures that each method whose name matches one of several -well-known interface methods from the standard library has the correct -signature for that interface. - -Checked method names include: - Format GobEncode GobDecode MarshalJSON MarshalXML - Peek ReadByte ReadFrom ReadRune Scan Seek - UnmarshalJSON UnreadByte UnreadRune WriteByte - WriteTo -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "stdmethods", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "stdmethods"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stdmethods", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/doc.go new file mode 100644 index 0000000000..205cd64011 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/doc.go @@ -0,0 +1,21 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package stringintconv defines an Analyzer that flags type conversions +// from integers to strings. +// +// # Analyzer stringintconv +// +// stringintconv: check for string(int) conversions +// +// This checker flags conversions of the form string(x) where x is an integer +// (but not byte or rune) type. Such conversions are discouraged because they +// return the UTF-8 representation of the Unicode code point x, and not a decimal +// string representation of x as one might expect. Furthermore, if x denotes an +// invalid code point, the conversion cannot be statically rejected. +// +// For conversions that intend on using the code point, consider replacing them +// with string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the +// string representation of the value in the desired base. +package stringintconv diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go index e41de809de..bb04dae626 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go @@ -2,11 +2,10 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package stringintconv defines an Analyzer that flags type conversions -// from integers to strings. package stringintconv import ( + _ "embed" "fmt" "go/ast" "go/types" @@ -14,26 +13,18 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/internal/typeparams" ) -const Doc = `check for string(int) conversions - -This checker flags conversions of the form string(x) where x is an integer -(but not byte or rune) type. Such conversions are discouraged because they -return the UTF-8 representation of the Unicode code point x, and not a decimal -string representation of x as one might expect. Furthermore, if x denotes an -invalid code point, the conversion cannot be statically rejected. - -For conversions that intend on using the code point, consider replacing them -with string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the -string representation of the value in the desired base. -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "stringintconv", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "stringintconv"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/stringintconv", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go index f0b15051c5..a0beb46bd1 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go @@ -28,6 +28,7 @@ Also report certain struct tags (json, xml) used with unexported fields.` var Analyzer = &analysis.Analyzer{ Name: "structtag", Doc: Doc, + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/structtag", Requires: []*analysis.Analyzer{inspect.Analyzer}, RunDespiteErrors: true, Run: run, diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/doc.go new file mode 100644 index 0000000000..a68adb12b4 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/doc.go @@ -0,0 +1,22 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package testinggoroutine defines an Analyzerfor detecting calls to +// Fatal from a test goroutine. +// +// # Analyzer testinggoroutine +// +// testinggoroutine: report calls to (*testing.T).Fatal from goroutines started by a test. +// +// Functions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and +// Skip{,f,Now} methods of *testing.T, must be called from the test goroutine itself. +// This checker detects calls to these functions that occur within a goroutine +// started by the test. For example: +// +// func TestFoo(t *testing.T) { +// go func() { +// t.Fatal("oops") // error: (*T).Fatal called from non-test goroutine +// }() +// } +package testinggoroutine diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go index 7ea8f77e33..907b71503e 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go @@ -5,6 +5,7 @@ package testinggoroutine import ( + _ "embed" "go/ast" "golang.org/x/tools/go/analysis" @@ -14,23 +15,13 @@ import ( "golang.org/x/tools/internal/typeparams" ) -const Doc = `report calls to (*testing.T).Fatal from goroutines started by a test. - -Functions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and -Skip{,f,Now} methods of *testing.T, must be called from the test goroutine itself. -This checker detects calls to these functions that occur within a goroutine -started by the test. For example: - -func TestFoo(t *testing.T) { - go func() { - t.Fatal("oops") // error: (*T).Fatal called from non-test goroutine - }() -} -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "testinggoroutine", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "testinggoroutine"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/testinggoroutine", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/doc.go new file mode 100644 index 0000000000..3ae27db9c1 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/doc.go @@ -0,0 +1,18 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package tests defines an Analyzer that checks for common mistaken +// usages of tests and examples. +// +// # Analyzer tests +// +// tests: check for common mistaken usages of tests and examples +// +// The tests checker walks Test, Benchmark, Fuzzing and Example functions checking +// malformed names, wrong signatures and examples documenting non-existent +// identifiers. +// +// Please see the documentation for package testing in golang.org/pkg/testing +// for the conventions that are enforced for Tests, Benchmarks, and Examples. +package tests diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go index 935aad00c9..9589a46a5a 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go @@ -2,11 +2,10 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package tests defines an Analyzer that checks for common mistaken -// usages of tests and examples. package tests import ( + _ "embed" "fmt" "go/ast" "go/token" @@ -17,22 +16,17 @@ import ( "unicode/utf8" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/internal/analysisinternal" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/internal/typeparams" ) -const Doc = `check for common mistaken usages of tests and examples - -The tests checker walks Test, Benchmark and Example functions checking -malformed names, wrong signatures and examples documenting non-existent -identifiers. - -Please see the documentation for package testing in golang.org/pkg/testing -for the conventions that are enforced for Tests, Benchmarks, and Examples.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "tests", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "tests"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/tests", Run: run, } @@ -73,9 +67,7 @@ func run(pass *analysis.Pass) (interface{}, error) { checkTest(pass, fn, "Test") case strings.HasPrefix(fn.Name.Name, "Benchmark"): checkTest(pass, fn, "Benchmark") - } - // run fuzz tests diagnostics only for 1.18 i.e. when analysisinternal.DiagnoseFuzzTests is turned on. - if strings.HasPrefix(fn.Name.Name, "Fuzz") && analysisinternal.DiagnoseFuzzTests { + case strings.HasPrefix(fn.Name.Name, "Fuzz"): checkTest(pass, fn, "Fuzz") checkFuzz(pass, fn) } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/doc.go new file mode 100644 index 0000000000..5c665b298b --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/doc.go @@ -0,0 +1,15 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package timeformat defines an Analyzer that checks for the use +// of time.Format or time.Parse calls with a bad format. +// +// # Analyzer timeformat +// +// timeformat: check for calls of (time.Time).Format or time.Parse with 2006-02-01 +// +// The timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm) +// format. Internationally, "yyyy-dd-mm" does not occur in common calendar date +// standards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended. +package timeformat diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go index acb198f95c..c45b9fa54b 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/timeformat/timeformat.go @@ -7,6 +7,7 @@ package timeformat import ( + _ "embed" "go/ast" "go/constant" "go/token" @@ -15,6 +16,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" ) @@ -22,21 +24,23 @@ import ( const badFormat = "2006-02-01" const goodFormat = "2006-01-02" -const Doc = `check for calls of (time.Time).Format or time.Parse with 2006-02-01 - -The timeformat checker looks for time formats with the 2006-02-01 (yyyy-dd-mm) -format. Internationally, "yyyy-dd-mm" does not occur in common calendar date -standards, and so it is more likely that 2006-01-02 (yyyy-mm-dd) was intended. -` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "timeformat", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "timeformat"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/timeformat", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } func run(pass *analysis.Pass) (interface{}, error) { + // Note: (time.Time).Format is a method and can be a typeutil.Callee + // without directly importing "time". So we cannot just skip this package + // when !analysisutil.Imports(pass.Pkg, "time"). + // TODO(taking): Consider using a prepass to collect typeutil.Callees. + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/doc.go new file mode 100644 index 0000000000..5781bbd32d --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/doc.go @@ -0,0 +1,14 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The unmarshal package defines an Analyzer that checks for passing +// non-pointer or non-interface types to unmarshal and decode functions. +// +// # Analyzer unmarshal +// +// unmarshal: report passing non-pointer or non-interface values to unmarshal +// +// The unmarshal analysis reports calls to functions such as json.Unmarshal +// in which the argument type is not a pointer or an interface. +package unmarshal diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go index 5129048a07..7043baa899 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go @@ -2,29 +2,28 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// The unmarshal package defines an Analyzer that checks for passing -// non-pointer or non-interface types to unmarshal and decode functions. package unmarshal import ( + _ "embed" "go/ast" "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" "golang.org/x/tools/internal/typeparams" ) -const Doc = `report passing non-pointer or non-interface values to unmarshal - -The unmarshal analysis reports calls to functions such as json.Unmarshal -in which the argument type is not a pointer or an interface.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "unmarshal", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "unmarshal"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unmarshal", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } @@ -37,6 +36,12 @@ func run(pass *analysis.Pass) (interface{}, error) { return nil, nil } + // Note: (*"encoding/json".Decoder).Decode, (* "encoding/gob".Decoder).Decode + // and (* "encoding/xml".Decoder).Decode are methods and can be a typeutil.Callee + // without directly importing their packages. So we cannot just skip this package + // when !analysisutil.Imports(pass.Pkg, "encoding/..."). + // TODO(taking): Consider using a prepass to collect typeutil.Callees. + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ @@ -51,6 +56,7 @@ func run(pass *analysis.Pass) (interface{}, error) { // Classify the callee (without allocating memory). argidx := -1 + recv := fn.Type().(*types.Signature).Recv() if fn.Name() == "Unmarshal" && recv == nil { // "encoding/json".Unmarshal diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unreachable/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unreachable/doc.go new file mode 100644 index 0000000000..d17d0d9444 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unreachable/doc.go @@ -0,0 +1,14 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unreachable defines an Analyzer that checks for unreachable code. +// +// # Analyzer unreachable +// +// unreachable: check for unreachable code +// +// The unreachable analyzer finds statements that execution can never reach +// because they are preceded by an return statement, a call to panic, an +// infinite loop, or similar constructs. +package unreachable diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go index 90896dd1bb..b810db7ee9 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go @@ -2,30 +2,29 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package unreachable defines an Analyzer that checks for unreachable code. package unreachable // TODO(adonovan): use the new cfg package, which is more precise. import ( + _ "embed" "go/ast" "go/token" "log" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" ) -const Doc = `check for unreachable code - -The unreachable analyzer finds statements that execution can never reach -because they are preceded by an return statement, a call to panic, an -infinite loop, or similar constructs.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "unreachable", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "unreachable"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unreachable", Requires: []*analysis.Analyzer{inspect.Analyzer}, RunDespiteErrors: true, Run: run, diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/doc.go new file mode 100644 index 0000000000..de10804cb1 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/doc.go @@ -0,0 +1,17 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unsafeptr defines an Analyzer that checks for invalid +// conversions of uintptr to unsafe.Pointer. +// +// # Analyzer unsafeptr +// +// unsafeptr: check for invalid conversions of uintptr to unsafe.Pointer +// +// The unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer +// to convert integers to pointers. A conversion from uintptr to +// unsafe.Pointer is invalid if it implies that there is a uintptr-typed +// word in memory that holds a pointer value, because that word will be +// invisible to stack copying and to the garbage collector. +package unsafeptr diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go index ed86e5ebf0..e43ac20782 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go @@ -7,6 +7,7 @@ package unsafeptr import ( + _ "embed" "go/ast" "go/token" "go/types" @@ -17,17 +18,13 @@ import ( "golang.org/x/tools/go/ast/inspector" ) -const Doc = `check for invalid conversions of uintptr to unsafe.Pointer - -The unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer -to convert integers to pointers. A conversion from uintptr to -unsafe.Pointer is invalid if it implies that there is a uintptr-typed -word in memory that holds a pointer value, because that word will be -invisible to stack copying and to the garbage collector.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "unsafeptr", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "unsafeptr"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unsafeptr", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/doc.go new file mode 100644 index 0000000000..a1bf4cf940 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/doc.go @@ -0,0 +1,19 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unusedresult defines an analyzer that checks for unused +// results of calls to certain pure functions. +// +// # Analyzer unusedresult +// +// unusedresult: check for unused results of calls to some functions +// +// Some functions like fmt.Errorf return a result and have no side +// effects, so it is always a mistake to discard the result. Other +// functions may return an error that must not be ignored, or a cleanup +// operation that must be called. This analyzer reports calls to +// functions like these when the result of the call is ignored. +// +// The set of functions may be controlled using flags. +package unusedresult diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go index 06747ba72b..cb487a2177 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go @@ -3,10 +3,18 @@ // license that can be found in the LICENSE file. // Package unusedresult defines an analyzer that checks for unused -// results of calls to certain pure functions. +// results of calls to certain functions. package unusedresult +// It is tempting to make this analysis inductive: for each function +// that tail-calls one of the functions that we check, check those +// functions too. However, just because you must use the result of +// fmt.Sprintf doesn't mean you need to use the result of every +// function that returns a formatted string: it may have other results +// and effects. + import ( + _ "embed" "go/ast" "go/token" "go/types" @@ -17,24 +25,16 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/go/types/typeutil" ) -// TODO(adonovan): make this analysis modular: export a mustUseResult -// fact for each function that tail-calls one of the functions that we -// check, and check those functions too. - -const Doc = `check for unused results of calls to some functions - -Some functions like fmt.Errorf return a result and have no side effects, -so it is always a mistake to discard the result. This analyzer reports -calls to certain functions in which the result of the call is ignored. - -The set of functions may be controlled using flags.` +//go:embed doc.go +var doc string var Analyzer = &analysis.Analyzer{ Name: "unusedresult", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "unusedresult"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedresult", Requires: []*analysis.Analyzer{inspect.Analyzer}, Run: run, } @@ -43,8 +43,21 @@ var Analyzer = &analysis.Analyzer{ var funcs, stringMethods stringSetFlag func init() { - // TODO(adonovan): provide a comment syntax to allow users to - // add their functions to this set using facts. + // TODO(adonovan): provide a comment or declaration syntax to + // allow users to add their functions to this set using facts. + // For example: + // + // func ignoringTheErrorWouldBeVeryBad() error { + // type mustUseResult struct{} // enables vet unusedresult check + // ... + // } + // + // ignoringTheErrorWouldBeVeryBad() // oops + // + + // List standard library functions here. + // The context.With{Cancel,Deadline,Timeout} entries are + // effectively redundant wrt the lostcancel analyzer. funcs.Set("errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse,context.WithValue,context.WithCancel,context.WithDeadline,context.WithTimeout") Analyzer.Flags.Var(&funcs, "funcs", "comma-separated list of functions whose results must be used") @@ -57,6 +70,14 @@ func init() { func run(pass *analysis.Pass) (interface{}, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + // Split functions into (pkg, name) pairs to save allocation later. + pkgFuncs := make(map[[2]string]bool, len(funcs)) + for s := range funcs { + if i := strings.LastIndexByte(s, '.'); i > 0 { + pkgFuncs[[2]string{s[:i], s[i+1:]}] = true + } + } + nodeFilter := []ast.Node{ (*ast.ExprStmt)(nil), } @@ -65,41 +86,26 @@ func run(pass *analysis.Pass) (interface{}, error) { if !ok { return // not a call statement } - fun := analysisutil.Unparen(call.Fun) - - if pass.TypesInfo.Types[fun].IsType() { - return // a conversion, not a call - } - x, _, _, _ := typeparams.UnpackIndexExpr(fun) - if x != nil { - fun = x // If this is generic function or method call, skip the instantiation arguments - } - - selector, ok := fun.(*ast.SelectorExpr) + // Call to function or method? + fn, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Func) if !ok { - return // neither a method call nor a qualified ident + return // e.g. var or builtin } - sel, ok := pass.TypesInfo.Selections[selector] - if ok && sel.Kind() == types.MethodVal { + if sig := fn.Type().(*types.Signature); sig.Recv() != nil { // method (e.g. foo.String()) - obj := sel.Obj().(*types.Func) - sig := sel.Type().(*types.Signature) if types.Identical(sig, sigNoArgsStringResult) { - if stringMethods[obj.Name()] { + if stringMethods[fn.Name()] { pass.Reportf(call.Lparen, "result of (%s).%s call not used", - sig.Recv().Type(), obj.Name()) + sig.Recv().Type(), fn.Name()) } } - } else if !ok { - // package-qualified function (e.g. fmt.Errorf) - obj := pass.TypesInfo.Uses[selector.Sel] - if obj, ok := obj.(*types.Func); ok { - qname := obj.Pkg().Path() + "." + obj.Name() - if funcs[qname] { - pass.Reportf(call.Lparen, "result of %v call not used", qname) - } + } else { + // package-level function (e.g. fmt.Errorf) + if pkgFuncs[[2]string{fn.Pkg().Path(), fn.Name()}] { + pass.Reportf(call.Lparen, "result of %s.%s call not used", + fn.Pkg().Path(), fn.Name()) } } }) diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/doc.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/doc.go new file mode 100644 index 0000000000..de10dc8c8e --- /dev/null +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/doc.go @@ -0,0 +1,34 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unusedwrite checks for unused writes to the elements of a struct or array object. +// +// # Analyzer unusedwrite +// +// unusedwrite: checks for unused writes +// +// The analyzer reports instances of writes to struct fields and +// arrays that are never read. Specifically, when a struct object +// or an array is copied, its elements are copied implicitly by +// the compiler, and any element write to this copy does nothing +// with the original object. +// +// For example: +// +// type T struct { x int } +// +// func f(input []T) { +// for i, v := range input { // v is a copy +// v.x = i // unused write to field x +// } +// } +// +// Another example is about non-pointer receiver: +// +// type T struct { x int } +// +// func (t T) f() { // t is a copy +// t.x = i // unused write to field x +// } +package unusedwrite diff --git a/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go index 9cc45e0a36..f5d0f116ca 100644 --- a/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go +++ b/tools/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go @@ -2,49 +2,28 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package unusedwrite checks for unused writes to the elements of a struct or array object. package unusedwrite import ( + _ "embed" "fmt" "go/types" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ssa" ) -// Doc is a documentation string. -const Doc = `checks for unused writes - -The analyzer reports instances of writes to struct fields and -arrays that are never read. Specifically, when a struct object -or an array is copied, its elements are copied implicitly by -the compiler, and any element write to this copy does nothing -with the original object. - -For example: - - type T struct { x int } - func f(input []T) { - for i, v := range input { // v is a copy - v.x = i // unused write to field x - } - } - -Another example is about non-pointer receiver: - - type T struct { x int } - func (t T) f() { // t is a copy - t.x = i // unused write to field x - } -` +//go:embed doc.go +var doc string // Analyzer reports instances of writes to struct fields and arrays // that are never read. var Analyzer = &analysis.Analyzer{ Name: "unusedwrite", - Doc: Doc, + Doc: analysisutil.MustExtractDoc(doc, "unusedwrite"), + URL: "https://pkg.go.dev/golang.org/x/tools/go/analysis/passes/unusedwrite", Requires: []*analysis.Analyzer{buildssa.Analyzer}, Run: run, } diff --git a/tools/vendor/golang.org/x/tools/go/ast/inspector/inspector.go b/tools/vendor/golang.org/x/tools/go/ast/inspector/inspector.go index 3fbfebf369..1fc1de0bd1 100644 --- a/tools/vendor/golang.org/x/tools/go/ast/inspector/inspector.go +++ b/tools/vendor/golang.org/x/tools/go/ast/inspector/inspector.go @@ -64,8 +64,9 @@ type event struct { // depth-first order. It calls f(n) for each node n before it visits // n's children. // +// The complete traversal sequence is determined by ast.Inspect. // The types argument, if non-empty, enables type-based filtering of -// events. The function f if is called only for nodes whose type +// events. The function f is called only for nodes whose type // matches an element of the types slice. func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) { // Because it avoids postorder calls to f, and the pruning @@ -97,6 +98,7 @@ func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) { // of the non-nil children of the node, followed by a call of // f(n, false). // +// The complete traversal sequence is determined by ast.Inspect. // The types argument, if non-empty, enables type-based filtering of // events. The function f if is called only for nodes whose type // matches an element of the types slice. diff --git a/tools/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/tools/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go index 165ede0f8f..03543bd4bb 100644 --- a/tools/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go +++ b/tools/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go @@ -128,15 +128,14 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, // (from "version"). Select appropriate importer. if len(data) > 0 { switch data[0] { - case 'i': - _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) - return pkg, err + case 'v', 'c', 'd': // binary, till go1.10 + return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0]) - case 'v', 'c', 'd': - _, pkg, err := gcimporter.BImportData(fset, imports, data, path) + case 'i': // indexed, till go1.19 + _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) return pkg, err - case 'u': + case 'u': // unified, from go1.20 _, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path) return pkg, err diff --git a/tools/vendor/golang.org/x/tools/go/packages/golist.go b/tools/vendor/golang.org/x/tools/go/packages/golist.go index 6bb7168d2e..58230038a7 100644 --- a/tools/vendor/golang.org/x/tools/go/packages/golist.go +++ b/tools/vendor/golang.org/x/tools/go/packages/golist.go @@ -625,7 +625,12 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse } if pkg.PkgPath == "unsafe" { - pkg.GoFiles = nil // ignore fake unsafe.go file + pkg.CompiledGoFiles = nil // ignore fake unsafe.go file (#59929) + } else if len(pkg.CompiledGoFiles) == 0 { + // Work around for pre-go.1.11 versions of go list. + // TODO(matloob): they should be handled by the fallback. + // Can we delete this? + pkg.CompiledGoFiles = pkg.GoFiles } // Assume go list emits only absolute paths for Dir. @@ -663,16 +668,12 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse response.Roots = append(response.Roots, pkg.ID) } - // Work around for pre-go.1.11 versions of go list. - // TODO(matloob): they should be handled by the fallback. - // Can we delete this? - if len(pkg.CompiledGoFiles) == 0 { - pkg.CompiledGoFiles = pkg.GoFiles - } - // Temporary work-around for golang/go#39986. Parse filenames out of // error messages. This happens if there are unrecoverable syntax // errors in the source, so we can't match on a specific error message. + // + // TODO(rfindley): remove this heuristic, in favor of considering + // InvalidGoFiles from the list driver. if err := p.Error; err != nil && state.shouldAddFilenameFromError(p) { addFilenameFromPos := func(pos string) bool { split := strings.Split(pos, ":") @@ -891,6 +892,15 @@ func golistargs(cfg *Config, words []string, goVersion int) []string { // probably because you'd just get the TestMain. fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0 && !usesExportData(cfg)), } + + // golang/go#60456: with go1.21 and later, go list serves pgo variants, which + // can be costly to compute and may result in redundant processing for the + // caller. Disable these variants. If someone wants to add e.g. a NeedPGO + // mode flag, that should be a separate proposal. + if goVersion >= 21 { + fullargs = append(fullargs, "-pgo=off") + } + fullargs = append(fullargs, cfg.BuildFlags...) fullargs = append(fullargs, "--") fullargs = append(fullargs, words...) diff --git a/tools/vendor/golang.org/x/tools/go/packages/packages.go b/tools/vendor/golang.org/x/tools/go/packages/packages.go index 0f1505b808..632be722a2 100644 --- a/tools/vendor/golang.org/x/tools/go/packages/packages.go +++ b/tools/vendor/golang.org/x/tools/go/packages/packages.go @@ -308,6 +308,9 @@ type Package struct { TypeErrors []types.Error // GoFiles lists the absolute file paths of the package's Go source files. + // It may include files that should not be compiled, for example because + // they contain non-matching build tags, are documentary pseudo-files such as + // unsafe/unsafe.go or builtin/builtin.go, or are subject to cgo preprocessing. GoFiles []string // CompiledGoFiles lists the absolute file paths of the package's source diff --git a/tools/vendor/golang.org/x/tools/go/ssa/builder.go b/tools/vendor/golang.org/x/tools/go/ssa/builder.go index be8d36a6ee..11b6423191 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/builder.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/builder.go @@ -363,7 +363,7 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ } case "new": - alloc := emitNew(fn, deref(typ), pos) + alloc := emitNew(fn, mustDeref(typ), pos) alloc.Comment = "new" return alloc @@ -373,10 +373,8 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ // We must still evaluate the value, though. (If it // was side-effect free, the whole call would have // been constant-folded.) - // - // Type parameters are always non-constant so use Underlying. - t := deref(fn.typeOf(args[0])).Underlying() - if at, ok := t.(*types.Array); ok { + t, _ := deref(fn.typeOf(args[0])) + if at, ok := typeparams.CoreType(t).(*types.Array); ok { b.expr(fn, args[0]) // for effects only return intConst(at.Len()) } @@ -431,12 +429,12 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { return &address{addr: v, pos: e.Pos(), expr: e} case *ast.CompositeLit: - t := deref(fn.typeOf(e)) + typ, _ := deref(fn.typeOf(e)) var v *Alloc if escaping { - v = emitNew(fn, t, e.Lbrace) + v = emitNew(fn, typ, e.Lbrace) } else { - v = fn.addLocal(t, e.Lbrace) + v = fn.addLocal(typ, e.Lbrace) } v.Comment = "complit" var sb storebuf @@ -459,7 +457,7 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { wantAddr := true v := b.receiver(fn, e.X, wantAddr, escaping, sel) index := sel.index[len(sel.index)-1] - fld := typeparams.CoreType(deref(v.Type())).(*types.Struct).Field(index) + fld := fieldOf(mustDeref(v.Type()), index) // v is an addr. // Due to the two phases of resolving AssignStmt, a panic from x.f = p() // when x is nil is required to come after the side-effects of @@ -508,7 +506,7 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { v.setType(et) return fn.emit(v) } - return &lazyAddress{addr: emit, t: deref(et), pos: e.Lbrack, expr: e} + return &lazyAddress{addr: emit, t: mustDeref(et), pos: e.Lbrack, expr: e} case *ast.StarExpr: return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} @@ -554,7 +552,7 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb * // so if the type of the location is a pointer, // an &-operation is implied. if _, ok := loc.(blank); !ok { // avoid calling blank.typ() - if isPointer(loc.typ()) { + if _, ok := deref(loc.typ()); ok { ptr := b.addr(fn, e, true).address(fn) // copy address if sb != nil { @@ -584,7 +582,7 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb * // Subtle: emit debug ref for aggregate types only; // slice and map are handled by store ops in compLit. - switch loc.typ().Underlying().(type) { + switch typeparams.CoreType(loc.typ()).(type) { case *types.Struct, *types.Array: emitDebugRef(fn, e, addr, true) } @@ -831,7 +829,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { // The result is a "bound". obj := sel.obj.(*types.Func) rt := fn.typ(recvType(obj)) - wantAddr := isPointer(rt) + _, wantAddr := deref(rt) escaping := true v := b.receiver(fn, e.X, wantAddr, escaping, sel) @@ -959,7 +957,7 @@ func (b *builder) stmtList(fn *Function, list []ast.Stmt) { // escaping is defined as per builder.addr(). func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *selection) Value { var v Value - if wantAddr && !sel.indirect && !isPointer(fn.typeOf(e)) { + if _, eptr := deref(fn.typeOf(e)); wantAddr && !sel.indirect && !eptr { v = b.addr(fn, e, escaping).address(fn) } else { v = b.expr(fn, e) @@ -968,7 +966,7 @@ func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, se last := len(sel.index) - 1 // The position of implicit selection is the position of the inducing receiver expression. v = emitImplicitSelections(fn, v, sel.index[:last], e.Pos()) - if !wantAddr && isPointer(v.Type()) { + if _, vptr := deref(v.Type()); !wantAddr && vptr { v = emitLoad(fn, v) } return v @@ -987,7 +985,7 @@ func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { obj := sel.obj.(*types.Func) recv := recvType(obj) - wantAddr := isPointer(recv) + _, wantAddr := deref(recv) escaping := true v := b.receiver(fn, selector.X, wantAddr, escaping, sel) if types.IsInterface(recv) { @@ -1253,37 +1251,13 @@ func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { // literal has type *T behaves like &T{}. // In that case, addr must hold a T, not a *T. func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { - typ := deref(fn.typeOf(e)) // type with name [may be type param] - t := deref(typeparams.CoreType(typ)).Underlying() // core type for comp lit case - // Computing typ and t is subtle as these handle pointer types. - // For example, &T{...} is valid even for maps and slices. - // Also typ should refer to T (not *T) while t should be the core type of T. - // - // To show the ordering to take into account, consider the composite literal - // expressions `&T{f: 1}` and `{f: 1}` within the expression `[]S{{f: 1}}` here: - // type N struct{f int} - // func _[T N, S *N]() { - // _ = &T{f: 1} - // _ = []S{{f: 1}} - // } - // For `&T{f: 1}`, we compute `typ` and `t` as: - // typeOf(&T{f: 1}) == *T - // deref(*T) == T (typ) - // CoreType(T) == N - // deref(N) == N - // N.Underlying() == struct{f int} (t) - // For `{f: 1}` in `[]S{{f: 1}}`, we compute `typ` and `t` as: - // typeOf({f: 1}) == S - // deref(S) == S (typ) - // CoreType(S) == *N - // deref(*N) == N - // N.Underlying() == struct{f int} (t) - switch t := t.(type) { + typ, _ := deref(fn.typeOf(e)) // type with name [may be type param] + switch t := typeparams.CoreType(typ).(type) { case *types.Struct: if !isZero && len(e.Elts) != t.NumFields() { // memclear - sb.store(&address{addr, e.Lbrace, nil}, - zeroValue(fn, deref(addr.Type()))) + zt, _ := deref(addr.Type()) + sb.store(&address{addr, e.Lbrace, nil}, zeroConst(zt)) isZero = true } for i, e := range e.Elts { @@ -1327,8 +1301,8 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero if !isZero && int64(len(e.Elts)) != at.Len() { // memclear - sb.store(&address{array, e.Lbrace, nil}, - zeroValue(fn, deref(array.Type()))) + zt, _ := deref(array.Type()) + sb.store(&address{array, e.Lbrace, nil}, zeroConst(zt)) } } @@ -1381,8 +1355,13 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero // map[*struct{}]bool{{}: true} // An &-operation may be implied: // map[*struct{}]bool{&struct{}{}: true} + wantAddr := false + if _, ok := unparen(e.Key).(*ast.CompositeLit); ok { + _, wantAddr = deref(t.Key()) + } + var key Value - if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { + if wantAddr { // A CompositeLit never evaluates to a pointer, // so if the type of the location is a pointer, // an &-operation is implied. @@ -1409,7 +1388,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) default: - panic("unexpected CompositeLit type: " + t.String()) + panic("unexpected CompositeLit type: " + typ.String()) } } @@ -1875,15 +1854,14 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.P // Determine number of iterations. var length Value - if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { + dt, _ := deref(x.Type()) + if arr, ok := typeparams.CoreType(dt).(*types.Array); ok { // For array or *array, the number of iterations is // known statically thanks to the type. We avoid a // data dependence upon x, permitting later dead-code // elimination if x is pure, static unrolling, etc. // Ranging over a nil *array may have >0 iterations. // We still generate code for x, in case it has effects. - // - // TypeParams do not have constant length. Use underlying instead of core type. length = intConst(arr.Len()) } else { // length = len(x). diff --git a/tools/vendor/golang.org/x/tools/go/ssa/doc.go b/tools/vendor/golang.org/x/tools/go/ssa/doc.go index afda476b36..a687de45e2 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/doc.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/doc.go @@ -66,7 +66,6 @@ // *FieldAddr ✔ ✔ // *FreeVar ✔ // *Function ✔ ✔ (func) -// *GenericConvert ✔ ✔ // *Global ✔ ✔ (var) // *Go ✔ // *If ✔ @@ -80,6 +79,7 @@ // *MakeMap ✔ ✔ // *MakeSlice ✔ ✔ // *MapUpdate ✔ +// *MultiConvert ✔ ✔ // *NamedConst ✔ (const) // *Next ✔ ✔ // *Panic ✔ diff --git a/tools/vendor/golang.org/x/tools/go/ssa/emit.go b/tools/vendor/golang.org/x/tools/go/ssa/emit.go index 1731c79750..fe2f6f0f6d 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/emit.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/emit.go @@ -11,8 +11,6 @@ import ( "go/ast" "go/token" "go/types" - - "golang.org/x/tools/internal/typeparams" ) // emitNew emits to f a new (heap Alloc) instruction allocating an @@ -29,7 +27,7 @@ func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc { // new temporary, and returns the value so defined. func emitLoad(f *Function, addr Value) *UnOp { v := &UnOp{Op: token.MUL, X: addr} - v.setType(deref(typeparams.CoreType(addr.Type()))) + v.setType(mustDeref(addr.Type())) f.emit(v) return v } @@ -372,9 +370,10 @@ func emitTypeCoercion(f *Function, v Value, typ types.Type) Value { // emitStore emits to f an instruction to store value val at location // addr, applying implicit conversions as required by assignability rules. func emitStore(f *Function, addr, val Value, pos token.Pos) *Store { + typ := mustDeref(addr.Type()) s := &Store{ Addr: addr, - Val: emitConv(f, val, deref(addr.Type())), + Val: emitConv(f, val, typ), pos: pos, } f.emit(s) @@ -477,9 +476,8 @@ func emitTailCall(f *Function, call *Call) { // value of a field. func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) Value { for _, index := range indices { - fld := typeparams.CoreType(deref(v.Type())).(*types.Struct).Field(index) - - if isPointer(v.Type()) { + if st, vptr := deref(v.Type()); vptr { + fld := fieldOf(st, index) instr := &FieldAddr{ X: v, Field: index, @@ -488,10 +486,11 @@ func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) instr.setType(types.NewPointer(fld.Type())) v = f.emit(instr) // Load the field's value iff indirectly embedded. - if isPointer(fld.Type()) { + if _, fldptr := deref(fld.Type()); fldptr { v = emitLoad(f, v) } } else { + fld := fieldOf(v.Type(), index) instr := &Field{ X: v, Field: index, @@ -511,8 +510,8 @@ func emitImplicitSelections(f *Function, v Value, indices []int, pos token.Pos) // field's value. // Ident id is used for position and debug info. func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { - fld := typeparams.CoreType(deref(v.Type())).(*types.Struct).Field(index) - if isPointer(v.Type()) { + if st, vptr := deref(v.Type()); vptr { + fld := fieldOf(st, index) instr := &FieldAddr{ X: v, Field: index, @@ -525,6 +524,7 @@ func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast. v = emitLoad(f, v) } } else { + fld := fieldOf(v.Type(), index) instr := &Field{ X: v, Field: index, @@ -537,15 +537,46 @@ func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast. return v } -// zeroValue emits to f code to produce a zero value of type t, -// and returns it. -func zeroValue(f *Function, t types.Type) Value { - switch t.Underlying().(type) { - case *types.Struct, *types.Array: - return emitLoad(f, f.addLocal(t, token.NoPos)) - default: - return zeroConst(t) - } +// emitSliceToArray emits to f code to convert a slice value to an array value. +// +// Precondition: all types in type set of typ are arrays and convertible to all +// types in the type set of val.Type(). +func emitSliceToArray(f *Function, val Value, typ types.Type) Value { + // Emit the following: + // if val == nil && len(typ) == 0 { + // ptr = &[0]T{} + // } else { + // ptr = SliceToArrayPointer(val) + // } + // v = *ptr + + ptype := types.NewPointer(typ) + p := &SliceToArrayPointer{X: val} + p.setType(ptype) + ptr := f.emit(p) + + nilb := f.newBasicBlock("slicetoarray.nil") + nonnilb := f.newBasicBlock("slicetoarray.nonnil") + done := f.newBasicBlock("slicetoarray.done") + + cond := emitCompare(f, token.EQL, ptr, zeroConst(ptype), token.NoPos) + emitIf(f, cond, nilb, nonnilb) + f.currentBlock = nilb + + zero := f.addLocal(typ, token.NoPos) + emitJump(f, done) + f.currentBlock = nonnilb + + emitJump(f, done) + f.currentBlock = done + + phi := &Phi{Edges: []Value{zero, ptr}, Comment: "slicetoarray"} + phi.pos = val.Pos() + phi.setType(typ) + x := f.emit(phi) + unOp := &UnOp{Op: token.MUL, X: x} + unOp.setType(typ) + return f.emit(unOp) } // createRecoverBlock emits to f a block of code to return after a @@ -577,7 +608,7 @@ func createRecoverBlock(f *Function) { T := R.At(i).Type() // Return zero value of each result type. - results = append(results, zeroValue(f, T)) + results = append(results, zeroConst(T)) } } f.emit(&Return{Results: results}) diff --git a/tools/vendor/golang.org/x/tools/go/ssa/func.go b/tools/vendor/golang.org/x/tools/go/ssa/func.go index 57f5f718f7..38c3e31baf 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/func.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/func.go @@ -382,7 +382,9 @@ func (pkg *Package) SetDebugMode(debug bool) { // debugInfo reports whether debug info is wanted for this function. func (f *Function) debugInfo() bool { - return f.Pkg != nil && f.Pkg.debug + // debug info for instantiations follows the debug info of their origin. + p := f.declaredPackage() + return p != nil && p.debug } // addNamedLocal creates a local variable, adds it to function f and @@ -514,15 +516,15 @@ func (f *Function) relMethod(from *types.Package, recv types.Type) string { } // writeSignature writes to buf the signature sig in declaration syntax. -func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature, params []*Parameter) { +func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature) { buf.WriteString("func ") if recv := sig.Recv(); recv != nil { buf.WriteString("(") - if n := params[0].Name(); n != "" { - buf.WriteString(n) + if name := recv.Name(); name != "" { + buf.WriteString(name) buf.WriteString(" ") } - types.WriteType(buf, params[0].Type(), types.RelativeTo(from)) + types.WriteType(buf, recv.Type(), types.RelativeTo(from)) buf.WriteString(") ") } buf.WriteString(name) @@ -594,10 +596,10 @@ func WriteFunction(buf *bytes.Buffer, f *Function) { if len(f.Locals) > 0 { buf.WriteString("# Locals:\n") for i, l := range f.Locals { - fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from)) + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(mustDeref(l.Type()), from)) } } - writeSignature(buf, from, f.Name(), f.Signature, f.Params) + writeSignature(buf, from, f.Name(), f.Signature) buf.WriteString(":\n") if f.Blocks == nil { diff --git a/tools/vendor/golang.org/x/tools/go/ssa/instantiate.go b/tools/vendor/golang.org/x/tools/go/ssa/instantiate.go index f6b2533f24..38249dea26 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/instantiate.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/instantiate.go @@ -148,7 +148,8 @@ func (insts *instanceSet) lookupOrCreate(targs []types.Type, parameterized *tpWa if prog.mode&InstantiateGenerics != 0 && concrete { synthetic = fmt.Sprintf("instance of %s", fn.Name()) - subst = makeSubster(prog.ctxt, fn.typeparams, targs, false) + scope := typeparams.OriginMethod(obj).Scope() + subst = makeSubster(prog.ctxt, scope, fn.typeparams, targs, false) } else { synthetic = fmt.Sprintf("instantiation wrapper of %s", fn.Name()) } diff --git a/tools/vendor/golang.org/x/tools/go/ssa/lift.go b/tools/vendor/golang.org/x/tools/go/ssa/lift.go index 945536bbbf..dbd8790c6f 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/lift.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/lift.go @@ -41,11 +41,8 @@ package ssa import ( "fmt" "go/token" - "go/types" "math/big" "os" - - "golang.org/x/tools/internal/typeparams" ) // If true, show diagnostic information at each step of lifting. @@ -383,12 +380,6 @@ type newPhiMap map[*BasicBlock][]newPhi // // fresh is a source of fresh ids for phi nodes. func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool { - // TODO(taking): zero constants of aggregated types can now be lifted. - switch deref(alloc.Type()).Underlying().(type) { - case *types.Array, *types.Struct, *typeparams.TypeParam: - return false - } - // Don't lift named return values in functions that defer // calls that may recover from panic. if fn := alloc.Parent(); fn.Recover != nil { @@ -469,7 +460,7 @@ func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool *fresh++ phi.pos = alloc.Pos() - phi.setType(deref(alloc.Type())) + phi.setType(mustDeref(alloc.Type())) phi.block = v if debugLifting { fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v) @@ -514,7 +505,7 @@ func replaceAll(x, y Value) { func renamed(renaming []Value, alloc *Alloc) Value { v := renaming[alloc.index] if v == nil { - v = zeroConst(deref(alloc.Type())) + v = zeroConst(mustDeref(alloc.Type())) renaming[alloc.index] = v } return v diff --git a/tools/vendor/golang.org/x/tools/go/ssa/lvalue.go b/tools/vendor/golang.org/x/tools/go/ssa/lvalue.go index 51122b8e85..186cfcae70 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/lvalue.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/lvalue.go @@ -25,7 +25,7 @@ type lvalue interface { // An address is an lvalue represented by a true pointer. type address struct { - addr Value + addr Value // must have a pointer core type. pos token.Pos // source position expr ast.Expr // source syntax of the value (not address) [debug mode] } @@ -52,7 +52,7 @@ func (a *address) address(fn *Function) Value { } func (a *address) typ() types.Type { - return deref(a.addr.Type()) + return mustDeref(a.addr.Type()) } // An element is an lvalue represented by m[k], the location of an diff --git a/tools/vendor/golang.org/x/tools/go/ssa/methods.go b/tools/vendor/golang.org/x/tools/go/ssa/methods.go index 4185618cdd..2944983713 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/methods.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/methods.go @@ -101,8 +101,11 @@ func (prog *Program) addMethod(mset *methodSet, sel *types.Selection, cr *creato sel := toSelection(sel) obj := sel.obj.(*types.Func) + _, ptrObj := deptr(recvType(obj)) + _, ptrRecv := deptr(sel.recv) + needsPromotion := len(sel.index) > 1 - needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.recv) + needsIndirection := !ptrObj && ptrRecv if needsPromotion || needsIndirection { fn = makeWrapper(prog, sel, cr) } else { diff --git a/tools/vendor/golang.org/x/tools/go/ssa/parameterized.go b/tools/vendor/golang.org/x/tools/go/ssa/parameterized.go index b11413c818..b90ee0e86b 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/parameterized.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/parameterized.go @@ -17,7 +17,7 @@ type tpWalker struct { seen map[types.Type]bool } -// isParameterized returns true when typ contains any type parameters. +// isParameterized returns true when typ reaches any type parameter. func (w *tpWalker) isParameterized(typ types.Type) (res bool) { // NOTE: Adapted from go/types/infer.go. Try to keep in sync. @@ -63,7 +63,7 @@ func (w *tpWalker) isParameterized(typ types.Type) (res bool) { // of a generic function type (or an interface method) that is // part of the type we're testing. We don't care about these type // parameters. - // Similarly, the receiver of a method may declare (rather then + // Similarly, the receiver of a method may declare (rather than // use) type parameters, we don't care about those either. // Thus, we only need to look at the input and result parameters. return w.isParameterized(t.Params()) || w.isParameterized(t.Results()) @@ -101,6 +101,7 @@ func (w *tpWalker) isParameterized(typ types.Type) (res bool) { return true } } + return w.isParameterized(t.Underlying()) // recurse for types local to parameterized functions case *typeparams.TypeParam: return true diff --git a/tools/vendor/golang.org/x/tools/go/ssa/print.go b/tools/vendor/golang.org/x/tools/go/ssa/print.go index 8b783196e4..7f34a7b58b 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/print.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/print.go @@ -95,7 +95,7 @@ func (v *Alloc) String() string { op = "new" } from := v.Parent().relPkg() - return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment) + return fmt.Sprintf("%s %s (%s)", op, relType(mustDeref(v.Type()), from), v.Comment) } func (v *Phi) String() string { @@ -259,21 +259,19 @@ func (v *MakeChan) String() string { } func (v *FieldAddr) String() string { - st := typeparams.CoreType(deref(v.X.Type())).(*types.Struct) // Be robust against a bad index. name := "?" - if 0 <= v.Field && v.Field < st.NumFields() { - name = st.Field(v.Field).Name() + if fld := fieldOf(mustDeref(v.X.Type()), v.Field); fld != nil { + name = fld.Name() } return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field) } func (v *Field) String() string { - st := typeparams.CoreType(v.X.Type()).(*types.Struct) // Be robust against a bad index. name := "?" - if 0 <= v.Field && v.Field < st.NumFields() { - name = st.Field(v.Field).Name() + if fld := fieldOf(v.X.Type(), v.Field); fld != nil { + name = fld.Name() } return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field) } @@ -452,7 +450,7 @@ func WritePackage(buf *bytes.Buffer, p *Package) { case *Global: fmt.Fprintf(buf, " var %-*s %s\n", - maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from)) + maxname, name, relType(mustDeref(mem.Type()), from)) } } diff --git a/tools/vendor/golang.org/x/tools/go/ssa/sanity.go b/tools/vendor/golang.org/x/tools/go/ssa/sanity.go index 88ad374ded..886be05325 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/sanity.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/sanity.go @@ -8,6 +8,7 @@ package ssa // Currently it checks CFG invariants but little at the instruction level. import ( + "bytes" "fmt" "go/types" "io" @@ -412,8 +413,10 @@ func (s *sanity) checkFunction(fn *Function) bool { s.errorf("nil Prog") } + var buf bytes.Buffer _ = fn.String() // must not crash _ = fn.RelString(fn.relPkg()) // must not crash + WriteFunction(&buf, fn) // must not crash // All functions have a package, except delegates (which are // shared across packages, or duplicated as weak symbols in a diff --git a/tools/vendor/golang.org/x/tools/go/ssa/source.go b/tools/vendor/golang.org/x/tools/go/ssa/source.go index b9a08363ec..9c900e3aab 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/source.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/source.go @@ -121,7 +121,9 @@ func findNamedFunc(pkg *Package, pos token.Pos) *Function { // Don't call Program.Method: avoid creating wrappers. obj := mset.At(i).Obj().(*types.Func) if obj.Pos() == pos { - return pkg.objects[obj].(*Function) + // obj from MethodSet may not be the origin type. + m := typeparams.OriginMethod(obj) + return pkg.objects[m].(*Function) } } } diff --git a/tools/vendor/golang.org/x/tools/go/ssa/ssa.go b/tools/vendor/golang.org/x/tools/go/ssa/ssa.go index 5904b817b3..bd42f2e0a9 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/ssa.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/ssa.go @@ -36,7 +36,7 @@ type Program struct { bounds map[boundsKey]*Function // bounds for curried x.Method closures thunks map[selectionKey]*Function // thunks for T.Method expressions instances map[*Function]*instanceSet // instances of generic functions - parameterized tpWalker // determines whether a type is parameterized. + parameterized tpWalker // determines whether a type reaches a type parameter. } // A Package is a single analyzed Go package containing Members for @@ -258,8 +258,8 @@ type Node interface { // or method. // // If Blocks is nil, this indicates an external function for which no -// Go source code is available. In this case, FreeVars and Locals -// are nil too. Clients performing whole-program analysis must +// Go source code is available. In this case, FreeVars, Locals, and +// Params are nil too. Clients performing whole-program analysis must // handle external functions specially. // // Blocks contains the function's control-flow graph (CFG). @@ -865,7 +865,7 @@ type Slice struct { type FieldAddr struct { register X Value // *struct - Field int // field is typeparams.CoreType(X.Type().Underlying().(*types.Pointer).Elem()).(*types.Struct).Field(Field) + Field int // index into CoreType(CoreType(X.Type()).(*types.Pointer).Elem()).(*types.Struct).Fields } // The Field instruction yields the Field of struct X. @@ -884,7 +884,7 @@ type FieldAddr struct { type Field struct { register X Value // struct - Field int // index into typeparams.CoreType(X.Type()).(*types.Struct).Fields + Field int // index into CoreType(X.Type()).(*types.Struct).Fields } // The IndexAddr instruction yields the address of the element at @@ -1535,12 +1535,25 @@ func (fn *Function) TypeParams() *typeparams.TypeParamList { // from fn.Origin(). func (fn *Function) TypeArgs() []types.Type { return fn.typeargs } -// Origin is the function fn is an instantiation of. Returns nil if fn is not -// an instantiation. +// Origin returns the generic function from which fn was instantiated, +// or nil if fn is not an instantiation. func (fn *Function) Origin() *Function { if fn.parent != nil && len(fn.typeargs) > 0 { - // Nested functions are BUILT at a different time than there instances. - return fn.parent.Origin().AnonFuncs[fn.anonIdx] + // Nested functions are BUILT at a different time than their instances. + // Build declared package if not yet BUILT. This is not an expected use + // case, but is simple and robust. + fn.declaredPackage().Build() + } + return origin(fn) +} + +// origin is the function that fn is an instantiation of. Returns nil if fn is +// not an instantiation. +// +// Precondition: fn and the origin function are done building. +func origin(fn *Function) *Function { + if fn.parent != nil && len(fn.typeargs) > 0 { + return origin(fn.parent).AnonFuncs[fn.anonIdx] } return fn.topLevelOrigin } diff --git a/tools/vendor/golang.org/x/tools/go/ssa/subst.go b/tools/vendor/golang.org/x/tools/go/ssa/subst.go index d7f8ae4a70..89c41a8d4c 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/subst.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/subst.go @@ -5,7 +5,6 @@ package ssa import ( - "fmt" "go/types" "golang.org/x/tools/internal/typeparams" @@ -19,41 +18,42 @@ import ( // // Not concurrency-safe. type subster struct { - // TODO(zpavlinovic): replacements can contain type params - // when generating instances inside of a generic function body. replacements map[*typeparams.TypeParam]types.Type // values should contain no type params cache map[types.Type]types.Type // cache of subst results - ctxt *typeparams.Context - debug bool // perform extra debugging checks + ctxt *typeparams.Context // cache for instantiation + scope *types.Scope // *types.Named declared within this scope can be substituted (optional) + debug bool // perform extra debugging checks // TODO(taking): consider adding Pos + // TODO(zpavlinovic): replacements can contain type params + // when generating instances inside of a generic function body. } // Returns a subster that replaces tparams[i] with targs[i]. Uses ctxt as a cache. // targs should not contain any types in tparams. -func makeSubster(ctxt *typeparams.Context, tparams *typeparams.TypeParamList, targs []types.Type, debug bool) *subster { +// scope is the (optional) lexical block of the generic function for which we are substituting. +func makeSubster(ctxt *typeparams.Context, scope *types.Scope, tparams *typeparams.TypeParamList, targs []types.Type, debug bool) *subster { assert(tparams.Len() == len(targs), "makeSubster argument count must match") subst := &subster{ replacements: make(map[*typeparams.TypeParam]types.Type, tparams.Len()), cache: make(map[types.Type]types.Type), ctxt: ctxt, + scope: scope, debug: debug, } for i := 0; i < tparams.Len(); i++ { subst.replacements[tparams.At(i)] = targs[i] } if subst.debug { - if err := subst.wellFormed(); err != nil { - panic(err) - } + subst.wellFormed() } return subst } -// wellFormed returns an error if subst was not properly initialized. -func (subst *subster) wellFormed() error { - if subst == nil || len(subst.replacements) == 0 { - return nil +// wellFormed asserts that subst was properly initialized. +func (subst *subster) wellFormed() { + if subst == nil { + return } // Check that all of the type params do not appear in the arguments. s := make(map[types.Type]bool, len(subst.replacements)) @@ -62,10 +62,9 @@ func (subst *subster) wellFormed() error { } for _, r := range subst.replacements { if reaches(r, s) { - return fmt.Errorf("\n‰r %s s %v replacements %v\n", r, s, subst.replacements) + panic(subst) } } - return nil } // typ returns the type of t with the type parameter tparams[i] substituted @@ -250,7 +249,7 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface { } // methods for the interface. Initially nil if there is no known change needed. - // Signatures for the method where recv is nil. NewInterfaceType fills in the recievers. + // Signatures for the method where recv is nil. NewInterfaceType fills in the receivers. var methods []*types.Func initMethods := func(n int) { // copy first n explicit methods methods = make([]*types.Func, iface.NumExplicitMethods()) @@ -263,7 +262,7 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface { for i := 0; i < iface.NumExplicitMethods(); i++ { f := iface.ExplicitMethod(i) // On interfaces, we need to cycle break on anonymous interface types - // being in a cycle with their signatures being in cycles with their recievers + // being in a cycle with their signatures being in cycles with their receivers // that do not go through a Named. norecv := changeRecv(f.Type().(*types.Signature), nil) sig := subst.typ(norecv) @@ -306,29 +305,56 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface { } func (subst *subster) named(t *types.Named) types.Type { - // A name type may be: - // (1) ordinary (no type parameters, no type arguments), - // (2) generic (type parameters but no type arguments), or - // (3) instantiated (type parameters and type arguments). + // A named type may be: + // (1) ordinary named type (non-local scope, no type parameters, no type arguments), + // (2) locally scoped type, + // (3) generic (type parameters but no type arguments), or + // (4) instantiated (type parameters and type arguments). tparams := typeparams.ForNamed(t) if tparams.Len() == 0 { - // case (1) ordinary + if subst.scope != nil && !subst.scope.Contains(t.Obj().Pos()) { + // Outside the current function scope? + return t // case (1) ordinary + } - // Note: If Go allows for local type declarations in generic - // functions we may need to descend into underlying as well. - return t + // case (2) locally scoped type. + // Create a new named type to represent this instantiation. + // We assume that local types of distinct instantiations of a + // generic function are distinct, even if they don't refer to + // type parameters, but the spec is unclear; see golang/go#58573. + // + // Subtle: We short circuit substitution and use a newly created type in + // subst, i.e. cache[t]=n, to pre-emptively replace t with n in recursive + // types during traversal. This both breaks infinite cycles and allows for + // constructing types with the replacement applied in subst.typ(under). + // + // Example: + // func foo[T any]() { + // type linkedlist struct { + // next *linkedlist + // val T + // } + // } + // + // When the field `next *linkedlist` is visited during subst.typ(under), + // we want the substituted type for the field `next` to be `*n`. + n := types.NewNamed(t.Obj(), nil, nil) + subst.cache[t] = n + subst.cache[n] = n + n.SetUnderlying(subst.typ(t.Underlying())) + return n } targs := typeparams.NamedTypeArgs(t) // insts are arguments to instantiate using. insts := make([]types.Type, tparams.Len()) - // case (2) generic ==> targs.Len() == 0 + // case (3) generic ==> targs.Len() == 0 // Instantiating a generic with no type arguments should be unreachable. // Please report a bug if you encounter this. assert(targs.Len() != 0, "substition into a generic Named type is currently unsupported") - // case (3) instantiated. + // case (4) instantiated. // Substitute into the type arguments and instantiate the replacements/ // Example: // type N[A any] func() A @@ -378,19 +404,26 @@ func (subst *subster) signature(t *types.Signature) types.Type { } // reaches returns true if a type t reaches any type t' s.t. c[t'] == true. -// Updates c to cache results. +// It updates c to cache results. +// +// reaches is currently only part of the wellFormed debug logic, and +// in practice c is initially only type parameters. It is not currently +// relied on in production. func reaches(t types.Type, c map[types.Type]bool) (res bool) { if c, ok := c[t]; ok { return c } - c[t] = false // prevent cycles + + // c is populated with temporary false entries as types are visited. + // This avoids repeat visits and break cycles. + c[t] = false defer func() { c[t] = res }() switch t := t.(type) { case *typeparams.TypeParam, *types.Basic: - // no-op => c == false + return false case *types.Array: return reaches(t.Elem(), c) case *types.Slice: diff --git a/tools/vendor/golang.org/x/tools/go/ssa/util.go b/tools/vendor/golang.org/x/tools/go/ssa/util.go index db53aebee4..7735dd8e98 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/util.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/util.go @@ -43,12 +43,6 @@ func isBlankIdent(e ast.Expr) bool { //// Type utilities. Some of these belong in go/types. -// isPointer returns true for types whose underlying type is a pointer. -func isPointer(typ types.Type) bool { - _, ok := typ.Underlying().(*types.Pointer) - return ok -} - // isNonTypeParamInterface reports whether t is an interface type but not a type parameter. func isNonTypeParamInterface(t types.Type) bool { return !typeparams.IsTypeParam(t) && types.IsInterface(t) @@ -100,12 +94,33 @@ func isBasicConvTypes(tset termList) bool { return all && basics >= 1 && tset.Len()-basics <= 1 } -// deref returns a pointer's element type; otherwise it returns typ. -func deref(typ types.Type) types.Type { +// deptr returns a pointer's element type and true; otherwise it returns (typ, false). +// This function is oblivious to core types and is not suitable for generics. +// +// TODO: Deprecate this function once all usages have been audited. +func deptr(typ types.Type) (types.Type, bool) { if p, ok := typ.Underlying().(*types.Pointer); ok { - return p.Elem() + return p.Elem(), true + } + return typ, false +} + +// deref returns the element type of a type with a pointer core type and true; +// otherwise it returns (typ, false). +func deref(typ types.Type) (types.Type, bool) { + if p, ok := typeparams.CoreType(typ).(*types.Pointer); ok { + return p.Elem(), true + } + return typ, false +} + +// mustDeref returns the element type of a type with a pointer core type. +// Panics on failure. +func mustDeref(typ types.Type) types.Type { + if et, ok := deref(typ); ok { + return et } - return typ + panic("cannot dereference type " + typ.String()) } // recvType returns the receiver type of method obj. @@ -113,6 +128,17 @@ func recvType(obj *types.Func) types.Type { return obj.Type().(*types.Signature).Recv().Type() } +// fieldOf returns the index'th field of the (core type of) a struct type; +// otherwise returns nil. +func fieldOf(typ types.Type, index int) *types.Var { + if st, ok := typeparams.CoreType(typ).(*types.Struct); ok { + if 0 <= index && index < st.NumFields() { + return st.Field(index) + } + } + return nil +} + // isUntyped returns true for types that are untyped. func isUntyped(typ types.Type) bool { b, ok := typ.(*types.Basic) @@ -172,16 +198,14 @@ func nonbasicTypes(ts []types.Type) []types.Type { return filtered } -// receiverTypeArgs returns the type arguments to a function's reciever. -// Returns an empty list if obj does not have a reciever or its reciever does not have type arguments. +// receiverTypeArgs returns the type arguments to a function's receiver. +// Returns an empty list if obj does not have a receiver or its receiver does not have type arguments. func receiverTypeArgs(obj *types.Func) []types.Type { rtype := recvType(obj) if rtype == nil { return nil } - if isPointer(rtype) { - rtype = rtype.(*types.Pointer).Elem() - } + rtype, _ = deptr(rtype) named, ok := rtype.(*types.Named) if !ok { return nil diff --git a/tools/vendor/golang.org/x/tools/go/ssa/wrappers.go b/tools/vendor/golang.org/x/tools/go/ssa/wrappers.go index 228daf6158..123ea6858a 100644 --- a/tools/vendor/golang.org/x/tools/go/ssa/wrappers.go +++ b/tools/vendor/golang.org/x/tools/go/ssa/wrappers.go @@ -82,12 +82,14 @@ func makeWrapper(prog *Program, sel *selection, cr *creator) *Function { indices := sel.index var v Value = fn.Locals[0] // spilled receiver - if isPointer(sel.recv) { + srdt, ptrRecv := deptr(sel.recv) + if ptrRecv { v = emitLoad(fn, v) // For simple indirection wrappers, perform an informative nil-check: // "value method (T).f called using nil *T pointer" - if len(indices) == 1 && !isPointer(recvType(obj)) { + _, ptrObj := deptr(recvType(obj)) + if len(indices) == 1 && !ptrObj { var c Call c.Call.Value = &Builtin{ name: "ssa:wrapnilchk", @@ -97,7 +99,7 @@ func makeWrapper(prog *Program, sel *selection, cr *creator) *Function { } c.Call.Args = []Value{ v, - stringConst(deref(sel.recv).String()), + stringConst(srdt.String()), stringConst(sel.obj.Name()), } c.setType(v.Type()) @@ -121,7 +123,7 @@ func makeWrapper(prog *Program, sel *selection, cr *creator) *Function { var c Call if r := recvType(obj); !types.IsInterface(r) { // concrete method - if !isPointer(r) { + if _, ptrObj := deptr(r); !ptrObj { v = emitLoad(fn, v) } callee := prog.originFunc(obj) diff --git a/tools/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/tools/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go index c160acb686..549aa9e54c 100644 --- a/tools/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go +++ b/tools/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go @@ -111,7 +111,21 @@ const ( opObj = 'O' // .Obj() (Named, TypeParam) ) -// The For function returns the path to an object relative to its package, +// For is equivalent to new(Encoder).For(obj). +// +// It may be more efficient to reuse a single Encoder across several calls. +func For(obj types.Object) (Path, error) { + return new(Encoder).For(obj) +} + +// An Encoder amortizes the cost of encoding the paths of multiple objects. +// The zero value of an Encoder is ready to use. +type Encoder struct { + scopeMemo map[*types.Scope][]types.Object // memoization of scopeObjects + namedMethodsMemo map[*types.Named][]*types.Func // memoization of namedMethods() +} + +// For returns the path to an object relative to its package, // or an error if the object is not accessible from the package's Scope. // // The For function guarantees to return a path only for the following objects: @@ -143,7 +157,7 @@ const ( // .Type().Field(0) (field Var X) // // where p is the package (*types.Package) to which X belongs. -func For(obj types.Object) (Path, error) { +func (enc *Encoder) For(obj types.Object) (Path, error) { pkg := obj.Pkg() // This table lists the cases of interest. @@ -225,7 +239,7 @@ func For(obj types.Object) (Path, error) { return "", fmt.Errorf("func is not a method: %v", obj) } - if path, ok := concreteMethod(obj); ok { + if path, ok := enc.concreteMethod(obj); ok { // Fast path for concrete methods that avoids looping over scope. return path, nil } @@ -241,15 +255,14 @@ func For(obj types.Object) (Path, error) { // the best paths because non-types may // refer to types, but not the reverse. empty := make([]byte, 0, 48) // initial space - names := scope.Names() - for _, name := range names { - o := scope.Lookup(name) + objs := enc.scopeObjects(scope) + for _, o := range objs { tname, ok := o.(*types.TypeName) if !ok { continue // handle non-types in second pass } - path := append(empty, name...) + path := append(empty, o.Name()...) path = append(path, opType) T := o.Type() @@ -275,9 +288,8 @@ func For(obj types.Object) (Path, error) { // Then inspect everything else: // non-types, and declared methods of defined types. - for _, name := range names { - o := scope.Lookup(name) - path := append(empty, name...) + for _, o := range objs { + path := append(empty, o.Name()...) if _, ok := o.(*types.TypeName); !ok { if o.Exported() { // exported non-type (const, var, func) @@ -294,9 +306,7 @@ func For(obj types.Object) (Path, error) { // Note that method index here is always with respect // to canonical ordering of methods, regardless of how // they appear in the underlying type. - canonical := canonicalize(T) - for i := 0; i < len(canonical); i++ { - m := canonical[i] + for i, m := range enc.namedMethods(T) { path2 := appendOpArg(path, opMethod, i) if m == obj { return Path(path2), nil // found declared method @@ -324,7 +334,7 @@ func appendOpArg(path []byte, op byte, arg int) []byte { // This function is just an optimization that avoids the general scope walking // approach. You are expected to fall back to the general approach if this // function fails. -func concreteMethod(meth *types.Func) (Path, bool) { +func (enc *Encoder) concreteMethod(meth *types.Func) (Path, bool) { // Concrete methods can only be declared on package-scoped named types. For // that reason we can skip the expensive walk over the package scope: the // path will always be package -> named type -> method. We can trivially get @@ -397,15 +407,20 @@ func concreteMethod(meth *types.Func) (Path, bool) { path := make([]byte, 0, len(name)+8) path = append(path, name...) path = append(path, opType) - canonical := canonicalize(named) - for i, m := range canonical { + for i, m := range enc.namedMethods(named) { if m == meth { path = appendOpArg(path, opMethod, i) return Path(path), true } } - panic(fmt.Sprintf("couldn't find method %s on type %s", meth, named)) + // Due to golang/go#59944, go/types fails to associate the receiver with + // certain methods on cgo types. + // + // TODO(rfindley): replace this panic once golang/go#59944 is fixed in all Go + // versions gopls supports. + return "", false + // panic(fmt.Sprintf("couldn't find method %s on type %s; methods: %#v", meth, named, enc.namedMethods(named))) } // find finds obj within type T, returning the path to it, or nil if not found. @@ -663,15 +678,23 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { t = nil case opMethod: - hasMethods, ok := t.(hasMethods) // Interface or Named - if !ok { + switch t := t.(type) { + case *types.Interface: + if index >= t.NumMethods() { + return nil, fmt.Errorf("method index %d out of range [0-%d)", index, t.NumMethods()) + } + obj = t.Method(index) // Id-ordered + + case *types.Named: + methods := namedMethods(t) // (unmemoized) + if index >= len(methods) { + return nil, fmt.Errorf("method index %d out of range [0-%d)", index, len(methods)) + } + obj = methods[index] // Id-ordered + + default: return nil, fmt.Errorf("cannot apply %q to %s (got %T, want interface or named)", code, t, t) } - canonical := canonicalize(hasMethods) - if n := len(canonical); index >= n { - return nil, fmt.Errorf("method index %d out of range [0-%d)", index, n) - } - obj = canonical[index] t = nil case opObj: @@ -694,27 +717,49 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { return obj, nil // success } -// hasMethods is an abstraction of *types.{Interface,Named}. This is pulled up -// because it is used by methodOrdering, which is in turn used by both encoding -// and decoding. -type hasMethods interface { - Method(int) *types.Func - NumMethods() int +// namedMethods returns the methods of a Named type in ascending Id order. +func namedMethods(named *types.Named) []*types.Func { + methods := make([]*types.Func, named.NumMethods()) + for i := range methods { + methods[i] = named.Method(i) + } + sort.Slice(methods, func(i, j int) bool { + return methods[i].Id() < methods[j].Id() + }) + return methods } -// canonicalize returns a canonical order for the methods in a hasMethod. -func canonicalize(hm hasMethods) []*types.Func { - count := hm.NumMethods() - if count <= 0 { - return nil +// namedMethods is a memoization of the namedMethods function. Callers must not modify the result. +func (enc *Encoder) namedMethods(named *types.Named) []*types.Func { + m := enc.namedMethodsMemo + if m == nil { + m = make(map[*types.Named][]*types.Func) + enc.namedMethodsMemo = m } - canon := make([]*types.Func, count) - for i := 0; i < count; i++ { - canon[i] = hm.Method(i) + methods, ok := m[named] + if !ok { + methods = namedMethods(named) // allocates and sorts + m[named] = methods } - less := func(i, j int) bool { - return canon[i].Id() < canon[j].Id() + return methods +} + +// scopeObjects is a memoization of scope objects. +// Callers must not modify the result. +func (enc *Encoder) scopeObjects(scope *types.Scope) []types.Object { + m := enc.scopeMemo + if m == nil { + m = make(map[*types.Scope][]types.Object) + enc.scopeMemo = m + } + objs, ok := m[scope] + if !ok { + names := scope.Names() // allocates and sorts + objs = make([]types.Object, len(names)) + for i, name := range names { + objs[i] = scope.Lookup(name) + } + m[scope] = objs } - sort.Slice(canon, less) - return canon + return objs } diff --git a/tools/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go b/tools/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go deleted file mode 100644 index d15f0eb7ab..0000000000 --- a/tools/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go +++ /dev/null @@ -1,390 +0,0 @@ -// Copyright 2020 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package analysisinternal provides gopls' internal analyses with a -// number of helper functions that operate on typed syntax trees. -package analysisinternal - -import ( - "bytes" - "fmt" - "go/ast" - "go/token" - "go/types" - "strconv" -) - -// DiagnoseFuzzTests controls whether the 'tests' analyzer diagnoses fuzz tests -// in Go 1.18+. -var DiagnoseFuzzTests bool = false - -func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos { - // Get the end position for the type error. - offset, end := fset.PositionFor(start, false).Offset, start - if offset >= len(src) { - return end - } - if width := bytes.IndexAny(src[offset:], " \n,():;[]+-*"); width > 0 { - end = start + token.Pos(width) - } - return end -} - -func ZeroValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { - under := typ - if n, ok := typ.(*types.Named); ok { - under = n.Underlying() - } - switch u := under.(type) { - case *types.Basic: - switch { - case u.Info()&types.IsNumeric != 0: - return &ast.BasicLit{Kind: token.INT, Value: "0"} - case u.Info()&types.IsBoolean != 0: - return &ast.Ident{Name: "false"} - case u.Info()&types.IsString != 0: - return &ast.BasicLit{Kind: token.STRING, Value: `""`} - default: - panic("unknown basic type") - } - case *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Signature, *types.Slice, *types.Array: - return ast.NewIdent("nil") - case *types.Struct: - texpr := TypeExpr(f, pkg, typ) // typ because we want the name here. - if texpr == nil { - return nil - } - return &ast.CompositeLit{ - Type: texpr, - } - } - return nil -} - -// IsZeroValue checks whether the given expression is a 'zero value' (as determined by output of -// analysisinternal.ZeroValue) -func IsZeroValue(expr ast.Expr) bool { - switch e := expr.(type) { - case *ast.BasicLit: - return e.Value == "0" || e.Value == `""` - case *ast.Ident: - return e.Name == "nil" || e.Name == "false" - default: - return false - } -} - -// TypeExpr returns syntax for the specified type. References to -// named types from packages other than pkg are qualified by an appropriate -// package name, as defined by the import environment of file. -func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { - switch t := typ.(type) { - case *types.Basic: - switch t.Kind() { - case types.UnsafePointer: - return &ast.SelectorExpr{X: ast.NewIdent("unsafe"), Sel: ast.NewIdent("Pointer")} - default: - return ast.NewIdent(t.Name()) - } - case *types.Pointer: - x := TypeExpr(f, pkg, t.Elem()) - if x == nil { - return nil - } - return &ast.UnaryExpr{ - Op: token.MUL, - X: x, - } - case *types.Array: - elt := TypeExpr(f, pkg, t.Elem()) - if elt == nil { - return nil - } - return &ast.ArrayType{ - Len: &ast.BasicLit{ - Kind: token.INT, - Value: fmt.Sprintf("%d", t.Len()), - }, - Elt: elt, - } - case *types.Slice: - elt := TypeExpr(f, pkg, t.Elem()) - if elt == nil { - return nil - } - return &ast.ArrayType{ - Elt: elt, - } - case *types.Map: - key := TypeExpr(f, pkg, t.Key()) - value := TypeExpr(f, pkg, t.Elem()) - if key == nil || value == nil { - return nil - } - return &ast.MapType{ - Key: key, - Value: value, - } - case *types.Chan: - dir := ast.ChanDir(t.Dir()) - if t.Dir() == types.SendRecv { - dir = ast.SEND | ast.RECV - } - value := TypeExpr(f, pkg, t.Elem()) - if value == nil { - return nil - } - return &ast.ChanType{ - Dir: dir, - Value: value, - } - case *types.Signature: - var params []*ast.Field - for i := 0; i < t.Params().Len(); i++ { - p := TypeExpr(f, pkg, t.Params().At(i).Type()) - if p == nil { - return nil - } - params = append(params, &ast.Field{ - Type: p, - Names: []*ast.Ident{ - { - Name: t.Params().At(i).Name(), - }, - }, - }) - } - var returns []*ast.Field - for i := 0; i < t.Results().Len(); i++ { - r := TypeExpr(f, pkg, t.Results().At(i).Type()) - if r == nil { - return nil - } - returns = append(returns, &ast.Field{ - Type: r, - }) - } - return &ast.FuncType{ - Params: &ast.FieldList{ - List: params, - }, - Results: &ast.FieldList{ - List: returns, - }, - } - case *types.Named: - if t.Obj().Pkg() == nil { - return ast.NewIdent(t.Obj().Name()) - } - if t.Obj().Pkg() == pkg { - return ast.NewIdent(t.Obj().Name()) - } - pkgName := t.Obj().Pkg().Name() - - // If the file already imports the package under another name, use that. - for _, cand := range f.Imports { - if path, _ := strconv.Unquote(cand.Path.Value); path == t.Obj().Pkg().Path() { - if cand.Name != nil && cand.Name.Name != "" { - pkgName = cand.Name.Name - } - } - } - if pkgName == "." { - return ast.NewIdent(t.Obj().Name()) - } - return &ast.SelectorExpr{ - X: ast.NewIdent(pkgName), - Sel: ast.NewIdent(t.Obj().Name()), - } - case *types.Struct: - return ast.NewIdent(t.String()) - case *types.Interface: - return ast.NewIdent(t.String()) - default: - return nil - } -} - -// StmtToInsertVarBefore returns the ast.Stmt before which we can safely insert a new variable. -// Some examples: -// -// Basic Example: -// z := 1 -// y := z + x -// If x is undeclared, then this function would return `y := z + x`, so that we -// can insert `x := ` on the line before `y := z + x`. -// -// If stmt example: -// if z == 1 { -// } else if z == y {} -// If y is undeclared, then this function would return `if z == 1 {`, because we cannot -// insert a statement between an if and an else if statement. As a result, we need to find -// the top of the if chain to insert `y := ` before. -func StmtToInsertVarBefore(path []ast.Node) ast.Stmt { - enclosingIndex := -1 - for i, p := range path { - if _, ok := p.(ast.Stmt); ok { - enclosingIndex = i - break - } - } - if enclosingIndex == -1 { - return nil - } - enclosingStmt := path[enclosingIndex] - switch enclosingStmt.(type) { - case *ast.IfStmt: - // The enclosingStmt is inside of the if declaration, - // We need to check if we are in an else-if stmt and - // get the base if statement. - return baseIfStmt(path, enclosingIndex) - case *ast.CaseClause: - // Get the enclosing switch stmt if the enclosingStmt is - // inside of the case statement. - for i := enclosingIndex + 1; i < len(path); i++ { - if node, ok := path[i].(*ast.SwitchStmt); ok { - return node - } else if node, ok := path[i].(*ast.TypeSwitchStmt); ok { - return node - } - } - } - if len(path) <= enclosingIndex+1 { - return enclosingStmt.(ast.Stmt) - } - // Check if the enclosing statement is inside another node. - switch expr := path[enclosingIndex+1].(type) { - case *ast.IfStmt: - // Get the base if statement. - return baseIfStmt(path, enclosingIndex+1) - case *ast.ForStmt: - if expr.Init == enclosingStmt || expr.Post == enclosingStmt { - return expr - } - } - return enclosingStmt.(ast.Stmt) -} - -// baseIfStmt walks up the if/else-if chain until we get to -// the top of the current if chain. -func baseIfStmt(path []ast.Node, index int) ast.Stmt { - stmt := path[index] - for i := index + 1; i < len(path); i++ { - if node, ok := path[i].(*ast.IfStmt); ok && node.Else == stmt { - stmt = node - continue - } - break - } - return stmt.(ast.Stmt) -} - -// WalkASTWithParent walks the AST rooted at n. The semantics are -// similar to ast.Inspect except it does not call f(nil). -func WalkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) { - var ancestors []ast.Node - ast.Inspect(n, func(n ast.Node) (recurse bool) { - if n == nil { - ancestors = ancestors[:len(ancestors)-1] - return false - } - - var parent ast.Node - if len(ancestors) > 0 { - parent = ancestors[len(ancestors)-1] - } - ancestors = append(ancestors, n) - return f(n, parent) - }) -} - -// MatchingIdents finds the names of all identifiers in 'node' that match any of the given types. -// 'pos' represents the position at which the identifiers may be inserted. 'pos' must be within -// the scope of each of identifier we select. Otherwise, we will insert a variable at 'pos' that -// is unrecognized. -func MatchingIdents(typs []types.Type, node ast.Node, pos token.Pos, info *types.Info, pkg *types.Package) map[types.Type][]string { - - // Initialize matches to contain the variable types we are searching for. - matches := make(map[types.Type][]string) - for _, typ := range typs { - if typ == nil { - continue // TODO(adonovan): is this reachable? - } - matches[typ] = nil // create entry - } - - seen := map[types.Object]struct{}{} - ast.Inspect(node, func(n ast.Node) bool { - if n == nil { - return false - } - // Prevent circular definitions. If 'pos' is within an assignment statement, do not - // allow any identifiers in that assignment statement to be selected. Otherwise, - // we could do the following, where 'x' satisfies the type of 'f0': - // - // x := fakeStruct{f0: x} - // - if assign, ok := n.(*ast.AssignStmt); ok && pos > assign.Pos() && pos <= assign.End() { - return false - } - if n.End() > pos { - return n.Pos() <= pos - } - ident, ok := n.(*ast.Ident) - if !ok || ident.Name == "_" { - return true - } - obj := info.Defs[ident] - if obj == nil || obj.Type() == nil { - return true - } - if _, ok := obj.(*types.TypeName); ok { - return true - } - // Prevent duplicates in matches' values. - if _, ok = seen[obj]; ok { - return true - } - seen[obj] = struct{}{} - // Find the scope for the given position. Then, check whether the object - // exists within the scope. - innerScope := pkg.Scope().Innermost(pos) - if innerScope == nil { - return true - } - _, foundObj := innerScope.LookupParent(ident.Name, pos) - if foundObj != obj { - return true - } - // The object must match one of the types that we are searching for. - // TODO(adonovan): opt: use typeutil.Map? - if names, ok := matches[obj.Type()]; ok { - matches[obj.Type()] = append(names, ident.Name) - } else { - // If the object type does not exactly match - // any of the target types, greedily find the first - // target type that the object type can satisfy. - for typ := range matches { - if equivalentTypes(obj.Type(), typ) { - matches[typ] = append(matches[typ], ident.Name) - } - } - } - return true - }) - return matches -} - -func equivalentTypes(want, got types.Type) bool { - if types.Identical(want, got) { - return true - } - // Code segment to help check for untyped equality from (golang/go#32146). - if rhs, ok := want.(*types.Basic); ok && rhs.Info()&types.IsUntyped > 0 { - if lhs, ok := got.Underlying().(*types.Basic); ok { - return rhs.Info()&types.IsConstType == lhs.Info()&types.IsConstType - } - } - return types.AssignableTo(want, got) -} diff --git a/tools/vendor/golang.org/x/tools/internal/event/tag/tag.go b/tools/vendor/golang.org/x/tools/internal/event/tag/tag.go new file mode 100644 index 0000000000..581b26c204 --- /dev/null +++ b/tools/vendor/golang.org/x/tools/internal/event/tag/tag.go @@ -0,0 +1,59 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package tag provides the labels used for telemetry throughout gopls. +package tag + +import ( + "golang.org/x/tools/internal/event/keys" +) + +var ( + // create the label keys we use + Method = keys.NewString("method", "") + StatusCode = keys.NewString("status.code", "") + StatusMessage = keys.NewString("status.message", "") + RPCID = keys.NewString("id", "") + RPCDirection = keys.NewString("direction", "") + File = keys.NewString("file", "") + Directory = keys.New("directory", "") + URI = keys.New("URI", "") + Package = keys.NewString("package", "") // sorted comma-separated list of Package IDs + PackagePath = keys.NewString("package_path", "") + Query = keys.New("query", "") + Snapshot = keys.NewUInt64("snapshot", "") + Operation = keys.NewString("operation", "") + + Position = keys.New("position", "") + Category = keys.NewString("category", "") + PackageCount = keys.NewInt("packages", "") + Files = keys.New("files", "") + Port = keys.NewInt("port", "") + Type = keys.New("type", "") + HoverKind = keys.NewString("hoverkind", "") + + NewServer = keys.NewString("new_server", "A new server was added") + EndServer = keys.NewString("end_server", "A server was shut down") + + ServerID = keys.NewString("server", "The server ID an event is related to") + Logfile = keys.NewString("logfile", "") + DebugAddress = keys.NewString("debug_address", "") + GoplsPath = keys.NewString("gopls_path", "") + ClientID = keys.NewString("client_id", "") + + Level = keys.NewInt("level", "The logging level") +) + +var ( + // create the stats we measure + Started = keys.NewInt64("started", "Count of started RPCs.") + ReceivedBytes = keys.NewInt64("received_bytes", "Bytes received.") //, unit.Bytes) + SentBytes = keys.NewInt64("sent_bytes", "Bytes sent.") //, unit.Bytes) + Latency = keys.NewFloat64("latency_ms", "Elapsed time in milliseconds") //, unit.Milliseconds) +) + +const ( + Inbound = "in" + Outbound = "out" +) diff --git a/tools/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go b/tools/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go index 798fe599be..c40c7e9310 100644 --- a/tools/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go +++ b/tools/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package fastwalk provides a faster version of filepath.Walk for file system +// Package fastwalk provides a faster version of [filepath.Walk] for file system // scanning tools. package fastwalk @@ -23,31 +23,31 @@ var ErrTraverseLink = errors.New("fastwalk: traverse symlink, assuming target is // Child directories will still be traversed. var ErrSkipFiles = errors.New("fastwalk: skip remaining files in directory") -// Walk is a faster implementation of filepath.Walk. +// Walk is a faster implementation of [filepath.Walk]. // -// filepath.Walk's design necessarily calls os.Lstat on each file, +// [filepath.Walk]'s design necessarily calls [os.Lstat] on each file, // even if the caller needs less info. // Many tools need only the type of each file. // On some platforms, this information is provided directly by the readdir // system call, avoiding the need to stat each file individually. // fastwalk_unix.go contains a fork of the syscall routines. // -// See golang.org/issue/16399 +// See golang.org/issue/16399. // // Walk walks the file tree rooted at root, calling walkFn for // each file or directory in the tree, including root. // -// If fastWalk returns filepath.SkipDir, the directory is skipped. +// If Walk returns [filepath.SkipDir], the directory is skipped. // -// Unlike filepath.Walk: +// Unlike [filepath.Walk]: // - file stat calls must be done by the user. // The only provided metadata is the file type, which does not include // any permission bits. // - multiple goroutines stat the filesystem concurrently. The provided // walkFn must be safe for concurrent use. -// - fastWalk can follow symlinks if walkFn returns the TraverseLink +// - Walk can follow symlinks if walkFn returns the TraverseLink // sentinel error. It is the walkFn's responsibility to prevent -// fastWalk from going into symlink cycles. +// Walk from going into symlink cycles. func Walk(root string, walkFn func(path string, typ os.FileMode) error) error { // TODO(bradfitz): make numWorkers configurable? We used a // minimum of 4 to give the kernel more info about multiple diff --git a/tools/vendor/golang.org/x/tools/internal/gcimporter/bexport.go b/tools/vendor/golang.org/x/tools/internal/gcimporter/bexport.go deleted file mode 100644 index 30582ed6d3..0000000000 --- a/tools/vendor/golang.org/x/tools/internal/gcimporter/bexport.go +++ /dev/null @@ -1,852 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Binary package export. -// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go; -// see that file for specification of the format. - -package gcimporter - -import ( - "bytes" - "encoding/binary" - "fmt" - "go/constant" - "go/token" - "go/types" - "math" - "math/big" - "sort" - "strings" -) - -// If debugFormat is set, each integer and string value is preceded by a marker -// and position information in the encoding. This mechanism permits an importer -// to recognize immediately when it is out of sync. The importer recognizes this -// mode automatically (i.e., it can import export data produced with debugging -// support even if debugFormat is not set at the time of import). This mode will -// lead to massively larger export data (by a factor of 2 to 3) and should only -// be enabled during development and debugging. -// -// NOTE: This flag is the first flag to enable if importing dies because of -// (suspected) format errors, and whenever a change is made to the format. -const debugFormat = false // default: false - -// Current export format version. Increase with each format change. -// -// Note: The latest binary (non-indexed) export format is at version 6. -// This exporter is still at level 4, but it doesn't matter since -// the binary importer can handle older versions just fine. -// -// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE -// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMENTED HERE -// 4: type name objects support type aliases, uses aliasTag -// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used) -// 2: removed unused bool in ODCL export (compiler only) -// 1: header format change (more regular), export package for _ struct fields -// 0: Go1.7 encoding -const exportVersion = 4 - -// trackAllTypes enables cycle tracking for all types, not just named -// types. The existing compiler invariants assume that unnamed types -// that are not completely set up are not used, or else there are spurious -// errors. -// If disabled, only named types are tracked, possibly leading to slightly -// less efficient encoding in rare cases. It also prevents the export of -// some corner-case type declarations (but those are not handled correctly -// with with the textual export format either). -// TODO(gri) enable and remove once issues caused by it are fixed -const trackAllTypes = false - -type exporter struct { - fset *token.FileSet - out bytes.Buffer - - // object -> index maps, indexed in order of serialization - strIndex map[string]int - pkgIndex map[*types.Package]int - typIndex map[types.Type]int - - // position encoding - posInfoFormat bool - prevFile string - prevLine int - - // debugging support - written int // bytes written - indent int // for trace -} - -// internalError represents an error generated inside this package. -type internalError string - -func (e internalError) Error() string { return "gcimporter: " + string(e) } - -func internalErrorf(format string, args ...interface{}) error { - return internalError(fmt.Sprintf(format, args...)) -} - -// BExportData returns binary export data for pkg. -// If no file set is provided, position info will be missing. -func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { - if !debug { - defer func() { - if e := recover(); e != nil { - if ierr, ok := e.(internalError); ok { - err = ierr - return - } - // Not an internal error; panic again. - panic(e) - } - }() - } - - p := exporter{ - fset: fset, - strIndex: map[string]int{"": 0}, // empty string is mapped to 0 - pkgIndex: make(map[*types.Package]int), - typIndex: make(map[types.Type]int), - posInfoFormat: true, // TODO(gri) might become a flag, eventually - } - - // write version info - // The version string must start with "version %d" where %d is the version - // number. Additional debugging information may follow after a blank; that - // text is ignored by the importer. - p.rawStringln(fmt.Sprintf("version %d", exportVersion)) - var debug string - if debugFormat { - debug = "debug" - } - p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly - p.bool(trackAllTypes) - p.bool(p.posInfoFormat) - - // --- generic export data --- - - // populate type map with predeclared "known" types - for index, typ := range predeclared() { - p.typIndex[typ] = index - } - if len(p.typIndex) != len(predeclared()) { - return nil, internalError("duplicate entries in type map?") - } - - // write package data - p.pkg(pkg, true) - if trace { - p.tracef("\n") - } - - // write objects - objcount := 0 - scope := pkg.Scope() - for _, name := range scope.Names() { - if !token.IsExported(name) { - continue - } - if trace { - p.tracef("\n") - } - p.obj(scope.Lookup(name)) - objcount++ - } - - // indicate end of list - if trace { - p.tracef("\n") - } - p.tag(endTag) - - // for self-verification only (redundant) - p.int(objcount) - - if trace { - p.tracef("\n") - } - - // --- end of export data --- - - return p.out.Bytes(), nil -} - -func (p *exporter) pkg(pkg *types.Package, emptypath bool) { - if pkg == nil { - panic(internalError("unexpected nil pkg")) - } - - // if we saw the package before, write its index (>= 0) - if i, ok := p.pkgIndex[pkg]; ok { - p.index('P', i) - return - } - - // otherwise, remember the package, write the package tag (< 0) and package data - if trace { - p.tracef("P%d = { ", len(p.pkgIndex)) - defer p.tracef("} ") - } - p.pkgIndex[pkg] = len(p.pkgIndex) - - p.tag(packageTag) - p.string(pkg.Name()) - if emptypath { - p.string("") - } else { - p.string(pkg.Path()) - } -} - -func (p *exporter) obj(obj types.Object) { - switch obj := obj.(type) { - case *types.Const: - p.tag(constTag) - p.pos(obj) - p.qualifiedName(obj) - p.typ(obj.Type()) - p.value(obj.Val()) - - case *types.TypeName: - if obj.IsAlias() { - p.tag(aliasTag) - p.pos(obj) - p.qualifiedName(obj) - } else { - p.tag(typeTag) - } - p.typ(obj.Type()) - - case *types.Var: - p.tag(varTag) - p.pos(obj) - p.qualifiedName(obj) - p.typ(obj.Type()) - - case *types.Func: - p.tag(funcTag) - p.pos(obj) - p.qualifiedName(obj) - sig := obj.Type().(*types.Signature) - p.paramList(sig.Params(), sig.Variadic()) - p.paramList(sig.Results(), false) - - default: - panic(internalErrorf("unexpected object %v (%T)", obj, obj)) - } -} - -func (p *exporter) pos(obj types.Object) { - if !p.posInfoFormat { - return - } - - file, line := p.fileLine(obj) - if file == p.prevFile { - // common case: write line delta - // delta == 0 means different file or no line change - delta := line - p.prevLine - p.int(delta) - if delta == 0 { - p.int(-1) // -1 means no file change - } - } else { - // different file - p.int(0) - // Encode filename as length of common prefix with previous - // filename, followed by (possibly empty) suffix. Filenames - // frequently share path prefixes, so this can save a lot - // of space and make export data size less dependent on file - // path length. The suffix is unlikely to be empty because - // file names tend to end in ".go". - n := commonPrefixLen(p.prevFile, file) - p.int(n) // n >= 0 - p.string(file[n:]) // write suffix only - p.prevFile = file - p.int(line) - } - p.prevLine = line -} - -func (p *exporter) fileLine(obj types.Object) (file string, line int) { - if p.fset != nil { - pos := p.fset.Position(obj.Pos()) - file = pos.Filename - line = pos.Line - } - return -} - -func commonPrefixLen(a, b string) int { - if len(a) > len(b) { - a, b = b, a - } - // len(a) <= len(b) - i := 0 - for i < len(a) && a[i] == b[i] { - i++ - } - return i -} - -func (p *exporter) qualifiedName(obj types.Object) { - p.string(obj.Name()) - p.pkg(obj.Pkg(), false) -} - -func (p *exporter) typ(t types.Type) { - if t == nil { - panic(internalError("nil type")) - } - - // Possible optimization: Anonymous pointer types *T where - // T is a named type are common. We could canonicalize all - // such types *T to a single type PT = *T. This would lead - // to at most one *T entry in typIndex, and all future *T's - // would be encoded as the respective index directly. Would - // save 1 byte (pointerTag) per *T and reduce the typIndex - // size (at the cost of a canonicalization map). We can do - // this later, without encoding format change. - - // if we saw the type before, write its index (>= 0) - if i, ok := p.typIndex[t]; ok { - p.index('T', i) - return - } - - // otherwise, remember the type, write the type tag (< 0) and type data - if trackAllTypes { - if trace { - p.tracef("T%d = {>\n", len(p.typIndex)) - defer p.tracef("<\n} ") - } - p.typIndex[t] = len(p.typIndex) - } - - switch t := t.(type) { - case *types.Named: - if !trackAllTypes { - // if we don't track all types, track named types now - p.typIndex[t] = len(p.typIndex) - } - - p.tag(namedTag) - p.pos(t.Obj()) - p.qualifiedName(t.Obj()) - p.typ(t.Underlying()) - if !types.IsInterface(t) { - p.assocMethods(t) - } - - case *types.Array: - p.tag(arrayTag) - p.int64(t.Len()) - p.typ(t.Elem()) - - case *types.Slice: - p.tag(sliceTag) - p.typ(t.Elem()) - - case *dddSlice: - p.tag(dddTag) - p.typ(t.elem) - - case *types.Struct: - p.tag(structTag) - p.fieldList(t) - - case *types.Pointer: - p.tag(pointerTag) - p.typ(t.Elem()) - - case *types.Signature: - p.tag(signatureTag) - p.paramList(t.Params(), t.Variadic()) - p.paramList(t.Results(), false) - - case *types.Interface: - p.tag(interfaceTag) - p.iface(t) - - case *types.Map: - p.tag(mapTag) - p.typ(t.Key()) - p.typ(t.Elem()) - - case *types.Chan: - p.tag(chanTag) - p.int(int(3 - t.Dir())) // hack - p.typ(t.Elem()) - - default: - panic(internalErrorf("unexpected type %T: %s", t, t)) - } -} - -func (p *exporter) assocMethods(named *types.Named) { - // Sort methods (for determinism). - var methods []*types.Func - for i := 0; i < named.NumMethods(); i++ { - methods = append(methods, named.Method(i)) - } - sort.Sort(methodsByName(methods)) - - p.int(len(methods)) - - if trace && methods != nil { - p.tracef("associated methods {>\n") - } - - for i, m := range methods { - if trace && i > 0 { - p.tracef("\n") - } - - p.pos(m) - name := m.Name() - p.string(name) - if !exported(name) { - p.pkg(m.Pkg(), false) - } - - sig := m.Type().(*types.Signature) - p.paramList(types.NewTuple(sig.Recv()), false) - p.paramList(sig.Params(), sig.Variadic()) - p.paramList(sig.Results(), false) - p.int(0) // dummy value for go:nointerface pragma - ignored by importer - } - - if trace && methods != nil { - p.tracef("<\n} ") - } -} - -type methodsByName []*types.Func - -func (x methodsByName) Len() int { return len(x) } -func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } -func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() } - -func (p *exporter) fieldList(t *types.Struct) { - if trace && t.NumFields() > 0 { - p.tracef("fields {>\n") - defer p.tracef("<\n} ") - } - - p.int(t.NumFields()) - for i := 0; i < t.NumFields(); i++ { - if trace && i > 0 { - p.tracef("\n") - } - p.field(t.Field(i)) - p.string(t.Tag(i)) - } -} - -func (p *exporter) field(f *types.Var) { - if !f.IsField() { - panic(internalError("field expected")) - } - - p.pos(f) - p.fieldName(f) - p.typ(f.Type()) -} - -func (p *exporter) iface(t *types.Interface) { - // TODO(gri): enable importer to load embedded interfaces, - // then emit Embeddeds and ExplicitMethods separately here. - p.int(0) - - n := t.NumMethods() - if trace && n > 0 { - p.tracef("methods {>\n") - defer p.tracef("<\n} ") - } - p.int(n) - for i := 0; i < n; i++ { - if trace && i > 0 { - p.tracef("\n") - } - p.method(t.Method(i)) - } -} - -func (p *exporter) method(m *types.Func) { - sig := m.Type().(*types.Signature) - if sig.Recv() == nil { - panic(internalError("method expected")) - } - - p.pos(m) - p.string(m.Name()) - if m.Name() != "_" && !token.IsExported(m.Name()) { - p.pkg(m.Pkg(), false) - } - - // interface method; no need to encode receiver. - p.paramList(sig.Params(), sig.Variadic()) - p.paramList(sig.Results(), false) -} - -func (p *exporter) fieldName(f *types.Var) { - name := f.Name() - - if f.Anonymous() { - // anonymous field - we distinguish between 3 cases: - // 1) field name matches base type name and is exported - // 2) field name matches base type name and is not exported - // 3) field name doesn't match base type name (alias name) - bname := basetypeName(f.Type()) - if name == bname { - if token.IsExported(name) { - name = "" // 1) we don't need to know the field name or package - } else { - name = "?" // 2) use unexported name "?" to force package export - } - } else { - // 3) indicate alias and export name as is - // (this requires an extra "@" but this is a rare case) - p.string("@") - } - } - - p.string(name) - if name != "" && !token.IsExported(name) { - p.pkg(f.Pkg(), false) - } -} - -func basetypeName(typ types.Type) string { - switch typ := deref(typ).(type) { - case *types.Basic: - return typ.Name() - case *types.Named: - return typ.Obj().Name() - default: - return "" // unnamed type - } -} - -func (p *exporter) paramList(params *types.Tuple, variadic bool) { - // use negative length to indicate unnamed parameters - // (look at the first parameter only since either all - // names are present or all are absent) - n := params.Len() - if n > 0 && params.At(0).Name() == "" { - n = -n - } - p.int(n) - for i := 0; i < params.Len(); i++ { - q := params.At(i) - t := q.Type() - if variadic && i == params.Len()-1 { - t = &dddSlice{t.(*types.Slice).Elem()} - } - p.typ(t) - if n > 0 { - name := q.Name() - p.string(name) - if name != "_" { - p.pkg(q.Pkg(), false) - } - } - p.string("") // no compiler-specific info - } -} - -func (p *exporter) value(x constant.Value) { - if trace { - p.tracef("= ") - } - - switch x.Kind() { - case constant.Bool: - tag := falseTag - if constant.BoolVal(x) { - tag = trueTag - } - p.tag(tag) - - case constant.Int: - if v, exact := constant.Int64Val(x); exact { - // common case: x fits into an int64 - use compact encoding - p.tag(int64Tag) - p.int64(v) - return - } - // uncommon case: large x - use float encoding - // (powers of 2 will be encoded efficiently with exponent) - p.tag(floatTag) - p.float(constant.ToFloat(x)) - - case constant.Float: - p.tag(floatTag) - p.float(x) - - case constant.Complex: - p.tag(complexTag) - p.float(constant.Real(x)) - p.float(constant.Imag(x)) - - case constant.String: - p.tag(stringTag) - p.string(constant.StringVal(x)) - - case constant.Unknown: - // package contains type errors - p.tag(unknownTag) - - default: - panic(internalErrorf("unexpected value %v (%T)", x, x)) - } -} - -func (p *exporter) float(x constant.Value) { - if x.Kind() != constant.Float { - panic(internalErrorf("unexpected constant %v, want float", x)) - } - // extract sign (there is no -0) - sign := constant.Sign(x) - if sign == 0 { - // x == 0 - p.int(0) - return - } - // x != 0 - - var f big.Float - if v, exact := constant.Float64Val(x); exact { - // float64 - f.SetFloat64(v) - } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { - // TODO(gri): add big.Rat accessor to constant.Value. - r := valueToRat(num) - f.SetRat(r.Quo(r, valueToRat(denom))) - } else { - // Value too large to represent as a fraction => inaccessible. - // TODO(gri): add big.Float accessor to constant.Value. - f.SetFloat64(math.MaxFloat64) // FIXME - } - - // extract exponent such that 0.5 <= m < 1.0 - var m big.Float - exp := f.MantExp(&m) - - // extract mantissa as *big.Int - // - set exponent large enough so mant satisfies mant.IsInt() - // - get *big.Int from mant - m.SetMantExp(&m, int(m.MinPrec())) - mant, acc := m.Int(nil) - if acc != big.Exact { - panic(internalError("internal error")) - } - - p.int(sign) - p.int(exp) - p.string(string(mant.Bytes())) -} - -func valueToRat(x constant.Value) *big.Rat { - // Convert little-endian to big-endian. - // I can't believe this is necessary. - bytes := constant.Bytes(x) - for i := 0; i < len(bytes)/2; i++ { - bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] - } - return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) -} - -func (p *exporter) bool(b bool) bool { - if trace { - p.tracef("[") - defer p.tracef("= %v] ", b) - } - - x := 0 - if b { - x = 1 - } - p.int(x) - return b -} - -// ---------------------------------------------------------------------------- -// Low-level encoders - -func (p *exporter) index(marker byte, index int) { - if index < 0 { - panic(internalError("invalid index < 0")) - } - if debugFormat { - p.marker('t') - } - if trace { - p.tracef("%c%d ", marker, index) - } - p.rawInt64(int64(index)) -} - -func (p *exporter) tag(tag int) { - if tag >= 0 { - panic(internalError("invalid tag >= 0")) - } - if debugFormat { - p.marker('t') - } - if trace { - p.tracef("%s ", tagString[-tag]) - } - p.rawInt64(int64(tag)) -} - -func (p *exporter) int(x int) { - p.int64(int64(x)) -} - -func (p *exporter) int64(x int64) { - if debugFormat { - p.marker('i') - } - if trace { - p.tracef("%d ", x) - } - p.rawInt64(x) -} - -func (p *exporter) string(s string) { - if debugFormat { - p.marker('s') - } - if trace { - p.tracef("%q ", s) - } - // if we saw the string before, write its index (>= 0) - // (the empty string is mapped to 0) - if i, ok := p.strIndex[s]; ok { - p.rawInt64(int64(i)) - return - } - // otherwise, remember string and write its negative length and bytes - p.strIndex[s] = len(p.strIndex) - p.rawInt64(-int64(len(s))) - for i := 0; i < len(s); i++ { - p.rawByte(s[i]) - } -} - -// marker emits a marker byte and position information which makes -// it easy for a reader to detect if it is "out of sync". Used for -// debugFormat format only. -func (p *exporter) marker(m byte) { - p.rawByte(m) - // Enable this for help tracking down the location - // of an incorrect marker when running in debugFormat. - if false && trace { - p.tracef("#%d ", p.written) - } - p.rawInt64(int64(p.written)) -} - -// rawInt64 should only be used by low-level encoders. -func (p *exporter) rawInt64(x int64) { - var tmp [binary.MaxVarintLen64]byte - n := binary.PutVarint(tmp[:], x) - for i := 0; i < n; i++ { - p.rawByte(tmp[i]) - } -} - -// rawStringln should only be used to emit the initial version string. -func (p *exporter) rawStringln(s string) { - for i := 0; i < len(s); i++ { - p.rawByte(s[i]) - } - p.rawByte('\n') -} - -// rawByte is the bottleneck interface to write to p.out. -// rawByte escapes b as follows (any encoding does that -// hides '$'): -// -// '$' => '|' 'S' -// '|' => '|' '|' -// -// Necessary so other tools can find the end of the -// export data by searching for "$$". -// rawByte should only be used by low-level encoders. -func (p *exporter) rawByte(b byte) { - switch b { - case '$': - // write '$' as '|' 'S' - b = 'S' - fallthrough - case '|': - // write '|' as '|' '|' - p.out.WriteByte('|') - p.written++ - } - p.out.WriteByte(b) - p.written++ -} - -// tracef is like fmt.Printf but it rewrites the format string -// to take care of indentation. -func (p *exporter) tracef(format string, args ...interface{}) { - if strings.ContainsAny(format, "<>\n") { - var buf bytes.Buffer - for i := 0; i < len(format); i++ { - // no need to deal with runes - ch := format[i] - switch ch { - case '>': - p.indent++ - continue - case '<': - p.indent-- - continue - } - buf.WriteByte(ch) - if ch == '\n' { - for j := p.indent; j > 0; j-- { - buf.WriteString(". ") - } - } - } - format = buf.String() - } - fmt.Printf(format, args...) -} - -// Debugging support. -// (tagString is only used when tracing is enabled) -var tagString = [...]string{ - // Packages - -packageTag: "package", - - // Types - -namedTag: "named type", - -arrayTag: "array", - -sliceTag: "slice", - -dddTag: "ddd", - -structTag: "struct", - -pointerTag: "pointer", - -signatureTag: "signature", - -interfaceTag: "interface", - -mapTag: "map", - -chanTag: "chan", - - // Values - -falseTag: "false", - -trueTag: "true", - -int64Tag: "int64", - -floatTag: "float", - -fractionTag: "fraction", - -complexTag: "complex", - -stringTag: "string", - -unknownTag: "unknown", - - // Type aliases - -aliasTag: "alias", -} diff --git a/tools/vendor/golang.org/x/tools/internal/gcimporter/bimport.go b/tools/vendor/golang.org/x/tools/internal/gcimporter/bimport.go index b85de01470..d98b0db2a9 100644 --- a/tools/vendor/golang.org/x/tools/internal/gcimporter/bimport.go +++ b/tools/vendor/golang.org/x/tools/internal/gcimporter/bimport.go @@ -2,340 +2,24 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go. +// This file contains the remaining vestiges of +// $GOROOT/src/go/internal/gcimporter/bimport.go. package gcimporter import ( - "encoding/binary" "fmt" - "go/constant" "go/token" "go/types" - "sort" - "strconv" - "strings" "sync" - "unicode" - "unicode/utf8" ) -type importer struct { - imports map[string]*types.Package - data []byte - importpath string - buf []byte // for reading strings - version int // export format version - - // object lists - strList []string // in order of appearance - pathList []string // in order of appearance - pkgList []*types.Package // in order of appearance - typList []types.Type // in order of appearance - interfaceList []*types.Interface // for delayed completion only - trackAllTypes bool - - // position encoding - posInfoFormat bool - prevFile string - prevLine int - fake fakeFileSet - - // debugging support - debugFormat bool - read int // bytes read -} - -// BImportData imports a package from the serialized package data -// and returns the number of bytes consumed and a reference to the package. -// If the export data version is not recognized or the format is otherwise -// compromised, an error is returned. -func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { - // catch panics and return them as errors - const currentVersion = 6 - version := -1 // unknown version - defer func() { - if e := recover(); e != nil { - // Return a (possibly nil or incomplete) package unchanged (see #16088). - if version > currentVersion { - err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) - } else { - err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) - } - } - }() - - p := importer{ - imports: imports, - data: data, - importpath: path, - version: version, - strList: []string{""}, // empty string is mapped to 0 - pathList: []string{""}, // empty string is mapped to 0 - fake: fakeFileSet{ - fset: fset, - files: make(map[string]*fileInfo), - }, - } - defer p.fake.setLines() // set lines for files in fset - - // read version info - var versionstr string - if b := p.rawByte(); b == 'c' || b == 'd' { - // Go1.7 encoding; first byte encodes low-level - // encoding format (compact vs debug). - // For backward-compatibility only (avoid problems with - // old installed packages). Newly compiled packages use - // the extensible format string. - // TODO(gri) Remove this support eventually; after Go1.8. - if b == 'd' { - p.debugFormat = true - } - p.trackAllTypes = p.rawByte() == 'a' - p.posInfoFormat = p.int() != 0 - versionstr = p.string() - if versionstr == "v1" { - version = 0 - } - } else { - // Go1.8 extensible encoding - // read version string and extract version number (ignore anything after the version number) - versionstr = p.rawStringln(b) - if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" { - if v, err := strconv.Atoi(s[1]); err == nil && v > 0 { - version = v - } - } - } - p.version = version - - // read version specific flags - extend as necessary - switch p.version { - // case currentVersion: - // ... - // fallthrough - case currentVersion, 5, 4, 3, 2, 1: - p.debugFormat = p.rawStringln(p.rawByte()) == "debug" - p.trackAllTypes = p.int() != 0 - p.posInfoFormat = p.int() != 0 - case 0: - // Go1.7 encoding format - nothing to do here - default: - errorf("unknown bexport format version %d (%q)", p.version, versionstr) - } - - // --- generic export data --- - - // populate typList with predeclared "known" types - p.typList = append(p.typList, predeclared()...) - - // read package data - pkg = p.pkg() - - // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go) - objcount := 0 - for { - tag := p.tagOrIndex() - if tag == endTag { - break - } - p.obj(tag) - objcount++ - } - - // self-verification - if count := p.int(); count != objcount { - errorf("got %d objects; want %d", objcount, count) - } - - // ignore compiler-specific import data - - // complete interfaces - // TODO(gri) re-investigate if we still need to do this in a delayed fashion - for _, typ := range p.interfaceList { - typ.Complete() - } - - // record all referenced packages as imports - list := append(([]*types.Package)(nil), p.pkgList[1:]...) - sort.Sort(byPath(list)) - pkg.SetImports(list) - - // package was imported completely and without errors - pkg.MarkComplete() - - return p.read, pkg, nil -} - func errorf(format string, args ...interface{}) { panic(fmt.Sprintf(format, args...)) } -func (p *importer) pkg() *types.Package { - // if the package was seen before, i is its index (>= 0) - i := p.tagOrIndex() - if i >= 0 { - return p.pkgList[i] - } - - // otherwise, i is the package tag (< 0) - if i != packageTag { - errorf("unexpected package tag %d version %d", i, p.version) - } - - // read package data - name := p.string() - var path string - if p.version >= 5 { - path = p.path() - } else { - path = p.string() - } - if p.version >= 6 { - p.int() // package height; unused by go/types - } - - // we should never see an empty package name - if name == "" { - errorf("empty package name in import") - } - - // an empty path denotes the package we are currently importing; - // it must be the first package we see - if (path == "") != (len(p.pkgList) == 0) { - errorf("package path %q for pkg index %d", path, len(p.pkgList)) - } - - // if the package was imported before, use that one; otherwise create a new one - if path == "" { - path = p.importpath - } - pkg := p.imports[path] - if pkg == nil { - pkg = types.NewPackage(path, name) - p.imports[path] = pkg - } else if pkg.Name() != name { - errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path) - } - p.pkgList = append(p.pkgList, pkg) - - return pkg -} - -// objTag returns the tag value for each object kind. -func objTag(obj types.Object) int { - switch obj.(type) { - case *types.Const: - return constTag - case *types.TypeName: - return typeTag - case *types.Var: - return varTag - case *types.Func: - return funcTag - default: - errorf("unexpected object: %v (%T)", obj, obj) // panics - panic("unreachable") - } -} - -func sameObj(a, b types.Object) bool { - // Because unnamed types are not canonicalized, we cannot simply compare types for - // (pointer) identity. - // Ideally we'd check equality of constant values as well, but this is good enough. - return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type()) -} - -func (p *importer) declare(obj types.Object) { - pkg := obj.Pkg() - if alt := pkg.Scope().Insert(obj); alt != nil { - // This can only trigger if we import a (non-type) object a second time. - // Excluding type aliases, this cannot happen because 1) we only import a package - // once; and b) we ignore compiler-specific export data which may contain - // functions whose inlined function bodies refer to other functions that - // were already imported. - // However, type aliases require reexporting the original type, so we need - // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go, - // method importer.obj, switch case importing functions). - // TODO(gri) review/update this comment once the gc compiler handles type aliases. - if !sameObj(obj, alt) { - errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt) - } - } -} - -func (p *importer) obj(tag int) { - switch tag { - case constTag: - pos := p.pos() - pkg, name := p.qualifiedName() - typ := p.typ(nil, nil) - val := p.value() - p.declare(types.NewConst(pos, pkg, name, typ, val)) - - case aliasTag: - // TODO(gri) verify type alias hookup is correct - pos := p.pos() - pkg, name := p.qualifiedName() - typ := p.typ(nil, nil) - p.declare(types.NewTypeName(pos, pkg, name, typ)) - - case typeTag: - p.typ(nil, nil) - - case varTag: - pos := p.pos() - pkg, name := p.qualifiedName() - typ := p.typ(nil, nil) - p.declare(types.NewVar(pos, pkg, name, typ)) - - case funcTag: - pos := p.pos() - pkg, name := p.qualifiedName() - params, isddd := p.paramList() - result, _ := p.paramList() - sig := types.NewSignature(nil, params, result, isddd) - p.declare(types.NewFunc(pos, pkg, name, sig)) - - default: - errorf("unexpected object tag %d", tag) - } -} - const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go -func (p *importer) pos() token.Pos { - if !p.posInfoFormat { - return token.NoPos - } - - file := p.prevFile - line := p.prevLine - delta := p.int() - line += delta - if p.version >= 5 { - if delta == deltaNewFile { - if n := p.int(); n >= 0 { - // file changed - file = p.path() - line = n - } - } - } else { - if delta == 0 { - if n := p.int(); n >= 0 { - // file changed - file = p.prevFile[:n] + p.string() - line = p.int() - } - } - } - p.prevFile = file - p.prevLine = line - - return p.fake.pos(file, line, 0) -} - // Synthesize a token.Pos type fakeFileSet struct { fset *token.FileSet @@ -389,205 +73,6 @@ var ( fakeLinesOnce sync.Once ) -func (p *importer) qualifiedName() (pkg *types.Package, name string) { - name = p.string() - pkg = p.pkg() - return -} - -func (p *importer) record(t types.Type) { - p.typList = append(p.typList, t) -} - -// A dddSlice is a types.Type representing ...T parameters. -// It only appears for parameter types and does not escape -// the importer. -type dddSlice struct { - elem types.Type -} - -func (t *dddSlice) Underlying() types.Type { return t } -func (t *dddSlice) String() string { return "..." + t.elem.String() } - -// parent is the package which declared the type; parent == nil means -// the package currently imported. The parent package is needed for -// exported struct fields and interface methods which don't contain -// explicit package information in the export data. -// -// A non-nil tname is used as the "owner" of the result type; i.e., -// the result type is the underlying type of tname. tname is used -// to give interface methods a named receiver type where possible. -func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type { - // if the type was seen before, i is its index (>= 0) - i := p.tagOrIndex() - if i >= 0 { - return p.typList[i] - } - - // otherwise, i is the type tag (< 0) - switch i { - case namedTag: - // read type object - pos := p.pos() - parent, name := p.qualifiedName() - scope := parent.Scope() - obj := scope.Lookup(name) - - // if the object doesn't exist yet, create and insert it - if obj == nil { - obj = types.NewTypeName(pos, parent, name, nil) - scope.Insert(obj) - } - - if _, ok := obj.(*types.TypeName); !ok { - errorf("pkg = %s, name = %s => %s", parent, name, obj) - } - - // associate new named type with obj if it doesn't exist yet - t0 := types.NewNamed(obj.(*types.TypeName), nil, nil) - - // but record the existing type, if any - tname := obj.Type().(*types.Named) // tname is either t0 or the existing type - p.record(tname) - - // read underlying type - t0.SetUnderlying(p.typ(parent, t0)) - - // interfaces don't have associated methods - if types.IsInterface(t0) { - return tname - } - - // read associated methods - for i := p.int(); i > 0; i-- { - // TODO(gri) replace this with something closer to fieldName - pos := p.pos() - name := p.string() - if !exported(name) { - p.pkg() - } - - recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver? - params, isddd := p.paramList() - result, _ := p.paramList() - p.int() // go:nointerface pragma - discarded - - sig := types.NewSignature(recv.At(0), params, result, isddd) - t0.AddMethod(types.NewFunc(pos, parent, name, sig)) - } - - return tname - - case arrayTag: - t := new(types.Array) - if p.trackAllTypes { - p.record(t) - } - - n := p.int64() - *t = *types.NewArray(p.typ(parent, nil), n) - return t - - case sliceTag: - t := new(types.Slice) - if p.trackAllTypes { - p.record(t) - } - - *t = *types.NewSlice(p.typ(parent, nil)) - return t - - case dddTag: - t := new(dddSlice) - if p.trackAllTypes { - p.record(t) - } - - t.elem = p.typ(parent, nil) - return t - - case structTag: - t := new(types.Struct) - if p.trackAllTypes { - p.record(t) - } - - *t = *types.NewStruct(p.fieldList(parent)) - return t - - case pointerTag: - t := new(types.Pointer) - if p.trackAllTypes { - p.record(t) - } - - *t = *types.NewPointer(p.typ(parent, nil)) - return t - - case signatureTag: - t := new(types.Signature) - if p.trackAllTypes { - p.record(t) - } - - params, isddd := p.paramList() - result, _ := p.paramList() - *t = *types.NewSignature(nil, params, result, isddd) - return t - - case interfaceTag: - // Create a dummy entry in the type list. This is safe because we - // cannot expect the interface type to appear in a cycle, as any - // such cycle must contain a named type which would have been - // first defined earlier. - // TODO(gri) Is this still true now that we have type aliases? - // See issue #23225. - n := len(p.typList) - if p.trackAllTypes { - p.record(nil) - } - - var embeddeds []types.Type - for n := p.int(); n > 0; n-- { - p.pos() - embeddeds = append(embeddeds, p.typ(parent, nil)) - } - - t := newInterface(p.methodList(parent, tname), embeddeds) - p.interfaceList = append(p.interfaceList, t) - if p.trackAllTypes { - p.typList[n] = t - } - return t - - case mapTag: - t := new(types.Map) - if p.trackAllTypes { - p.record(t) - } - - key := p.typ(parent, nil) - val := p.typ(parent, nil) - *t = *types.NewMap(key, val) - return t - - case chanTag: - t := new(types.Chan) - if p.trackAllTypes { - p.record(t) - } - - dir := chanDir(p.int()) - val := p.typ(parent, nil) - *t = *types.NewChan(dir, val) - return t - - default: - errorf("unexpected type tag %d", i) // panics - panic("unreachable") - } -} - func chanDir(d int) types.ChanDir { // tag values must match the constants in cmd/compile/internal/gc/go.go switch d { @@ -603,394 +88,6 @@ func chanDir(d int) types.ChanDir { } } -func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) { - if n := p.int(); n > 0 { - fields = make([]*types.Var, n) - tags = make([]string, n) - for i := range fields { - fields[i], tags[i] = p.field(parent) - } - } - return -} - -func (p *importer) field(parent *types.Package) (*types.Var, string) { - pos := p.pos() - pkg, name, alias := p.fieldName(parent) - typ := p.typ(parent, nil) - tag := p.string() - - anonymous := false - if name == "" { - // anonymous field - typ must be T or *T and T must be a type name - switch typ := deref(typ).(type) { - case *types.Basic: // basic types are named types - pkg = nil // // objects defined in Universe scope have no package - name = typ.Name() - case *types.Named: - name = typ.Obj().Name() - default: - errorf("named base type expected") - } - anonymous = true - } else if alias { - // anonymous field: we have an explicit name because it's an alias - anonymous = true - } - - return types.NewField(pos, pkg, name, typ, anonymous), tag -} - -func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) { - if n := p.int(); n > 0 { - methods = make([]*types.Func, n) - for i := range methods { - methods[i] = p.method(parent, baseType) - } - } - return -} - -func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func { - pos := p.pos() - pkg, name, _ := p.fieldName(parent) - // If we don't have a baseType, use a nil receiver. - // A receiver using the actual interface type (which - // we don't know yet) will be filled in when we call - // types.Interface.Complete. - var recv *types.Var - if baseType != nil { - recv = types.NewVar(token.NoPos, parent, "", baseType) - } - params, isddd := p.paramList() - result, _ := p.paramList() - sig := types.NewSignature(recv, params, result, isddd) - return types.NewFunc(pos, pkg, name, sig) -} - -func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) { - name = p.string() - pkg = parent - if pkg == nil { - // use the imported package instead - pkg = p.pkgList[0] - } - if p.version == 0 && name == "_" { - // version 0 didn't export a package for _ fields - return - } - switch name { - case "": - // 1) field name matches base type name and is exported: nothing to do - case "?": - // 2) field name matches base type name and is not exported: need package - name = "" - pkg = p.pkg() - case "@": - // 3) field name doesn't match type name (alias) - name = p.string() - alias = true - fallthrough - default: - if !exported(name) { - pkg = p.pkg() - } - } - return -} - -func (p *importer) paramList() (*types.Tuple, bool) { - n := p.int() - if n == 0 { - return nil, false - } - // negative length indicates unnamed parameters - named := true - if n < 0 { - n = -n - named = false - } - // n > 0 - params := make([]*types.Var, n) - isddd := false - for i := range params { - params[i], isddd = p.param(named) - } - return types.NewTuple(params...), isddd -} - -func (p *importer) param(named bool) (*types.Var, bool) { - t := p.typ(nil, nil) - td, isddd := t.(*dddSlice) - if isddd { - t = types.NewSlice(td.elem) - } - - var pkg *types.Package - var name string - if named { - name = p.string() - if name == "" { - errorf("expected named parameter") - } - if name != "_" { - pkg = p.pkg() - } - if i := strings.Index(name, "·"); i > 0 { - name = name[:i] // cut off gc-specific parameter numbering - } - } - - // read and discard compiler-specific info - p.string() - - return types.NewVar(token.NoPos, pkg, name, t), isddd -} - -func exported(name string) bool { - ch, _ := utf8.DecodeRuneInString(name) - return unicode.IsUpper(ch) -} - -func (p *importer) value() constant.Value { - switch tag := p.tagOrIndex(); tag { - case falseTag: - return constant.MakeBool(false) - case trueTag: - return constant.MakeBool(true) - case int64Tag: - return constant.MakeInt64(p.int64()) - case floatTag: - return p.float() - case complexTag: - re := p.float() - im := p.float() - return constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) - case stringTag: - return constant.MakeString(p.string()) - case unknownTag: - return constant.MakeUnknown() - default: - errorf("unexpected value tag %d", tag) // panics - panic("unreachable") - } -} - -func (p *importer) float() constant.Value { - sign := p.int() - if sign == 0 { - return constant.MakeInt64(0) - } - - exp := p.int() - mant := []byte(p.string()) // big endian - - // remove leading 0's if any - for len(mant) > 0 && mant[0] == 0 { - mant = mant[1:] - } - - // convert to little endian - // TODO(gri) go/constant should have a more direct conversion function - // (e.g., once it supports a big.Float based implementation) - for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 { - mant[i], mant[j] = mant[j], mant[i] - } - - // adjust exponent (constant.MakeFromBytes creates an integer value, - // but mant represents the mantissa bits such that 0.5 <= mant < 1.0) - exp -= len(mant) << 3 - if len(mant) > 0 { - for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 { - exp++ - } - } - - x := constant.MakeFromBytes(mant) - switch { - case exp < 0: - d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) - x = constant.BinaryOp(x, token.QUO, d) - case exp > 0: - x = constant.Shift(x, token.SHL, uint(exp)) - } - - if sign < 0 { - x = constant.UnaryOp(token.SUB, x, 0) - } - return x -} - -// ---------------------------------------------------------------------------- -// Low-level decoders - -func (p *importer) tagOrIndex() int { - if p.debugFormat { - p.marker('t') - } - - return int(p.rawInt64()) -} - -func (p *importer) int() int { - x := p.int64() - if int64(int(x)) != x { - errorf("exported integer too large") - } - return int(x) -} - -func (p *importer) int64() int64 { - if p.debugFormat { - p.marker('i') - } - - return p.rawInt64() -} - -func (p *importer) path() string { - if p.debugFormat { - p.marker('p') - } - // if the path was seen before, i is its index (>= 0) - // (the empty string is at index 0) - i := p.rawInt64() - if i >= 0 { - return p.pathList[i] - } - // otherwise, i is the negative path length (< 0) - a := make([]string, -i) - for n := range a { - a[n] = p.string() - } - s := strings.Join(a, "/") - p.pathList = append(p.pathList, s) - return s -} - -func (p *importer) string() string { - if p.debugFormat { - p.marker('s') - } - // if the string was seen before, i is its index (>= 0) - // (the empty string is at index 0) - i := p.rawInt64() - if i >= 0 { - return p.strList[i] - } - // otherwise, i is the negative string length (< 0) - if n := int(-i); n <= cap(p.buf) { - p.buf = p.buf[:n] - } else { - p.buf = make([]byte, n) - } - for i := range p.buf { - p.buf[i] = p.rawByte() - } - s := string(p.buf) - p.strList = append(p.strList, s) - return s -} - -func (p *importer) marker(want byte) { - if got := p.rawByte(); got != want { - errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read) - } - - pos := p.read - if n := int(p.rawInt64()); n != pos { - errorf("incorrect position: got %d; want %d", n, pos) - } -} - -// rawInt64 should only be used by low-level decoders. -func (p *importer) rawInt64() int64 { - i, err := binary.ReadVarint(p) - if err != nil { - errorf("read error: %v", err) - } - return i -} - -// rawStringln should only be used to read the initial version string. -func (p *importer) rawStringln(b byte) string { - p.buf = p.buf[:0] - for b != '\n' { - p.buf = append(p.buf, b) - b = p.rawByte() - } - return string(p.buf) -} - -// needed for binary.ReadVarint in rawInt64 -func (p *importer) ReadByte() (byte, error) { - return p.rawByte(), nil -} - -// byte is the bottleneck interface for reading p.data. -// It unescapes '|' 'S' to '$' and '|' '|' to '|'. -// rawByte should only be used by low-level decoders. -func (p *importer) rawByte() byte { - b := p.data[0] - r := 1 - if b == '|' { - b = p.data[1] - r = 2 - switch b { - case 'S': - b = '$' - case '|': - // nothing to do - default: - errorf("unexpected escape sequence in export data") - } - } - p.data = p.data[r:] - p.read += r - return b - -} - -// ---------------------------------------------------------------------------- -// Export format - -// Tags. Must be < 0. -const ( - // Objects - packageTag = -(iota + 1) - constTag - typeTag - varTag - funcTag - endTag - - // Types - namedTag - arrayTag - sliceTag - dddTag - structTag - pointerTag - signatureTag - interfaceTag - mapTag - chanTag - - // Values - falseTag - trueTag - int64Tag - floatTag - fractionTag // not used by gc - complexTag - stringTag - nilTag // only used by gc (appears in exported inlined function bodies) - unknownTag // not used by gc (only appears in packages with errors) - - // Type aliases - aliasTag -) - var predeclOnce sync.Once var predecl []types.Type // initialized lazily diff --git a/tools/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go b/tools/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go index 0372fb3a64..b1223713b9 100644 --- a/tools/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go +++ b/tools/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go @@ -7,6 +7,18 @@ // Package gcimporter provides various functions for reading // gc-generated object files that can be used to implement the // Importer interface defined by the Go 1.5 standard library package. +// +// The encoding is deterministic: if the encoder is applied twice to +// the same types.Package data structure, both encodings are equal. +// This property may be important to avoid spurious changes in +// applications such as build systems. +// +// However, the encoder is not necessarily idempotent. Importing an +// exported package may yield a types.Package that, while it +// represents the same set of Go types as the original, may differ in +// the details of its internal representation. Because of these +// differences, re-encoding the imported package may yield a +// different, but equally valid, encoding of the package. package gcimporter // import "golang.org/x/tools/internal/gcimporter" import ( @@ -218,20 +230,17 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func // Or, define a new standard go/types/gcexportdata package. fset := token.NewFileSet() - // The indexed export format starts with an 'i'; the older - // binary export format starts with a 'c', 'd', or 'v' - // (from "version"). Select appropriate importer. + // Select appropriate importer. if len(data) > 0 { switch data[0] { - case 'i': - _, pkg, err := IImportData(fset, packages, data[1:], id) - return pkg, err + case 'v', 'c', 'd': // binary, till go1.10 + return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0]) - case 'v', 'c', 'd': - _, pkg, err := BImportData(fset, packages, data, id) + case 'i': // indexed, till go1.19 + _, pkg, err := IImportData(fset, packages, data[1:], id) return pkg, err - case 'u': + case 'u': // unified, from go1.20 _, pkg, err := UImportData(fset, packages, data[1:size], id) return pkg, err diff --git a/tools/vendor/golang.org/x/tools/internal/gcimporter/iexport.go b/tools/vendor/golang.org/x/tools/internal/gcimporter/iexport.go index ba53cdcdd1..eed1702186 100644 --- a/tools/vendor/golang.org/x/tools/internal/gcimporter/iexport.go +++ b/tools/vendor/golang.org/x/tools/internal/gcimporter/iexport.go @@ -44,23 +44,23 @@ func IExportShallow(fset *token.FileSet, pkg *types.Package) ([]byte, error) { return out.Bytes(), err } -// IImportShallow decodes "shallow" types.Package data encoded by IExportShallow -// in the same executable. This function cannot import data from +// IImportShallow decodes "shallow" types.Package data encoded by +// IExportShallow in the same executable. This function cannot import data from // cmd/compile or gcexportdata.Write. -func IImportShallow(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string, insert InsertType) (*types.Package, error) { +// +// The importer calls getPackages to obtain package symbols for all +// packages mentioned in the export data, including the one being +// decoded. +func IImportShallow(fset *token.FileSet, getPackages GetPackagesFunc, data []byte, path string) (*types.Package, error) { const bundle = false - pkgs, err := iimportCommon(fset, imports, data, bundle, path, insert) + const shallow = true + pkgs, err := iimportCommon(fset, getPackages, data, bundle, path, shallow) if err != nil { return nil, err } return pkgs[0], nil } -// InsertType is the type of a function that creates a types.TypeName -// object for a named type and inserts it into the scope of the -// specified Package. -type InsertType = func(pkg *types.Package, name string) - // Current bundled export format version. Increase with each format change. // 0: initial implementation const bundleVersion = 0 @@ -673,6 +673,9 @@ func (w *exportWriter) qualifiedType(obj *types.TypeName) { w.pkg(obj.Pkg()) } +// TODO(rfindley): what does 'pkg' even mean here? It would be better to pass +// it in explicitly into signatures and structs that may use it for +// constructing fields. func (w *exportWriter) typ(t types.Type, pkg *types.Package) { w.data.uint64(w.p.typOff(t, pkg)) } @@ -773,7 +776,21 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { if n > 0 { w.setPkg(t.Field(0).Pkg(), true) // qualifying package for field objects } else { - w.setPkg(pkg, true) + // TODO(rfindley): improve this very hacky logic. + // + // The importer expects a package to be set for all struct types, even + // those with no fields. A better encoding might be to set NumFields + // before pkg. setPkg panics with a nil package, which may be possible + // to reach with invalid packages (and perhaps valid packages, too?), so + // (arbitrarily) set the localpkg if available. + switch { + case pkg != nil: + w.setPkg(pkg, true) + case w.p.shallow: + w.setPkg(w.p.localpkg, true) + default: + panic(internalErrorf("no package to set for empty struct")) + } } w.uint64(uint64(n)) for i := 0; i < n; i++ { @@ -913,6 +930,17 @@ func (w *exportWriter) value(typ types.Type, v constant.Value) { w.int64(int64(v.Kind())) } + if v.Kind() == constant.Unknown { + // golang/go#60605: treat unknown constant values as if they have invalid type + // + // This loses some fidelity over the package type-checked from source, but that + // is acceptable. + // + // TODO(rfindley): we should switch on the recorded constant kind rather + // than the constant type + return + } + switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { case types.IsBoolean: w.bool(constant.BoolVal(v)) @@ -969,6 +997,16 @@ func constantToFloat(x constant.Value) *big.Float { return &f } +func valueToRat(x constant.Value) *big.Rat { + // Convert little-endian to big-endian. + // I can't believe this is necessary. + bytes := constant.Bytes(x) + for i := 0; i < len(bytes)/2; i++ { + bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] + } + return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) +} + // mpint exports a multi-precision integer. // // For unsigned types, small values are written out as a single @@ -1178,3 +1216,19 @@ func (q *objQueue) popHead() types.Object { q.head++ return obj } + +// internalError represents an error generated inside this package. +type internalError string + +func (e internalError) Error() string { return "gcimporter: " + string(e) } + +// TODO(adonovan): make this call panic, so that it's symmetric with errorf. +// Otherwise it's easy to forget to do anything with the error. +// +// TODO(adonovan): also, consider switching the names "errorf" and +// "internalErrorf" as the former is used for bugs, whose cause is +// internal inconsistency, whereas the latter is used for ordinary +// situations like bad input, whose cause is external. +func internalErrorf(format string, args ...interface{}) error { + return internalError(fmt.Sprintf(format, args...)) +} diff --git a/tools/vendor/golang.org/x/tools/internal/gcimporter/iimport.go b/tools/vendor/golang.org/x/tools/internal/gcimporter/iimport.go index 448f903e86..fb6554f926 100644 --- a/tools/vendor/golang.org/x/tools/internal/gcimporter/iimport.go +++ b/tools/vendor/golang.org/x/tools/internal/gcimporter/iimport.go @@ -85,7 +85,7 @@ const ( // If the export data version is not recognized or the format is otherwise // compromised, an error is returned. func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (int, *types.Package, error) { - pkgs, err := iimportCommon(fset, imports, data, false, path, nil) + pkgs, err := iimportCommon(fset, GetPackagesFromMap(imports), data, false, path, false) if err != nil { return 0, nil, err } @@ -94,10 +94,49 @@ func IImportData(fset *token.FileSet, imports map[string]*types.Package, data [] // IImportBundle imports a set of packages from the serialized package bundle. func IImportBundle(fset *token.FileSet, imports map[string]*types.Package, data []byte) ([]*types.Package, error) { - return iimportCommon(fset, imports, data, true, "", nil) + return iimportCommon(fset, GetPackagesFromMap(imports), data, true, "", false) } -func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data []byte, bundle bool, path string, insert InsertType) (pkgs []*types.Package, err error) { +// A GetPackagesFunc function obtains the non-nil symbols for a set of +// packages, creating and recursively importing them as needed. An +// implementation should store each package symbol is in the Pkg +// field of the items array. +// +// Any error causes importing to fail. This can be used to quickly read +// the import manifest of an export data file without fully decoding it. +type GetPackagesFunc = func(items []GetPackagesItem) error + +// A GetPackagesItem is a request from the importer for the package +// symbol of the specified name and path. +type GetPackagesItem struct { + Name, Path string + Pkg *types.Package // to be filled in by GetPackagesFunc call + + // private importer state + pathOffset uint64 + nameIndex map[string]uint64 +} + +// GetPackagesFromMap returns a GetPackagesFunc that retrieves +// packages from the given map of package path to package. +// +// The returned function may mutate m: each requested package that is not +// found is created with types.NewPackage and inserted into m. +func GetPackagesFromMap(m map[string]*types.Package) GetPackagesFunc { + return func(items []GetPackagesItem) error { + for i, item := range items { + pkg, ok := m[item.Path] + if !ok { + pkg = types.NewPackage(item.Path, item.Name) + m[item.Path] = pkg + } + items[i].Pkg = pkg + } + return nil + } +} + +func iimportCommon(fset *token.FileSet, getPackages GetPackagesFunc, data []byte, bundle bool, path string, shallow bool) (pkgs []*types.Package, err error) { const currentVersion = iexportVersionCurrent version := int64(-1) if !debug { @@ -108,7 +147,7 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data } else if version > currentVersion { err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) } else { - err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + err = fmt.Errorf("internal error while importing %q (%v); please report an issue", path, e) } } }() @@ -117,11 +156,8 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data r := &intReader{bytes.NewReader(data), path} if bundle { - bundleVersion := r.uint64() - switch bundleVersion { - case bundleVersion: - default: - errorf("unknown bundle format version %d", bundleVersion) + if v := r.uint64(); v != bundleVersion { + errorf("unknown bundle format version %d", v) } } @@ -139,7 +175,7 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data sLen := int64(r.uint64()) var fLen int64 var fileOffset []uint64 - if insert != nil { + if shallow { // Shallow mode uses a different position encoding. fLen = int64(r.uint64()) fileOffset = make([]uint64, r.uint64()) @@ -156,9 +192,9 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data r.Seek(sLen+fLen+dLen, io.SeekCurrent) p := iimporter{ - version: int(version), - ipath: path, - insert: insert, + version: int(version), + ipath: path, + usePosv2: shallow, // precise offsets are encoded only in shallow mode stringData: stringData, stringCache: make(map[uint64]string), @@ -185,8 +221,9 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data p.typCache[uint64(i)] = pt } - pkgList := make([]*types.Package, r.uint64()) - for i := range pkgList { + // Gather the relevant packages from the manifest. + items := make([]GetPackagesItem, r.uint64()) + for i := range items { pkgPathOff := r.uint64() pkgPath := p.stringAt(pkgPathOff) pkgName := p.stringAt(r.uint64()) @@ -195,30 +232,42 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data if pkgPath == "" { pkgPath = path } - pkg := imports[pkgPath] - if pkg == nil { - pkg = types.NewPackage(pkgPath, pkgName) - imports[pkgPath] = pkg - } else if pkg.Name() != pkgName { - errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path) - } - if i == 0 && !bundle { - p.localpkg = pkg - } - - p.pkgCache[pkgPathOff] = pkg + items[i].Name = pkgName + items[i].Path = pkgPath + items[i].pathOffset = pkgPathOff // Read index for package. nameIndex := make(map[string]uint64) nSyms := r.uint64() - // In shallow mode we don't expect an index for other packages. - assert(nSyms == 0 || p.localpkg == pkg || p.insert == nil) + // In shallow mode, only the current package (i=0) has an index. + assert(!(shallow && i > 0 && nSyms != 0)) for ; nSyms > 0; nSyms-- { name := p.stringAt(r.uint64()) nameIndex[name] = r.uint64() } - p.pkgIndex[pkg] = nameIndex + items[i].nameIndex = nameIndex + } + + // Request packages all at once from the client, + // enabling a parallel implementation. + if err := getPackages(items); err != nil { + return nil, err // don't wrap this error + } + + // Check the results and complete the index. + pkgList := make([]*types.Package, len(items)) + for i, item := range items { + pkg := item.Pkg + if pkg == nil { + errorf("internal error: getPackages returned nil package for %q", item.Path) + } else if pkg.Path() != item.Path { + errorf("internal error: getPackages returned wrong path %q, want %q", pkg.Path(), item.Path) + } else if pkg.Name() != item.Name { + errorf("internal error: getPackages returned wrong name %s for package %q, want %s", pkg.Name(), item.Path, item.Name) + } + p.pkgCache[item.pathOffset] = pkg + p.pkgIndex[pkg] = item.nameIndex pkgList[i] = pkg } @@ -289,8 +338,7 @@ type iimporter struct { version int ipath string - localpkg *types.Package - insert func(pkg *types.Package, name string) // "shallow" mode only + usePosv2 bool stringData []byte stringCache map[uint64]string @@ -338,13 +386,9 @@ func (p *iimporter) doDecl(pkg *types.Package, name string) { off, ok := p.pkgIndex[pkg][name] if !ok { - // In "shallow" mode, call back to the application to - // find the object and insert it into the package scope. - if p.insert != nil { - assert(pkg != p.localpkg) - p.insert(pkg, name) // "can't fail" - return - } + // In deep mode, the index should be complete. In shallow + // mode, we should have already recursively loaded necessary + // dependencies so the above Lookup succeeds. errorf("%v.%v not in index", pkg, name) } @@ -711,7 +755,7 @@ func (r *importReader) qualifiedIdent() (*types.Package, string) { } func (r *importReader) pos() token.Pos { - if r.p.insert != nil { // shallow mode + if r.p.usePosv2 { return r.posv2() } if r.p.version >= iexportVersionPosCol { diff --git a/tools/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go b/tools/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go index b285a11ce2..b977435f62 100644 --- a/tools/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go +++ b/tools/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go @@ -10,8 +10,10 @@ package gcimporter import ( + "fmt" "go/token" "go/types" + "sort" "strings" "golang.org/x/tools/internal/pkgbits" @@ -62,6 +64,14 @@ type typeInfo struct { } func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { + if !debug { + defer func() { + if x := recover(); x != nil { + err = fmt.Errorf("internal error in importing %q (%v); please report an issue", path, x) + } + }() + } + s := string(data) s = s[:strings.LastIndex(s, "\n$$\n")] input := pkgbits.NewPkgDecoder(path, s) @@ -121,6 +131,16 @@ func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[st iface.Complete() } + // Imports() of pkg are all of the transitive packages that were loaded. + var imps []*types.Package + for _, imp := range pr.pkgs { + if imp != nil && imp != pkg { + imps = append(imps, imp) + } + } + sort.Sort(byPath(imps)) + pkg.SetImports(imps) + pkg.MarkComplete() return pkg } @@ -260,39 +280,9 @@ func (r *reader) doPkg() *types.Package { pkg := types.NewPackage(path, name) r.p.imports[path] = pkg - imports := make([]*types.Package, r.Len()) - for i := range imports { - imports[i] = r.pkg() - } - pkg.SetImports(flattenImports(imports)) - return pkg } -// flattenImports returns the transitive closure of all imported -// packages rooted from pkgs. -func flattenImports(pkgs []*types.Package) []*types.Package { - var res []*types.Package - seen := make(map[*types.Package]struct{}) - for _, pkg := range pkgs { - if _, ok := seen[pkg]; ok { - continue - } - seen[pkg] = struct{}{} - res = append(res, pkg) - - // pkg.Imports() is already flattened. - for _, pkg := range pkg.Imports() { - if _, ok := seen[pkg]; ok { - continue - } - seen[pkg] = struct{}{} - res = append(res, pkg) - } - } - return res -} - // @@@ Types func (r *reader) typ() types.Type { diff --git a/tools/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/tools/vendor/golang.org/x/tools/internal/gocommand/invoke.go index d50551693f..8d9fc98d8f 100644 --- a/tools/vendor/golang.org/x/tools/internal/gocommand/invoke.go +++ b/tools/vendor/golang.org/x/tools/internal/gocommand/invoke.go @@ -8,10 +8,12 @@ package gocommand import ( "bytes" "context" + "errors" "fmt" "io" "log" "os" + "reflect" "regexp" "runtime" "strconv" @@ -22,6 +24,9 @@ import ( exec "golang.org/x/sys/execabs" "golang.org/x/tools/internal/event" + "golang.org/x/tools/internal/event/keys" + "golang.org/x/tools/internal/event/label" + "golang.org/x/tools/internal/event/tag" ) // An Runner will run go command invocations and serialize @@ -51,9 +56,19 @@ func (runner *Runner) initialize() { // 1.14: go: updating go.mod: existing contents have changed since last read var modConcurrencyError = regexp.MustCompile(`go:.*go.mod.*contents have changed`) +// verb is an event label for the go command verb. +var verb = keys.NewString("verb", "go command verb") + +func invLabels(inv Invocation) []label.Label { + return []label.Label{verb.Of(inv.Verb), tag.Directory.Of(inv.WorkingDir)} +} + // Run is a convenience wrapper around RunRaw. // It returns only stdout and a "friendly" error. func (runner *Runner) Run(ctx context.Context, inv Invocation) (*bytes.Buffer, error) { + ctx, done := event.Start(ctx, "gocommand.Runner.Run", invLabels(inv)...) + defer done() + stdout, _, friendly, _ := runner.RunRaw(ctx, inv) return stdout, friendly } @@ -61,6 +76,9 @@ func (runner *Runner) Run(ctx context.Context, inv Invocation) (*bytes.Buffer, e // RunPiped runs the invocation serially, always waiting for any concurrent // invocations to complete first. func (runner *Runner) RunPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) error { + ctx, done := event.Start(ctx, "gocommand.Runner.RunPiped", invLabels(inv)...) + defer done() + _, err := runner.runPiped(ctx, inv, stdout, stderr) return err } @@ -68,6 +86,8 @@ func (runner *Runner) RunPiped(ctx context.Context, inv Invocation, stdout, stde // RunRaw runs the invocation, serializing requests only if they fight over // go.mod changes. func (runner *Runner) RunRaw(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) { + ctx, done := event.Start(ctx, "gocommand.Runner.RunRaw", invLabels(inv)...) + defer done() // Make sure the runner is always initialized. runner.initialize() @@ -215,6 +235,18 @@ func (i *Invocation) run(ctx context.Context, stdout, stderr io.Writer) error { cmd := exec.Command("go", goArgs...) cmd.Stdout = stdout cmd.Stderr = stderr + + // cmd.WaitDelay was added only in go1.20 (see #50436). + if waitDelay := reflect.ValueOf(cmd).Elem().FieldByName("WaitDelay"); waitDelay.IsValid() { + // https://go.dev/issue/59541: don't wait forever copying stderr + // after the command has exited. + // After CL 484741 we copy stdout manually, so we we'll stop reading that as + // soon as ctx is done. However, we also don't want to wait around forever + // for stderr. Give a much-longer-than-reasonable delay and then assume that + // something has wedged in the kernel or runtime. + waitDelay.Set(reflect.ValueOf(30 * time.Second)) + } + // On darwin the cwd gets resolved to the real path, which breaks anything that // expects the working directory to keep the original path, including the // go command when dealing with modules. @@ -229,6 +261,7 @@ func (i *Invocation) run(ctx context.Context, stdout, stderr io.Writer) error { cmd.Env = append(cmd.Env, "PWD="+i.WorkingDir) cmd.Dir = i.WorkingDir } + defer func(start time.Time) { log("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now()) return runCmdContext(ctx, cmd) @@ -242,10 +275,85 @@ var DebugHangingGoCommands = false // runCmdContext is like exec.CommandContext except it sends os.Interrupt // before os.Kill. -func runCmdContext(ctx context.Context, cmd *exec.Cmd) error { - if err := cmd.Start(); err != nil { +func runCmdContext(ctx context.Context, cmd *exec.Cmd) (err error) { + // If cmd.Stdout is not an *os.File, the exec package will create a pipe and + // copy it to the Writer in a goroutine until the process has finished and + // either the pipe reaches EOF or command's WaitDelay expires. + // + // However, the output from 'go list' can be quite large, and we don't want to + // keep reading (and allocating buffers) if we've already decided we don't + // care about the output. We don't want to wait for the process to finish, and + // we don't wait to wait for the WaitDelay to expire either. + // + // Instead, if cmd.Stdout requires a copying goroutine we explicitly replace + // it with a pipe (which is an *os.File), which we can close in order to stop + // copying output as soon as we realize we don't care about it. + var stdoutW *os.File + if cmd.Stdout != nil { + if _, ok := cmd.Stdout.(*os.File); !ok { + var stdoutR *os.File + stdoutR, stdoutW, err = os.Pipe() + if err != nil { + return err + } + prevStdout := cmd.Stdout + cmd.Stdout = stdoutW + + stdoutErr := make(chan error, 1) + go func() { + _, err := io.Copy(prevStdout, stdoutR) + if err != nil { + err = fmt.Errorf("copying stdout: %w", err) + } + stdoutErr <- err + }() + defer func() { + // We started a goroutine to copy a stdout pipe. + // Wait for it to finish, or terminate it if need be. + var err2 error + select { + case err2 = <-stdoutErr: + stdoutR.Close() + case <-ctx.Done(): + stdoutR.Close() + // Per https://pkg.go.dev/os#File.Close, the call to stdoutR.Close + // should cause the Read call in io.Copy to unblock and return + // immediately, but we still need to receive from stdoutErr to confirm + // that that has happened. + <-stdoutErr + err2 = ctx.Err() + } + if err == nil { + err = err2 + } + }() + + // Per https://pkg.go.dev/os/exec#Cmd, “If Stdout and Stderr are the + // same writer, and have a type that can be compared with ==, at most + // one goroutine at a time will call Write.” + // + // Since we're starting a goroutine that writes to cmd.Stdout, we must + // also update cmd.Stderr so that that still holds. + func() { + defer func() { recover() }() + if cmd.Stderr == prevStdout { + cmd.Stderr = cmd.Stdout + } + }() + } + } + + err = cmd.Start() + if stdoutW != nil { + // The child process has inherited the pipe file, + // so close the copy held in this process. + stdoutW.Close() + stdoutW = nil + } + if err != nil { return err } + resChan := make(chan error, 1) go func() { resChan <- cmd.Wait() @@ -253,11 +361,14 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) error { // If we're interested in debugging hanging Go commands, stop waiting after a // minute and panic with interesting information. - if DebugHangingGoCommands { + debug := DebugHangingGoCommands + if debug { + timer := time.NewTimer(1 * time.Minute) + defer timer.Stop() select { case err := <-resChan: return err - case <-time.After(1 * time.Minute): + case <-timer.C: HandleHangingGoCommand(cmd.Process) case <-ctx.Done(): } @@ -270,30 +381,25 @@ func runCmdContext(ctx context.Context, cmd *exec.Cmd) error { } // Cancelled. Interrupt and see if it ends voluntarily. - cmd.Process.Signal(os.Interrupt) - select { - case err := <-resChan: - return err - case <-time.After(time.Second): + if err := cmd.Process.Signal(os.Interrupt); err == nil { + // (We used to wait only 1s but this proved + // fragile on loaded builder machines.) + timer := time.NewTimer(5 * time.Second) + defer timer.Stop() + select { + case err := <-resChan: + return err + case <-timer.C: + } } // Didn't shut down in response to interrupt. Kill it hard. // TODO(rfindley): per advice from bcmills@, it may be better to send SIGQUIT // on certain platforms, such as unix. - if err := cmd.Process.Kill(); err != nil && DebugHangingGoCommands { - // Don't panic here as this reliably fails on windows with EINVAL. + if err := cmd.Process.Kill(); err != nil && !errors.Is(err, os.ErrProcessDone) && debug { log.Printf("error killing the Go command: %v", err) } - // See above: don't wait indefinitely if we're debugging hanging Go commands. - if DebugHangingGoCommands { - select { - case err := <-resChan: - return err - case <-time.After(10 * time.Second): // a shorter wait as resChan should return quickly following Kill - HandleHangingGoCommand(cmd.Process) - } - } return <-resChan } diff --git a/tools/vendor/golang.org/x/tools/internal/gocommand/version.go b/tools/vendor/golang.org/x/tools/internal/gocommand/version.go index 307a76d474..446c5846a6 100644 --- a/tools/vendor/golang.org/x/tools/internal/gocommand/version.go +++ b/tools/vendor/golang.org/x/tools/internal/gocommand/version.go @@ -23,21 +23,11 @@ import ( func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) { inv.Verb = "list" inv.Args = []string{"-e", "-f", `{{context.ReleaseTags}}`, `--`, `unsafe`} - inv.Env = append(append([]string{}, inv.Env...), "GO111MODULE=off") - // Unset any unneeded flags, and remove them from BuildFlags, if they're - // present. - inv.ModFile = "" + inv.BuildFlags = nil // This is not a build command. inv.ModFlag = "" - var buildFlags []string - for _, flag := range inv.BuildFlags { - // Flags can be prefixed by one or two dashes. - f := strings.TrimPrefix(strings.TrimPrefix(flag, "-"), "-") - if strings.HasPrefix(f, "mod=") || strings.HasPrefix(f, "modfile=") { - continue - } - buildFlags = append(buildFlags, flag) - } - inv.BuildFlags = buildFlags + inv.ModFile = "" + inv.Env = append(inv.Env[:len(inv.Env):len(inv.Env)], "GO111MODULE=off") + stdoutBytes, err := r.Run(ctx, inv) if err != nil { return 0, err diff --git a/tools/vendor/golang.org/x/tools/internal/imports/fix.go b/tools/vendor/golang.org/x/tools/internal/imports/fix.go index 642a5ac2d7..d4f1b4e8a0 100644 --- a/tools/vendor/golang.org/x/tools/internal/imports/fix.go +++ b/tools/vendor/golang.org/x/tools/internal/imports/fix.go @@ -26,6 +26,7 @@ import ( "unicode/utf8" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/gopathwalk" ) @@ -414,9 +415,16 @@ func (p *pass) fix() ([]*ImportFix, bool) { }) } } + // Collecting fixes involved map iteration, so sort for stability. See + // golang/go#59976. + sortFixes(fixes) + // collect selected fixes in a separate slice, so that it can be sorted + // separately. Note that these fixes must occur after fixes to existing + // imports. TODO(rfindley): figure out why. + var selectedFixes []*ImportFix for _, imp := range selected { - fixes = append(fixes, &ImportFix{ + selectedFixes = append(selectedFixes, &ImportFix{ StmtInfo: ImportInfo{ Name: p.importSpecName(imp), ImportPath: imp.ImportPath, @@ -425,8 +433,25 @@ func (p *pass) fix() ([]*ImportFix, bool) { FixType: AddImport, }) } + sortFixes(selectedFixes) - return fixes, true + return append(fixes, selectedFixes...), true +} + +func sortFixes(fixes []*ImportFix) { + sort.Slice(fixes, func(i, j int) bool { + fi, fj := fixes[i], fixes[j] + if fi.StmtInfo.ImportPath != fj.StmtInfo.ImportPath { + return fi.StmtInfo.ImportPath < fj.StmtInfo.ImportPath + } + if fi.StmtInfo.Name != fj.StmtInfo.Name { + return fi.StmtInfo.Name < fj.StmtInfo.Name + } + if fi.IdentName != fj.IdentName { + return fi.IdentName < fj.IdentName + } + return fi.FixType < fj.FixType + }) } // importSpecName gets the import name of imp in the import spec. @@ -519,7 +544,7 @@ func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) { var fixImports = fixImportsDefault func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error { - fixes, err := getFixes(fset, f, filename, env) + fixes, err := getFixes(context.Background(), fset, f, filename, env) if err != nil { return err } @@ -529,7 +554,7 @@ func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *P // getFixes gets the import fixes that need to be made to f in order to fix the imports. // It does not modify the ast. -func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) { +func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) { abs, err := filepath.Abs(filename) if err != nil { return nil, err @@ -583,7 +608,7 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv // Go look for candidates in $GOPATH, etc. We don't necessarily load // the real exports of sibling imports, so keep assuming their contents. - if err := addExternalCandidates(p, p.missingRefs, filename); err != nil { + if err := addExternalCandidates(ctx, p, p.missingRefs, filename); err != nil { return nil, err } @@ -1031,7 +1056,10 @@ type scanCallback struct { exportsLoaded func(pkg *pkg, exports []string) } -func addExternalCandidates(pass *pass, refs references, filename string) error { +func addExternalCandidates(ctx context.Context, pass *pass, refs references, filename string) error { + ctx, done := event.Start(ctx, "imports.addExternalCandidates") + defer done() + var mu sync.Mutex found := make(map[string][]pkgDistance) callback := &scanCallback{ diff --git a/tools/vendor/golang.org/x/tools/internal/imports/imports.go b/tools/vendor/golang.org/x/tools/internal/imports/imports.go index 95a88383a7..58e637b90f 100644 --- a/tools/vendor/golang.org/x/tools/internal/imports/imports.go +++ b/tools/vendor/golang.org/x/tools/internal/imports/imports.go @@ -11,6 +11,7 @@ package imports import ( "bufio" "bytes" + "context" "fmt" "go/ast" "go/format" @@ -23,6 +24,7 @@ import ( "strings" "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/internal/event" ) // Options is golang.org/x/tools/imports.Options with extra internal-only options. @@ -66,14 +68,17 @@ func Process(filename string, src []byte, opt *Options) (formatted []byte, err e // // Note that filename's directory influences which imports can be chosen, // so it is important that filename be accurate. -func FixImports(filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) { +func FixImports(ctx context.Context, filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) { + ctx, done := event.Start(ctx, "imports.FixImports") + defer done() + fileSet := token.NewFileSet() file, _, err := parse(fileSet, filename, src, opt) if err != nil { return nil, err } - return getFixes(fileSet, file, filename, opt.Env) + return getFixes(ctx, fileSet, file, filename, opt.Env) } // ApplyFixes applies all of the fixes to the file and formats it. extraMode diff --git a/tools/vendor/golang.org/x/tools/internal/imports/mod.go b/tools/vendor/golang.org/x/tools/internal/imports/mod.go index 7d99d04ca8..977d2389da 100644 --- a/tools/vendor/golang.org/x/tools/internal/imports/mod.go +++ b/tools/vendor/golang.org/x/tools/internal/imports/mod.go @@ -19,6 +19,7 @@ import ( "strings" "golang.org/x/mod/module" + "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/gopathwalk" ) @@ -37,7 +38,7 @@ type ModuleResolver struct { mains []*gocommand.ModuleJSON mainByDir map[string]*gocommand.ModuleJSON modsByModPath []*gocommand.ModuleJSON // All modules, ordered by # of path components in module Path... - modsByDir []*gocommand.ModuleJSON // ...or Dir. + modsByDir []*gocommand.ModuleJSON // ...or number of path components in their Dir. // moduleCacheCache stores information about the module cache. moduleCacheCache *dirInfoCache @@ -123,7 +124,7 @@ func (r *ModuleResolver) init() error { }) sort.Slice(r.modsByDir, func(i, j int) bool { count := func(x int) int { - return strings.Count(r.modsByDir[x].Dir, "/") + return strings.Count(r.modsByDir[x].Dir, string(filepath.Separator)) } return count(j) < count(i) // descending order }) @@ -327,6 +328,10 @@ func (r *ModuleResolver) findModuleByDir(dir string) *gocommand.ModuleJSON { // - in /vendor/ in -mod=vendor mode. // - nested module? Dunno. // Rumor has it that replace targets cannot contain other replace targets. + // + // Note that it is critical here that modsByDir is sorted to have deeper dirs + // first. This ensures that findModuleByDir finds the innermost module. + // See also golang/go#56291. for _, m := range r.modsByDir { if !strings.HasPrefix(dir, m.Dir) { continue @@ -424,6 +429,9 @@ func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) ( } func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error { + ctx, done := event.Start(ctx, "imports.ModuleResolver.scan") + defer done() + if err := r.init(); err != nil { return err } diff --git a/tools/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go b/tools/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go index a3fb2d4f29..7e638ec24f 100644 --- a/tools/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go +++ b/tools/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go @@ -7,7 +7,9 @@ package tokeninternal import ( + "fmt" "go/token" + "sort" "sync" "unsafe" ) @@ -57,3 +59,93 @@ func GetLines(file *token.File) []int { panic("unexpected token.File size") } } + +// AddExistingFiles adds the specified files to the FileSet if they +// are not already present. It panics if any pair of files in the +// resulting FileSet would overlap. +func AddExistingFiles(fset *token.FileSet, files []*token.File) { + // Punch through the FileSet encapsulation. + type tokenFileSet struct { + // This type remained essentially consistent from go1.16 to go1.21. + mutex sync.RWMutex + base int + files []*token.File + _ *token.File // changed to atomic.Pointer[token.File] in go1.19 + } + + // If the size of token.FileSet changes, this will fail to compile. + const delta = int64(unsafe.Sizeof(tokenFileSet{})) - int64(unsafe.Sizeof(token.FileSet{})) + var _ [-delta * delta]int + + type uP = unsafe.Pointer + var ptr *tokenFileSet + *(*uP)(uP(&ptr)) = uP(fset) + ptr.mutex.Lock() + defer ptr.mutex.Unlock() + + // Merge and sort. + newFiles := append(ptr.files, files...) + sort.Slice(newFiles, func(i, j int) bool { + return newFiles[i].Base() < newFiles[j].Base() + }) + + // Reject overlapping files. + // Discard adjacent identical files. + out := newFiles[:0] + for i, file := range newFiles { + if i > 0 { + prev := newFiles[i-1] + if file == prev { + continue + } + if prev.Base()+prev.Size()+1 > file.Base() { + panic(fmt.Sprintf("file %s (%d-%d) overlaps with file %s (%d-%d)", + prev.Name(), prev.Base(), prev.Base()+prev.Size(), + file.Name(), file.Base(), file.Base()+file.Size())) + } + } + out = append(out, file) + } + newFiles = out + + ptr.files = newFiles + + // Advance FileSet.Base(). + if len(newFiles) > 0 { + last := newFiles[len(newFiles)-1] + newBase := last.Base() + last.Size() + 1 + if ptr.base < newBase { + ptr.base = newBase + } + } +} + +// FileSetFor returns a new FileSet containing a sequence of new Files with +// the same base, size, and line as the input files, for use in APIs that +// require a FileSet. +// +// Precondition: the input files must be non-overlapping, and sorted in order +// of their Base. +func FileSetFor(files ...*token.File) *token.FileSet { + fset := token.NewFileSet() + for _, f := range files { + f2 := fset.AddFile(f.Name(), f.Base(), f.Size()) + lines := GetLines(f) + f2.SetLines(lines) + } + return fset +} + +// CloneFileSet creates a new FileSet holding all files in fset. It does not +// create copies of the token.Files in fset: they are added to the resulting +// FileSet unmodified. +func CloneFileSet(fset *token.FileSet) *token.FileSet { + var files []*token.File + fset.Iterate(func(f *token.File) bool { + files = append(files, f) + return true + }) + newFileSet := token.NewFileSet() + AddExistingFiles(newFileSet, files) + return newFileSet +} diff --git a/tools/vendor/golang.org/x/tools/internal/typeparams/common.go b/tools/vendor/golang.org/x/tools/internal/typeparams/common.go index 25a1426d30..b9e87c691a 100644 --- a/tools/vendor/golang.org/x/tools/internal/typeparams/common.go +++ b/tools/vendor/golang.org/x/tools/internal/typeparams/common.go @@ -87,7 +87,6 @@ func IsTypeParam(t types.Type) bool { func OriginMethod(fn *types.Func) *types.Func { recv := fn.Type().(*types.Signature).Recv() if recv == nil { - return fn } base := recv.Type() @@ -106,6 +105,26 @@ func OriginMethod(fn *types.Func) *types.Func { } orig := NamedTypeOrigin(named) gfn, _, _ := types.LookupFieldOrMethod(orig, true, fn.Pkg(), fn.Name()) + + // This is a fix for a gopls crash (#60628) due to a go/types bug (#60634). In: + // package p + // type T *int + // func (*T) f() {} + // LookupFieldOrMethod(T, true, p, f)=nil, but NewMethodSet(*T)={(*T).f}. + // Here we make them consistent by force. + // (The go/types bug is general, but this workaround is reached only + // for generic T thanks to the early return above.) + if gfn == nil { + mset := types.NewMethodSet(types.NewPointer(orig)) + for i := 0; i < mset.Len(); i++ { + m := mset.At(i) + if m.Obj().Id() == fn.Id() { + gfn = m.Obj() + break + } + } + } + return gfn.(*types.Func) } diff --git a/tools/vendor/honnef.co/go/tools/unused/unused.go b/tools/vendor/honnef.co/go/tools/unused/unused.go index 614d567537..fdf6d32cb2 100644 --- a/tools/vendor/honnef.co/go/tools/unused/unused.go +++ b/tools/vendor/honnef.co/go/tools/unused/unused.go @@ -1394,9 +1394,9 @@ func (g *graph) stmt(stmt ast.Stmt, by types.Object) { // embeddedField sees the field declared by the embedded field node, and marks the type as used by the field. // // Embedded fields are special in two ways: they don't have names, so we don't have immediate access to an ast.Ident to -// resolve to the field's types.Var, and we cannot use g.read on the type because eventually we do get to an ast.Ident, -// and ObjectOf resolves embedded fields to the field they declare, not the type. That's why we have code specially for -// handling embedded fields. +// resolve to the field's types.Var and need to instead walk the AST, and we cannot use g.read on the type because +// eventually we do get to an ast.Ident, and ObjectOf resolves embedded fields to the field they declare, not the type. +// That's why we have code specially for handling embedded fields. func (g *graph) embeddedField(node ast.Node, by types.Object) *types.Var { // We need to traverse the tree to find the ast.Ident, but all the nodes we traverse should be used by the object we // get once we resolve the ident. Collect the nodes and process them once we've found the ident. @@ -1404,18 +1404,28 @@ func (g *graph) embeddedField(node ast.Node, by types.Object) *types.Var { for { switch node_ := node.(type) { case *ast.Ident: + // obj is the field obj := g.info.ObjectOf(node_).(*types.Var) + // the field is declared by the enclosing type g.see(obj, by) for _, n := range nodes { g.read(n, obj) } - switch typ := typeutil.Dereference(g.info.TypeOf(node_)).(type) { - case *types.Named: - g.use(typ.Obj(), obj) - case *types.Basic: - // Nothing to do - default: - lint.ExhaustiveTypeSwitch(typ) + + if tname, ok := g.info.Uses[node_].(*types.TypeName); ok && tname.IsAlias() { + // When embedding an alias we want to use the alias, not what the alias points to. + g.use(tname, obj) + } else { + switch typ := typeutil.Dereference(g.info.TypeOf(node_)).(type) { + case *types.Named: + // (7.2) fields use their types + g.use(typ.Obj(), obj) + case *types.Basic: + // Nothing to do + default: + // Other types are only possible for aliases, which we've already handled + lint.ExhaustiveTypeSwitch(typ) + } } return obj case *ast.StarExpr: @@ -1518,6 +1528,9 @@ func (g *graph) namedType(typ *types.TypeName, spec ast.Expr) { obj := g.info.ObjectOf(name) g.see(obj, typ) // (7.2) fields use their types + // + // This handles aliases correctly because ObjectOf(alias) returns the TypeName of the alias, not + // what the alias points to. g.read(field.Type, obj) if name.Name == "_" { // (9.9) objects named the blank identifier are used diff --git a/tools/vendor/modules.txt b/tools/vendor/modules.txt index 6234e43cc9..d245f0647a 100644 --- a/tools/vendor/modules.txt +++ b/tools/vendor/modules.txt @@ -4,16 +4,19 @@ # 4d63.com/gochecknoglobals v0.2.1 ## explicit; go 1.15 4d63.com/gochecknoglobals/checknoglobals -# github.com/Abirdcfly/dupword v0.0.9 +# github.com/4meepo/tagalign v1.2.2 ## explicit; go 1.19 +github.com/4meepo/tagalign +# github.com/Abirdcfly/dupword v0.0.11 +## explicit; go 1.20 github.com/Abirdcfly/dupword -# github.com/Antonboom/errname v0.1.7 -## explicit; go 1.18 +# github.com/Antonboom/errname v0.1.10 +## explicit; go 1.20 github.com/Antonboom/errname/pkg/analyzer -# github.com/Antonboom/nilnil v0.1.1 -## explicit; go 1.18 +# github.com/Antonboom/nilnil v0.1.5 +## explicit; go 1.20 github.com/Antonboom/nilnil/pkg/analyzer -# github.com/BurntSushi/toml v1.2.1 +# github.com/BurntSushi/toml v1.3.2 ## explicit; go 1.16 github.com/BurntSushi/toml github.com/BurntSushi/toml/internal @@ -26,17 +29,21 @@ github.com/GaijinEntertainment/go-exhaustruct/v2/pkg/analyzer # github.com/Masterminds/semver v1.5.0 ## explicit github.com/Masterminds/semver -# github.com/OpenPeeDeeP/depguard v1.1.1 -## explicit; go 1.13 -github.com/OpenPeeDeeP/depguard +# github.com/OpenPeeDeeP/depguard/v2 v2.1.0 +## explicit; go 1.20 +github.com/OpenPeeDeeP/depguard/v2 +github.com/OpenPeeDeeP/depguard/v2/internal/utils +# github.com/alexkohler/nakedret/v2 v2.0.2 +## explicit; go 1.18 +github.com/alexkohler/nakedret/v2 # github.com/alexkohler/prealloc v1.0.0 ## explicit; go 1.15 github.com/alexkohler/prealloc/pkg # github.com/alingse/asasalint v0.0.11 ## explicit; go 1.18 github.com/alingse/asasalint -# github.com/ashanbrown/forbidigo v1.4.0 -## explicit; go 1.12 +# github.com/ashanbrown/forbidigo v1.5.3 +## explicit; go 1.13 github.com/ashanbrown/forbidigo/forbidigo # github.com/ashanbrown/makezero v1.1.1 ## explicit; go 1.12 @@ -44,8 +51,8 @@ github.com/ashanbrown/makezero/makezero # github.com/beorn7/perks v1.0.1 ## explicit; go 1.11 github.com/beorn7/perks/quantile -# github.com/bkielbasa/cyclop v1.2.0 -## explicit; go 1.15 +# github.com/bkielbasa/cyclop v1.2.1 +## explicit; go 1.20 github.com/bkielbasa/cyclop/pkg/analyzer # github.com/blizzy78/varnamelen v0.8.0 ## explicit; go 1.16 @@ -53,34 +60,38 @@ github.com/blizzy78/varnamelen # github.com/bombsimon/wsl/v3 v3.4.0 ## explicit; go 1.19 github.com/bombsimon/wsl/v3 -# github.com/breml/bidichk v0.2.3 -## explicit; go 1.17 +# github.com/breml/bidichk v0.2.4 +## explicit; go 1.19 github.com/breml/bidichk/pkg/bidichk -# github.com/breml/errchkjson v0.3.0 +# github.com/breml/errchkjson v0.3.1 ## explicit; go 1.17 github.com/breml/errchkjson -# github.com/butuzov/ireturn v0.1.1 +# github.com/butuzov/ireturn v0.2.0 ## explicit; go 1.15 github.com/butuzov/ireturn/analyzer -github.com/butuzov/ireturn/config -github.com/butuzov/ireturn/types +github.com/butuzov/ireturn/analyzer/internal/config +github.com/butuzov/ireturn/analyzer/internal/types +# github.com/butuzov/mirror v1.1.0 +## explicit; go 1.19 +github.com/butuzov/mirror +github.com/butuzov/mirror/internal/checker # github.com/cespare/xxhash/v2 v2.1.2 ## explicit; go 1.11 github.com/cespare/xxhash/v2 # github.com/cfergeau/gomod2rpmdeps v0.0.0-20210223144124-2042c4850ca8 ## explicit github.com/cfergeau/gomod2rpmdeps/cmd/gomod2rpmdeps -# github.com/charithe/durationcheck v0.0.9 +# github.com/charithe/durationcheck v0.0.10 ## explicit; go 1.14 github.com/charithe/durationcheck -# github.com/chavacava/garif v0.0.0-20221024190013-b3ef35877348 +# github.com/chavacava/garif v0.0.0-20230227094218-b8c73b2037b8 ## explicit; go 1.16 github.com/chavacava/garif # github.com/curioswitch/go-reassign v0.2.0 ## explicit; go 1.18 github.com/curioswitch/go-reassign github.com/curioswitch/go-reassign/internal/analyzer -# github.com/daixiang0/gci v0.9.1 +# github.com/daixiang0/gci v0.10.1 ## explicit; go 1.18 github.com/daixiang0/gci/pkg/config github.com/daixiang0/gci/pkg/format @@ -103,7 +114,7 @@ github.com/esimonov/ifshort/pkg/analyzer # github.com/ettle/strcase v0.1.1 ## explicit; go 1.12 github.com/ettle/strcase -# github.com/fatih/color v1.14.1 +# github.com/fatih/color v1.15.0 ## explicit; go 1.17 github.com/fatih/color # github.com/fatih/structtag v1.2.0 @@ -118,17 +129,17 @@ github.com/fsnotify/fsnotify # github.com/fzipp/gocyclo v0.6.0 ## explicit; go 1.18 github.com/fzipp/gocyclo -# github.com/go-critic/go-critic v0.6.7 +# github.com/go-critic/go-critic v0.8.1 ## explicit; go 1.18 github.com/go-critic/go-critic/checkers github.com/go-critic/go-critic/checkers/internal/astwalk github.com/go-critic/go-critic/checkers/internal/lintutil github.com/go-critic/go-critic/checkers/rulesdata -github.com/go-critic/go-critic/framework/linter +github.com/go-critic/go-critic/linter # github.com/go-toolsmith/astcast v1.1.0 ## explicit; go 1.16 github.com/go-toolsmith/astcast -# github.com/go-toolsmith/astcopy v1.0.3 +# github.com/go-toolsmith/astcopy v1.1.0 ## explicit; go 1.16 github.com/go-toolsmith/astcopy # github.com/go-toolsmith/astequal v1.1.0 @@ -190,7 +201,7 @@ github.com/golangci/gofmt/gofmt github.com/golangci/gofmt/gofmt/internal/diff github.com/golangci/gofmt/gofmt/internal/execabs github.com/golangci/gofmt/goimports -# github.com/golangci/golangci-lint v1.51.2 +# github.com/golangci/golangci-lint v1.53.3 ## explicit; go 1.19 github.com/golangci/golangci-lint/cmd/golangci-lint github.com/golangci/golangci-lint/internal/cache @@ -216,7 +227,6 @@ github.com/golangci/golangci-lint/pkg/printers github.com/golangci/golangci-lint/pkg/report github.com/golangci/golangci-lint/pkg/result github.com/golangci/golangci-lint/pkg/result/processors -github.com/golangci/golangci-lint/pkg/sliceutil github.com/golangci/golangci-lint/pkg/timeutils # github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 ## explicit @@ -240,7 +250,7 @@ github.com/google/go-cmp/cmp/internal/diff github.com/google/go-cmp/cmp/internal/flags github.com/google/go-cmp/cmp/internal/function github.com/google/go-cmp/cmp/internal/value -# github.com/gordonklaus/ineffassign v0.0.0-20230107090616-13ace0543b28 +# github.com/gordonklaus/ineffassign v0.0.0-20230610083614-0e73809eb601 ## explicit; go 1.14 github.com/gordonklaus/ineffassign/pkg/ineffassign # github.com/gostaticanalysis/analysisutil v0.7.1 @@ -282,7 +292,7 @@ github.com/hashicorp/hcl/json/token github.com/hexops/gotextdiff github.com/hexops/gotextdiff/myers github.com/hexops/gotextdiff/span -# github.com/inconshreveable/mousetrap v1.0.1 +# github.com/inconshreveable/mousetrap v1.1.0 ## explicit; go 1.18 github.com/inconshreveable/mousetrap # github.com/jgautheron/goconst v1.5.1 @@ -297,9 +307,6 @@ github.com/jirfag/go-printf-func-name/pkg/analyzer # github.com/julz/importas v0.1.0 ## explicit; go 1.15 github.com/julz/importas -# github.com/junk1tm/musttag v0.4.5 -## explicit; go 1.18 -github.com/junk1tm/musttag # github.com/kisielk/errcheck v1.6.3 ## explicit; go 1.14 github.com/kisielk/errcheck/errcheck @@ -307,13 +314,13 @@ github.com/kisielk/errcheck/errcheck ## explicit github.com/kisielk/gotool github.com/kisielk/gotool/internal/load -# github.com/kkHAIKE/contextcheck v1.1.3 -## explicit; go 1.15 +# github.com/kkHAIKE/contextcheck v1.1.4 +## explicit; go 1.20 github.com/kkHAIKE/contextcheck # github.com/kulti/thelper v0.6.3 ## explicit; go 1.18 github.com/kulti/thelper/pkg/analyzer -# github.com/kunwardeep/paralleltest v1.0.6 +# github.com/kunwardeep/paralleltest v1.0.7 ## explicit; go 1.17 github.com/kunwardeep/paralleltest/pkg/paralleltest # github.com/kyoh86/exportloopref v0.1.11 @@ -322,7 +329,7 @@ github.com/kyoh86/exportloopref # github.com/ldez/gomoddirectives v0.2.3 ## explicit; go 1.16 github.com/ldez/gomoddirectives -# github.com/ldez/tagliatelle v0.4.0 +# github.com/ldez/tagliatelle v0.5.0 ## explicit; go 1.19 github.com/ldez/tagliatelle # github.com/leonklingele/grouper v1.1.1 @@ -342,10 +349,10 @@ github.com/magiconair/properties # github.com/maratori/testableexamples v1.0.0 ## explicit; go 1.19 github.com/maratori/testableexamples/pkg/testableexamples -# github.com/maratori/testpackage v1.1.0 -## explicit; go 1.18 +# github.com/maratori/testpackage v1.1.1 +## explicit; go 1.20 github.com/maratori/testpackage/pkg/testpackage -# github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 +# github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26 ## explicit; go 1.13 github.com/matoous/godox # github.com/mattn/go-colorable v0.1.13 @@ -363,7 +370,7 @@ github.com/matttproud/golang_protobuf_extensions/pbutil # github.com/mbilski/exhaustivestruct v1.2.0 ## explicit; go 1.15 github.com/mbilski/exhaustivestruct/pkg/analyzer -# github.com/mgechev/revive v1.2.5 +# github.com/mgechev/revive v1.3.2 ## explicit; go 1.19 github.com/mgechev/revive/config github.com/mgechev/revive/formatter @@ -376,8 +383,8 @@ github.com/mitchellh/go-homedir # github.com/mitchellh/mapstructure v1.5.0 ## explicit; go 1.14 github.com/mitchellh/mapstructure -# github.com/moricho/tparallel v0.2.1 -## explicit; go 1.15 +# github.com/moricho/tparallel v0.3.1 +## explicit; go 1.20 github.com/moricho/tparallel github.com/moricho/tparallel/pkg/ssafunc github.com/moricho/tparallel/pkg/ssainstr @@ -395,18 +402,19 @@ github.com/nbutton23/zxcvbn-go/match github.com/nbutton23/zxcvbn-go/matching github.com/nbutton23/zxcvbn-go/scoring github.com/nbutton23/zxcvbn-go/utils/math -# github.com/nishanths/exhaustive v0.9.5 -## explicit; go 1.14 +# github.com/nishanths/exhaustive v0.11.0 +## explicit; go 1.18 github.com/nishanths/exhaustive # github.com/nishanths/predeclared v0.2.2 ## explicit; go 1.14 github.com/nishanths/predeclared/passes/predeclared -# github.com/nunnatsa/ginkgolinter v0.8.1 +# github.com/nunnatsa/ginkgolinter v0.12.1 ## explicit; go 1.19 github.com/nunnatsa/ginkgolinter github.com/nunnatsa/ginkgolinter/gomegahandler github.com/nunnatsa/ginkgolinter/reverseassertion github.com/nunnatsa/ginkgolinter/types +github.com/nunnatsa/ginkgolinter/version # github.com/olekukonko/tablewriter v0.0.5 ## explicit; go 1.12 github.com/olekukonko/tablewriter @@ -419,14 +427,11 @@ github.com/pelletier/go-toml/v2 github.com/pelletier/go-toml/v2/internal/ast github.com/pelletier/go-toml/v2/internal/danger github.com/pelletier/go-toml/v2/internal/tracker -# github.com/pkg/errors v0.9.1 -## explicit -github.com/pkg/errors # github.com/pmezard/go-difflib v1.0.0 ## explicit github.com/pmezard/go-difflib/difflib -# github.com/polyfloyd/go-errorlint v1.1.0 -## explicit; go 1.13 +# github.com/polyfloyd/go-errorlint v1.4.2 +## explicit; go 1.20 github.com/polyfloyd/go-errorlint/errorlint # github.com/prometheus/client_golang v1.12.1 ## explicit; go 1.13 @@ -468,7 +473,7 @@ github.com/quasilyte/go-ruleguard/ruleguard/typematch github.com/quasilyte/gogrep github.com/quasilyte/gogrep/internal/stdinfo github.com/quasilyte/gogrep/nodetag -# github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 +# github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 ## explicit; go 1.14 github.com/quasilyte/regex/syntax # github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 @@ -493,18 +498,20 @@ github.com/sashamelentyev/interfacebloat/pkg/analyzer ## explicit; go 1.19 github.com/sashamelentyev/usestdlibvars/pkg/analyzer github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping -# github.com/securego/gosec/v2 v2.15.0 +# github.com/securego/gosec/v2 v2.16.0 ## explicit; go 1.19 github.com/securego/gosec/v2 +github.com/securego/gosec/v2/analyzers github.com/securego/gosec/v2/cwe +github.com/securego/gosec/v2/issue github.com/securego/gosec/v2/rules # github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c ## explicit github.com/shazow/go-diff/difflib -# github.com/sirupsen/logrus v1.9.0 +# github.com/sirupsen/logrus v1.9.3 ## explicit; go 1.13 github.com/sirupsen/logrus -# github.com/sivchari/containedctx v1.0.2 +# github.com/sivchari/containedctx v1.0.3 ## explicit; go 1.17 github.com/sivchari/containedctx # github.com/sivchari/nosnakecase v1.7.0 @@ -513,8 +520,8 @@ github.com/sivchari/nosnakecase # github.com/sivchari/tenv v1.7.1 ## explicit; go 1.18 github.com/sivchari/tenv -# github.com/sonatard/noctx v0.0.1 -## explicit; go 1.13 +# github.com/sonatard/noctx v0.0.2 +## explicit; go 1.20 github.com/sonatard/noctx github.com/sonatard/noctx/ngfunc github.com/sonatard/noctx/reqwithoutctx @@ -528,7 +535,7 @@ github.com/spf13/afero/mem # github.com/spf13/cast v1.5.0 ## explicit; go 1.18 github.com/spf13/cast -# github.com/spf13/cobra v1.6.1 +# github.com/spf13/cobra v1.7.0 ## explicit; go 1.15 github.com/spf13/cobra # github.com/spf13/jwalterweatherman v1.1.0 @@ -557,8 +564,8 @@ github.com/stbenjam/no-sprintf-host-port/pkg/analyzer # github.com/stretchr/objx v0.5.0 ## explicit; go 1.12 github.com/stretchr/objx -# github.com/stretchr/testify v1.8.1 -## explicit; go 1.13 +# github.com/stretchr/testify v1.8.4 +## explicit; go 1.20 github.com/stretchr/testify/assert github.com/stretchr/testify/mock # github.com/subosito/gotenv v1.4.1 @@ -567,16 +574,16 @@ github.com/subosito/gotenv # github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c ## explicit github.com/t-yuki/gocover-cobertura -# github.com/tdakkota/asciicheck v0.1.1 -## explicit; go 1.13 +# github.com/tdakkota/asciicheck v0.2.0 +## explicit; go 1.18 github.com/tdakkota/asciicheck # github.com/tetafro/godot v1.4.11 ## explicit; go 1.16 github.com/tetafro/godot -# github.com/timakin/bodyclose v0.0.0-20221125081123-e39cf3fc478e +# github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 ## explicit; go 1.12 github.com/timakin/bodyclose/passes/bodyclose -# github.com/timonwong/loggercheck v0.9.3 +# github.com/timonwong/loggercheck v0.9.4 ## explicit; go 1.18 github.com/timonwong/loggercheck github.com/timonwong/loggercheck/internal/bytebufferpool @@ -585,7 +592,7 @@ github.com/timonwong/loggercheck/internal/checkers/printf github.com/timonwong/loggercheck/internal/rules github.com/timonwong/loggercheck/internal/sets github.com/timonwong/loggercheck/internal/stringutil -# github.com/tomarrell/wrapcheck/v2 v2.8.0 +# github.com/tomarrell/wrapcheck/v2 v2.8.1 ## explicit; go 1.18 github.com/tomarrell/wrapcheck/v2/wrapcheck # github.com/tommy-muehle/go-mnd/v2 v2.5.1 @@ -602,6 +609,9 @@ github.com/ultraware/whitespace # github.com/uudashr/gocognit v1.0.6 ## explicit; go 1.16 github.com/uudashr/gocognit +# github.com/xen0n/gosmopolitan v1.2.1 +## explicit; go 1.19 +github.com/xen0n/gosmopolitan # github.com/yagipy/maintidx v1.0.0 ## explicit; go 1.17 github.com/yagipy/maintidx @@ -610,54 +620,60 @@ github.com/yagipy/maintidx/pkg/halstvol # github.com/yeya24/promlinter v0.2.0 ## explicit; go 1.16 github.com/yeya24/promlinter +# github.com/ykadowak/zerologlint v0.1.2 +## explicit; go 1.19 +github.com/ykadowak/zerologlint # gitlab.com/bosi/decorder v0.2.3 ## explicit; go 1.17 gitlab.com/bosi/decorder +# go.tmz.dev/musttag v0.7.0 +## explicit; go 1.19 +go.tmz.dev/musttag # go.uber.org/atomic v1.7.0 ## explicit; go 1.13 go.uber.org/atomic # go.uber.org/multierr v1.6.0 ## explicit; go 1.12 go.uber.org/multierr -# go.uber.org/zap v1.17.0 -## explicit; go 1.13 +# go.uber.org/zap v1.24.0 +## explicit; go 1.19 go.uber.org/zap go.uber.org/zap/buffer +go.uber.org/zap/internal go.uber.org/zap/internal/bufferpool go.uber.org/zap/internal/color go.uber.org/zap/internal/exit go.uber.org/zap/zapcore -# golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e -## explicit; go 1.18 +# golang.org/x/exp v0.0.0-20230510235704-dd950f8aeaea +## explicit; go 1.20 golang.org/x/exp/constraints golang.org/x/exp/slices -# golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9 +# golang.org/x/exp/typeparams v0.0.0-20230224173230-c95f2b4c22f2 ## explicit; go 1.18 golang.org/x/exp/typeparams -# golang.org/x/mod v0.8.0 +# golang.org/x/mod v0.12.0 ## explicit; go 1.17 golang.org/x/mod/internal/lazyregexp golang.org/x/mod/modfile golang.org/x/mod/module golang.org/x/mod/semver -# golang.org/x/net v0.7.0 +# golang.org/x/sync v0.3.0 ## explicit; go 1.17 -# golang.org/x/sync v0.1.0 -## explicit golang.org/x/sync/errgroup golang.org/x/sync/semaphore -# golang.org/x/sys v0.5.0 +# golang.org/x/sys v0.10.0 ## explicit; go 1.17 golang.org/x/sys/execabs golang.org/x/sys/internal/unsafeheader golang.org/x/sys/unix golang.org/x/sys/windows -# golang.org/x/text v0.7.0 +# golang.org/x/text v0.11.0 ## explicit; go 1.17 +golang.org/x/text/runes golang.org/x/text/transform golang.org/x/text/unicode/norm golang.org/x/text/width -# golang.org/x/tools v0.6.0 +# golang.org/x/tools v0.11.0 ## explicit; go 1.18 golang.org/x/tools/cmd/goimports golang.org/x/tools/go/analysis @@ -717,11 +733,11 @@ golang.org/x/tools/go/ssa/ssautil golang.org/x/tools/go/types/objectpath golang.org/x/tools/go/types/typeutil golang.org/x/tools/imports -golang.org/x/tools/internal/analysisinternal golang.org/x/tools/internal/event golang.org/x/tools/internal/event/core golang.org/x/tools/internal/event/keys golang.org/x/tools/internal/event/label +golang.org/x/tools/internal/event/tag golang.org/x/tools/internal/fastwalk golang.org/x/tools/internal/gcimporter golang.org/x/tools/internal/gocommand @@ -773,7 +789,7 @@ gopkg.in/yaml.v2 # gopkg.in/yaml.v3 v3.0.1 ## explicit gopkg.in/yaml.v3 -# honnef.co/go/tools v0.4.2 +# honnef.co/go/tools v0.4.3 ## explicit; go 1.19 honnef.co/go/tools/analysis/code honnef.co/go/tools/analysis/edit @@ -803,9 +819,12 @@ honnef.co/go/tools/staticcheck/fakereflect honnef.co/go/tools/staticcheck/fakexml honnef.co/go/tools/stylecheck honnef.co/go/tools/unused -# mvdan.cc/gofumpt v0.4.0 -## explicit; go 1.18 +# mvdan.cc/gofumpt v0.5.0 +## explicit; go 1.19 mvdan.cc/gofumpt/format +mvdan.cc/gofumpt/internal/govendor/go/doc/comment +mvdan.cc/gofumpt/internal/govendor/go/format +mvdan.cc/gofumpt/internal/govendor/go/printer mvdan.cc/gofumpt/internal/version # mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed ## explicit diff --git a/tools/vendor/mvdan.cc/gofumpt/format/format.go b/tools/vendor/mvdan.cc/gofumpt/format/format.go index fa4f2fcb6d..787561781e 100644 --- a/tools/vendor/mvdan.cc/gofumpt/format/format.go +++ b/tools/vendor/mvdan.cc/gofumpt/format/format.go @@ -10,7 +10,6 @@ import ( "bytes" "fmt" "go/ast" - "go/format" "go/parser" "go/token" "os" @@ -26,6 +25,7 @@ import ( "golang.org/x/mod/semver" "golang.org/x/tools/go/ast/astutil" + "mvdan.cc/gofumpt/internal/govendor/go/format" "mvdan.cc/gofumpt/internal/version" ) @@ -395,7 +395,7 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { slc := []string{ "//gofumpt:diagnose", "version:", - version.String(), + version.String(""), "flags:", "-lang=" + f.LangVersion, "-modpath=" + f.ModulePath, @@ -467,13 +467,19 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { specEnd := node.Specs[0].End() if len(f.commentsBetween(node.TokPos, specPos)) > 0 { - // If the single spec has any comment, it must - // go before the entire declaration now. + // If the single spec has a comment on the line above, + // the comment must go before the entire declaration now. node.TokPos = specPos } else { f.removeLines(f.Line(node.TokPos), f.Line(specPos)) } - f.removeLines(f.Line(specEnd), f.Line(node.Rparen)) + if len(f.commentsBetween(specEnd, node.Rparen)) > 0 { + // Leave one newline to not force a comment on the next line to + // become an inline comment. + f.removeLines(f.Line(specEnd)+1, f.Line(node.Rparen)) + } else { + f.removeLines(f.Line(specEnd), f.Line(node.Rparen)) + } // Remove the parentheses. go/printer will automatically // get rid of the newlines. @@ -546,12 +552,19 @@ func (f *fumpter) applyPre(c *astutil.Cursor) { if f.Line(sign.Pos()) != endLine { handleMultiLine := func(fl *ast.FieldList) { + // Refuse to insert a newline before the closing token + // if the list is empty or all in one line. if fl == nil || len(fl.List) == 0 { return } + fieldOpeningLine := f.Line(fl.Opening) + fieldClosingLine := f.Line(fl.Closing) + if fieldOpeningLine == fieldClosingLine { + return + } + lastFieldEnd := fl.List[len(fl.List)-1].End() lastFieldLine := f.Line(lastFieldEnd) - fieldClosingLine := f.Line(fl.Closing) isLastFieldOnFieldClosingLine := lastFieldLine == fieldClosingLine isLastFieldOnSigClosingLine := lastFieldLine == endLine diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/doc.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/doc.go new file mode 100644 index 0000000000..45a476aa9a --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/doc.go @@ -0,0 +1,36 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package comment implements parsing and reformatting of Go doc comments, +(documentation comments), which are comments that immediately precede +a top-level declaration of a package, const, func, type, or var. + +Go doc comment syntax is a simplified subset of Markdown that supports +links, headings, paragraphs, lists (without nesting), and preformatted text blocks. +The details of the syntax are documented at https://go.dev/doc/comment. + +To parse the text associated with a doc comment (after removing comment markers), +use a [Parser]: + + var p comment.Parser + doc := p.Parse(text) + +The result is a [*Doc]. +To reformat it as a doc comment, HTML, Markdown, or plain text, +use a [Printer]: + + var pr comment.Printer + os.Stdout.Write(pr.Text(doc)) + +The [Parser] and [Printer] types are structs whose fields can be +modified to customize the operations. +For details, see the documentation for those types. + +Use cases that need additional control over reformatting can +implement their own logic by inspecting the parsed syntax itself. +See the documentation for [Doc], [Block], [Text] for an overview +and links to additional types. +*/ +package comment diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/html.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/html.go new file mode 100644 index 0000000000..bc076f6a58 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/html.go @@ -0,0 +1,169 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package comment + +import ( + "bytes" + "fmt" + "strconv" +) + +// An htmlPrinter holds the state needed for printing a Doc as HTML. +type htmlPrinter struct { + *Printer + tight bool +} + +// HTML returns an HTML formatting of the Doc. +// See the [Printer] documentation for ways to customize the HTML output. +func (p *Printer) HTML(d *Doc) []byte { + hp := &htmlPrinter{Printer: p} + var out bytes.Buffer + for _, x := range d.Content { + hp.block(&out, x) + } + return out.Bytes() +} + +// block prints the block x to out. +func (p *htmlPrinter) block(out *bytes.Buffer, x Block) { + switch x := x.(type) { + default: + fmt.Fprintf(out, "?%T", x) + + case *Paragraph: + if !p.tight { + out.WriteString("

") + } + p.text(out, x.Text) + out.WriteString("\n") + + case *Heading: + out.WriteString("") + p.text(out, x.Text) + out.WriteString("\n") + + case *Code: + out.WriteString("

")
+		p.escape(out, x.Text)
+		out.WriteString("
\n") + + case *List: + kind := "ol>\n" + if x.Items[0].Number == "" { + kind = "ul>\n" + } + out.WriteString("<") + out.WriteString(kind) + next := "1" + for _, item := range x.Items { + out.WriteString("") + p.tight = !x.BlankBetween() + for _, blk := range item.Content { + p.block(out, blk) + } + p.tight = false + } + out.WriteString("= 0; i-- { + if b[i] < '9' { + b[i]++ + return string(b) + } + b[i] = '0' + } + return "1" + string(b) +} + +// text prints the text sequence x to out. +func (p *htmlPrinter) text(out *bytes.Buffer, x []Text) { + for _, t := range x { + switch t := t.(type) { + case Plain: + p.escape(out, string(t)) + case Italic: + out.WriteString("") + p.escape(out, string(t)) + out.WriteString("") + case *Link: + out.WriteString(``) + p.text(out, t.Text) + out.WriteString("") + case *DocLink: + url := p.docLinkURL(t) + if url != "" { + out.WriteString(``) + } + p.text(out, t.Text) + if url != "" { + out.WriteString("") + } + } + } +} + +// escape prints s to out as plain text, +// escaping < & " ' and > to avoid being misinterpreted +// in larger HTML constructs. +func (p *htmlPrinter) escape(out *bytes.Buffer, s string) { + start := 0 + for i := 0; i < len(s); i++ { + switch s[i] { + case '<': + out.WriteString(s[start:i]) + out.WriteString("<") + start = i + 1 + case '&': + out.WriteString(s[start:i]) + out.WriteString("&") + start = i + 1 + case '"': + out.WriteString(s[start:i]) + out.WriteString(""") + start = i + 1 + case '\'': + out.WriteString(s[start:i]) + out.WriteString("'") + start = i + 1 + case '>': + out.WriteString(s[start:i]) + out.WriteString(">") + start = i + 1 + } + } + out.WriteString(s[start:]) +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/markdown.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/markdown.go new file mode 100644 index 0000000000..d8550f2e39 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/markdown.go @@ -0,0 +1,188 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package comment + +import ( + "bytes" + "fmt" + "strings" +) + +// An mdPrinter holds the state needed for printing a Doc as Markdown. +type mdPrinter struct { + *Printer + headingPrefix string + raw bytes.Buffer +} + +// Markdown returns a Markdown formatting of the Doc. +// See the [Printer] documentation for ways to customize the Markdown output. +func (p *Printer) Markdown(d *Doc) []byte { + mp := &mdPrinter{ + Printer: p, + headingPrefix: strings.Repeat("#", p.headingLevel()) + " ", + } + + var out bytes.Buffer + for i, x := range d.Content { + if i > 0 { + out.WriteByte('\n') + } + mp.block(&out, x) + } + return out.Bytes() +} + +// block prints the block x to out. +func (p *mdPrinter) block(out *bytes.Buffer, x Block) { + switch x := x.(type) { + default: + fmt.Fprintf(out, "?%T", x) + + case *Paragraph: + p.text(out, x.Text) + out.WriteString("\n") + + case *Heading: + out.WriteString(p.headingPrefix) + p.text(out, x.Text) + if id := p.headingID(x); id != "" { + out.WriteString(" {#") + out.WriteString(id) + out.WriteString("}") + } + out.WriteString("\n") + + case *Code: + md := x.Text + for md != "" { + var line string + line, md, _ = strings.Cut(md, "\n") + if line != "" { + out.WriteString("\t") + out.WriteString(line) + } + out.WriteString("\n") + } + + case *List: + loose := x.BlankBetween() + for i, item := range x.Items { + if i > 0 && loose { + out.WriteString("\n") + } + if n := item.Number; n != "" { + out.WriteString(" ") + out.WriteString(n) + out.WriteString(". ") + } else { + out.WriteString(" - ") // SP SP - SP + } + for i, blk := range item.Content { + const fourSpace = " " + if i > 0 { + out.WriteString("\n" + fourSpace) + } + p.text(out, blk.(*Paragraph).Text) + out.WriteString("\n") + } + } + } +} + +// text prints the text sequence x to out. +func (p *mdPrinter) text(out *bytes.Buffer, x []Text) { + p.raw.Reset() + p.rawText(&p.raw, x) + line := bytes.TrimSpace(p.raw.Bytes()) + if len(line) == 0 { + return + } + switch line[0] { + case '+', '-', '*', '#': + // Escape what would be the start of an unordered list or heading. + out.WriteByte('\\') + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + i := 1 + for i < len(line) && '0' <= line[i] && line[i] <= '9' { + i++ + } + if i < len(line) && (line[i] == '.' || line[i] == ')') { + // Escape what would be the start of an ordered list. + out.Write(line[:i]) + out.WriteByte('\\') + line = line[i:] + } + } + out.Write(line) +} + +// rawText prints the text sequence x to out, +// without worrying about escaping characters +// that have special meaning at the start of a Markdown line. +func (p *mdPrinter) rawText(out *bytes.Buffer, x []Text) { + for _, t := range x { + switch t := t.(type) { + case Plain: + p.escape(out, string(t)) + case Italic: + out.WriteString("*") + p.escape(out, string(t)) + out.WriteString("*") + case *Link: + out.WriteString("[") + p.rawText(out, t.Text) + out.WriteString("](") + out.WriteString(t.URL) + out.WriteString(")") + case *DocLink: + url := p.docLinkURL(t) + if url != "" { + out.WriteString("[") + } + p.rawText(out, t.Text) + if url != "" { + out.WriteString("](") + url = strings.ReplaceAll(url, "(", "%28") + url = strings.ReplaceAll(url, ")", "%29") + out.WriteString(url) + out.WriteString(")") + } + } + } +} + +// escape prints s to out as plain text, +// escaping special characters to avoid being misinterpreted +// as Markdown markup sequences. +func (p *mdPrinter) escape(out *bytes.Buffer, s string) { + start := 0 + for i := 0; i < len(s); i++ { + switch s[i] { + case '\n': + // Turn all \n into spaces, for a few reasons: + // - Avoid introducing paragraph breaks accidentally. + // - Avoid the need to reindent after the newline. + // - Avoid problems with Markdown renderers treating + // every mid-paragraph newline as a
. + out.WriteString(s[start:i]) + out.WriteByte(' ') + start = i + 1 + continue + case '`', '_', '*', '[', '<', '\\': + // Not all of these need to be escaped all the time, + // but is valid and easy to do so. + // We assume the Markdown is being passed to a + // Markdown renderer, not edited by a person, + // so it's fine to have escapes that are not strictly + // necessary in some cases. + out.WriteString(s[start:i]) + out.WriteByte('\\') + out.WriteByte(s[i]) + start = i + 1 + } + } + out.WriteString(s[start:]) +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/parse.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/parse.go new file mode 100644 index 0000000000..372577b2b3 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/parse.go @@ -0,0 +1,1262 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package comment + +import ( + "sort" + "strings" + "unicode" + "unicode/utf8" +) + +// A Doc is a parsed Go doc comment. +type Doc struct { + // Content is the sequence of content blocks in the comment. + Content []Block + + // Links is the link definitions in the comment. + Links []*LinkDef +} + +// A LinkDef is a single link definition. +type LinkDef struct { + Text string // the link text + URL string // the link URL + Used bool // whether the comment uses the definition +} + +// A Block is block-level content in a doc comment, +// one of [*Code], [*Heading], [*List], or [*Paragraph]. +type Block interface { + block() +} + +// A Heading is a doc comment heading. +type Heading struct { + Text []Text // the heading text +} + +func (*Heading) block() {} + +// A List is a numbered or bullet list. +// Lists are always non-empty: len(Items) > 0. +// In a numbered list, every Items[i].Number is a non-empty string. +// In a bullet list, every Items[i].Number is an empty string. +type List struct { + // Items is the list items. + Items []*ListItem + + // ForceBlankBefore indicates that the list must be + // preceded by a blank line when reformatting the comment, + // overriding the usual conditions. See the BlankBefore method. + // + // The comment parser sets ForceBlankBefore for any list + // that is preceded by a blank line, to make sure + // the blank line is preserved when printing. + ForceBlankBefore bool + + // ForceBlankBetween indicates that list items must be + // separated by blank lines when reformatting the comment, + // overriding the usual conditions. See the BlankBetween method. + // + // The comment parser sets ForceBlankBetween for any list + // that has a blank line between any two of its items, to make sure + // the blank lines are preserved when printing. + ForceBlankBetween bool +} + +func (*List) block() {} + +// BlankBefore reports whether a reformatting of the comment +// should include a blank line before the list. +// The default rule is the same as for [BlankBetween]: +// if the list item content contains any blank lines +// (meaning at least one item has multiple paragraphs) +// then the list itself must be preceded by a blank line. +// A preceding blank line can be forced by setting [List].ForceBlankBefore. +func (l *List) BlankBefore() bool { + return l.ForceBlankBefore || l.BlankBetween() +} + +// BlankBetween reports whether a reformatting of the comment +// should include a blank line between each pair of list items. +// The default rule is that if the list item content contains any blank lines +// (meaning at least one item has multiple paragraphs) +// then list items must themselves be separated by blank lines. +// Blank line separators can be forced by setting [List].ForceBlankBetween. +func (l *List) BlankBetween() bool { + if l.ForceBlankBetween { + return true + } + for _, item := range l.Items { + if len(item.Content) != 1 { + // Unreachable for parsed comments today, + // since the only way to get multiple item.Content + // is multiple paragraphs, which must have been + // separated by a blank line. + return true + } + } + return false +} + +// A ListItem is a single item in a numbered or bullet list. +type ListItem struct { + // Number is a decimal string in a numbered list + // or an empty string in a bullet list. + Number string // "1", "2", ...; "" for bullet list + + // Content is the list content. + // Currently, restrictions in the parser and printer + // require every element of Content to be a *Paragraph. + Content []Block // Content of this item. +} + +// A Paragraph is a paragraph of text. +type Paragraph struct { + Text []Text +} + +func (*Paragraph) block() {} + +// A Code is a preformatted code block. +type Code struct { + // Text is the preformatted text, ending with a newline character. + // It may be multiple lines, each of which ends with a newline character. + // It is never empty, nor does it start or end with a blank line. + Text string +} + +func (*Code) block() {} + +// A Text is text-level content in a doc comment, +// one of [Plain], [Italic], [*Link], or [*DocLink]. +type Text interface { + text() +} + +// A Plain is a string rendered as plain text (not italicized). +type Plain string + +func (Plain) text() {} + +// An Italic is a string rendered as italicized text. +type Italic string + +func (Italic) text() {} + +// A Link is a link to a specific URL. +type Link struct { + Auto bool // is this an automatic (implicit) link of a literal URL? + Text []Text // text of link + URL string // target URL of link +} + +func (*Link) text() {} + +// A DocLink is a link to documentation for a Go package or symbol. +type DocLink struct { + Text []Text // text of link + + // ImportPath, Recv, and Name identify the Go package or symbol + // that is the link target. The potential combinations of + // non-empty fields are: + // - ImportPath: a link to another package + // - ImportPath, Name: a link to a const, func, type, or var in another package + // - ImportPath, Recv, Name: a link to a method in another package + // - Name: a link to a const, func, type, or var in this package + // - Recv, Name: a link to a method in this package + ImportPath string // import path + Recv string // receiver type, without any pointer star, for methods + Name string // const, func, type, var, or method name +} + +func (*DocLink) text() {} + +// A Parser is a doc comment parser. +// The fields in the struct can be filled in before calling Parse +// in order to customize the details of the parsing process. +type Parser struct { + // Words is a map of Go identifier words that + // should be italicized and potentially linked. + // If Words[w] is the empty string, then the word w + // is only italicized. Otherwise it is linked, using + // Words[w] as the link target. + // Words corresponds to the [go/doc.ToHTML] words parameter. + Words map[string]string + + // LookupPackage resolves a package name to an import path. + // + // If LookupPackage(name) returns ok == true, then [name] + // (or [name.Sym] or [name.Sym.Method]) + // is considered a documentation link to importPath's package docs. + // It is valid to return "", true, in which case name is considered + // to refer to the current package. + // + // If LookupPackage(name) returns ok == false, + // then [name] (or [name.Sym] or [name.Sym.Method]) + // will not be considered a documentation link, + // except in the case where name is the full (but single-element) import path + // of a package in the standard library, such as in [math] or [io.Reader]. + // LookupPackage is still called for such names, + // in order to permit references to imports of other packages + // with the same package names. + // + // Setting LookupPackage to nil is equivalent to setting it to + // a function that always returns "", false. + LookupPackage func(name string) (importPath string, ok bool) + + // LookupSym reports whether a symbol name or method name + // exists in the current package. + // + // If LookupSym("", "Name") returns true, then [Name] + // is considered a documentation link for a const, func, type, or var. + // + // Similarly, if LookupSym("Recv", "Name") returns true, + // then [Recv.Name] is considered a documentation link for + // type Recv's method Name. + // + // Setting LookupSym to nil is equivalent to setting it to a function + // that always returns false. + LookupSym func(recv, name string) (ok bool) +} + +// parseDoc is parsing state for a single doc comment. +type parseDoc struct { + *Parser + *Doc + links map[string]*LinkDef + lines []string + lookupSym func(recv, name string) bool +} + +// lookupPkg is called to look up the pkg in [pkg], [pkg.Name], and [pkg.Name.Recv]. +// If pkg has a slash, it is assumed to be the full import path and is returned with ok = true. +// +// Otherwise, pkg is probably a simple package name like "rand" (not "crypto/rand" or "math/rand"). +// d.LookupPackage provides a way for the caller to allow resolving such names with reference +// to the imports in the surrounding package. +// +// There is one collision between these two cases: single-element standard library names +// like "math" are full import paths but don't contain slashes. We let d.LookupPackage have +// the first chance to resolve it, in case there's a different package imported as math, +// and otherwise we refer to a built-in list of single-element standard library package names. +func (d *parseDoc) lookupPkg(pkg string) (importPath string, ok bool) { + if strings.Contains(pkg, "/") { // assume a full import path + if validImportPath(pkg) { + return pkg, true + } + return "", false + } + if d.LookupPackage != nil { + // Give LookupPackage a chance. + if path, ok := d.LookupPackage(pkg); ok { + return path, true + } + } + return DefaultLookupPackage(pkg) +} + +func isStdPkg(path string) bool { + // TODO(rsc): Use sort.Find once we don't have to worry about + // copying this code into older Go environments. + i := sort.Search(len(stdPkgs), func(i int) bool { return stdPkgs[i] >= path }) + return i < len(stdPkgs) && stdPkgs[i] == path +} + +// DefaultLookupPackage is the default package lookup +// function, used when [Parser].LookupPackage is nil. +// It recognizes names of the packages from the standard +// library with single-element import paths, such as math, +// which would otherwise be impossible to name. +// +// Note that the go/doc package provides a more sophisticated +// lookup based on the imports used in the current package. +func DefaultLookupPackage(name string) (importPath string, ok bool) { + if isStdPkg(name) { + return name, true + } + return "", false +} + +// Parse parses the doc comment text and returns the *Doc form. +// Comment markers (/* // and */) in the text must have already been removed. +func (p *Parser) Parse(text string) *Doc { + lines := unindent(strings.Split(text, "\n")) + d := &parseDoc{ + Parser: p, + Doc: new(Doc), + links: make(map[string]*LinkDef), + lines: lines, + lookupSym: func(recv, name string) bool { return false }, + } + if p.LookupSym != nil { + d.lookupSym = p.LookupSym + } + + // First pass: break into block structure and collect known links. + // The text is all recorded as Plain for now. + var prev span + for _, s := range parseSpans(lines) { + var b Block + switch s.kind { + default: + panic("mvdan.cc/gofumpt/internal/govendor/go/doc/comment: internal error: unknown span kind") + case spanList: + b = d.list(lines[s.start:s.end], prev.end < s.start) + case spanCode: + b = d.code(lines[s.start:s.end]) + case spanOldHeading: + b = d.oldHeading(lines[s.start]) + case spanHeading: + b = d.heading(lines[s.start]) + case spanPara: + b = d.paragraph(lines[s.start:s.end]) + } + if b != nil { + d.Content = append(d.Content, b) + } + prev = s + } + + // Second pass: interpret all the Plain text now that we know the links. + for _, b := range d.Content { + switch b := b.(type) { + case *Paragraph: + b.Text = d.parseLinkedText(string(b.Text[0].(Plain))) + case *List: + for _, i := range b.Items { + for _, c := range i.Content { + p := c.(*Paragraph) + p.Text = d.parseLinkedText(string(p.Text[0].(Plain))) + } + } + } + } + + return d.Doc +} + +// A span represents a single span of comment lines (lines[start:end]) +// of an identified kind (code, heading, paragraph, and so on). +type span struct { + start int + end int + kind spanKind +} + +// A spanKind describes the kind of span. +type spanKind int + +const ( + _ spanKind = iota + spanCode + spanHeading + spanList + spanOldHeading + spanPara +) + +func parseSpans(lines []string) []span { + var spans []span + + // The loop may process a line twice: once as unindented + // and again forced indented. So the maximum expected + // number of iterations is 2*len(lines). The repeating logic + // can be subtle, though, and to protect against introduction + // of infinite loops in future changes, we watch to see that + // we are not looping too much. A panic is better than a + // quiet infinite loop. + watchdog := 2 * len(lines) + + i := 0 + forceIndent := 0 +Spans: + for { + // Skip blank lines. + for i < len(lines) && lines[i] == "" { + i++ + } + if i >= len(lines) { + break + } + if watchdog--; watchdog < 0 { + panic("mvdan.cc/gofumpt/internal/govendor/go/doc/comment: internal error: not making progress") + } + + var kind spanKind + start := i + end := i + if i < forceIndent || indented(lines[i]) { + // Indented (or force indented). + // Ends before next unindented. (Blank lines are OK.) + // If this is an unindented list that we are heuristically treating as indented, + // then accept unindented list item lines up to the first blank lines. + // The heuristic is disabled at blank lines to contain its effect + // to non-gofmt'ed sections of the comment. + unindentedListOK := isList(lines[i]) && i < forceIndent + i++ + for i < len(lines) && (lines[i] == "" || i < forceIndent || indented(lines[i]) || (unindentedListOK && isList(lines[i]))) { + if lines[i] == "" { + unindentedListOK = false + } + i++ + } + + // Drop trailing blank lines. + end = i + for end > start && lines[end-1] == "" { + end-- + } + + // If indented lines are followed (without a blank line) + // by an unindented line ending in a brace, + // take that one line too. This fixes the common mistake + // of pasting in something like + // + // func main() { + // fmt.Println("hello, world") + // } + // + // and forgetting to indent it. + // The heuristic will never trigger on a gofmt'ed comment, + // because any gofmt'ed code block or list would be + // followed by a blank line or end of comment. + if end < len(lines) && strings.HasPrefix(lines[end], "}") { + end++ + } + + if isList(lines[start]) { + kind = spanList + } else { + kind = spanCode + } + } else { + // Unindented. Ends at next blank or indented line. + i++ + for i < len(lines) && lines[i] != "" && !indented(lines[i]) { + i++ + } + end = i + + // If unindented lines are followed (without a blank line) + // by an indented line that would start a code block, + // check whether the final unindented lines + // should be left for the indented section. + // This can happen for the common mistakes of + // unindented code or unindented lists. + // The heuristic will never trigger on a gofmt'ed comment, + // because any gofmt'ed code block would have a blank line + // preceding it after the unindented lines. + if i < len(lines) && lines[i] != "" && !isList(lines[i]) { + switch { + case isList(lines[i-1]): + // If the final unindented line looks like a list item, + // this may be the first indented line wrap of + // a mistakenly unindented list. + // Leave all the unindented list items. + forceIndent = end + end-- + for end > start && isList(lines[end-1]) { + end-- + } + + case strings.HasSuffix(lines[i-1], "{") || strings.HasSuffix(lines[i-1], `\`): + // If the final unindented line ended in { or \ + // it is probably the start of a misindented code block. + // Give the user a single line fix. + // Often that's enough; if not, the user can fix the others themselves. + forceIndent = end + end-- + } + + if start == end && forceIndent > start { + i = start + continue Spans + } + } + + // Span is either paragraph or heading. + if end-start == 1 && isHeading(lines[start]) { + kind = spanHeading + } else if end-start == 1 && isOldHeading(lines[start], lines, start) { + kind = spanOldHeading + } else { + kind = spanPara + } + } + + spans = append(spans, span{start, end, kind}) + i = end + } + + return spans +} + +// indented reports whether line is indented +// (starts with a leading space or tab). +func indented(line string) bool { + return line != "" && (line[0] == ' ' || line[0] == '\t') +} + +// unindent removes any common space/tab prefix +// from each line in lines, returning a copy of lines in which +// those prefixes have been trimmed from each line. +// It also replaces any lines containing only spaces with blank lines (empty strings). +func unindent(lines []string) []string { + // Trim leading and trailing blank lines. + for len(lines) > 0 && isBlank(lines[0]) { + lines = lines[1:] + } + for len(lines) > 0 && isBlank(lines[len(lines)-1]) { + lines = lines[:len(lines)-1] + } + if len(lines) == 0 { + return nil + } + + // Compute and remove common indentation. + prefix := leadingSpace(lines[0]) + for _, line := range lines[1:] { + if !isBlank(line) { + prefix = commonPrefix(prefix, leadingSpace(line)) + } + } + + out := make([]string, len(lines)) + for i, line := range lines { + line = strings.TrimPrefix(line, prefix) + if strings.TrimSpace(line) == "" { + line = "" + } + out[i] = line + } + for len(out) > 0 && out[0] == "" { + out = out[1:] + } + for len(out) > 0 && out[len(out)-1] == "" { + out = out[:len(out)-1] + } + return out +} + +// isBlank reports whether s is a blank line. +func isBlank(s string) bool { + return len(s) == 0 || (len(s) == 1 && s[0] == '\n') +} + +// commonPrefix returns the longest common prefix of a and b. +func commonPrefix(a, b string) string { + i := 0 + for i < len(a) && i < len(b) && a[i] == b[i] { + i++ + } + return a[0:i] +} + +// leadingSpace returns the longest prefix of s consisting of spaces and tabs. +func leadingSpace(s string) string { + i := 0 + for i < len(s) && (s[i] == ' ' || s[i] == '\t') { + i++ + } + return s[:i] +} + +// isOldHeading reports whether line is an old-style section heading. +// line is all[off]. +func isOldHeading(line string, all []string, off int) bool { + if off <= 0 || all[off-1] != "" || off+2 >= len(all) || all[off+1] != "" || leadingSpace(all[off+2]) != "" { + return false + } + + line = strings.TrimSpace(line) + + // a heading must start with an uppercase letter + r, _ := utf8.DecodeRuneInString(line) + if !unicode.IsLetter(r) || !unicode.IsUpper(r) { + return false + } + + // it must end in a letter or digit: + r, _ = utf8.DecodeLastRuneInString(line) + if !unicode.IsLetter(r) && !unicode.IsDigit(r) { + return false + } + + // exclude lines with illegal characters. we allow "()," + if strings.ContainsAny(line, ";:!?+*/=[]{}_^°&§~%#@<\">\\") { + return false + } + + // allow "'" for possessive "'s" only + for b := line; ; { + var ok bool + if _, b, ok = strings.Cut(b, "'"); !ok { + break + } + if b != "s" && !strings.HasPrefix(b, "s ") { + return false // ' not followed by s and then end-of-word + } + } + + // allow "." when followed by non-space + for b := line; ; { + var ok bool + if _, b, ok = strings.Cut(b, "."); !ok { + break + } + if b == "" || strings.HasPrefix(b, " ") { + return false // not followed by non-space + } + } + + return true +} + +// oldHeading returns the *Heading for the given old-style section heading line. +func (d *parseDoc) oldHeading(line string) Block { + return &Heading{Text: []Text{Plain(strings.TrimSpace(line))}} +} + +// isHeading reports whether line is a new-style section heading. +func isHeading(line string) bool { + return len(line) >= 2 && + line[0] == '#' && + (line[1] == ' ' || line[1] == '\t') && + strings.TrimSpace(line) != "#" +} + +// heading returns the *Heading for the given new-style section heading line. +func (d *parseDoc) heading(line string) Block { + return &Heading{Text: []Text{Plain(strings.TrimSpace(line[1:]))}} +} + +// code returns a code block built from the lines. +func (d *parseDoc) code(lines []string) *Code { + body := unindent(lines) + body = append(body, "") // to get final \n from Join + return &Code{Text: strings.Join(body, "\n")} +} + +// paragraph returns a paragraph block built from the lines. +// If the lines are link definitions, paragraph adds them to d and returns nil. +func (d *parseDoc) paragraph(lines []string) Block { + // Is this a block of known links? Handle. + var defs []*LinkDef + for _, line := range lines { + def, ok := parseLink(line) + if !ok { + goto NoDefs + } + defs = append(defs, def) + } + for _, def := range defs { + d.Links = append(d.Links, def) + if d.links[def.Text] == nil { + d.links[def.Text] = def + } + } + return nil +NoDefs: + + return &Paragraph{Text: []Text{Plain(strings.Join(lines, "\n"))}} +} + +// parseLink parses a single link definition line: +// +// [text]: url +// +// It returns the link definition and whether the line was well formed. +func parseLink(line string) (*LinkDef, bool) { + if line == "" || line[0] != '[' { + return nil, false + } + i := strings.Index(line, "]:") + if i < 0 || i+3 >= len(line) || (line[i+2] != ' ' && line[i+2] != '\t') { + return nil, false + } + + text := line[1:i] + url := strings.TrimSpace(line[i+3:]) + j := strings.Index(url, "://") + if j < 0 || !isScheme(url[:j]) { + return nil, false + } + + // Line has right form and has valid scheme://. + // That's good enough for us - we are not as picky + // about the characters beyond the :// as we are + // when extracting inline URLs from text. + return &LinkDef{Text: text, URL: url}, true +} + +// list returns a list built from the indented lines, +// using forceBlankBefore as the value of the List's ForceBlankBefore field. +func (d *parseDoc) list(lines []string, forceBlankBefore bool) *List { + num, _, _ := listMarker(lines[0]) + var ( + list *List = &List{ForceBlankBefore: forceBlankBefore} + item *ListItem + text []string + ) + flush := func() { + if item != nil { + if para := d.paragraph(text); para != nil { + item.Content = append(item.Content, para) + } + } + text = nil + } + + for _, line := range lines { + if n, after, ok := listMarker(line); ok && (n != "") == (num != "") { + // start new list item + flush() + + item = &ListItem{Number: n} + list.Items = append(list.Items, item) + line = after + } + line = strings.TrimSpace(line) + if line == "" { + list.ForceBlankBetween = true + flush() + continue + } + text = append(text, strings.TrimSpace(line)) + } + flush() + return list +} + +// listMarker parses the line as beginning with a list marker. +// If it can do that, it returns the numeric marker ("" for a bullet list), +// the rest of the line, and ok == true. +// Otherwise, it returns "", "", false. +func listMarker(line string) (num, rest string, ok bool) { + line = strings.TrimSpace(line) + if line == "" { + return "", "", false + } + + // Can we find a marker? + if r, n := utf8.DecodeRuneInString(line); r == '•' || r == '*' || r == '+' || r == '-' { + num, rest = "", line[n:] + } else if '0' <= line[0] && line[0] <= '9' { + n := 1 + for n < len(line) && '0' <= line[n] && line[n] <= '9' { + n++ + } + if n >= len(line) || (line[n] != '.' && line[n] != ')') { + return "", "", false + } + num, rest = line[:n], line[n+1:] + } else { + return "", "", false + } + + if !indented(rest) || strings.TrimSpace(rest) == "" { + return "", "", false + } + + return num, rest, true +} + +// isList reports whether the line is the first line of a list, +// meaning starts with a list marker after any indentation. +// (The caller is responsible for checking the line is indented, as appropriate.) +func isList(line string) bool { + _, _, ok := listMarker(line) + return ok +} + +// parseLinkedText parses text that is allowed to contain explicit links, +// such as [math.Sin] or [Go home page], into a slice of Text items. +// +// A “pkg” is only assumed to be a full import path if it starts with +// a domain name (a path element with a dot) or is one of the packages +// from the standard library (“[os]”, “[encoding/json]”, and so on). +// To avoid problems with maps, generics, and array types, doc links +// must be both preceded and followed by punctuation, spaces, tabs, +// or the start or end of a line. An example problem would be treating +// map[ast.Expr]TypeAndValue as containing a link. +func (d *parseDoc) parseLinkedText(text string) []Text { + var out []Text + wrote := 0 + flush := func(i int) { + if wrote < i { + out = d.parseText(out, text[wrote:i], true) + wrote = i + } + } + + start := -1 + var buf []byte + for i := 0; i < len(text); i++ { + c := text[i] + if c == '\n' || c == '\t' { + c = ' ' + } + switch c { + case '[': + start = i + case ']': + if start >= 0 { + if def, ok := d.links[string(buf)]; ok { + def.Used = true + flush(start) + out = append(out, &Link{ + Text: d.parseText(nil, text[start+1:i], false), + URL: def.URL, + }) + wrote = i + 1 + } else if link, ok := d.docLink(text[start+1:i], text[:start], text[i+1:]); ok { + flush(start) + link.Text = d.parseText(nil, text[start+1:i], false) + out = append(out, link) + wrote = i + 1 + } + } + start = -1 + buf = buf[:0] + } + if start >= 0 && i != start { + buf = append(buf, c) + } + } + + flush(len(text)) + return out +} + +// docLink parses text, which was found inside [ ] brackets, +// as a doc link if possible, returning the DocLink and ok == true +// or else nil, false. +// The before and after strings are the text before the [ and after the ] +// on the same line. Doc links must be preceded and followed by +// punctuation, spaces, tabs, or the start or end of a line. +func (d *parseDoc) docLink(text, before, after string) (link *DocLink, ok bool) { + if before != "" { + r, _ := utf8.DecodeLastRuneInString(before) + if !unicode.IsPunct(r) && r != ' ' && r != '\t' && r != '\n' { + return nil, false + } + } + if after != "" { + r, _ := utf8.DecodeRuneInString(after) + if !unicode.IsPunct(r) && r != ' ' && r != '\t' && r != '\n' { + return nil, false + } + } + text = strings.TrimPrefix(text, "*") + pkg, name, ok := splitDocName(text) + var recv string + if ok { + pkg, recv, _ = splitDocName(pkg) + } + if pkg != "" { + if pkg, ok = d.lookupPkg(pkg); !ok { + return nil, false + } + } else { + if ok = d.lookupSym(recv, name); !ok { + return nil, false + } + } + link = &DocLink{ + ImportPath: pkg, + Recv: recv, + Name: name, + } + return link, true +} + +// If text is of the form before.Name, where Name is a capitalized Go identifier, +// then splitDocName returns before, name, true. +// Otherwise it returns text, "", false. +func splitDocName(text string) (before, name string, foundDot bool) { + i := strings.LastIndex(text, ".") + name = text[i+1:] + if !isName(name) { + return text, "", false + } + if i >= 0 { + before = text[:i] + } + return before, name, true +} + +// parseText parses s as text and returns the result of appending +// those parsed Text elements to out. +// parseText does not handle explicit links like [math.Sin] or [Go home page]: +// those are handled by parseLinkedText. +// If autoLink is true, then parseText recognizes URLs and words from d.Words +// and converts those to links as appropriate. +func (d *parseDoc) parseText(out []Text, s string, autoLink bool) []Text { + var w strings.Builder + wrote := 0 + writeUntil := func(i int) { + w.WriteString(s[wrote:i]) + wrote = i + } + flush := func(i int) { + writeUntil(i) + if w.Len() > 0 { + out = append(out, Plain(w.String())) + w.Reset() + } + } + for i := 0; i < len(s); { + t := s[i:] + if autoLink { + if url, ok := autoURL(t); ok { + flush(i) + // Note: The old comment parser would look up the URL in words + // and replace the target with words[URL] if it was non-empty. + // That would allow creating links that display as one URL but + // when clicked go to a different URL. Not sure what the point + // of that is, so we're not doing that lookup here. + out = append(out, &Link{Auto: true, Text: []Text{Plain(url)}, URL: url}) + i += len(url) + wrote = i + continue + } + if id, ok := ident(t); ok { + url, italics := d.Words[id] + if !italics { + i += len(id) + continue + } + flush(i) + if url == "" { + out = append(out, Italic(id)) + } else { + out = append(out, &Link{Auto: true, Text: []Text{Italic(id)}, URL: url}) + } + i += len(id) + wrote = i + continue + } + } + switch { + case strings.HasPrefix(t, "``"): + if len(t) >= 3 && t[2] == '`' { + // Do not convert `` inside ```, in case people are mistakenly writing Markdown. + i += 3 + for i < len(t) && t[i] == '`' { + i++ + } + break + } + writeUntil(i) + w.WriteRune('“') + i += 2 + wrote = i + case strings.HasPrefix(t, "''"): + writeUntil(i) + w.WriteRune('”') + i += 2 + wrote = i + default: + i++ + } + } + flush(len(s)) + return out +} + +// autoURL checks whether s begins with a URL that should be hyperlinked. +// If so, it returns the URL, which is a prefix of s, and ok == true. +// Otherwise it returns "", false. +// The caller should skip over the first len(url) bytes of s +// before further processing. +func autoURL(s string) (url string, ok bool) { + // Find the ://. Fast path to pick off non-URL, + // since we call this at every position in the string. + // The shortest possible URL is ftp://x, 7 bytes. + var i int + switch { + case len(s) < 7: + return "", false + case s[3] == ':': + i = 3 + case s[4] == ':': + i = 4 + case s[5] == ':': + i = 5 + case s[6] == ':': + i = 6 + default: + return "", false + } + if i+3 > len(s) || s[i:i+3] != "://" { + return "", false + } + + // Check valid scheme. + if !isScheme(s[:i]) { + return "", false + } + + // Scan host part. Must have at least one byte, + // and must start and end in non-punctuation. + i += 3 + if i >= len(s) || !isHost(s[i]) || isPunct(s[i]) { + return "", false + } + i++ + end := i + for i < len(s) && isHost(s[i]) { + if !isPunct(s[i]) { + end = i + 1 + } + i++ + } + i = end + + // At this point we are definitely returning a URL (scheme://host). + // We just have to find the longest path we can add to it. + // Heuristics abound. + // We allow parens, braces, and brackets, + // but only if they match (#5043, #22285). + // We allow .,:;?! in the path but not at the end, + // to avoid end-of-sentence punctuation (#18139, #16565). + stk := []byte{} + end = i +Path: + for ; i < len(s); i++ { + if isPunct(s[i]) { + continue + } + if !isPath(s[i]) { + break + } + switch s[i] { + case '(': + stk = append(stk, ')') + case '{': + stk = append(stk, '}') + case '[': + stk = append(stk, ']') + case ')', '}', ']': + if len(stk) == 0 || stk[len(stk)-1] != s[i] { + break Path + } + stk = stk[:len(stk)-1] + } + if len(stk) == 0 { + end = i + 1 + } + } + + return s[:end], true +} + +// isScheme reports whether s is a recognized URL scheme. +// Note that if strings of new length (beyond 3-7) +// are added here, the fast path at the top of autoURL will need updating. +func isScheme(s string) bool { + switch s { + case "file", + "ftp", + "gopher", + "http", + "https", + "mailto", + "nntp": + return true + } + return false +} + +// isHost reports whether c is a byte that can appear in a URL host, +// like www.example.com or user@[::1]:8080 +func isHost(c byte) bool { + // mask is a 128-bit bitmap with 1s for allowed bytes, + // so that the byte c can be tested with a shift and an and. + // If c > 128, then 1<>64)) != 0 +} + +// isPunct reports whether c is a punctuation byte that can appear +// inside a path but not at the end. +func isPunct(c byte) bool { + // mask is a 128-bit bitmap with 1s for allowed bytes, + // so that the byte c can be tested with a shift and an and. + // If c > 128, then 1<>64)) != 0 +} + +// isPath reports whether c is a (non-punctuation) path byte. +func isPath(c byte) bool { + // mask is a 128-bit bitmap with 1s for allowed bytes, + // so that the byte c can be tested with a shift and an and. + // If c > 128, then 1<>64)) != 0 +} + +// isName reports whether s is a capitalized Go identifier (like Name). +func isName(s string) bool { + t, ok := ident(s) + if !ok || t != s { + return false + } + r, _ := utf8.DecodeRuneInString(s) + return unicode.IsUpper(r) +} + +// ident checks whether s begins with a Go identifier. +// If so, it returns the identifier, which is a prefix of s, and ok == true. +// Otherwise it returns "", false. +// The caller should skip over the first len(id) bytes of s +// before further processing. +func ident(s string) (id string, ok bool) { + // Scan [\pL_][\pL_0-9]* + n := 0 + for n < len(s) { + if c := s[n]; c < utf8.RuneSelf { + if isIdentASCII(c) && (n > 0 || c < '0' || c > '9') { + n++ + continue + } + break + } + r, nr := utf8.DecodeRuneInString(s[n:]) + if unicode.IsLetter(r) { + n += nr + continue + } + break + } + return s[:n], n > 0 +} + +// isIdentASCII reports whether c is an ASCII identifier byte. +func isIdentASCII(c byte) bool { + // mask is a 128-bit bitmap with 1s for allowed bytes, + // so that the byte c can be tested with a shift and an and. + // If c > 128, then 1<>64)) != 0 +} + +// validImportPath reports whether path is a valid import path. +// It is a lightly edited copy of golang.org/x/mod/module.CheckImportPath. +func validImportPath(path string) bool { + if !utf8.ValidString(path) { + return false + } + if path == "" { + return false + } + if path[0] == '-' { + return false + } + if strings.Contains(path, "//") { + return false + } + if path[len(path)-1] == '/' { + return false + } + elemStart := 0 + for i, r := range path { + if r == '/' { + if !validImportPathElem(path[elemStart:i]) { + return false + } + elemStart = i + 1 + } + } + return validImportPathElem(path[elemStart:]) +} + +func validImportPathElem(elem string) bool { + if elem == "" || elem[0] == '.' || elem[len(elem)-1] == '.' { + return false + } + for i := 0; i < len(elem); i++ { + if !importPathOK(elem[i]) { + return false + } + } + return true +} + +func importPathOK(c byte) bool { + // mask is a 128-bit bitmap with 1s for allowed bytes, + // so that the byte c can be tested with a shift and an and. + // If c > 128, then 1<>64)) != 0 +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go new file mode 100644 index 0000000000..4e9da3d1e8 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/print.go @@ -0,0 +1,290 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package comment + +import ( + "bytes" + "fmt" + "strings" +) + +// A Printer is a doc comment printer. +// The fields in the struct can be filled in before calling +// any of the printing methods +// in order to customize the details of the printing process. +type Printer struct { + // HeadingLevel is the nesting level used for + // HTML and Markdown headings. + // If HeadingLevel is zero, it defaults to level 3, + // meaning to use

and ###. + HeadingLevel int + + // HeadingID is a function that computes the heading ID + // (anchor tag) to use for the heading h when generating + // HTML and Markdown. If HeadingID returns an empty string, + // then the heading ID is omitted. + // If HeadingID is nil, h.DefaultID is used. + HeadingID func(h *Heading) string + + // DocLinkURL is a function that computes the URL for the given DocLink. + // If DocLinkURL is nil, then link.DefaultURL(p.DocLinkBaseURL) is used. + DocLinkURL func(link *DocLink) string + + // DocLinkBaseURL is used when DocLinkURL is nil, + // passed to [DocLink.DefaultURL] to construct a DocLink's URL. + // See that method's documentation for details. + DocLinkBaseURL string + + // TextPrefix is a prefix to print at the start of every line + // when generating text output using the Text method. + TextPrefix string + + // TextCodePrefix is the prefix to print at the start of each + // preformatted (code block) line when generating text output, + // instead of (not in addition to) TextPrefix. + // If TextCodePrefix is the empty string, it defaults to TextPrefix+"\t". + TextCodePrefix string + + // TextWidth is the maximum width text line to generate, + // measured in Unicode code points, + // excluding TextPrefix and the newline character. + // If TextWidth is zero, it defaults to 80 minus the number of code points in TextPrefix. + // If TextWidth is negative, there is no limit. + TextWidth int +} + +func (p *Printer) headingLevel() int { + if p.HeadingLevel <= 0 { + return 3 + } + return p.HeadingLevel +} + +func (p *Printer) headingID(h *Heading) string { + if p.HeadingID == nil { + return h.DefaultID() + } + return p.HeadingID(h) +} + +func (p *Printer) docLinkURL(link *DocLink) string { + if p.DocLinkURL != nil { + return p.DocLinkURL(link) + } + return link.DefaultURL(p.DocLinkBaseURL) +} + +// DefaultURL constructs and returns the documentation URL for l, +// using baseURL as a prefix for links to other packages. +// +// The possible forms returned by DefaultURL are: +// - baseURL/ImportPath, for a link to another package +// - baseURL/ImportPath#Name, for a link to a const, func, type, or var in another package +// - baseURL/ImportPath#Recv.Name, for a link to a method in another package +// - #Name, for a link to a const, func, type, or var in this package +// - #Recv.Name, for a link to a method in this package +// +// If baseURL ends in a trailing slash, then DefaultURL inserts +// a slash between ImportPath and # in the anchored forms. +// For example, here are some baseURL values and URLs they can generate: +// +// "/pkg/" → "/pkg/math/#Sqrt" +// "/pkg" → "/pkg/math#Sqrt" +// "/" → "/math/#Sqrt" +// "" → "/math#Sqrt" +func (l *DocLink) DefaultURL(baseURL string) string { + if l.ImportPath != "" { + slash := "" + if strings.HasSuffix(baseURL, "/") { + slash = "/" + } else { + baseURL += "/" + } + switch { + case l.Name == "": + return baseURL + l.ImportPath + slash + case l.Recv != "": + return baseURL + l.ImportPath + slash + "#" + l.Recv + "." + l.Name + default: + return baseURL + l.ImportPath + slash + "#" + l.Name + } + } + if l.Recv != "" { + return "#" + l.Recv + "." + l.Name + } + return "#" + l.Name +} + +// DefaultID returns the default anchor ID for the heading h. +// +// The default anchor ID is constructed by converting every +// rune that is not alphanumeric ASCII to an underscore +// and then adding the prefix “hdr-”. +// For example, if the heading text is “Go Doc Comments”, +// the default ID is “hdr-Go_Doc_Comments”. +func (h *Heading) DefaultID() string { + // Note: The “hdr-” prefix is important to avoid DOM clobbering attacks. + // See https://pkg.go.dev/github.com/google/safehtml#Identifier. + var out strings.Builder + var p textPrinter + p.oneLongLine(&out, h.Text) + s := strings.TrimSpace(out.String()) + if s == "" { + return "" + } + out.Reset() + out.WriteString("hdr-") + for _, r := range s { + if r < 0x80 && isIdentASCII(byte(r)) { + out.WriteByte(byte(r)) + } else { + out.WriteByte('_') + } + } + return out.String() +} + +type commentPrinter struct { + *Printer + headingPrefix string + needDoc map[string]bool +} + +// Comment returns the standard Go formatting of the Doc, +// without any comment markers. +func (p *Printer) Comment(d *Doc) []byte { + cp := &commentPrinter{Printer: p} + var out bytes.Buffer + for i, x := range d.Content { + if i > 0 && blankBefore(x) { + out.WriteString("\n") + } + cp.block(&out, x) + } + + // Print one block containing all the link definitions that were used, + // and then a second block containing all the unused ones. + // This makes it easy to clean up the unused ones: gofmt and + // delete the final block. And it's a nice visual signal without + // affecting the way the comment formats for users. + for i := 0; i < 2; i++ { + used := i == 0 + first := true + for _, def := range d.Links { + if def.Used == used { + if first { + out.WriteString("\n") + first = false + } + out.WriteString("[") + out.WriteString(def.Text) + out.WriteString("]: ") + out.WriteString(def.URL) + out.WriteString("\n") + } + } + } + + return out.Bytes() +} + +// blankBefore reports whether the block x requires a blank line before it. +// All blocks do, except for Lists that return false from x.BlankBefore(). +func blankBefore(x Block) bool { + if x, ok := x.(*List); ok { + return x.BlankBefore() + } + return true +} + +// block prints the block x to out. +func (p *commentPrinter) block(out *bytes.Buffer, x Block) { + switch x := x.(type) { + default: + fmt.Fprintf(out, "?%T", x) + + case *Paragraph: + p.text(out, "", x.Text) + out.WriteString("\n") + + case *Heading: + out.WriteString("# ") + p.text(out, "", x.Text) + out.WriteString("\n") + + case *Code: + md := x.Text + for md != "" { + var line string + line, md, _ = strings.Cut(md, "\n") + if line != "" { + out.WriteString("\t") + out.WriteString(line) + } + out.WriteString("\n") + } + + case *List: + loose := x.BlankBetween() + for i, item := range x.Items { + if i > 0 && loose { + out.WriteString("\n") + } + out.WriteString(" ") + if item.Number == "" { + out.WriteString(" - ") + } else { + out.WriteString(item.Number) + out.WriteString(". ") + } + for i, blk := range item.Content { + const fourSpace = " " + if i > 0 { + out.WriteString("\n" + fourSpace) + } + p.text(out, fourSpace, blk.(*Paragraph).Text) + out.WriteString("\n") + } + } + } +} + +// text prints the text sequence x to out. +func (p *commentPrinter) text(out *bytes.Buffer, indent string, x []Text) { + for _, t := range x { + switch t := t.(type) { + case Plain: + p.indent(out, indent, string(t)) + case Italic: + p.indent(out, indent, string(t)) + case *Link: + if t.Auto { + p.text(out, indent, t.Text) + } else { + out.WriteString("[") + p.text(out, indent, t.Text) + out.WriteString("]") + } + case *DocLink: + out.WriteString("[") + p.text(out, indent, t.Text) + out.WriteString("]") + } + } +} + +// indent prints s to out, indenting with the indent string +// after each newline in s. +func (p *commentPrinter) indent(out *bytes.Buffer, indent, s string) { + for s != "" { + line, rest, ok := strings.Cut(s, "\n") + out.WriteString(line) + if ok { + out.WriteString("\n") + out.WriteString(indent) + } + s = rest + } +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go new file mode 100644 index 0000000000..6786e4d7a1 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/std.go @@ -0,0 +1,44 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by 'go generate' DO NOT EDIT. +//disabled go:generate ./mkstd.sh + +package comment + +var stdPkgs = []string{ + "bufio", + "bytes", + "context", + "crypto", + "embed", + "encoding", + "errors", + "expvar", + "flag", + "fmt", + "hash", + "html", + "image", + "io", + "log", + "math", + "mime", + "net", + "os", + "path", + "plugin", + "reflect", + "regexp", + "runtime", + "sort", + "strconv", + "strings", + "sync", + "syscall", + "testing", + "time", + "unicode", + "unsafe", +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/text.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/text.go new file mode 100644 index 0000000000..6f9c2e201d --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/doc/comment/text.go @@ -0,0 +1,337 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package comment + +import ( + "bytes" + "fmt" + "sort" + "strings" + "unicode/utf8" +) + +// A textPrinter holds the state needed for printing a Doc as plain text. +type textPrinter struct { + *Printer + long strings.Builder + prefix string + codePrefix string + width int +} + +// Text returns a textual formatting of the Doc. +// See the [Printer] documentation for ways to customize the text output. +func (p *Printer) Text(d *Doc) []byte { + tp := &textPrinter{ + Printer: p, + prefix: p.TextPrefix, + codePrefix: p.TextCodePrefix, + width: p.TextWidth, + } + if tp.codePrefix == "" { + tp.codePrefix = p.TextPrefix + "\t" + } + if tp.width == 0 { + tp.width = 80 - utf8.RuneCountInString(tp.prefix) + } + + var out bytes.Buffer + for i, x := range d.Content { + if i > 0 && blankBefore(x) { + out.WriteString(tp.prefix) + writeNL(&out) + } + tp.block(&out, x) + } + anyUsed := false + for _, def := range d.Links { + if def.Used { + anyUsed = true + break + } + } + if anyUsed { + writeNL(&out) + for _, def := range d.Links { + if def.Used { + fmt.Fprintf(&out, "[%s]: %s\n", def.Text, def.URL) + } + } + } + return out.Bytes() +} + +// writeNL calls out.WriteByte('\n') +// but first trims trailing spaces on the previous line. +func writeNL(out *bytes.Buffer) { + // Trim trailing spaces. + data := out.Bytes() + n := 0 + for n < len(data) && (data[len(data)-n-1] == ' ' || data[len(data)-n-1] == '\t') { + n++ + } + if n > 0 { + out.Truncate(len(data) - n) + } + out.WriteByte('\n') +} + +// block prints the block x to out. +func (p *textPrinter) block(out *bytes.Buffer, x Block) { + switch x := x.(type) { + default: + fmt.Fprintf(out, "?%T\n", x) + + case *Paragraph: + out.WriteString(p.prefix) + p.text(out, "", x.Text) + + case *Heading: + out.WriteString(p.prefix) + out.WriteString("# ") + p.text(out, "", x.Text) + + case *Code: + text := x.Text + for text != "" { + var line string + line, text, _ = strings.Cut(text, "\n") + if line != "" { + out.WriteString(p.codePrefix) + out.WriteString(line) + } + writeNL(out) + } + + case *List: + loose := x.BlankBetween() + for i, item := range x.Items { + if i > 0 && loose { + out.WriteString(p.prefix) + writeNL(out) + } + out.WriteString(p.prefix) + out.WriteString(" ") + if item.Number == "" { + out.WriteString(" - ") + } else { + out.WriteString(item.Number) + out.WriteString(". ") + } + for i, blk := range item.Content { + const fourSpace = " " + if i > 0 { + writeNL(out) + out.WriteString(p.prefix) + out.WriteString(fourSpace) + } + p.text(out, fourSpace, blk.(*Paragraph).Text) + } + } + } +} + +// text prints the text sequence x to out. +func (p *textPrinter) text(out *bytes.Buffer, indent string, x []Text) { + p.oneLongLine(&p.long, x) + words := strings.Fields(p.long.String()) + p.long.Reset() + + var seq []int + if p.width < 0 || len(words) == 0 { + seq = []int{0, len(words)} // one long line + } else { + seq = wrap(words, p.width-utf8.RuneCountInString(indent)) + } + for i := 0; i+1 < len(seq); i++ { + if i > 0 { + out.WriteString(p.prefix) + out.WriteString(indent) + } + for j, w := range words[seq[i]:seq[i+1]] { + if j > 0 { + out.WriteString(" ") + } + out.WriteString(w) + } + writeNL(out) + } +} + +// oneLongLine prints the text sequence x to out as one long line, +// without worrying about line wrapping. +// Explicit links have the [ ] dropped to improve readability. +func (p *textPrinter) oneLongLine(out *strings.Builder, x []Text) { + for _, t := range x { + switch t := t.(type) { + case Plain: + out.WriteString(string(t)) + case Italic: + out.WriteString(string(t)) + case *Link: + p.oneLongLine(out, t.Text) + case *DocLink: + p.oneLongLine(out, t.Text) + } + } +} + +// wrap wraps words into lines of at most max runes, +// minimizing the sum of the squares of the leftover lengths +// at the end of each line (except the last, of course), +// with a preference for ending lines at punctuation (.,:;). +// +// The returned slice gives the indexes of the first words +// on each line in the wrapped text with a final entry of len(words). +// Thus the lines are words[seq[0]:seq[1]], words[seq[1]:seq[2]], +// ..., words[seq[len(seq)-2]:seq[len(seq)-1]]. +// +// The implementation runs in O(n log n) time, where n = len(words), +// using the algorithm described in D. S. Hirschberg and L. L. Larmore, +// “[The least weight subsequence problem],” FOCS 1985, pp. 137-143. +// +// [The least weight subsequence problem]: https://doi.org/10.1109/SFCS.1985.60 +func wrap(words []string, max int) (seq []int) { + // The algorithm requires that our scoring function be concave, + // meaning that for all i₀ ≤ i₁ < j₀ ≤ j₁, + // weight(i₀, j₀) + weight(i₁, j₁) ≤ weight(i₀, j₁) + weight(i₁, j₀). + // + // Our weights are two-element pairs [hi, lo] + // ordered by elementwise comparison. + // The hi entry counts the weight for lines that are longer than max, + // and the lo entry counts the weight for lines that are not. + // This forces the algorithm to first minimize the number of lines + // that are longer than max, which correspond to lines with + // single very long words. Having done that, it can move on to + // minimizing the lo score, which is more interesting. + // + // The lo score is the sum for each line of the square of the + // number of spaces remaining at the end of the line and a + // penalty of 64 given out for not ending the line in a + // punctuation character (.,:;). + // The penalty is somewhat arbitrarily chosen by trying + // different amounts and judging how nice the wrapped text looks. + // Roughly speaking, using 64 means that we are willing to + // end a line with eight blank spaces in order to end at a + // punctuation character, even if the next word would fit in + // those spaces. + // + // We care about ending in punctuation characters because + // it makes the text easier to skim if not too many sentences + // or phrases begin with a single word on the previous line. + + // A score is the score (also called weight) for a given line. + // add and cmp add and compare scores. + type score struct { + hi int64 + lo int64 + } + add := func(s, t score) score { return score{s.hi + t.hi, s.lo + t.lo} } + cmp := func(s, t score) int { + switch { + case s.hi < t.hi: + return -1 + case s.hi > t.hi: + return +1 + case s.lo < t.lo: + return -1 + case s.lo > t.lo: + return +1 + } + return 0 + } + + // total[j] is the total number of runes + // (including separating spaces) in words[:j]. + total := make([]int, len(words)+1) + total[0] = 0 + for i, s := range words { + total[1+i] = total[i] + utf8.RuneCountInString(s) + 1 + } + + // weight returns weight(i, j). + weight := func(i, j int) score { + // On the last line, there is zero weight for being too short. + n := total[j] - 1 - total[i] + if j == len(words) && n <= max { + return score{0, 0} + } + + // Otherwise the weight is the penalty plus the square of the number of + // characters remaining on the line or by which the line goes over. + // In the latter case, that value goes in the hi part of the score. + // (See note above.) + p := wrapPenalty(words[j-1]) + v := int64(max-n) * int64(max-n) + if n > max { + return score{v, p} + } + return score{0, v + p} + } + + // The rest of this function is “The Basic Algorithm” from + // Hirschberg and Larmore's conference paper, + // using the same names as in the paper. + f := []score{{0, 0}} + g := func(i, j int) score { return add(f[i], weight(i, j)) } + + bridge := func(a, b, c int) bool { + k := c + sort.Search(len(words)+1-c, func(k int) bool { + k += c + return cmp(g(a, k), g(b, k)) > 0 + }) + if k > len(words) { + return true + } + return cmp(g(c, k), g(b, k)) <= 0 + } + + // d is a one-ended deque implemented as a slice. + d := make([]int, 1, len(words)) + d[0] = 0 + bestleft := make([]int, 1, len(words)) + bestleft[0] = -1 + for m := 1; m < len(words); m++ { + f = append(f, g(d[0], m)) + bestleft = append(bestleft, d[0]) + for len(d) > 1 && cmp(g(d[1], m+1), g(d[0], m+1)) <= 0 { + d = d[1:] // “Retire” + } + for len(d) > 1 && bridge(d[len(d)-2], d[len(d)-1], m) { + d = d[:len(d)-1] // “Fire” + } + if cmp(g(m, len(words)), g(d[len(d)-1], len(words))) < 0 { + d = append(d, m) // “Hire” + // The next few lines are not in the paper but are necessary + // to handle two-word inputs correctly. It appears to be + // just a bug in the paper's pseudocode. + if len(d) == 2 && cmp(g(d[1], m+1), g(d[0], m+1)) <= 0 { + d = d[1:] + } + } + } + bestleft = append(bestleft, d[0]) + + // Recover least weight sequence from bestleft. + n := 1 + for m := len(words); m > 0; m = bestleft[m] { + n++ + } + seq = make([]int, n) + for m := len(words); m > 0; m = bestleft[m] { + n-- + seq[n] = m + } + return seq +} + +// wrapPenalty is the penalty for inserting a line break after word s. +func wrapPenalty(s string) int64 { + switch s[len(s)-1] { + case '.', ',', ':', ';': + return 0 + } + return 64 +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/format.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/format.go new file mode 100644 index 0000000000..5540686ed0 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/format.go @@ -0,0 +1,134 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package format implements standard formatting of Go source. +// +// Note that formatting of Go source code changes over time, so tools relying on +// consistent formatting should execute a specific version of the gofmt binary +// instead of using this package. That way, the formatting will be stable, and +// the tools won't need to be recompiled each time gofmt changes. +// +// For example, pre-submit checks that use this package directly would behave +// differently depending on what Go version each developer uses, causing the +// check to be inherently fragile. +package format + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/token" + "io" + + "mvdan.cc/gofumpt/internal/govendor/go/printer" +) + +// Keep these in sync with cmd/gofmt/gofmt.go. +const ( + tabWidth = 8 + printerMode = printer.UseSpaces | printer.TabIndent | printerNormalizeNumbers + + // printerNormalizeNumbers means to canonicalize number literal prefixes + // and exponents while printing. See https://golang.org/doc/go1.13#gofmt. + // + // This value is defined in mvdan.cc/gofumpt/internal/govendor/go/printer specifically for mvdan.cc/gofumpt/internal/govendor/go/format and cmd/gofmt. + printerNormalizeNumbers = 1 << 30 +) + +var config = printer.Config{Mode: printerMode, Tabwidth: tabWidth} + +const parserMode = parser.ParseComments | parser.SkipObjectResolution + +// Node formats node in canonical gofmt style and writes the result to dst. +// +// The node type must be *ast.File, *printer.CommentedNode, []ast.Decl, +// []ast.Stmt, or assignment-compatible to ast.Expr, ast.Decl, ast.Spec, +// or ast.Stmt. Node does not modify node. Imports are not sorted for +// nodes representing partial source files (for instance, if the node is +// not an *ast.File or a *printer.CommentedNode not wrapping an *ast.File). +// +// The function may return early (before the entire result is written) +// and return a formatting error, for instance due to an incorrect AST. +func Node(dst io.Writer, fset *token.FileSet, node any) error { + // Determine if we have a complete source file (file != nil). + var file *ast.File + var cnode *printer.CommentedNode + switch n := node.(type) { + case *ast.File: + file = n + case *printer.CommentedNode: + if f, ok := n.Node.(*ast.File); ok { + file = f + cnode = n + } + } + + // Sort imports if necessary. + if file != nil && hasUnsortedImports(file) { + // Make a copy of the AST because ast.SortImports is destructive. + // TODO(gri) Do this more efficiently. + var buf bytes.Buffer + err := config.Fprint(&buf, fset, file) + if err != nil { + return err + } + file, err = parser.ParseFile(fset, "", buf.Bytes(), parserMode) + if err != nil { + // We should never get here. If we do, provide good diagnostic. + return fmt.Errorf("format.Node internal error (%s)", err) + } + ast.SortImports(fset, file) + + // Use new file with sorted imports. + node = file + if cnode != nil { + node = &printer.CommentedNode{Node: file, Comments: cnode.Comments} + } + } + + return config.Fprint(dst, fset, node) +} + +// Source formats src in canonical gofmt style and returns the result +// or an (I/O or syntax) error. src is expected to be a syntactically +// correct Go source file, or a list of Go declarations or statements. +// +// If src is a partial source file, the leading and trailing space of src +// is applied to the result (such that it has the same leading and trailing +// space as src), and the result is indented by the same amount as the first +// line of src containing code. Imports are not sorted for partial source files. +func Source(src []byte) ([]byte, error) { + fset := token.NewFileSet() + file, sourceAdj, indentAdj, err := parse(fset, "", src, true) + if err != nil { + return nil, err + } + + if sourceAdj == nil { + // Complete source file. + // TODO(gri) consider doing this always. + ast.SortImports(fset, file) + } + + return format(fset, file, sourceAdj, indentAdj, src, config) +} + +func hasUnsortedImports(file *ast.File) bool { + for _, d := range file.Decls { + d, ok := d.(*ast.GenDecl) + if !ok || d.Tok != token.IMPORT { + // Not an import declaration, so we're done. + // Imports are always first. + return false + } + if d.Lparen.IsValid() { + // For now assume all grouped imports are unsorted. + // TODO(gri) Should check if they are sorted already. + return true + } + // Ungrouped imports are sorted by default. + } + return false +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/internal.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/internal.go new file mode 100644 index 0000000000..df03587143 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/format/internal.go @@ -0,0 +1,177 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// TODO(gri): This file and the file src/cmd/gofmt/internal.go are +// the same (but for this comment and the package name). Do not modify +// one without the other. Determine if we can factor out functionality +// in a public API. See also #11844 for context. + +package format + +import ( + "bytes" + "go/ast" + "go/parser" + "go/token" + "strings" + + "mvdan.cc/gofumpt/internal/govendor/go/printer" +) + +// parse parses src, which was read from the named file, +// as a Go source file, declaration, or statement list. +func parse(fset *token.FileSet, filename string, src []byte, fragmentOk bool) ( + file *ast.File, + sourceAdj func(src []byte, indent int) []byte, + indentAdj int, + err error, +) { + // Try as whole source file. + file, err = parser.ParseFile(fset, filename, src, parserMode) + // If there's no error, return. If the error is that the source file didn't begin with a + // package line and source fragments are ok, fall through to + // try as a source fragment. Stop and return on any other error. + if err == nil || !fragmentOk || !strings.Contains(err.Error(), "expected 'package'") { + return + } + + // If this is a declaration list, make it a source file + // by inserting a package clause. + // Insert using a ';', not a newline, so that the line numbers + // in psrc match the ones in src. + psrc := append([]byte("package p;"), src...) + file, err = parser.ParseFile(fset, filename, psrc, parserMode) + if err == nil { + sourceAdj = func(src []byte, indent int) []byte { + // Remove the package clause. + // Gofmt has turned the ';' into a '\n'. + src = src[indent+len("package p\n"):] + return bytes.TrimSpace(src) + } + return + } + // If the error is that the source file didn't begin with a + // declaration, fall through to try as a statement list. + // Stop and return on any other error. + if !strings.Contains(err.Error(), "expected declaration") { + return + } + + // If this is a statement list, make it a source file + // by inserting a package clause and turning the list + // into a function body. This handles expressions too. + // Insert using a ';', not a newline, so that the line numbers + // in fsrc match the ones in src. Add an extra '\n' before the '}' + // to make sure comments are flushed before the '}'. + fsrc := append(append([]byte("package p; func _() {"), src...), '\n', '\n', '}') + file, err = parser.ParseFile(fset, filename, fsrc, parserMode) + if err == nil { + sourceAdj = func(src []byte, indent int) []byte { + // Cap adjusted indent to zero. + if indent < 0 { + indent = 0 + } + // Remove the wrapping. + // Gofmt has turned the "; " into a "\n\n". + // There will be two non-blank lines with indent, hence 2*indent. + src = src[2*indent+len("package p\n\nfunc _() {"):] + // Remove only the "}\n" suffix: remaining whitespaces will be trimmed anyway + src = src[:len(src)-len("}\n")] + return bytes.TrimSpace(src) + } + // Gofmt has also indented the function body one level. + // Adjust that with indentAdj. + indentAdj = -1 + } + + // Succeeded, or out of options. + return +} + +// format formats the given package file originally obtained from src +// and adjusts the result based on the original source via sourceAdj +// and indentAdj. +func format( + fset *token.FileSet, + file *ast.File, + sourceAdj func(src []byte, indent int) []byte, + indentAdj int, + src []byte, + cfg printer.Config, +) ([]byte, error) { + if sourceAdj == nil { + // Complete source file. + var buf bytes.Buffer + err := cfg.Fprint(&buf, fset, file) + if err != nil { + return nil, err + } + return buf.Bytes(), nil + } + + // Partial source file. + // Determine and prepend leading space. + i, j := 0, 0 + for j < len(src) && isSpace(src[j]) { + if src[j] == '\n' { + i = j + 1 // byte offset of last line in leading space + } + j++ + } + var res []byte + res = append(res, src[:i]...) + + // Determine and prepend indentation of first code line. + // Spaces are ignored unless there are no tabs, + // in which case spaces count as one tab. + indent := 0 + hasSpace := false + for _, b := range src[i:j] { + switch b { + case ' ': + hasSpace = true + case '\t': + indent++ + } + } + if indent == 0 && hasSpace { + indent = 1 + } + for i := 0; i < indent; i++ { + res = append(res, '\t') + } + + // Format the source. + // Write it without any leading and trailing space. + cfg.Indent = indent + indentAdj + var buf bytes.Buffer + err := cfg.Fprint(&buf, fset, file) + if err != nil { + return nil, err + } + out := sourceAdj(buf.Bytes(), cfg.Indent) + + // If the adjusted output is empty, the source + // was empty but (possibly) for white space. + // The result is the incoming source. + if len(out) == 0 { + return src, nil + } + + // Otherwise, append output to leading space. + res = append(res, out...) + + // Determine and append trailing space. + i = len(src) + for i > 0 && isSpace(src[i-1]) { + i-- + } + return append(res, src[i:]...), nil +} + +// isSpace reports whether the byte is a space character. +// isSpace defines a space as being among the following bytes: ' ', '\t', '\n' and '\r'. +func isSpace(b byte) bool { + return b == ' ' || b == '\t' || b == '\n' || b == '\r' +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go new file mode 100644 index 0000000000..5b9775dc84 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/comment.go @@ -0,0 +1,155 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package printer + +import ( + "go/ast" + "strings" + + "mvdan.cc/gofumpt/internal/govendor/go/doc/comment" +) + +// formatDocComment reformats the doc comment list, +// returning the canonical formatting. +func formatDocComment(list []*ast.Comment) []*ast.Comment { + // Extract comment text (removing comment markers). + var kind, text string + var directives []*ast.Comment + if len(list) == 1 && strings.HasPrefix(list[0].Text, "/*") { + kind = "/*" + text = list[0].Text + if !strings.Contains(text, "\n") || allStars(text) { + // Single-line /* .. */ comment in doc comment position, + // or multiline old-style comment like + // /* + // * Comment + // * text here. + // */ + // Should not happen, since it will not work well as a + // doc comment, but if it does, just ignore: + // reformatting it will only make the situation worse. + return list + } + text = text[2 : len(text)-2] // cut /* and */ + } else if strings.HasPrefix(list[0].Text, "//") { + kind = "//" + var b strings.Builder + for _, c := range list { + if !strings.HasPrefix(c.Text, "//") { + return list + } + // Accumulate //go:build etc lines separately. + if isDirective(c.Text[2:]) { + directives = append(directives, c) + continue + } + b.WriteString(strings.TrimPrefix(c.Text[2:], " ")) + b.WriteString("\n") + } + text = b.String() + } else { + // Not sure what this is, so leave alone. + return list + } + + if text == "" { + return list + } + + // Parse comment and reformat as text. + var p comment.Parser + d := p.Parse(text) + + var pr comment.Printer + text = string(pr.Comment(d)) + + // For /* */ comment, return one big comment with text inside. + slash := list[0].Slash + if kind == "/*" { + c := &ast.Comment{ + Slash: slash, + Text: "/*\n" + text + "*/", + } + return []*ast.Comment{c} + } + + // For // comment, return sequence of // lines. + var out []*ast.Comment + for text != "" { + var line string + line, text, _ = strings.Cut(text, "\n") + if line == "" { + line = "//" + } else if strings.HasPrefix(line, "\t") { + line = "//" + line + } else { + line = "// " + line + } + out = append(out, &ast.Comment{ + Slash: slash, + Text: line, + }) + } + if len(directives) > 0 { + out = append(out, &ast.Comment{ + Slash: slash, + Text: "//", + }) + for _, c := range directives { + out = append(out, &ast.Comment{ + Slash: slash, + Text: c.Text, + }) + } + } + return out +} + +// isDirective reports whether c is a comment directive. +// See go.dev/issue/37974. +// This code is also in go/ast. +func isDirective(c string) bool { + // "//line " is a line directive. + // "//extern " is for gccgo. + // "//export " is for cgo. + // (The // has been removed.) + if strings.HasPrefix(c, "line ") || strings.HasPrefix(c, "extern ") || strings.HasPrefix(c, "export ") { + return true + } + + // "//[a-z0-9]+:[a-z0-9]" + // (The // has been removed.) + colon := strings.Index(c, ":") + if colon <= 0 || colon+1 >= len(c) { + return false + } + for i := 0; i <= colon+1; i++ { + if i == colon { + continue + } + b := c[i] + if !('a' <= b && b <= 'z' || '0' <= b && b <= '9') { + return false + } + } + return true +} + +// allStars reports whether text is the interior of an +// old-style /* */ comment with a star at the start of each line. +func allStars(text string) bool { + for i := 0; i < len(text); i++ { + if text[i] == '\n' { + j := i + 1 + for j < len(text) && (text[j] == ' ' || text[j] == '\t') { + j++ + } + if j < len(text) && text[j] != '*' { + return false + } + } + } + return true +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/gobuild.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/gobuild.go new file mode 100644 index 0000000000..f00492d077 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/gobuild.go @@ -0,0 +1,170 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package printer + +import ( + "go/build/constraint" + "sort" + "text/tabwriter" +) + +func (p *printer) fixGoBuildLines() { + if len(p.goBuild)+len(p.plusBuild) == 0 { + return + } + + // Find latest possible placement of //go:build and // +build comments. + // That's just after the last blank line before we find a non-comment. + // (We'll add another blank line after our comment block.) + // When we start dropping // +build comments, we can skip over /* */ comments too. + // Note that we are processing tabwriter input, so every comment + // begins and ends with a tabwriter.Escape byte. + // And some newlines have turned into \f bytes. + insert := 0 + for pos := 0; ; { + // Skip leading space at beginning of line. + blank := true + for pos < len(p.output) && (p.output[pos] == ' ' || p.output[pos] == '\t') { + pos++ + } + // Skip over // comment if any. + if pos+3 < len(p.output) && p.output[pos] == tabwriter.Escape && p.output[pos+1] == '/' && p.output[pos+2] == '/' { + blank = false + for pos < len(p.output) && !isNL(p.output[pos]) { + pos++ + } + } + // Skip over \n at end of line. + if pos >= len(p.output) || !isNL(p.output[pos]) { + break + } + pos++ + + if blank { + insert = pos + } + } + + // If there is a //go:build comment before the place we identified, + // use that point instead. (Earlier in the file is always fine.) + if len(p.goBuild) > 0 && p.goBuild[0] < insert { + insert = p.goBuild[0] + } else if len(p.plusBuild) > 0 && p.plusBuild[0] < insert { + insert = p.plusBuild[0] + } + + var x constraint.Expr + switch len(p.goBuild) { + case 0: + // Synthesize //go:build expression from // +build lines. + for _, pos := range p.plusBuild { + y, err := constraint.Parse(p.commentTextAt(pos)) + if err != nil { + x = nil + break + } + if x == nil { + x = y + } else { + x = &constraint.AndExpr{X: x, Y: y} + } + } + case 1: + // Parse //go:build expression. + x, _ = constraint.Parse(p.commentTextAt(p.goBuild[0])) + } + + var block []byte + if x == nil { + // Don't have a valid //go:build expression to treat as truth. + // Bring all the lines together but leave them alone. + // Note that these are already tabwriter-escaped. + for _, pos := range p.goBuild { + block = append(block, p.lineAt(pos)...) + } + for _, pos := range p.plusBuild { + block = append(block, p.lineAt(pos)...) + } + } else { + block = append(block, tabwriter.Escape) + block = append(block, "//go:build "...) + block = append(block, x.String()...) + block = append(block, tabwriter.Escape, '\n') + if len(p.plusBuild) > 0 { + lines, err := constraint.PlusBuildLines(x) + if err != nil { + lines = []string{"// +build error: " + err.Error()} + } + for _, line := range lines { + block = append(block, tabwriter.Escape) + block = append(block, line...) + block = append(block, tabwriter.Escape, '\n') + } + } + } + block = append(block, '\n') + + // Build sorted list of lines to delete from remainder of output. + toDelete := append(p.goBuild, p.plusBuild...) + sort.Ints(toDelete) + + // Collect output after insertion point, with lines deleted, into after. + var after []byte + start := insert + for _, end := range toDelete { + if end < start { + continue + } + after = appendLines(after, p.output[start:end]) + start = end + len(p.lineAt(end)) + } + after = appendLines(after, p.output[start:]) + if n := len(after); n >= 2 && isNL(after[n-1]) && isNL(after[n-2]) { + after = after[:n-1] + } + + p.output = p.output[:insert] + p.output = append(p.output, block...) + p.output = append(p.output, after...) +} + +// appendLines is like append(x, y...) +// but it avoids creating doubled blank lines, +// which would not be gofmt-standard output. +// It assumes that only whole blocks of lines are being appended, +// not line fragments. +func appendLines(x, y []byte) []byte { + if len(y) > 0 && isNL(y[0]) && // y starts in blank line + (len(x) == 0 || len(x) >= 2 && isNL(x[len(x)-1]) && isNL(x[len(x)-2])) { // x is empty or ends in blank line + y = y[1:] // delete y's leading blank line + } + return append(x, y...) +} + +func (p *printer) lineAt(start int) []byte { + pos := start + for pos < len(p.output) && !isNL(p.output[pos]) { + pos++ + } + if pos < len(p.output) { + pos++ + } + return p.output[start:pos] +} + +func (p *printer) commentTextAt(start int) string { + if start < len(p.output) && p.output[start] == tabwriter.Escape { + start++ + } + pos := start + for pos < len(p.output) && p.output[pos] != tabwriter.Escape && !isNL(p.output[pos]) { + pos++ + } + return string(p.output[start:pos]) +} + +func isNL(b byte) bool { + return b == '\n' || b == '\f' +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/nodes.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/nodes.go new file mode 100644 index 0000000000..a58525b855 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/nodes.go @@ -0,0 +1,2001 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file implements printing of AST nodes; specifically +// expressions, statements, declarations, and files. It uses +// the print functionality implemented in printer.go. + +package printer + +import ( + "go/ast" + "go/token" + "math" + "strconv" + "strings" + "unicode" + "unicode/utf8" +) + +// Formatting issues: +// - better comment formatting for /*-style comments at the end of a line (e.g. a declaration) +// when the comment spans multiple lines; if such a comment is just two lines, formatting is +// not idempotent +// - formatting of expression lists +// - should use blank instead of tab to separate one-line function bodies from +// the function header unless there is a group of consecutive one-liners + +// ---------------------------------------------------------------------------- +// Common AST nodes. + +// Print as many newlines as necessary (but at least min newlines) to get to +// the current line. ws is printed before the first line break. If newSection +// is set, the first line break is printed as formfeed. Returns 0 if no line +// breaks were printed, returns 1 if there was exactly one newline printed, +// and returns a value > 1 if there was a formfeed or more than one newline +// printed. +// +// TODO(gri): linebreak may add too many lines if the next statement at "line" +// is preceded by comments because the computation of n assumes +// the current position before the comment and the target position +// after the comment. Thus, after interspersing such comments, the +// space taken up by them is not considered to reduce the number of +// linebreaks. At the moment there is no easy way to know about +// future (not yet interspersed) comments in this function. +func (p *printer) linebreak(line, min int, ws whiteSpace, newSection bool) (nbreaks int) { + n := nlimit(line - p.pos.Line) + if n < min { + n = min + } + if n > 0 { + p.print(ws) + if newSection { + p.print(formfeed) + n-- + nbreaks = 2 + } + nbreaks += n + for ; n > 0; n-- { + p.print(newline) + } + } + return +} + +// setComment sets g as the next comment if g != nil and if node comments +// are enabled - this mode is used when printing source code fragments such +// as exports only. It assumes that there is no pending comment in p.comments +// and at most one pending comment in the p.comment cache. +func (p *printer) setComment(g *ast.CommentGroup) { + if g == nil || !p.useNodeComments { + return + } + if p.comments == nil { + // initialize p.comments lazily + p.comments = make([]*ast.CommentGroup, 1) + } else if p.cindex < len(p.comments) { + // for some reason there are pending comments; this + // should never happen - handle gracefully and flush + // all comments up to g, ignore anything after that + p.flush(p.posFor(g.List[0].Pos()), token.ILLEGAL) + p.comments = p.comments[0:1] + // in debug mode, report error + p.internalError("setComment found pending comments") + } + p.comments[0] = g + p.cindex = 0 + // don't overwrite any pending comment in the p.comment cache + // (there may be a pending comment when a line comment is + // immediately followed by a lead comment with no other + // tokens between) + if p.commentOffset == infinity { + p.nextComment() // get comment ready for use + } +} + +type exprListMode uint + +const ( + commaTerm exprListMode = 1 << iota // list is optionally terminated by a comma + noIndent // no extra indentation in multi-line lists +) + +// If indent is set, a multi-line identifier list is indented after the +// first linebreak encountered. +func (p *printer) identList(list []*ast.Ident, indent bool) { + // convert into an expression list so we can re-use exprList formatting + xlist := make([]ast.Expr, len(list)) + for i, x := range list { + xlist[i] = x + } + var mode exprListMode + if !indent { + mode = noIndent + } + p.exprList(token.NoPos, xlist, 1, mode, token.NoPos, false) +} + +const filteredMsg = "contains filtered or unexported fields" + +// Print a list of expressions. If the list spans multiple +// source lines, the original line breaks are respected between +// expressions. +// +// TODO(gri) Consider rewriting this to be independent of []ast.Expr +// so that we can use the algorithm for any kind of list +// +// (e.g., pass list via a channel over which to range). +func (p *printer) exprList(prev0 token.Pos, list []ast.Expr, depth int, mode exprListMode, next0 token.Pos, isIncomplete bool) { + if len(list) == 0 { + if isIncomplete { + prev := p.posFor(prev0) + next := p.posFor(next0) + if prev.IsValid() && prev.Line == next.Line { + p.print("/* " + filteredMsg + " */") + } else { + p.print(newline) + p.print(indent, "// "+filteredMsg, unindent, newline) + } + } + return + } + + prev := p.posFor(prev0) + next := p.posFor(next0) + line := p.lineFor(list[0].Pos()) + endLine := p.lineFor(list[len(list)-1].End()) + + if prev.IsValid() && prev.Line == line && line == endLine { + // all list entries on a single line + for i, x := range list { + if i > 0 { + // use position of expression following the comma as + // comma position for correct comment placement + p.setPos(x.Pos()) + p.print(token.COMMA, blank) + } + p.expr0(x, depth) + } + if isIncomplete { + p.print(token.COMMA, blank, "/* "+filteredMsg+" */") + } + return + } + + // list entries span multiple lines; + // use source code positions to guide line breaks + + // Don't add extra indentation if noIndent is set; + // i.e., pretend that the first line is already indented. + ws := ignore + if mode&noIndent == 0 { + ws = indent + } + + // The first linebreak is always a formfeed since this section must not + // depend on any previous formatting. + prevBreak := -1 // index of last expression that was followed by a linebreak + if prev.IsValid() && prev.Line < line && p.linebreak(line, 0, ws, true) > 0 { + ws = ignore + prevBreak = 0 + } + + // initialize expression/key size: a zero value indicates expr/key doesn't fit on a single line + size := 0 + + // We use the ratio between the geometric mean of the previous key sizes and + // the current size to determine if there should be a break in the alignment. + // To compute the geometric mean we accumulate the ln(size) values (lnsum) + // and the number of sizes included (count). + lnsum := 0.0 + count := 0 + + // print all list elements + prevLine := prev.Line + for i, x := range list { + line = p.lineFor(x.Pos()) + + // Determine if the next linebreak, if any, needs to use formfeed: + // in general, use the entire node size to make the decision; for + // key:value expressions, use the key size. + // TODO(gri) for a better result, should probably incorporate both + // the key and the node size into the decision process + useFF := true + + // Determine element size: All bets are off if we don't have + // position information for the previous and next token (likely + // generated code - simply ignore the size in this case by setting + // it to 0). + prevSize := size + const infinity = 1e6 // larger than any source line + size = p.nodeSize(x, infinity) + pair, isPair := x.(*ast.KeyValueExpr) + if size <= infinity && prev.IsValid() && next.IsValid() { + // x fits on a single line + if isPair { + size = p.nodeSize(pair.Key, infinity) // size <= infinity + } + } else { + // size too large or we don't have good layout information + size = 0 + } + + // If the previous line and the current line had single- + // line-expressions and the key sizes are small or the + // ratio between the current key and the geometric mean + // if the previous key sizes does not exceed a threshold, + // align columns and do not use formfeed. + if prevSize > 0 && size > 0 { + const smallSize = 40 + if count == 0 || prevSize <= smallSize && size <= smallSize { + useFF = false + } else { + const r = 2.5 // threshold + geomean := math.Exp(lnsum / float64(count)) // count > 0 + ratio := float64(size) / geomean + useFF = r*ratio <= 1 || r <= ratio + } + } + + needsLinebreak := 0 < prevLine && prevLine < line + if i > 0 { + // Use position of expression following the comma as + // comma position for correct comment placement, but + // only if the expression is on the same line. + if !needsLinebreak { + p.setPos(x.Pos()) + } + p.print(token.COMMA) + needsBlank := true + if needsLinebreak { + // Lines are broken using newlines so comments remain aligned + // unless useFF is set or there are multiple expressions on + // the same line in which case formfeed is used. + nbreaks := p.linebreak(line, 0, ws, useFF || prevBreak+1 < i) + if nbreaks > 0 { + ws = ignore + prevBreak = i + needsBlank = false // we got a line break instead + } + // If there was a new section or more than one new line + // (which means that the tabwriter will implicitly break + // the section), reset the geomean variables since we are + // starting a new group of elements with the next element. + if nbreaks > 1 { + lnsum = 0 + count = 0 + } + } + if needsBlank { + p.print(blank) + } + } + + if len(list) > 1 && isPair && size > 0 && needsLinebreak { + // We have a key:value expression that fits onto one line + // and it's not on the same line as the prior expression: + // Use a column for the key such that consecutive entries + // can align if possible. + // (needsLinebreak is set if we started a new line before) + p.expr(pair.Key) + p.setPos(pair.Colon) + p.print(token.COLON, vtab) + p.expr(pair.Value) + } else { + p.expr0(x, depth) + } + + if size > 0 { + lnsum += math.Log(float64(size)) + count++ + } + + prevLine = line + } + + if mode&commaTerm != 0 && next.IsValid() && p.pos.Line < next.Line { + // Print a terminating comma if the next token is on a new line. + p.print(token.COMMA) + if isIncomplete { + p.print(newline) + p.print("// " + filteredMsg) + } + if ws == ignore && mode&noIndent == 0 { + // unindent if we indented + p.print(unindent) + } + p.print(formfeed) // terminating comma needs a line break to look good + return + } + + if isIncomplete { + p.print(token.COMMA, newline) + p.print("// "+filteredMsg, newline) + } + + if ws == ignore && mode&noIndent == 0 { + // unindent if we indented + p.print(unindent) + } +} + +type paramMode int + +const ( + funcParam paramMode = iota + funcTParam + typeTParam +) + +func (p *printer) parameters(fields *ast.FieldList, mode paramMode) { + openTok, closeTok := token.LPAREN, token.RPAREN + if mode != funcParam { + openTok, closeTok = token.LBRACK, token.RBRACK + } + p.setPos(fields.Opening) + p.print(openTok) + if len(fields.List) > 0 { + prevLine := p.lineFor(fields.Opening) + ws := indent + for i, par := range fields.List { + // determine par begin and end line (may be different + // if there are multiple parameter names for this par + // or the type is on a separate line) + parLineBeg := p.lineFor(par.Pos()) + parLineEnd := p.lineFor(par.End()) + // separating "," if needed + needsLinebreak := 0 < prevLine && prevLine < parLineBeg + if i > 0 { + // use position of parameter following the comma as + // comma position for correct comma placement, but + // only if the next parameter is on the same line + if !needsLinebreak { + p.setPos(par.Pos()) + } + p.print(token.COMMA) + } + // separator if needed (linebreak or blank) + if needsLinebreak && p.linebreak(parLineBeg, 0, ws, true) > 0 { + // break line if the opening "(" or previous parameter ended on a different line + ws = ignore + } else if i > 0 { + p.print(blank) + } + // parameter names + if len(par.Names) > 0 { + // Very subtle: If we indented before (ws == ignore), identList + // won't indent again. If we didn't (ws == indent), identList will + // indent if the identList spans multiple lines, and it will outdent + // again at the end (and still ws == indent). Thus, a subsequent indent + // by a linebreak call after a type, or in the next multi-line identList + // will do the right thing. + p.identList(par.Names, ws == indent) + p.print(blank) + } + // parameter type + p.expr(stripParensAlways(par.Type)) + prevLine = parLineEnd + } + + // if the closing ")" is on a separate line from the last parameter, + // print an additional "," and line break + if closing := p.lineFor(fields.Closing); 0 < prevLine && prevLine < closing { + p.print(token.COMMA) + p.linebreak(closing, 0, ignore, true) + } else if mode == typeTParam && fields.NumFields() == 1 && combinesWithName(fields.List[0].Type) { + // A type parameter list [P T] where the name P and the type expression T syntactically + // combine to another valid (value) expression requires a trailing comma, as in [P *T,] + // (or an enclosing interface as in [P interface(*T)]), so that the type parameter list + // is not parsed as an array length [P*T]. + p.print(token.COMMA) + } + + // unindent if we indented + if ws == ignore { + p.print(unindent) + } + } + + p.setPos(fields.Closing) + p.print(closeTok) +} + +// combinesWithName reports whether a name followed by the expression x +// syntactically combines to another valid (value) expression. For instance +// using *T for x, "name *T" syntactically appears as the expression x*T. +// On the other hand, using P|Q or *P|~Q for x, "name P|Q" or name *P|~Q" +// cannot be combined into a valid (value) expression. +func combinesWithName(x ast.Expr) bool { + switch x := x.(type) { + case *ast.StarExpr: + // name *x.X combines to name*x.X if x.X is not a type element + return !isTypeElem(x.X) + case *ast.BinaryExpr: + return combinesWithName(x.X) && !isTypeElem(x.Y) + case *ast.ParenExpr: + // name(x) combines but we are making sure at + // the call site that x is never parenthesized. + panic("unexpected parenthesized expression") + } + return false +} + +// isTypeElem reports whether x is a (possibly parenthesized) type element expression. +// The result is false if x could be a type element OR an ordinary (value) expression. +func isTypeElem(x ast.Expr) bool { + switch x := x.(type) { + case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType: + return true + case *ast.UnaryExpr: + return x.Op == token.TILDE + case *ast.BinaryExpr: + return isTypeElem(x.X) || isTypeElem(x.Y) + case *ast.ParenExpr: + return isTypeElem(x.X) + } + return false +} + +func (p *printer) signature(sig *ast.FuncType) { + if sig.TypeParams != nil { + p.parameters(sig.TypeParams, funcTParam) + } + if sig.Params != nil { + p.parameters(sig.Params, funcParam) + } else { + p.print(token.LPAREN, token.RPAREN) + } + res := sig.Results + n := res.NumFields() + if n > 0 { + // res != nil + p.print(blank) + if n == 1 && res.List[0].Names == nil { + // single anonymous res; no ()'s + p.expr(stripParensAlways(res.List[0].Type)) + return + } + p.parameters(res, funcParam) + } +} + +func identListSize(list []*ast.Ident, maxSize int) (size int) { + for i, x := range list { + if i > 0 { + size += len(", ") + } + size += utf8.RuneCountInString(x.Name) + if size >= maxSize { + break + } + } + return +} + +func (p *printer) isOneLineFieldList(list []*ast.Field) bool { + if len(list) != 1 { + return false // allow only one field + } + f := list[0] + if f.Tag != nil || f.Comment != nil { + return false // don't allow tags or comments + } + // only name(s) and type + const maxSize = 30 // adjust as appropriate, this is an approximate value + namesSize := identListSize(f.Names, maxSize) + if namesSize > 0 { + namesSize = 1 // blank between names and types + } + typeSize := p.nodeSize(f.Type, maxSize) + return namesSize+typeSize <= maxSize +} + +func (p *printer) setLineComment(text string) { + p.setComment(&ast.CommentGroup{List: []*ast.Comment{{Slash: token.NoPos, Text: text}}}) +} + +func (p *printer) fieldList(fields *ast.FieldList, isStruct, isIncomplete bool) { + lbrace := fields.Opening + list := fields.List + rbrace := fields.Closing + hasComments := isIncomplete || p.commentBefore(p.posFor(rbrace)) + srcIsOneLine := lbrace.IsValid() && rbrace.IsValid() && p.lineFor(lbrace) == p.lineFor(rbrace) + + if !hasComments && srcIsOneLine { + // possibly a one-line struct/interface + if len(list) == 0 { + // no blank between keyword and {} in this case + p.setPos(lbrace) + p.print(token.LBRACE) + p.setPos(rbrace) + p.print(token.RBRACE) + return + } else if p.isOneLineFieldList(list) { + // small enough - print on one line + // (don't use identList and ignore source line breaks) + p.setPos(lbrace) + p.print(token.LBRACE, blank) + f := list[0] + if isStruct { + for i, x := range f.Names { + if i > 0 { + // no comments so no need for comma position + p.print(token.COMMA, blank) + } + p.expr(x) + } + if len(f.Names) > 0 { + p.print(blank) + } + p.expr(f.Type) + } else { // interface + if len(f.Names) > 0 { + name := f.Names[0] // method name + p.expr(name) + p.signature(f.Type.(*ast.FuncType)) // don't print "func" + } else { + // embedded interface + p.expr(f.Type) + } + } + p.print(blank) + p.setPos(rbrace) + p.print(token.RBRACE) + return + } + } + // hasComments || !srcIsOneLine + + p.print(blank) + p.setPos(lbrace) + p.print(token.LBRACE, indent) + if hasComments || len(list) > 0 { + p.print(formfeed) + } + + if isStruct { + + sep := vtab + if len(list) == 1 { + sep = blank + } + var line int + for i, f := range list { + if i > 0 { + p.linebreak(p.lineFor(f.Pos()), 1, ignore, p.linesFrom(line) > 0) + } + extraTabs := 0 + p.setComment(f.Doc) + p.recordLine(&line) + if len(f.Names) > 0 { + // named fields + p.identList(f.Names, false) + p.print(sep) + p.expr(f.Type) + extraTabs = 1 + } else { + // anonymous field + p.expr(f.Type) + extraTabs = 2 + } + if f.Tag != nil { + if len(f.Names) > 0 && sep == vtab { + p.print(sep) + } + p.print(sep) + p.expr(f.Tag) + extraTabs = 0 + } + if f.Comment != nil { + for ; extraTabs > 0; extraTabs-- { + p.print(sep) + } + p.setComment(f.Comment) + } + } + if isIncomplete { + if len(list) > 0 { + p.print(formfeed) + } + p.flush(p.posFor(rbrace), token.RBRACE) // make sure we don't lose the last line comment + p.setLineComment("// " + filteredMsg) + } + + } else { // interface + + var line int + var prev *ast.Ident // previous "type" identifier + for i, f := range list { + var name *ast.Ident // first name, or nil + if len(f.Names) > 0 { + name = f.Names[0] + } + if i > 0 { + // don't do a line break (min == 0) if we are printing a list of types + // TODO(gri) this doesn't work quite right if the list of types is + // spread across multiple lines + min := 1 + if prev != nil && name == prev { + min = 0 + } + p.linebreak(p.lineFor(f.Pos()), min, ignore, p.linesFrom(line) > 0) + } + p.setComment(f.Doc) + p.recordLine(&line) + if name != nil { + // method + p.expr(name) + p.signature(f.Type.(*ast.FuncType)) // don't print "func" + prev = nil + } else { + // embedded interface + p.expr(f.Type) + prev = nil + } + p.setComment(f.Comment) + } + if isIncomplete { + if len(list) > 0 { + p.print(formfeed) + } + p.flush(p.posFor(rbrace), token.RBRACE) // make sure we don't lose the last line comment + p.setLineComment("// contains filtered or unexported methods") + } + + } + p.print(unindent, formfeed) + p.setPos(rbrace) + p.print(token.RBRACE) +} + +// ---------------------------------------------------------------------------- +// Expressions + +func walkBinary(e *ast.BinaryExpr) (has4, has5 bool, maxProblem int) { + switch e.Op.Precedence() { + case 4: + has4 = true + case 5: + has5 = true + } + + switch l := e.X.(type) { + case *ast.BinaryExpr: + if l.Op.Precedence() < e.Op.Precedence() { + // parens will be inserted. + // pretend this is an *ast.ParenExpr and do nothing. + break + } + h4, h5, mp := walkBinary(l) + has4 = has4 || h4 + has5 = has5 || h5 + if maxProblem < mp { + maxProblem = mp + } + } + + switch r := e.Y.(type) { + case *ast.BinaryExpr: + if r.Op.Precedence() <= e.Op.Precedence() { + // parens will be inserted. + // pretend this is an *ast.ParenExpr and do nothing. + break + } + h4, h5, mp := walkBinary(r) + has4 = has4 || h4 + has5 = has5 || h5 + if maxProblem < mp { + maxProblem = mp + } + + case *ast.StarExpr: + if e.Op == token.QUO { // `*/` + maxProblem = 5 + } + + case *ast.UnaryExpr: + switch e.Op.String() + r.Op.String() { + case "/*", "&&", "&^": + maxProblem = 5 + case "++", "--": + if maxProblem < 4 { + maxProblem = 4 + } + } + } + return +} + +func cutoff(e *ast.BinaryExpr, depth int) int { + has4, has5, maxProblem := walkBinary(e) + if maxProblem > 0 { + return maxProblem + 1 + } + if has4 && has5 { + if depth == 1 { + return 5 + } + return 4 + } + if depth == 1 { + return 6 + } + return 4 +} + +func diffPrec(expr ast.Expr, prec int) int { + x, ok := expr.(*ast.BinaryExpr) + if !ok || prec != x.Op.Precedence() { + return 1 + } + return 0 +} + +func reduceDepth(depth int) int { + depth-- + if depth < 1 { + depth = 1 + } + return depth +} + +// Format the binary expression: decide the cutoff and then format. +// Let's call depth == 1 Normal mode, and depth > 1 Compact mode. +// (Algorithm suggestion by Russ Cox.) +// +// The precedences are: +// +// 5 * / % << >> & &^ +// 4 + - | ^ +// 3 == != < <= > >= +// 2 && +// 1 || +// +// The only decision is whether there will be spaces around levels 4 and 5. +// There are never spaces at level 6 (unary), and always spaces at levels 3 and below. +// +// To choose the cutoff, look at the whole expression but excluding primary +// expressions (function calls, parenthesized exprs), and apply these rules: +// +// 1. If there is a binary operator with a right side unary operand +// that would clash without a space, the cutoff must be (in order): +// +// /* 6 +// && 6 +// &^ 6 +// ++ 5 +// -- 5 +// +// (Comparison operators always have spaces around them.) +// +// 2. If there is a mix of level 5 and level 4 operators, then the cutoff +// is 5 (use spaces to distinguish precedence) in Normal mode +// and 4 (never use spaces) in Compact mode. +// +// 3. If there are no level 4 operators or no level 5 operators, then the +// cutoff is 6 (always use spaces) in Normal mode +// and 4 (never use spaces) in Compact mode. +func (p *printer) binaryExpr(x *ast.BinaryExpr, prec1, cutoff, depth int) { + prec := x.Op.Precedence() + if prec < prec1 { + // parenthesis needed + // Note: The parser inserts an ast.ParenExpr node; thus this case + // can only occur if the AST is created in a different way. + p.print(token.LPAREN) + p.expr0(x, reduceDepth(depth)) // parentheses undo one level of depth + p.print(token.RPAREN) + return + } + + printBlank := prec < cutoff + + ws := indent + p.expr1(x.X, prec, depth+diffPrec(x.X, prec)) + if printBlank { + p.print(blank) + } + xline := p.pos.Line // before the operator (it may be on the next line!) + yline := p.lineFor(x.Y.Pos()) + p.setPos(x.OpPos) + p.print(x.Op) + if xline != yline && xline > 0 && yline > 0 { + // at least one line break, but respect an extra empty line + // in the source + if p.linebreak(yline, 1, ws, true) > 0 { + ws = ignore + printBlank = false // no blank after line break + } + } + if printBlank { + p.print(blank) + } + p.expr1(x.Y, prec+1, depth+1) + if ws == ignore { + p.print(unindent) + } +} + +func isBinary(expr ast.Expr) bool { + _, ok := expr.(*ast.BinaryExpr) + return ok +} + +func (p *printer) expr1(expr ast.Expr, prec1, depth int) { + p.setPos(expr.Pos()) + + switch x := expr.(type) { + case *ast.BadExpr: + p.print("BadExpr") + + case *ast.Ident: + p.print(x) + + case *ast.BinaryExpr: + if depth < 1 { + p.internalError("depth < 1:", depth) + depth = 1 + } + p.binaryExpr(x, prec1, cutoff(x, depth), depth) + + case *ast.KeyValueExpr: + p.expr(x.Key) + p.setPos(x.Colon) + p.print(token.COLON, blank) + p.expr(x.Value) + + case *ast.StarExpr: + const prec = token.UnaryPrec + if prec < prec1 { + // parenthesis needed + p.print(token.LPAREN) + p.print(token.MUL) + p.expr(x.X) + p.print(token.RPAREN) + } else { + // no parenthesis needed + p.print(token.MUL) + p.expr(x.X) + } + + case *ast.UnaryExpr: + const prec = token.UnaryPrec + if prec < prec1 { + // parenthesis needed + p.print(token.LPAREN) + p.expr(x) + p.print(token.RPAREN) + } else { + // no parenthesis needed + p.print(x.Op) + if x.Op == token.RANGE { + // TODO(gri) Remove this code if it cannot be reached. + p.print(blank) + } + p.expr1(x.X, prec, depth) + } + + case *ast.BasicLit: + if p.Config.Mode&normalizeNumbers != 0 { + x = normalizedNumber(x) + } + p.print(x) + + case *ast.FuncLit: + p.setPos(x.Type.Pos()) + p.print(token.FUNC) + // See the comment in funcDecl about how the header size is computed. + startCol := p.out.Column - len("func") + p.signature(x.Type) + p.funcBody(p.distanceFrom(x.Type.Pos(), startCol), blank, x.Body) + + case *ast.ParenExpr: + if _, hasParens := x.X.(*ast.ParenExpr); hasParens { + // don't print parentheses around an already parenthesized expression + // TODO(gri) consider making this more general and incorporate precedence levels + p.expr0(x.X, depth) + } else { + p.print(token.LPAREN) + p.expr0(x.X, reduceDepth(depth)) // parentheses undo one level of depth + p.setPos(x.Rparen) + p.print(token.RPAREN) + } + + case *ast.SelectorExpr: + p.selectorExpr(x, depth, false) + + case *ast.TypeAssertExpr: + p.expr1(x.X, token.HighestPrec, depth) + p.print(token.PERIOD) + p.setPos(x.Lparen) + p.print(token.LPAREN) + if x.Type != nil { + p.expr(x.Type) + } else { + p.print(token.TYPE) + } + p.setPos(x.Rparen) + p.print(token.RPAREN) + + case *ast.IndexExpr: + // TODO(gri): should treat[] like parentheses and undo one level of depth + p.expr1(x.X, token.HighestPrec, 1) + p.setPos(x.Lbrack) + p.print(token.LBRACK) + p.expr0(x.Index, depth+1) + p.setPos(x.Rbrack) + p.print(token.RBRACK) + + case *ast.IndexListExpr: + // TODO(gri): as for IndexExpr, should treat [] like parentheses and undo + // one level of depth + p.expr1(x.X, token.HighestPrec, 1) + p.setPos(x.Lbrack) + p.print(token.LBRACK) + p.exprList(x.Lbrack, x.Indices, depth+1, commaTerm, x.Rbrack, false) + p.setPos(x.Rbrack) + p.print(token.RBRACK) + + case *ast.SliceExpr: + // TODO(gri): should treat[] like parentheses and undo one level of depth + p.expr1(x.X, token.HighestPrec, 1) + p.setPos(x.Lbrack) + p.print(token.LBRACK) + indices := []ast.Expr{x.Low, x.High} + if x.Max != nil { + indices = append(indices, x.Max) + } + // determine if we need extra blanks around ':' + var needsBlanks bool + if depth <= 1 { + var indexCount int + var hasBinaries bool + for _, x := range indices { + if x != nil { + indexCount++ + if isBinary(x) { + hasBinaries = true + } + } + } + if indexCount > 1 && hasBinaries { + needsBlanks = true + } + } + for i, x := range indices { + if i > 0 { + if indices[i-1] != nil && needsBlanks { + p.print(blank) + } + p.print(token.COLON) + if x != nil && needsBlanks { + p.print(blank) + } + } + if x != nil { + p.expr0(x, depth+1) + } + } + p.setPos(x.Rbrack) + p.print(token.RBRACK) + + case *ast.CallExpr: + if len(x.Args) > 1 { + depth++ + } + var wasIndented bool + if _, ok := x.Fun.(*ast.FuncType); ok { + // conversions to literal function types require parentheses around the type + p.print(token.LPAREN) + wasIndented = p.possibleSelectorExpr(x.Fun, token.HighestPrec, depth) + p.print(token.RPAREN) + } else { + wasIndented = p.possibleSelectorExpr(x.Fun, token.HighestPrec, depth) + } + p.setPos(x.Lparen) + p.print(token.LPAREN) + if x.Ellipsis.IsValid() { + p.exprList(x.Lparen, x.Args, depth, 0, x.Ellipsis, false) + p.setPos(x.Ellipsis) + p.print(token.ELLIPSIS) + if x.Rparen.IsValid() && p.lineFor(x.Ellipsis) < p.lineFor(x.Rparen) { + p.print(token.COMMA, formfeed) + } + } else { + p.exprList(x.Lparen, x.Args, depth, commaTerm, x.Rparen, false) + } + p.setPos(x.Rparen) + p.print(token.RPAREN) + if wasIndented { + p.print(unindent) + } + + case *ast.CompositeLit: + // composite literal elements that are composite literals themselves may have the type omitted + if x.Type != nil { + p.expr1(x.Type, token.HighestPrec, depth) + } + p.level++ + p.setPos(x.Lbrace) + p.print(token.LBRACE) + p.exprList(x.Lbrace, x.Elts, 1, commaTerm, x.Rbrace, x.Incomplete) + // do not insert extra line break following a /*-style comment + // before the closing '}' as it might break the code if there + // is no trailing ',' + mode := noExtraLinebreak + // do not insert extra blank following a /*-style comment + // before the closing '}' unless the literal is empty + if len(x.Elts) > 0 { + mode |= noExtraBlank + } + // need the initial indent to print lone comments with + // the proper level of indentation + p.print(indent, unindent, mode) + p.setPos(x.Rbrace) + p.print(token.RBRACE, mode) + p.level-- + + case *ast.Ellipsis: + p.print(token.ELLIPSIS) + if x.Elt != nil { + p.expr(x.Elt) + } + + case *ast.ArrayType: + p.print(token.LBRACK) + if x.Len != nil { + p.expr(x.Len) + } + p.print(token.RBRACK) + p.expr(x.Elt) + + case *ast.StructType: + p.print(token.STRUCT) + p.fieldList(x.Fields, true, x.Incomplete) + + case *ast.FuncType: + p.print(token.FUNC) + p.signature(x) + + case *ast.InterfaceType: + p.print(token.INTERFACE) + p.fieldList(x.Methods, false, x.Incomplete) + + case *ast.MapType: + p.print(token.MAP, token.LBRACK) + p.expr(x.Key) + p.print(token.RBRACK) + p.expr(x.Value) + + case *ast.ChanType: + switch x.Dir { + case ast.SEND | ast.RECV: + p.print(token.CHAN) + case ast.RECV: + p.print(token.ARROW, token.CHAN) // x.Arrow and x.Pos() are the same + case ast.SEND: + p.print(token.CHAN) + p.setPos(x.Arrow) + p.print(token.ARROW) + } + p.print(blank) + p.expr(x.Value) + + default: + panic("unreachable") + } +} + +// normalizedNumber rewrites base prefixes and exponents +// of numbers to use lower-case letters (0X123 to 0x123 and 1.2E3 to 1.2e3), +// and removes leading 0's from integer imaginary literals (0765i to 765i). +// It leaves hexadecimal digits alone. +// +// normalizedNumber doesn't modify the ast.BasicLit value lit points to. +// If lit is not a number or a number in canonical format already, +// lit is returned as is. Otherwise a new ast.BasicLit is created. +func normalizedNumber(lit *ast.BasicLit) *ast.BasicLit { + if lit.Kind != token.INT && lit.Kind != token.FLOAT && lit.Kind != token.IMAG { + return lit // not a number - nothing to do + } + if len(lit.Value) < 2 { + return lit // only one digit (common case) - nothing to do + } + // len(lit.Value) >= 2 + + // We ignore lit.Kind because for lit.Kind == token.IMAG the literal may be an integer + // or floating-point value, decimal or not. Instead, just consider the literal pattern. + x := lit.Value + switch x[:2] { + default: + // 0-prefix octal, decimal int, or float (possibly with 'i' suffix) + if i := strings.LastIndexByte(x, 'E'); i >= 0 { + x = x[:i] + "e" + x[i+1:] + break + } + // remove leading 0's from integer (but not floating-point) imaginary literals + if x[len(x)-1] == 'i' && !strings.ContainsAny(x, ".e") { + x = strings.TrimLeft(x, "0_") + if x == "i" { + x = "0i" + } + } + case "0X": + x = "0x" + x[2:] + // possibly a hexadecimal float + if i := strings.LastIndexByte(x, 'P'); i >= 0 { + x = x[:i] + "p" + x[i+1:] + } + case "0x": + // possibly a hexadecimal float + i := strings.LastIndexByte(x, 'P') + if i == -1 { + return lit // nothing to do + } + x = x[:i] + "p" + x[i+1:] + case "0O": + x = "0o" + x[2:] + case "0o": + return lit // nothing to do + case "0B": + x = "0b" + x[2:] + case "0b": + return lit // nothing to do + } + + return &ast.BasicLit{ValuePos: lit.ValuePos, Kind: lit.Kind, Value: x} +} + +func (p *printer) possibleSelectorExpr(expr ast.Expr, prec1, depth int) bool { + if x, ok := expr.(*ast.SelectorExpr); ok { + return p.selectorExpr(x, depth, true) + } + p.expr1(expr, prec1, depth) + return false +} + +// selectorExpr handles an *ast.SelectorExpr node and reports whether x spans +// multiple lines. +func (p *printer) selectorExpr(x *ast.SelectorExpr, depth int, isMethod bool) bool { + p.expr1(x.X, token.HighestPrec, depth) + p.print(token.PERIOD) + if line := p.lineFor(x.Sel.Pos()); p.pos.IsValid() && p.pos.Line < line { + p.print(indent, newline) + p.setPos(x.Sel.Pos()) + p.print(x.Sel) + if !isMethod { + p.print(unindent) + } + return true + } + p.setPos(x.Sel.Pos()) + p.print(x.Sel) + return false +} + +func (p *printer) expr0(x ast.Expr, depth int) { + p.expr1(x, token.LowestPrec, depth) +} + +func (p *printer) expr(x ast.Expr) { + const depth = 1 + p.expr1(x, token.LowestPrec, depth) +} + +// ---------------------------------------------------------------------------- +// Statements + +// Print the statement list indented, but without a newline after the last statement. +// Extra line breaks between statements in the source are respected but at most one +// empty line is printed between statements. +func (p *printer) stmtList(list []ast.Stmt, nindent int, nextIsRBrace bool) { + if nindent > 0 { + p.print(indent) + } + var line int + i := 0 + for _, s := range list { + // ignore empty statements (was issue 3466) + if _, isEmpty := s.(*ast.EmptyStmt); !isEmpty { + // nindent == 0 only for lists of switch/select case clauses; + // in those cases each clause is a new section + if len(p.output) > 0 { + // only print line break if we are not at the beginning of the output + // (i.e., we are not printing only a partial program) + p.linebreak(p.lineFor(s.Pos()), 1, ignore, i == 0 || nindent == 0 || p.linesFrom(line) > 0) + } + p.recordLine(&line) + p.stmt(s, nextIsRBrace && i == len(list)-1) + // labeled statements put labels on a separate line, but here + // we only care about the start line of the actual statement + // without label - correct line for each label + for t := s; ; { + lt, _ := t.(*ast.LabeledStmt) + if lt == nil { + break + } + line++ + t = lt.Stmt + } + i++ + } + } + if nindent > 0 { + p.print(unindent) + } +} + +// block prints an *ast.BlockStmt; it always spans at least two lines. +func (p *printer) block(b *ast.BlockStmt, nindent int) { + p.setPos(b.Lbrace) + p.print(token.LBRACE) + p.stmtList(b.List, nindent, true) + p.linebreak(p.lineFor(b.Rbrace), 1, ignore, true) + p.setPos(b.Rbrace) + p.print(token.RBRACE) +} + +func isTypeName(x ast.Expr) bool { + switch t := x.(type) { + case *ast.Ident: + return true + case *ast.SelectorExpr: + return isTypeName(t.X) + } + return false +} + +func stripParens(x ast.Expr) ast.Expr { + if px, strip := x.(*ast.ParenExpr); strip { + // parentheses must not be stripped if there are any + // unparenthesized composite literals starting with + // a type name + ast.Inspect(px.X, func(node ast.Node) bool { + switch x := node.(type) { + case *ast.ParenExpr: + // parentheses protect enclosed composite literals + return false + case *ast.CompositeLit: + if isTypeName(x.Type) { + strip = false // do not strip parentheses + } + return false + } + // in all other cases, keep inspecting + return true + }) + if strip { + return stripParens(px.X) + } + } + return x +} + +func stripParensAlways(x ast.Expr) ast.Expr { + if x, ok := x.(*ast.ParenExpr); ok { + return stripParensAlways(x.X) + } + return x +} + +func (p *printer) controlClause(isForStmt bool, init ast.Stmt, expr ast.Expr, post ast.Stmt) { + p.print(blank) + needsBlank := false + if init == nil && post == nil { + // no semicolons required + if expr != nil { + p.expr(stripParens(expr)) + needsBlank = true + } + } else { + // all semicolons required + // (they are not separators, print them explicitly) + if init != nil { + p.stmt(init, false) + } + p.print(token.SEMICOLON, blank) + if expr != nil { + p.expr(stripParens(expr)) + needsBlank = true + } + if isForStmt { + p.print(token.SEMICOLON, blank) + needsBlank = false + if post != nil { + p.stmt(post, false) + needsBlank = true + } + } + } + if needsBlank { + p.print(blank) + } +} + +// indentList reports whether an expression list would look better if it +// were indented wholesale (starting with the very first element, rather +// than starting at the first line break). +func (p *printer) indentList(list []ast.Expr) bool { + // Heuristic: indentList reports whether there are more than one multi- + // line element in the list, or if there is any element that is not + // starting on the same line as the previous one ends. + if len(list) >= 2 { + b := p.lineFor(list[0].Pos()) + e := p.lineFor(list[len(list)-1].End()) + if 0 < b && b < e { + // list spans multiple lines + n := 0 // multi-line element count + line := b + for _, x := range list { + xb := p.lineFor(x.Pos()) + xe := p.lineFor(x.End()) + if line < xb { + // x is not starting on the same + // line as the previous one ended + return true + } + if xb < xe { + // x is a multi-line element + n++ + } + line = xe + } + return n > 1 + } + } + return false +} + +func (p *printer) stmt(stmt ast.Stmt, nextIsRBrace bool) { + p.setPos(stmt.Pos()) + + switch s := stmt.(type) { + case *ast.BadStmt: + p.print("BadStmt") + + case *ast.DeclStmt: + p.decl(s.Decl) + + case *ast.EmptyStmt: + // nothing to do + + case *ast.LabeledStmt: + // a "correcting" unindent immediately following a line break + // is applied before the line break if there is no comment + // between (see writeWhitespace) + p.print(unindent) + p.expr(s.Label) + p.setPos(s.Colon) + p.print(token.COLON, indent) + if e, isEmpty := s.Stmt.(*ast.EmptyStmt); isEmpty { + if !nextIsRBrace { + p.print(newline) + p.setPos(e.Pos()) + p.print(token.SEMICOLON) + break + } + } else { + p.linebreak(p.lineFor(s.Stmt.Pos()), 1, ignore, true) + } + p.stmt(s.Stmt, nextIsRBrace) + + case *ast.ExprStmt: + const depth = 1 + p.expr0(s.X, depth) + + case *ast.SendStmt: + const depth = 1 + p.expr0(s.Chan, depth) + p.print(blank) + p.setPos(s.Arrow) + p.print(token.ARROW, blank) + p.expr0(s.Value, depth) + + case *ast.IncDecStmt: + const depth = 1 + p.expr0(s.X, depth+1) + p.setPos(s.TokPos) + p.print(s.Tok) + + case *ast.AssignStmt: + depth := 1 + if len(s.Lhs) > 1 && len(s.Rhs) > 1 { + depth++ + } + p.exprList(s.Pos(), s.Lhs, depth, 0, s.TokPos, false) + p.print(blank) + p.setPos(s.TokPos) + p.print(s.Tok, blank) + p.exprList(s.TokPos, s.Rhs, depth, 0, token.NoPos, false) + + case *ast.GoStmt: + p.print(token.GO, blank) + p.expr(s.Call) + + case *ast.DeferStmt: + p.print(token.DEFER, blank) + p.expr(s.Call) + + case *ast.ReturnStmt: + p.print(token.RETURN) + if s.Results != nil { + p.print(blank) + // Use indentList heuristic to make corner cases look + // better (issue 1207). A more systematic approach would + // always indent, but this would cause significant + // reformatting of the code base and not necessarily + // lead to more nicely formatted code in general. + if p.indentList(s.Results) { + p.print(indent) + // Use NoPos so that a newline never goes before + // the results (see issue #32854). + p.exprList(token.NoPos, s.Results, 1, noIndent, token.NoPos, false) + p.print(unindent) + } else { + p.exprList(token.NoPos, s.Results, 1, 0, token.NoPos, false) + } + } + + case *ast.BranchStmt: + p.print(s.Tok) + if s.Label != nil { + p.print(blank) + p.expr(s.Label) + } + + case *ast.BlockStmt: + p.block(s, 1) + + case *ast.IfStmt: + p.print(token.IF) + p.controlClause(false, s.Init, s.Cond, nil) + p.block(s.Body, 1) + if s.Else != nil { + p.print(blank, token.ELSE, blank) + switch s.Else.(type) { + case *ast.BlockStmt, *ast.IfStmt: + p.stmt(s.Else, nextIsRBrace) + default: + // This can only happen with an incorrectly + // constructed AST. Permit it but print so + // that it can be parsed without errors. + p.print(token.LBRACE, indent, formfeed) + p.stmt(s.Else, true) + p.print(unindent, formfeed, token.RBRACE) + } + } + + case *ast.CaseClause: + if s.List != nil { + p.print(token.CASE, blank) + p.exprList(s.Pos(), s.List, 1, 0, s.Colon, false) + } else { + p.print(token.DEFAULT) + } + p.setPos(s.Colon) + p.print(token.COLON) + p.stmtList(s.Body, 1, nextIsRBrace) + + case *ast.SwitchStmt: + p.print(token.SWITCH) + p.controlClause(false, s.Init, s.Tag, nil) + p.block(s.Body, 0) + + case *ast.TypeSwitchStmt: + p.print(token.SWITCH) + if s.Init != nil { + p.print(blank) + p.stmt(s.Init, false) + p.print(token.SEMICOLON) + } + p.print(blank) + p.stmt(s.Assign, false) + p.print(blank) + p.block(s.Body, 0) + + case *ast.CommClause: + if s.Comm != nil { + p.print(token.CASE, blank) + p.stmt(s.Comm, false) + } else { + p.print(token.DEFAULT) + } + p.setPos(s.Colon) + p.print(token.COLON) + p.stmtList(s.Body, 1, nextIsRBrace) + + case *ast.SelectStmt: + p.print(token.SELECT, blank) + body := s.Body + if len(body.List) == 0 && !p.commentBefore(p.posFor(body.Rbrace)) { + // print empty select statement w/o comments on one line + p.setPos(body.Lbrace) + p.print(token.LBRACE) + p.setPos(body.Rbrace) + p.print(token.RBRACE) + } else { + p.block(body, 0) + } + + case *ast.ForStmt: + p.print(token.FOR) + p.controlClause(true, s.Init, s.Cond, s.Post) + p.block(s.Body, 1) + + case *ast.RangeStmt: + p.print(token.FOR, blank) + if s.Key != nil { + p.expr(s.Key) + if s.Value != nil { + // use position of value following the comma as + // comma position for correct comment placement + p.setPos(s.Value.Pos()) + p.print(token.COMMA, blank) + p.expr(s.Value) + } + p.print(blank) + p.setPos(s.TokPos) + p.print(s.Tok, blank) + } + p.print(token.RANGE, blank) + p.expr(stripParens(s.X)) + p.print(blank) + p.block(s.Body, 1) + + default: + panic("unreachable") + } +} + +// ---------------------------------------------------------------------------- +// Declarations + +// The keepTypeColumn function determines if the type column of a series of +// consecutive const or var declarations must be kept, or if initialization +// values (V) can be placed in the type column (T) instead. The i'th entry +// in the result slice is true if the type column in spec[i] must be kept. +// +// For example, the declaration: +// +// const ( +// foobar int = 42 // comment +// x = 7 // comment +// foo +// bar = 991 +// ) +// +// leads to the type/values matrix below. A run of value columns (V) can +// be moved into the type column if there is no type for any of the values +// in that column (we only move entire columns so that they align properly). +// +// matrix formatted result +// matrix +// T V -> T V -> true there is a T and so the type +// - V - V true column must be kept +// - - - - false +// - V V - false V is moved into T column +func keepTypeColumn(specs []ast.Spec) []bool { + m := make([]bool, len(specs)) + + populate := func(i, j int, keepType bool) { + if keepType { + for ; i < j; i++ { + m[i] = true + } + } + } + + i0 := -1 // if i0 >= 0 we are in a run and i0 is the start of the run + var keepType bool + for i, s := range specs { + t := s.(*ast.ValueSpec) + if t.Values != nil { + if i0 < 0 { + // start of a run of ValueSpecs with non-nil Values + i0 = i + keepType = false + } + } else { + if i0 >= 0 { + // end of a run + populate(i0, i, keepType) + i0 = -1 + } + } + if t.Type != nil { + keepType = true + } + } + if i0 >= 0 { + // end of a run + populate(i0, len(specs), keepType) + } + + return m +} + +func (p *printer) valueSpec(s *ast.ValueSpec, keepType bool) { + p.setComment(s.Doc) + p.identList(s.Names, false) // always present + extraTabs := 3 + if s.Type != nil || keepType { + p.print(vtab) + extraTabs-- + } + if s.Type != nil { + p.expr(s.Type) + } + if s.Values != nil { + p.print(vtab, token.ASSIGN, blank) + p.exprList(token.NoPos, s.Values, 1, 0, token.NoPos, false) + extraTabs-- + } + if s.Comment != nil { + for ; extraTabs > 0; extraTabs-- { + p.print(vtab) + } + p.setComment(s.Comment) + } +} + +func sanitizeImportPath(lit *ast.BasicLit) *ast.BasicLit { + // Note: An unmodified AST generated by go/parser will already + // contain a backward- or double-quoted path string that does + // not contain any invalid characters, and most of the work + // here is not needed. However, a modified or generated AST + // may possibly contain non-canonical paths. Do the work in + // all cases since it's not too hard and not speed-critical. + + // if we don't have a proper string, be conservative and return whatever we have + if lit.Kind != token.STRING { + return lit + } + s, err := strconv.Unquote(lit.Value) + if err != nil { + return lit + } + + // if the string is an invalid path, return whatever we have + // + // spec: "Implementation restriction: A compiler may restrict + // ImportPaths to non-empty strings using only characters belonging + // to Unicode's L, M, N, P, and S general categories (the Graphic + // characters without spaces) and may also exclude the characters + // !"#$%&'()*,:;<=>?[\]^`{|} and the Unicode replacement character + // U+FFFD." + if s == "" { + return lit + } + const illegalChars = `!"#$%&'()*,:;<=>?[\]^{|}` + "`\uFFFD" + for _, r := range s { + if !unicode.IsGraphic(r) || unicode.IsSpace(r) || strings.ContainsRune(illegalChars, r) { + return lit + } + } + + // otherwise, return the double-quoted path + s = strconv.Quote(s) + if s == lit.Value { + return lit // nothing wrong with lit + } + return &ast.BasicLit{ValuePos: lit.ValuePos, Kind: token.STRING, Value: s} +} + +// The parameter n is the number of specs in the group. If doIndent is set, +// multi-line identifier lists in the spec are indented when the first +// linebreak is encountered. +func (p *printer) spec(spec ast.Spec, n int, doIndent bool) { + switch s := spec.(type) { + case *ast.ImportSpec: + p.setComment(s.Doc) + if s.Name != nil { + p.expr(s.Name) + p.print(blank) + } + p.expr(sanitizeImportPath(s.Path)) + p.setComment(s.Comment) + p.setPos(s.EndPos) + + case *ast.ValueSpec: + if n != 1 { + p.internalError("expected n = 1; got", n) + } + p.setComment(s.Doc) + p.identList(s.Names, doIndent) // always present + if s.Type != nil { + p.print(blank) + p.expr(s.Type) + } + if s.Values != nil { + p.print(blank, token.ASSIGN, blank) + p.exprList(token.NoPos, s.Values, 1, 0, token.NoPos, false) + } + p.setComment(s.Comment) + + case *ast.TypeSpec: + p.setComment(s.Doc) + p.expr(s.Name) + if s.TypeParams != nil { + p.parameters(s.TypeParams, typeTParam) + } + if n == 1 { + p.print(blank) + } else { + p.print(vtab) + } + if s.Assign.IsValid() { + p.print(token.ASSIGN, blank) + } + p.expr(s.Type) + p.setComment(s.Comment) + + default: + panic("unreachable") + } +} + +func (p *printer) genDecl(d *ast.GenDecl) { + p.setComment(d.Doc) + p.setPos(d.Pos()) + p.print(d.Tok, blank) + + if d.Lparen.IsValid() || len(d.Specs) > 1 { + // group of parenthesized declarations + p.setPos(d.Lparen) + p.print(token.LPAREN) + if n := len(d.Specs); n > 0 { + p.print(indent, formfeed) + if n > 1 && (d.Tok == token.CONST || d.Tok == token.VAR) { + // two or more grouped const/var declarations: + // determine if the type column must be kept + keepType := keepTypeColumn(d.Specs) + var line int + for i, s := range d.Specs { + if i > 0 { + p.linebreak(p.lineFor(s.Pos()), 1, ignore, p.linesFrom(line) > 0) + } + p.recordLine(&line) + p.valueSpec(s.(*ast.ValueSpec), keepType[i]) + } + } else { + var line int + for i, s := range d.Specs { + if i > 0 { + p.linebreak(p.lineFor(s.Pos()), 1, ignore, p.linesFrom(line) > 0) + } + p.recordLine(&line) + p.spec(s, n, false) + } + } + p.print(unindent, formfeed) + } + p.setPos(d.Rparen) + p.print(token.RPAREN) + + } else if len(d.Specs) > 0 { + // single declaration + p.spec(d.Specs[0], 1, true) + } +} + +// sizeCounter is an io.Writer which counts the number of bytes written, +// as well as whether a newline character was seen. +type sizeCounter struct { + hasNewline bool + size int +} + +func (c *sizeCounter) Write(p []byte) (int, error) { + if !c.hasNewline { + for _, b := range p { + if b == '\n' || b == '\f' { + c.hasNewline = true + break + } + } + } + c.size += len(p) + return len(p), nil +} + +// nodeSize determines the size of n in chars after formatting. +// The result is <= maxSize if the node fits on one line with at +// most maxSize chars and the formatted output doesn't contain +// any control chars. Otherwise, the result is > maxSize. +func (p *printer) nodeSize(n ast.Node, maxSize int) (size int) { + // nodeSize invokes the printer, which may invoke nodeSize + // recursively. For deep composite literal nests, this can + // lead to an exponential algorithm. Remember previous + // results to prune the recursion (was issue 1628). + if size, found := p.nodeSizes[n]; found { + return size + } + + size = maxSize + 1 // assume n doesn't fit + p.nodeSizes[n] = size + + // nodeSize computation must be independent of particular + // style so that we always get the same decision; print + // in RawFormat + cfg := Config{Mode: RawFormat} + var counter sizeCounter + if err := cfg.fprint(&counter, p.fset, n, p.nodeSizes); err != nil { + return + } + if counter.size <= maxSize && !counter.hasNewline { + // n fits in a single line + size = counter.size + p.nodeSizes[n] = size + } + return +} + +// numLines returns the number of lines spanned by node n in the original source. +func (p *printer) numLines(n ast.Node) int { + if from := n.Pos(); from.IsValid() { + if to := n.End(); to.IsValid() { + return p.lineFor(to) - p.lineFor(from) + 1 + } + } + return infinity +} + +// bodySize is like nodeSize but it is specialized for *ast.BlockStmt's. +func (p *printer) bodySize(b *ast.BlockStmt, maxSize int) int { + pos1 := b.Pos() + pos2 := b.Rbrace + if pos1.IsValid() && pos2.IsValid() && p.lineFor(pos1) != p.lineFor(pos2) { + // opening and closing brace are on different lines - don't make it a one-liner + return maxSize + 1 + } + if len(b.List) > 5 { + // too many statements - don't make it a one-liner + return maxSize + 1 + } + // otherwise, estimate body size + bodySize := p.commentSizeBefore(p.posFor(pos2)) + for i, s := range b.List { + if bodySize > maxSize { + break // no need to continue + } + if i > 0 { + bodySize += 2 // space for a semicolon and blank + } + bodySize += p.nodeSize(s, maxSize) + } + return bodySize +} + +// funcBody prints a function body following a function header of given headerSize. +// If the header's and block's size are "small enough" and the block is "simple enough", +// the block is printed on the current line, without line breaks, spaced from the header +// by sep. Otherwise the block's opening "{" is printed on the current line, followed by +// lines for the block's statements and its closing "}". +func (p *printer) funcBody(headerSize int, sep whiteSpace, b *ast.BlockStmt) { + if b == nil { + return + } + + // save/restore composite literal nesting level + defer func(level int) { + p.level = level + }(p.level) + p.level = 0 + + const maxSize = 100 + if headerSize+p.bodySize(b, maxSize) <= maxSize { + p.print(sep) + p.setPos(b.Lbrace) + p.print(token.LBRACE) + if len(b.List) > 0 { + p.print(blank) + for i, s := range b.List { + if i > 0 { + p.print(token.SEMICOLON, blank) + } + p.stmt(s, i == len(b.List)-1) + } + p.print(blank) + } + p.print(noExtraLinebreak) + p.setPos(b.Rbrace) + p.print(token.RBRACE, noExtraLinebreak) + return + } + + if sep != ignore { + p.print(blank) // always use blank + } + p.block(b, 1) +} + +// distanceFrom returns the column difference between p.out (the current output +// position) and startOutCol. If the start position is on a different line from +// the current position (or either is unknown), the result is infinity. +func (p *printer) distanceFrom(startPos token.Pos, startOutCol int) int { + if startPos.IsValid() && p.pos.IsValid() && p.posFor(startPos).Line == p.pos.Line { + return p.out.Column - startOutCol + } + return infinity +} + +func (p *printer) funcDecl(d *ast.FuncDecl) { + p.setComment(d.Doc) + p.setPos(d.Pos()) + p.print(token.FUNC, blank) + // We have to save startCol only after emitting FUNC; otherwise it can be on a + // different line (all whitespace preceding the FUNC is emitted only when the + // FUNC is emitted). + startCol := p.out.Column - len("func ") + if d.Recv != nil { + p.parameters(d.Recv, funcParam) // method: print receiver + p.print(blank) + } + p.expr(d.Name) + p.signature(d.Type) + p.funcBody(p.distanceFrom(d.Pos(), startCol), vtab, d.Body) +} + +func (p *printer) decl(decl ast.Decl) { + switch d := decl.(type) { + case *ast.BadDecl: + p.setPos(d.Pos()) + p.print("BadDecl") + case *ast.GenDecl: + p.genDecl(d) + case *ast.FuncDecl: + p.funcDecl(d) + default: + panic("unreachable") + } +} + +// ---------------------------------------------------------------------------- +// Files + +func declToken(decl ast.Decl) (tok token.Token) { + tok = token.ILLEGAL + switch d := decl.(type) { + case *ast.GenDecl: + tok = d.Tok + case *ast.FuncDecl: + tok = token.FUNC + } + return +} + +func (p *printer) declList(list []ast.Decl) { + tok := token.ILLEGAL + for _, d := range list { + prev := tok + tok = declToken(d) + // If the declaration token changed (e.g., from CONST to TYPE) + // or the next declaration has documentation associated with it, + // print an empty line between top-level declarations. + // (because p.linebreak is called with the position of d, which + // is past any documentation, the minimum requirement is satisfied + // even w/o the extra getDoc(d) nil-check - leave it in case the + // linebreak logic improves - there's already a TODO). + if len(p.output) > 0 { + // only print line break if we are not at the beginning of the output + // (i.e., we are not printing only a partial program) + min := 1 + if prev != tok || getDoc(d) != nil { + min = 2 + } + // start a new section if the next declaration is a function + // that spans multiple lines (see also issue #19544) + p.linebreak(p.lineFor(d.Pos()), min, ignore, tok == token.FUNC && p.numLines(d) > 1) + } + p.decl(d) + } +} + +func (p *printer) file(src *ast.File) { + p.setComment(src.Doc) + p.setPos(src.Pos()) + p.print(token.PACKAGE, blank) + p.expr(src.Name) + p.declList(src.Decls) + p.print(newline) +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go new file mode 100644 index 0000000000..576f9690d7 --- /dev/null +++ b/tools/vendor/mvdan.cc/gofumpt/internal/govendor/go/printer/printer.go @@ -0,0 +1,1427 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package printer implements printing of AST nodes. +package printer + +import ( + "fmt" + "go/ast" + "go/build/constraint" + "go/token" + "io" + "os" + "strings" + "sync" + "text/tabwriter" + "unicode" +) + +const ( + maxNewlines = 2 // max. number of newlines between source text + debug = false // enable for debugging + infinity = 1 << 30 +) + +type whiteSpace byte + +const ( + ignore = whiteSpace(0) + blank = whiteSpace(' ') + vtab = whiteSpace('\v') + newline = whiteSpace('\n') + formfeed = whiteSpace('\f') + indent = whiteSpace('>') + unindent = whiteSpace('<') +) + +// A pmode value represents the current printer mode. +type pmode int + +const ( + noExtraBlank pmode = 1 << iota // disables extra blank after /*-style comment + noExtraLinebreak // disables extra line break after /*-style comment +) + +type commentInfo struct { + cindex int // current comment index + comment *ast.CommentGroup // = printer.comments[cindex]; or nil + commentOffset int // = printer.posFor(printer.comments[cindex].List[0].Pos()).Offset; or infinity + commentNewline bool // true if the comment group contains newlines +} + +type printer struct { + // Configuration (does not change after initialization) + Config + fset *token.FileSet + + // Current state + output []byte // raw printer result + indent int // current indentation + level int // level == 0: outside composite literal; level > 0: inside composite literal + mode pmode // current printer mode + endAlignment bool // if set, terminate alignment immediately + impliedSemi bool // if set, a linebreak implies a semicolon + lastTok token.Token // last token printed (token.ILLEGAL if it's whitespace) + prevOpen token.Token // previous non-brace "open" token (, [, or token.ILLEGAL + wsbuf []whiteSpace // delayed white space + goBuild []int // start index of all //go:build comments in output + plusBuild []int // start index of all // +build comments in output + + // Positions + // The out position differs from the pos position when the result + // formatting differs from the source formatting (in the amount of + // white space). If there's a difference and SourcePos is set in + // ConfigMode, //line directives are used in the output to restore + // original source positions for a reader. + pos token.Position // current position in AST (source) space + out token.Position // current position in output space + last token.Position // value of pos after calling writeString + linePtr *int // if set, record out.Line for the next token in *linePtr + + // The list of all source comments, in order of appearance. + comments []*ast.CommentGroup // may be nil + useNodeComments bool // if not set, ignore lead and line comments of nodes + + // Information about p.comments[p.cindex]; set up by nextComment. + commentInfo + + // Cache of already computed node sizes. + nodeSizes map[ast.Node]int + + // Cache of most recently computed line position. + cachedPos token.Pos + cachedLine int // line corresponding to cachedPos +} + +func (p *printer) internalError(msg ...any) { + if debug { + fmt.Print(p.pos.String() + ": ") + fmt.Println(msg...) + panic("mvdan.cc/gofumpt/internal/govendor/go/printer") + } +} + +// commentsHaveNewline reports whether a list of comments belonging to +// an *ast.CommentGroup contains newlines. Because the position information +// may only be partially correct, we also have to read the comment text. +func (p *printer) commentsHaveNewline(list []*ast.Comment) bool { + // len(list) > 0 + line := p.lineFor(list[0].Pos()) + for i, c := range list { + if i > 0 && p.lineFor(list[i].Pos()) != line { + // not all comments on the same line + return true + } + if t := c.Text; len(t) >= 2 && (t[1] == '/' || strings.Contains(t, "\n")) { + return true + } + } + _ = line + return false +} + +func (p *printer) nextComment() { + for p.cindex < len(p.comments) { + c := p.comments[p.cindex] + p.cindex++ + if list := c.List; len(list) > 0 { + p.comment = c + p.commentOffset = p.posFor(list[0].Pos()).Offset + p.commentNewline = p.commentsHaveNewline(list) + return + } + // we should not reach here (correct ASTs don't have empty + // ast.CommentGroup nodes), but be conservative and try again + } + // no more comments + p.commentOffset = infinity +} + +// commentBefore reports whether the current comment group occurs +// before the next position in the source code and printing it does +// not introduce implicit semicolons. +func (p *printer) commentBefore(next token.Position) bool { + return p.commentOffset < next.Offset && (!p.impliedSemi || !p.commentNewline) +} + +// commentSizeBefore returns the estimated size of the +// comments on the same line before the next position. +func (p *printer) commentSizeBefore(next token.Position) int { + // save/restore current p.commentInfo (p.nextComment() modifies it) + defer func(info commentInfo) { + p.commentInfo = info + }(p.commentInfo) + + size := 0 + for p.commentBefore(next) { + for _, c := range p.comment.List { + size += len(c.Text) + } + p.nextComment() + } + return size +} + +// recordLine records the output line number for the next non-whitespace +// token in *linePtr. It is used to compute an accurate line number for a +// formatted construct, independent of pending (not yet emitted) whitespace +// or comments. +func (p *printer) recordLine(linePtr *int) { + p.linePtr = linePtr +} + +// linesFrom returns the number of output lines between the current +// output line and the line argument, ignoring any pending (not yet +// emitted) whitespace or comments. It is used to compute an accurate +// size (in number of lines) for a formatted construct. +func (p *printer) linesFrom(line int) int { + return p.out.Line - line +} + +func (p *printer) posFor(pos token.Pos) token.Position { + // not used frequently enough to cache entire token.Position + return p.fset.PositionFor(pos, false /* absolute position */) +} + +func (p *printer) lineFor(pos token.Pos) int { + if pos != p.cachedPos { + p.cachedPos = pos + p.cachedLine = p.fset.PositionFor(pos, false /* absolute position */).Line + } + return p.cachedLine +} + +// writeLineDirective writes a //line directive if necessary. +func (p *printer) writeLineDirective(pos token.Position) { + if pos.IsValid() && (p.out.Line != pos.Line || p.out.Filename != pos.Filename) { + p.output = append(p.output, tabwriter.Escape) // protect '\n' in //line from tabwriter interpretation + p.output = append(p.output, fmt.Sprintf("//line %s:%d\n", pos.Filename, pos.Line)...) + p.output = append(p.output, tabwriter.Escape) + // p.out must match the //line directive + p.out.Filename = pos.Filename + p.out.Line = pos.Line + } +} + +// writeIndent writes indentation. +func (p *printer) writeIndent() { + // use "hard" htabs - indentation columns + // must not be discarded by the tabwriter + n := p.Config.Indent + p.indent // include base indentation + for i := 0; i < n; i++ { + p.output = append(p.output, '\t') + } + + // update positions + p.pos.Offset += n + p.pos.Column += n + p.out.Column += n +} + +// writeByte writes ch n times to p.output and updates p.pos. +// Only used to write formatting (white space) characters. +func (p *printer) writeByte(ch byte, n int) { + if p.endAlignment { + // Ignore any alignment control character; + // and at the end of the line, break with + // a formfeed to indicate termination of + // existing columns. + switch ch { + case '\t', '\v': + ch = ' ' + case '\n', '\f': + ch = '\f' + p.endAlignment = false + } + } + + if p.out.Column == 1 { + // no need to write line directives before white space + p.writeIndent() + } + + for i := 0; i < n; i++ { + p.output = append(p.output, ch) + } + + // update positions + p.pos.Offset += n + if ch == '\n' || ch == '\f' { + p.pos.Line += n + p.out.Line += n + p.pos.Column = 1 + p.out.Column = 1 + return + } + p.pos.Column += n + p.out.Column += n +} + +// writeString writes the string s to p.output and updates p.pos, p.out, +// and p.last. If isLit is set, s is escaped w/ tabwriter.Escape characters +// to protect s from being interpreted by the tabwriter. +// +// Note: writeString is only used to write Go tokens, literals, and +// comments, all of which must be written literally. Thus, it is correct +// to always set isLit = true. However, setting it explicitly only when +// needed (i.e., when we don't know that s contains no tabs or line breaks) +// avoids processing extra escape characters and reduces run time of the +// printer benchmark by up to 10%. +func (p *printer) writeString(pos token.Position, s string, isLit bool) { + if p.out.Column == 1 { + if p.Config.Mode&SourcePos != 0 { + p.writeLineDirective(pos) + } + p.writeIndent() + } + + if pos.IsValid() { + // update p.pos (if pos is invalid, continue with existing p.pos) + // Note: Must do this after handling line beginnings because + // writeIndent updates p.pos if there's indentation, but p.pos + // is the position of s. + p.pos = pos + } + + if isLit { + // Protect s such that is passes through the tabwriter + // unchanged. Note that valid Go programs cannot contain + // tabwriter.Escape bytes since they do not appear in legal + // UTF-8 sequences. + p.output = append(p.output, tabwriter.Escape) + } + + if debug { + p.output = append(p.output, fmt.Sprintf("/*%s*/", pos)...) // do not update p.pos! + } + p.output = append(p.output, s...) + + // update positions + nlines := 0 + var li int // index of last newline; valid if nlines > 0 + for i := 0; i < len(s); i++ { + // Raw string literals may contain any character except back quote (`). + if ch := s[i]; ch == '\n' || ch == '\f' { + // account for line break + nlines++ + li = i + // A line break inside a literal will break whatever column + // formatting is in place; ignore any further alignment through + // the end of the line. + p.endAlignment = true + } + } + p.pos.Offset += len(s) + if nlines > 0 { + p.pos.Line += nlines + p.out.Line += nlines + c := len(s) - li + p.pos.Column = c + p.out.Column = c + } else { + p.pos.Column += len(s) + p.out.Column += len(s) + } + + if isLit { + p.output = append(p.output, tabwriter.Escape) + } + + p.last = p.pos +} + +// writeCommentPrefix writes the whitespace before a comment. +// If there is any pending whitespace, it consumes as much of +// it as is likely to help position the comment nicely. +// pos is the comment position, next the position of the item +// after all pending comments, prev is the previous comment in +// a group of comments (or nil), and tok is the next token. +func (p *printer) writeCommentPrefix(pos, next token.Position, prev *ast.Comment, tok token.Token) { + if len(p.output) == 0 { + // the comment is the first item to be printed - don't write any whitespace + return + } + + if pos.IsValid() && pos.Filename != p.last.Filename { + // comment in a different file - separate with newlines + p.writeByte('\f', maxNewlines) + return + } + + if pos.Line == p.last.Line && (prev == nil || prev.Text[1] != '/') { + // comment on the same line as last item: + // separate with at least one separator + hasSep := false + if prev == nil { + // first comment of a comment group + j := 0 + for i, ch := range p.wsbuf { + switch ch { + case blank: + // ignore any blanks before a comment + p.wsbuf[i] = ignore + continue + case vtab: + // respect existing tabs - important + // for proper formatting of commented structs + hasSep = true + continue + case indent: + // apply pending indentation + continue + } + j = i + break + } + p.writeWhitespace(j) + } + // make sure there is at least one separator + if !hasSep { + sep := byte('\t') + if pos.Line == next.Line { + // next item is on the same line as the comment + // (which must be a /*-style comment): separate + // with a blank instead of a tab + sep = ' ' + } + p.writeByte(sep, 1) + } + + } else { + // comment on a different line: + // separate with at least one line break + droppedLinebreak := false + j := 0 + for i, ch := range p.wsbuf { + switch ch { + case blank, vtab: + // ignore any horizontal whitespace before line breaks + p.wsbuf[i] = ignore + continue + case indent: + // apply pending indentation + continue + case unindent: + // if this is not the last unindent, apply it + // as it is (likely) belonging to the last + // construct (e.g., a multi-line expression list) + // and is not part of closing a block + if i+1 < len(p.wsbuf) && p.wsbuf[i+1] == unindent { + continue + } + // if the next token is not a closing }, apply the unindent + // if it appears that the comment is aligned with the + // token; otherwise assume the unindent is part of a + // closing block and stop (this scenario appears with + // comments before a case label where the comments + // apply to the next case instead of the current one) + if tok != token.RBRACE && pos.Column == next.Column { + continue + } + case newline, formfeed: + p.wsbuf[i] = ignore + droppedLinebreak = prev == nil // record only if first comment of a group + } + j = i + break + } + p.writeWhitespace(j) + + // determine number of linebreaks before the comment + n := 0 + if pos.IsValid() && p.last.IsValid() { + n = pos.Line - p.last.Line + if n < 0 { // should never happen + n = 0 + } + } + + // at the package scope level only (p.indent == 0), + // add an extra newline if we dropped one before: + // this preserves a blank line before documentation + // comments at the package scope level (issue 2570) + if p.indent == 0 && droppedLinebreak { + n++ + } + + // make sure there is at least one line break + // if the previous comment was a line comment + if n == 0 && prev != nil && prev.Text[1] == '/' { + n = 1 + } + + if n > 0 { + // use formfeeds to break columns before a comment; + // this is analogous to using formfeeds to separate + // individual lines of /*-style comments + p.writeByte('\f', nlimit(n)) + } + } +} + +// Returns true if s contains only white space +// (only tabs and blanks can appear in the printer's context). +func isBlank(s string) bool { + for i := 0; i < len(s); i++ { + if s[i] > ' ' { + return false + } + } + return true +} + +// commonPrefix returns the common prefix of a and b. +func commonPrefix(a, b string) string { + i := 0 + for i < len(a) && i < len(b) && a[i] == b[i] && (a[i] <= ' ' || a[i] == '*') { + i++ + } + return a[0:i] +} + +// trimRight returns s with trailing whitespace removed. +func trimRight(s string) string { + return strings.TrimRightFunc(s, unicode.IsSpace) +} + +// stripCommonPrefix removes a common prefix from /*-style comment lines (unless no +// comment line is indented, all but the first line have some form of space prefix). +// The prefix is computed using heuristics such that is likely that the comment +// contents are nicely laid out after re-printing each line using the printer's +// current indentation. +func stripCommonPrefix(lines []string) { + if len(lines) <= 1 { + return // at most one line - nothing to do + } + // len(lines) > 1 + + // The heuristic in this function tries to handle a few + // common patterns of /*-style comments: Comments where + // the opening /* and closing */ are aligned and the + // rest of the comment text is aligned and indented with + // blanks or tabs, cases with a vertical "line of stars" + // on the left, and cases where the closing */ is on the + // same line as the last comment text. + + // Compute maximum common white prefix of all but the first, + // last, and blank lines, and replace blank lines with empty + // lines (the first line starts with /* and has no prefix). + // In cases where only the first and last lines are not blank, + // such as two-line comments, or comments where all inner lines + // are blank, consider the last line for the prefix computation + // since otherwise the prefix would be empty. + // + // Note that the first and last line are never empty (they + // contain the opening /* and closing */ respectively) and + // thus they can be ignored by the blank line check. + prefix := "" + prefixSet := false + if len(lines) > 2 { + for i, line := range lines[1 : len(lines)-1] { + if isBlank(line) { + lines[1+i] = "" // range starts with lines[1] + } else { + if !prefixSet { + prefix = line + prefixSet = true + } + prefix = commonPrefix(prefix, line) + } + } + } + // If we don't have a prefix yet, consider the last line. + if !prefixSet { + line := lines[len(lines)-1] + prefix = commonPrefix(line, line) + } + + /* + * Check for vertical "line of stars" and correct prefix accordingly. + */ + lineOfStars := false + if p, _, ok := strings.Cut(prefix, "*"); ok { + // remove trailing blank from prefix so stars remain aligned + prefix = strings.TrimSuffix(p, " ") + lineOfStars = true + } else { + // No line of stars present. + // Determine the white space on the first line after the /* + // and before the beginning of the comment text, assume two + // blanks instead of the /* unless the first character after + // the /* is a tab. If the first comment line is empty but + // for the opening /*, assume up to 3 blanks or a tab. This + // whitespace may be found as suffix in the common prefix. + first := lines[0] + if isBlank(first[2:]) { + // no comment text on the first line: + // reduce prefix by up to 3 blanks or a tab + // if present - this keeps comment text indented + // relative to the /* and */'s if it was indented + // in the first place + i := len(prefix) + for n := 0; n < 3 && i > 0 && prefix[i-1] == ' '; n++ { + i-- + } + if i == len(prefix) && i > 0 && prefix[i-1] == '\t' { + i-- + } + prefix = prefix[0:i] + } else { + // comment text on the first line + suffix := make([]byte, len(first)) + n := 2 // start after opening /* + for n < len(first) && first[n] <= ' ' { + suffix[n] = first[n] + n++ + } + if n > 2 && suffix[2] == '\t' { + // assume the '\t' compensates for the /* + suffix = suffix[2:n] + } else { + // otherwise assume two blanks + suffix[0], suffix[1] = ' ', ' ' + suffix = suffix[0:n] + } + // Shorten the computed common prefix by the length of + // suffix, if it is found as suffix of the prefix. + prefix = strings.TrimSuffix(prefix, string(suffix)) + } + } + + // Handle last line: If it only contains a closing */, align it + // with the opening /*, otherwise align the text with the other + // lines. + last := lines[len(lines)-1] + closing := "*/" + before, _, _ := strings.Cut(last, closing) // closing always present + if isBlank(before) { + // last line only contains closing */ + if lineOfStars { + closing = " */" // add blank to align final star + } + lines[len(lines)-1] = prefix + closing + } else { + // last line contains more comment text - assume + // it is aligned like the other lines and include + // in prefix computation + prefix = commonPrefix(prefix, last) + } + + // Remove the common prefix from all but the first and empty lines. + for i, line := range lines { + if i > 0 && line != "" { + lines[i] = line[len(prefix):] + } + } +} + +func (p *printer) writeComment(comment *ast.Comment) { + text := comment.Text + pos := p.posFor(comment.Pos()) + + const linePrefix = "//line " + if strings.HasPrefix(text, linePrefix) && (!pos.IsValid() || pos.Column == 1) { + // Possibly a //-style line directive. + // Suspend indentation temporarily to keep line directive valid. + defer func(indent int) { p.indent = indent }(p.indent) + p.indent = 0 + } + + // shortcut common case of //-style comments + if text[1] == '/' { + if constraint.IsGoBuild(text) { + p.goBuild = append(p.goBuild, len(p.output)) + } else if constraint.IsPlusBuild(text) { + p.plusBuild = append(p.plusBuild, len(p.output)) + } + p.writeString(pos, trimRight(text), true) + return + } + + // for /*-style comments, print line by line and let the + // write function take care of the proper indentation + lines := strings.Split(text, "\n") + + // The comment started in the first column but is going + // to be indented. For an idempotent result, add indentation + // to all lines such that they look like they were indented + // before - this will make sure the common prefix computation + // is the same independent of how many times formatting is + // applied (was issue 1835). + if pos.IsValid() && pos.Column == 1 && p.indent > 0 { + for i, line := range lines[1:] { + lines[1+i] = " " + line + } + } + + stripCommonPrefix(lines) + + // write comment lines, separated by formfeed, + // without a line break after the last line + for i, line := range lines { + if i > 0 { + p.writeByte('\f', 1) + pos = p.pos + } + if len(line) > 0 { + p.writeString(pos, trimRight(line), true) + } + } +} + +// writeCommentSuffix writes a line break after a comment if indicated +// and processes any leftover indentation information. If a line break +// is needed, the kind of break (newline vs formfeed) depends on the +// pending whitespace. The writeCommentSuffix result indicates if a +// newline was written or if a formfeed was dropped from the whitespace +// buffer. +func (p *printer) writeCommentSuffix(needsLinebreak bool) (wroteNewline, droppedFF bool) { + for i, ch := range p.wsbuf { + switch ch { + case blank, vtab: + // ignore trailing whitespace + p.wsbuf[i] = ignore + case indent, unindent: + // don't lose indentation information + case newline, formfeed: + // if we need a line break, keep exactly one + // but remember if we dropped any formfeeds + if needsLinebreak { + needsLinebreak = false + wroteNewline = true + } else { + if ch == formfeed { + droppedFF = true + } + p.wsbuf[i] = ignore + } + } + } + p.writeWhitespace(len(p.wsbuf)) + + // make sure we have a line break + if needsLinebreak { + p.writeByte('\n', 1) + wroteNewline = true + } + + return +} + +// containsLinebreak reports whether the whitespace buffer contains any line breaks. +func (p *printer) containsLinebreak() bool { + for _, ch := range p.wsbuf { + if ch == newline || ch == formfeed { + return true + } + } + return false +} + +// intersperseComments consumes all comments that appear before the next token +// tok and prints it together with the buffered whitespace (i.e., the whitespace +// that needs to be written before the next token). A heuristic is used to mix +// the comments and whitespace. The intersperseComments result indicates if a +// newline was written or if a formfeed was dropped from the whitespace buffer. +func (p *printer) intersperseComments(next token.Position, tok token.Token) (wroteNewline, droppedFF bool) { + var last *ast.Comment + for p.commentBefore(next) { + list := p.comment.List + changed := false + if p.lastTok != token.IMPORT && // do not rewrite cgo's import "C" comments + p.posFor(p.comment.Pos()).Column == 1 && + p.posFor(p.comment.End()+1) == next { + // Unindented comment abutting next token position: + // a top-level doc comment. + list = formatDocComment(list) + changed = true + + if len(p.comment.List) > 0 && len(list) == 0 { + // The doc comment was removed entirely. + // Keep preceding whitespace. + p.writeCommentPrefix(p.posFor(p.comment.Pos()), next, last, tok) + // Change print state to continue at next. + p.pos = next + p.last = next + // There can't be any more comments. + p.nextComment() + return p.writeCommentSuffix(false) + } + } + for _, c := range list { + p.writeCommentPrefix(p.posFor(c.Pos()), next, last, tok) + p.writeComment(c) + last = c + } + // In case list was rewritten, change print state to where + // the original list would have ended. + if len(p.comment.List) > 0 && changed { + last = p.comment.List[len(p.comment.List)-1] + p.pos = p.posFor(last.End()) + p.last = p.pos + } + p.nextComment() + } + + if last != nil { + // If the last comment is a /*-style comment and the next item + // follows on the same line but is not a comma, and not a "closing" + // token immediately following its corresponding "opening" token, + // add an extra separator unless explicitly disabled. Use a blank + // as separator unless we have pending linebreaks, they are not + // disabled, and we are outside a composite literal, in which case + // we want a linebreak (issue 15137). + // TODO(gri) This has become overly complicated. We should be able + // to track whether we're inside an expression or statement and + // use that information to decide more directly. + needsLinebreak := false + if p.mode&noExtraBlank == 0 && + last.Text[1] == '*' && p.lineFor(last.Pos()) == next.Line && + tok != token.COMMA && + (tok != token.RPAREN || p.prevOpen == token.LPAREN) && + (tok != token.RBRACK || p.prevOpen == token.LBRACK) { + if p.containsLinebreak() && p.mode&noExtraLinebreak == 0 && p.level == 0 { + needsLinebreak = true + } else { + p.writeByte(' ', 1) + } + } + // Ensure that there is a line break after a //-style comment, + // before EOF, and before a closing '}' unless explicitly disabled. + if last.Text[1] == '/' || + tok == token.EOF || + tok == token.RBRACE && p.mode&noExtraLinebreak == 0 { + needsLinebreak = true + } + return p.writeCommentSuffix(needsLinebreak) + } + + // no comment was written - we should never reach here since + // intersperseComments should not be called in that case + p.internalError("intersperseComments called without pending comments") + return +} + +// whiteWhitespace writes the first n whitespace entries. +func (p *printer) writeWhitespace(n int) { + // write entries + for i := 0; i < n; i++ { + switch ch := p.wsbuf[i]; ch { + case ignore: + // ignore! + case indent: + p.indent++ + case unindent: + p.indent-- + if p.indent < 0 { + p.internalError("negative indentation:", p.indent) + p.indent = 0 + } + case newline, formfeed: + // A line break immediately followed by a "correcting" + // unindent is swapped with the unindent - this permits + // proper label positioning. If a comment is between + // the line break and the label, the unindent is not + // part of the comment whitespace prefix and the comment + // will be positioned correctly indented. + if i+1 < n && p.wsbuf[i+1] == unindent { + // Use a formfeed to terminate the current section. + // Otherwise, a long label name on the next line leading + // to a wide column may increase the indentation column + // of lines before the label; effectively leading to wrong + // indentation. + p.wsbuf[i], p.wsbuf[i+1] = unindent, formfeed + i-- // do it again + continue + } + fallthrough + default: + p.writeByte(byte(ch), 1) + } + } + + // shift remaining entries down + l := copy(p.wsbuf, p.wsbuf[n:]) + p.wsbuf = p.wsbuf[:l] +} + +// ---------------------------------------------------------------------------- +// Printing interface + +// nlimit limits n to maxNewlines. +func nlimit(n int) int { + if n > maxNewlines { + n = maxNewlines + } + return n +} + +func mayCombine(prev token.Token, next byte) (b bool) { + switch prev { + case token.INT: + b = next == '.' // 1. + case token.ADD: + b = next == '+' // ++ + case token.SUB: + b = next == '-' // -- + case token.QUO: + b = next == '*' // /* + case token.LSS: + b = next == '-' || next == '<' // <- or << + case token.AND: + b = next == '&' || next == '^' // && or &^ + } + return +} + +func (p *printer) setPos(pos token.Pos) { + if pos.IsValid() { + p.pos = p.posFor(pos) // accurate position of next item + } +} + +// print prints a list of "items" (roughly corresponding to syntactic +// tokens, but also including whitespace and formatting information). +// It is the only print function that should be called directly from +// any of the AST printing functions in nodes.go. +// +// Whitespace is accumulated until a non-whitespace token appears. Any +// comments that need to appear before that token are printed first, +// taking into account the amount and structure of any pending white- +// space for best comment placement. Then, any leftover whitespace is +// printed, followed by the actual token. +func (p *printer) print(args ...any) { + for _, arg := range args { + // information about the current arg + var data string + var isLit bool + var impliedSemi bool // value for p.impliedSemi after this arg + + // record previous opening token, if any + switch p.lastTok { + case token.ILLEGAL: + // ignore (white space) + case token.LPAREN, token.LBRACK: + p.prevOpen = p.lastTok + default: + // other tokens followed any opening token + p.prevOpen = token.ILLEGAL + } + + switch x := arg.(type) { + case pmode: + // toggle printer mode + p.mode ^= x + continue + + case whiteSpace: + if x == ignore { + // don't add ignore's to the buffer; they + // may screw up "correcting" unindents (see + // LabeledStmt) + continue + } + i := len(p.wsbuf) + if i == cap(p.wsbuf) { + // Whitespace sequences are very short so this should + // never happen. Handle gracefully (but possibly with + // bad comment placement) if it does happen. + p.writeWhitespace(i) + i = 0 + } + p.wsbuf = p.wsbuf[0 : i+1] + p.wsbuf[i] = x + if x == newline || x == formfeed { + // newlines affect the current state (p.impliedSemi) + // and not the state after printing arg (impliedSemi) + // because comments can be interspersed before the arg + // in this case + p.impliedSemi = false + } + p.lastTok = token.ILLEGAL + continue + + case *ast.Ident: + data = x.Name + impliedSemi = true + p.lastTok = token.IDENT + + case *ast.BasicLit: + data = x.Value + isLit = true + impliedSemi = true + p.lastTok = x.Kind + + case token.Token: + s := x.String() + if mayCombine(p.lastTok, s[0]) { + // the previous and the current token must be + // separated by a blank otherwise they combine + // into a different incorrect token sequence + // (except for token.INT followed by a '.' this + // should never happen because it is taken care + // of via binary expression formatting) + if len(p.wsbuf) != 0 { + p.internalError("whitespace buffer not empty") + } + p.wsbuf = p.wsbuf[0:1] + p.wsbuf[0] = ' ' + } + data = s + // some keywords followed by a newline imply a semicolon + switch x { + case token.BREAK, token.CONTINUE, token.FALLTHROUGH, token.RETURN, + token.INC, token.DEC, token.RPAREN, token.RBRACK, token.RBRACE: + impliedSemi = true + } + p.lastTok = x + + case string: + // incorrect AST - print error message + data = x + isLit = true + impliedSemi = true + p.lastTok = token.STRING + + default: + fmt.Fprintf(os.Stderr, "print: unsupported argument %v (%T)\n", arg, arg) + panic("mvdan.cc/gofumpt/internal/govendor/go/printer type") + } + // data != "" + + next := p.pos // estimated/accurate position of next item + wroteNewline, droppedFF := p.flush(next, p.lastTok) + + // intersperse extra newlines if present in the source and + // if they don't cause extra semicolons (don't do this in + // flush as it will cause extra newlines at the end of a file) + if !p.impliedSemi { + n := nlimit(next.Line - p.pos.Line) + // don't exceed maxNewlines if we already wrote one + if wroteNewline && n == maxNewlines { + n = maxNewlines - 1 + } + if n > 0 { + ch := byte('\n') + if droppedFF { + ch = '\f' // use formfeed since we dropped one before + } + p.writeByte(ch, n) + impliedSemi = false + } + } + + // the next token starts now - record its line number if requested + if p.linePtr != nil { + *p.linePtr = p.out.Line + p.linePtr = nil + } + + p.writeString(next, data, isLit) + p.impliedSemi = impliedSemi + } +} + +// flush prints any pending comments and whitespace occurring textually +// before the position of the next token tok. The flush result indicates +// if a newline was written or if a formfeed was dropped from the whitespace +// buffer. +func (p *printer) flush(next token.Position, tok token.Token) (wroteNewline, droppedFF bool) { + if p.commentBefore(next) { + // if there are comments before the next item, intersperse them + wroteNewline, droppedFF = p.intersperseComments(next, tok) + } else { + // otherwise, write any leftover whitespace + p.writeWhitespace(len(p.wsbuf)) + } + return +} + +// getDoc returns the ast.CommentGroup associated with n, if any. +func getDoc(n ast.Node) *ast.CommentGroup { + switch n := n.(type) { + case *ast.Field: + return n.Doc + case *ast.ImportSpec: + return n.Doc + case *ast.ValueSpec: + return n.Doc + case *ast.TypeSpec: + return n.Doc + case *ast.GenDecl: + return n.Doc + case *ast.FuncDecl: + return n.Doc + case *ast.File: + return n.Doc + } + return nil +} + +func getLastComment(n ast.Node) *ast.CommentGroup { + switch n := n.(type) { + case *ast.Field: + return n.Comment + case *ast.ImportSpec: + return n.Comment + case *ast.ValueSpec: + return n.Comment + case *ast.TypeSpec: + return n.Comment + case *ast.GenDecl: + if len(n.Specs) > 0 { + return getLastComment(n.Specs[len(n.Specs)-1]) + } + case *ast.File: + if len(n.Comments) > 0 { + return n.Comments[len(n.Comments)-1] + } + } + return nil +} + +func (p *printer) printNode(node any) error { + // unpack *CommentedNode, if any + var comments []*ast.CommentGroup + if cnode, ok := node.(*CommentedNode); ok { + node = cnode.Node + comments = cnode.Comments + } + + if comments != nil { + // commented node - restrict comment list to relevant range + n, ok := node.(ast.Node) + if !ok { + goto unsupported + } + beg := n.Pos() + end := n.End() + // if the node has associated documentation, + // include that commentgroup in the range + // (the comment list is sorted in the order + // of the comment appearance in the source code) + if doc := getDoc(n); doc != nil { + beg = doc.Pos() + } + if com := getLastComment(n); com != nil { + if e := com.End(); e > end { + end = e + } + } + // token.Pos values are global offsets, we can + // compare them directly + i := 0 + for i < len(comments) && comments[i].End() < beg { + i++ + } + j := i + for j < len(comments) && comments[j].Pos() < end { + j++ + } + if i < j { + p.comments = comments[i:j] + } + } else if n, ok := node.(*ast.File); ok { + // use ast.File comments, if any + p.comments = n.Comments + } + + // if there are no comments, use node comments + p.useNodeComments = p.comments == nil + + // get comments ready for use + p.nextComment() + + p.print(pmode(0)) + + // format node + switch n := node.(type) { + case ast.Expr: + p.expr(n) + case ast.Stmt: + // A labeled statement will un-indent to position the label. + // Set p.indent to 1 so we don't get indent "underflow". + if _, ok := n.(*ast.LabeledStmt); ok { + p.indent = 1 + } + p.stmt(n, false) + case ast.Decl: + p.decl(n) + case ast.Spec: + p.spec(n, 1, false) + case []ast.Stmt: + // A labeled statement will un-indent to position the label. + // Set p.indent to 1 so we don't get indent "underflow". + for _, s := range n { + if _, ok := s.(*ast.LabeledStmt); ok { + p.indent = 1 + } + } + p.stmtList(n, 0, false) + case []ast.Decl: + p.declList(n) + case *ast.File: + p.file(n) + default: + goto unsupported + } + + return nil + +unsupported: + return fmt.Errorf("mvdan.cc/gofumpt/internal/govendor/go/printer: unsupported node type %T", node) +} + +// ---------------------------------------------------------------------------- +// Trimmer + +// A trimmer is an io.Writer filter for stripping tabwriter.Escape +// characters, trailing blanks and tabs, and for converting formfeed +// and vtab characters into newlines and htabs (in case no tabwriter +// is used). Text bracketed by tabwriter.Escape characters is passed +// through unchanged. +type trimmer struct { + output io.Writer + state int + space []byte +} + +// trimmer is implemented as a state machine. +// It can be in one of the following states: +const ( + inSpace = iota // inside space + inEscape // inside text bracketed by tabwriter.Escapes + inText // inside text +) + +func (p *trimmer) resetSpace() { + p.state = inSpace + p.space = p.space[0:0] +} + +// Design note: It is tempting to eliminate extra blanks occurring in +// whitespace in this function as it could simplify some +// of the blanks logic in the node printing functions. +// However, this would mess up any formatting done by +// the tabwriter. + +var aNewline = []byte("\n") + +func (p *trimmer) Write(data []byte) (n int, err error) { + // invariants: + // p.state == inSpace: + // p.space is unwritten + // p.state == inEscape, inText: + // data[m:n] is unwritten + m := 0 + var b byte + for n, b = range data { + if b == '\v' { + b = '\t' // convert to htab + } + switch p.state { + case inSpace: + switch b { + case '\t', ' ': + p.space = append(p.space, b) + case '\n', '\f': + p.resetSpace() // discard trailing space + _, err = p.output.Write(aNewline) + case tabwriter.Escape: + _, err = p.output.Write(p.space) + p.state = inEscape + m = n + 1 // +1: skip tabwriter.Escape + default: + _, err = p.output.Write(p.space) + p.state = inText + m = n + } + case inEscape: + if b == tabwriter.Escape { + _, err = p.output.Write(data[m:n]) + p.resetSpace() + } + case inText: + switch b { + case '\t', ' ': + _, err = p.output.Write(data[m:n]) + p.resetSpace() + p.space = append(p.space, b) + case '\n', '\f': + _, err = p.output.Write(data[m:n]) + p.resetSpace() + if err == nil { + _, err = p.output.Write(aNewline) + } + case tabwriter.Escape: + _, err = p.output.Write(data[m:n]) + p.state = inEscape + m = n + 1 // +1: skip tabwriter.Escape + } + default: + panic("unreachable") + } + if err != nil { + return + } + } + n = len(data) + + switch p.state { + case inEscape, inText: + _, err = p.output.Write(data[m:n]) + p.resetSpace() + } + + return +} + +// ---------------------------------------------------------------------------- +// Public interface + +// A Mode value is a set of flags (or 0). They control printing. +type Mode uint + +const ( + RawFormat Mode = 1 << iota // do not use a tabwriter; if set, UseSpaces is ignored + TabIndent // use tabs for indentation independent of UseSpaces + UseSpaces // use spaces instead of tabs for alignment + SourcePos // emit //line directives to preserve original source positions +) + +// The mode below is not included in printer's public API because +// editing code text is deemed out of scope. Because this mode is +// unexported, it's also possible to modify or remove it based on +// the evolving needs of mvdan.cc/gofumpt/internal/govendor/go/format and cmd/gofmt without breaking +// users. See discussion in CL 240683. +const ( + // normalizeNumbers means to canonicalize number + // literal prefixes and exponents while printing. + // + // This value is known in and used by mvdan.cc/gofumpt/internal/govendor/go/format and cmd/gofmt. + // It is currently more convenient and performant for those + // packages to apply number normalization during printing, + // rather than by modifying the AST in advance. + normalizeNumbers Mode = 1 << 30 +) + +// A Config node controls the output of Fprint. +type Config struct { + Mode Mode // default: 0 + Tabwidth int // default: 8 + Indent int // default: 0 (all code is indented at least by this much) +} + +var printerPool = sync.Pool{ + New: func() any { + return &printer{ + // Whitespace sequences are short. + wsbuf: make([]whiteSpace, 0, 16), + // We start the printer with a 16K output buffer, which is currently + // larger than about 80% of Go files in the standard library. + output: make([]byte, 0, 16<<10), + } + }, +} + +func newPrinter(cfg *Config, fset *token.FileSet, nodeSizes map[ast.Node]int) *printer { + p := printerPool.Get().(*printer) + *p = printer{ + Config: *cfg, + fset: fset, + pos: token.Position{Line: 1, Column: 1}, + out: token.Position{Line: 1, Column: 1}, + wsbuf: p.wsbuf[:0], + nodeSizes: nodeSizes, + cachedPos: -1, + output: p.output[:0], + } + return p +} + +func (p *printer) free() { + // Hard limit on buffer size; see https://golang.org/issue/23199. + if cap(p.output) > 64<<10 { + return + } + + printerPool.Put(p) +} + +// fprint implements Fprint and takes a nodesSizes map for setting up the printer state. +func (cfg *Config) fprint(output io.Writer, fset *token.FileSet, node any, nodeSizes map[ast.Node]int) (err error) { + // print node + p := newPrinter(cfg, fset, nodeSizes) + defer p.free() + if err = p.printNode(node); err != nil { + return + } + // print outstanding comments + p.impliedSemi = false // EOF acts like a newline + p.flush(token.Position{Offset: infinity, Line: infinity}, token.EOF) + + // output is buffered in p.output now. + // fix //go:build and // +build comments if needed. + p.fixGoBuildLines() + + // redirect output through a trimmer to eliminate trailing whitespace + // (Input to a tabwriter must be untrimmed since trailing tabs provide + // formatting information. The tabwriter could provide trimming + // functionality but no tabwriter is used when RawFormat is set.) + output = &trimmer{output: output} + + // redirect output through a tabwriter if necessary + if cfg.Mode&RawFormat == 0 { + minwidth := cfg.Tabwidth + + padchar := byte('\t') + if cfg.Mode&UseSpaces != 0 { + padchar = ' ' + } + + twmode := tabwriter.DiscardEmptyColumns + if cfg.Mode&TabIndent != 0 { + minwidth = 0 + twmode |= tabwriter.TabIndent + } + + output = tabwriter.NewWriter(output, minwidth, cfg.Tabwidth, 1, padchar, twmode) + } + + // write printer result via tabwriter/trimmer to output + if _, err = output.Write(p.output); err != nil { + return + } + + // flush tabwriter, if any + if tw, _ := output.(*tabwriter.Writer); tw != nil { + err = tw.Flush() + } + + return +} + +// A CommentedNode bundles an AST node and corresponding comments. +// It may be provided as argument to any of the Fprint functions. +type CommentedNode struct { + Node any // *ast.File, or ast.Expr, ast.Decl, ast.Spec, or ast.Stmt + Comments []*ast.CommentGroup +} + +// Fprint "pretty-prints" an AST node to output for a given configuration cfg. +// Position information is interpreted relative to the file set fset. +// The node type must be *ast.File, *CommentedNode, []ast.Decl, []ast.Stmt, +// or assignment-compatible to ast.Expr, ast.Decl, ast.Spec, or ast.Stmt. +func (cfg *Config) Fprint(output io.Writer, fset *token.FileSet, node any) error { + return cfg.fprint(output, fset, node, make(map[ast.Node]int)) +} + +// Fprint "pretty-prints" an AST node to output. +// It calls Config.Fprint with default settings. +// Note that gofmt uses tabs for indentation but spaces for alignment; +// use format.Node (package mvdan.cc/gofumpt/internal/govendor/go/format) for output that matches gofmt. +func Fprint(output io.Writer, fset *token.FileSet, node any) error { + return (&Config{Tabwidth: 8}).Fprint(output, fset, node) +} diff --git a/tools/vendor/mvdan.cc/gofumpt/internal/version/version.go b/tools/vendor/mvdan.cc/gofumpt/internal/version/version.go index 992930480f..785b6b317d 100644 --- a/tools/vendor/mvdan.cc/gofumpt/internal/version/version.go +++ b/tools/vendor/mvdan.cc/gofumpt/internal/version/version.go @@ -97,6 +97,9 @@ func goVersion() string { return runtime.Version() } -func String() string { +func String(injected string) string { + if injected != "" { + return fmt.Sprintf("%s (%s)", injected, goVersion()) + } return fmt.Sprintf("%s (%s)", gofumptVersion(), goVersion()) }